##// END OF EJS Templates
dirstate: document dirstatemap interface...
Mark Thomas -
r35077:a0520226 default
parent child Browse files
Show More
@@ -1,1427 +1,1464 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 (state, mode, size, time).'''
132 self._map = dirstatemap(self._ui, self._opener, self._root)
131 self._map = dirstatemap(self._ui, self._opener, self._root)
133 return self._map
132 return self._map
134
133
135 @property
134 @property
136 def _sparsematcher(self):
135 def _sparsematcher(self):
137 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
138
137
139 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
140 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
141 included in the working directory.
140 included in the working directory.
142 """
141 """
143 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
144 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
145 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
146 return self._sparsematchfn()
145 return self._sparsematchfn()
147
146
148 @repocache('branch')
147 @repocache('branch')
149 def _branch(self):
148 def _branch(self):
150 try:
149 try:
151 return self._opener.read("branch").strip() or "default"
150 return self._opener.read("branch").strip() or "default"
152 except IOError as inst:
151 except IOError as inst:
153 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
154 raise
153 raise
155 return "default"
154 return "default"
156
155
157 @property
156 @property
158 def _pl(self):
157 def _pl(self):
159 return self._map.parents()
158 return self._map.parents()
160
159
161 def dirs(self):
160 def dirs(self):
162 return self._map.dirs
161 return self._map.dirs
163
162
164 @rootcache('.hgignore')
163 @rootcache('.hgignore')
165 def _ignore(self):
164 def _ignore(self):
166 files = self._ignorefiles()
165 files = self._ignorefiles()
167 if not files:
166 if not files:
168 return matchmod.never(self._root, '')
167 return matchmod.never(self._root, '')
169
168
170 pats = ['include:%s' % f for f in files]
169 pats = ['include:%s' % f for f in files]
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
172
171
173 @propertycache
172 @propertycache
174 def _slash(self):
173 def _slash(self):
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
176
175
177 @propertycache
176 @propertycache
178 def _checklink(self):
177 def _checklink(self):
179 return util.checklink(self._root)
178 return util.checklink(self._root)
180
179
181 @propertycache
180 @propertycache
182 def _checkexec(self):
181 def _checkexec(self):
183 return util.checkexec(self._root)
182 return util.checkexec(self._root)
184
183
185 @propertycache
184 @propertycache
186 def _checkcase(self):
185 def _checkcase(self):
187 return not util.fscasesensitive(self._join('.hg'))
186 return not util.fscasesensitive(self._join('.hg'))
188
187
189 def _join(self, f):
188 def _join(self, f):
190 # much faster than os.path.join()
189 # much faster than os.path.join()
191 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
192 return self._rootdir + f
191 return self._rootdir + f
193
192
194 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
195 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
196 def f(x):
195 def f(x):
197 try:
196 try:
198 st = os.lstat(self._join(x))
197 st = os.lstat(self._join(x))
199 if util.statislink(st):
198 if util.statislink(st):
200 return 'l'
199 return 'l'
201 if util.statisexec(st):
200 if util.statisexec(st):
202 return 'x'
201 return 'x'
203 except OSError:
202 except OSError:
204 pass
203 pass
205 return ''
204 return ''
206 return f
205 return f
207
206
208 fallback = buildfallback()
207 fallback = buildfallback()
209 if self._checklink:
208 if self._checklink:
210 def f(x):
209 def f(x):
211 if os.path.islink(self._join(x)):
210 if os.path.islink(self._join(x)):
212 return 'l'
211 return 'l'
213 if 'x' in fallback(x):
212 if 'x' in fallback(x):
214 return 'x'
213 return 'x'
215 return ''
214 return ''
216 return f
215 return f
217 if self._checkexec:
216 if self._checkexec:
218 def f(x):
217 def f(x):
219 if 'l' in fallback(x):
218 if 'l' in fallback(x):
220 return 'l'
219 return 'l'
221 if util.isexec(self._join(x)):
220 if util.isexec(self._join(x)):
222 return 'x'
221 return 'x'
223 return ''
222 return ''
224 return f
223 return f
225 else:
224 else:
226 return fallback
225 return fallback
227
226
228 @propertycache
227 @propertycache
229 def _cwd(self):
228 def _cwd(self):
230 # internal config: ui.forcecwd
229 # internal config: ui.forcecwd
231 forcecwd = self._ui.config('ui', 'forcecwd')
230 forcecwd = self._ui.config('ui', 'forcecwd')
232 if forcecwd:
231 if forcecwd:
233 return forcecwd
232 return forcecwd
234 return pycompat.getcwd()
233 return pycompat.getcwd()
235
234
236 def getcwd(self):
235 def getcwd(self):
237 '''Return the path from which a canonical path is calculated.
236 '''Return the path from which a canonical path is calculated.
238
237
239 This path should be used to resolve file patterns or to convert
238 This path should be used to resolve file patterns or to convert
240 canonical paths back to file paths for display. It shouldn't be
239 canonical paths back to file paths for display. It shouldn't be
241 used to get real file paths. Use vfs functions instead.
240 used to get real file paths. Use vfs functions instead.
242 '''
241 '''
243 cwd = self._cwd
242 cwd = self._cwd
244 if cwd == self._root:
243 if cwd == self._root:
245 return ''
244 return ''
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
247 rootsep = self._root
246 rootsep = self._root
248 if not util.endswithsep(rootsep):
247 if not util.endswithsep(rootsep):
249 rootsep += pycompat.ossep
248 rootsep += pycompat.ossep
250 if cwd.startswith(rootsep):
249 if cwd.startswith(rootsep):
251 return cwd[len(rootsep):]
250 return cwd[len(rootsep):]
252 else:
251 else:
253 # we're outside the repo. return an absolute path.
252 # we're outside the repo. return an absolute path.
254 return cwd
253 return cwd
255
254
256 def pathto(self, f, cwd=None):
255 def pathto(self, f, cwd=None):
257 if cwd is None:
256 if cwd is None:
258 cwd = self.getcwd()
257 cwd = self.getcwd()
259 path = util.pathto(self._root, cwd, f)
258 path = util.pathto(self._root, cwd, f)
260 if self._slash:
259 if self._slash:
261 return util.pconvert(path)
260 return util.pconvert(path)
262 return path
261 return path
263
262
264 def __getitem__(self, key):
263 def __getitem__(self, key):
265 '''Return the current state of key (a filename) in the dirstate.
264 '''Return the current state of key (a filename) in the dirstate.
266
265
267 States are:
266 States are:
268 n normal
267 n normal
269 m needs merging
268 m needs merging
270 r marked for removal
269 r marked for removal
271 a marked for addition
270 a marked for addition
272 ? not tracked
271 ? not tracked
273 '''
272 '''
274 return self._map.get(key, ("?",))[0]
273 return self._map.get(key, ("?",))[0]
275
274
276 def __contains__(self, key):
275 def __contains__(self, key):
277 return key in self._map
276 return key in self._map
278
277
279 def __iter__(self):
278 def __iter__(self):
280 return iter(sorted(self._map))
279 return iter(sorted(self._map))
281
280
282 def items(self):
281 def items(self):
283 return self._map.iteritems()
282 return self._map.iteritems()
284
283
285 iteritems = items
284 iteritems = items
286
285
287 def parents(self):
286 def parents(self):
288 return [self._validate(p) for p in self._pl]
287 return [self._validate(p) for p in self._pl]
289
288
290 def p1(self):
289 def p1(self):
291 return self._validate(self._pl[0])
290 return self._validate(self._pl[0])
292
291
293 def p2(self):
292 def p2(self):
294 return self._validate(self._pl[1])
293 return self._validate(self._pl[1])
295
294
296 def branch(self):
295 def branch(self):
297 return encoding.tolocal(self._branch)
296 return encoding.tolocal(self._branch)
298
297
299 def setparents(self, p1, p2=nullid):
298 def setparents(self, p1, p2=nullid):
300 """Set dirstate parents to p1 and p2.
299 """Set dirstate parents to p1 and p2.
301
300
302 When moving from two parents to one, 'm' merged entries a
301 When moving from two parents to one, 'm' merged entries a
303 adjusted to normal and previous copy records discarded and
302 adjusted to normal and previous copy records discarded and
304 returned by the call.
303 returned by the call.
305
304
306 See localrepo.setparents()
305 See localrepo.setparents()
307 """
306 """
308 if self._parentwriters == 0:
307 if self._parentwriters == 0:
309 raise ValueError("cannot set dirstate parent without "
308 raise ValueError("cannot set dirstate parent without "
310 "calling dirstate.beginparentchange")
309 "calling dirstate.beginparentchange")
311
310
312 self._dirty = True
311 self._dirty = True
313 oldp2 = self._pl[1]
312 oldp2 = self._pl[1]
314 if self._origpl is None:
313 if self._origpl is None:
315 self._origpl = self._pl
314 self._origpl = self._pl
316 self._map.setparents(p1, p2)
315 self._map.setparents(p1, p2)
317 copies = {}
316 copies = {}
318 if oldp2 != nullid and p2 == nullid:
317 if oldp2 != nullid and p2 == nullid:
319 candidatefiles = self._map.nonnormalset.union(
318 candidatefiles = self._map.nonnormalset.union(
320 self._map.otherparentset)
319 self._map.otherparentset)
321 for f in candidatefiles:
320 for f in candidatefiles:
322 s = self._map.get(f)
321 s = self._map.get(f)
323 if s is None:
322 if s is None:
324 continue
323 continue
325
324
326 # Discard 'm' markers when moving away from a merge state
325 # Discard 'm' markers when moving away from a merge state
327 if s[0] == 'm':
326 if s[0] == 'm':
328 source = self._map.copymap.get(f)
327 source = self._map.copymap.get(f)
329 if source:
328 if source:
330 copies[f] = source
329 copies[f] = source
331 self.normallookup(f)
330 self.normallookup(f)
332 # Also fix up otherparent markers
331 # Also fix up otherparent markers
333 elif s[0] == 'n' and s[2] == -2:
332 elif s[0] == 'n' and s[2] == -2:
334 source = self._map.copymap.get(f)
333 source = self._map.copymap.get(f)
335 if source:
334 if source:
336 copies[f] = source
335 copies[f] = source
337 self.add(f)
336 self.add(f)
338 return copies
337 return copies
339
338
340 def setbranch(self, branch):
339 def setbranch(self, branch):
341 self._branch = encoding.fromlocal(branch)
340 self._branch = encoding.fromlocal(branch)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
343 try:
342 try:
344 f.write(self._branch + '\n')
343 f.write(self._branch + '\n')
345 f.close()
344 f.close()
346
345
347 # make sure filecache has the correct stat info for _branch after
346 # make sure filecache has the correct stat info for _branch after
348 # replacing the underlying file
347 # replacing the underlying file
349 ce = self._filecache['_branch']
348 ce = self._filecache['_branch']
350 if ce:
349 if ce:
351 ce.refresh()
350 ce.refresh()
352 except: # re-raises
351 except: # re-raises
353 f.discard()
352 f.discard()
354 raise
353 raise
355
354
356 def invalidate(self):
355 def invalidate(self):
357 '''Causes the next access to reread the dirstate.
356 '''Causes the next access to reread the dirstate.
358
357
359 This is different from localrepo.invalidatedirstate() because it always
358 This is different from localrepo.invalidatedirstate() because it always
360 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
361 check whether the dirstate has changed before rereading it.'''
360 check whether the dirstate has changed before rereading it.'''
362
361
363 for a in ("_map", "_branch", "_ignore"):
362 for a in ("_map", "_branch", "_ignore"):
364 if a in self.__dict__:
363 if a in self.__dict__:
365 delattr(self, a)
364 delattr(self, a)
366 self._lastnormaltime = 0
365 self._lastnormaltime = 0
367 self._dirty = False
366 self._dirty = False
368 self._updatedfiles.clear()
367 self._updatedfiles.clear()
369 self._parentwriters = 0
368 self._parentwriters = 0
370 self._origpl = None
369 self._origpl = None
371
370
372 def copy(self, source, dest):
371 def copy(self, source, dest):
373 """Mark dest as a copy of source. Unmark dest if source is None."""
372 """Mark dest as a copy of source. Unmark dest if source is None."""
374 if source == dest:
373 if source == dest:
375 return
374 return
376 self._dirty = True
375 self._dirty = True
377 if source is not None:
376 if source is not None:
378 self._map.copymap[dest] = source
377 self._map.copymap[dest] = source
379 self._updatedfiles.add(source)
378 self._updatedfiles.add(source)
380 self._updatedfiles.add(dest)
379 self._updatedfiles.add(dest)
381 elif self._map.copymap.pop(dest, None):
380 elif self._map.copymap.pop(dest, None):
382 self._updatedfiles.add(dest)
381 self._updatedfiles.add(dest)
383
382
384 def copied(self, file):
383 def copied(self, file):
385 return self._map.copymap.get(file, None)
384 return self._map.copymap.get(file, None)
386
385
387 def copies(self):
386 def copies(self):
388 return self._map.copymap
387 return self._map.copymap
389
388
390 def _droppath(self, f):
389 def _droppath(self, f):
391 if self[f] not in "?r" and "dirs" in self._map.__dict__:
390 if self[f] not in "?r" and "dirs" in self._map.__dict__:
392 self._map.dirs.delpath(f)
391 self._map.dirs.delpath(f)
393
392
394 if "filefoldmap" in self._map.__dict__:
393 if "filefoldmap" in self._map.__dict__:
395 normed = util.normcase(f)
394 normed = util.normcase(f)
396 if normed in self._map.filefoldmap:
395 if normed in self._map.filefoldmap:
397 del self._map.filefoldmap[normed]
396 del self._map.filefoldmap[normed]
398
397
399 self._updatedfiles.add(f)
398 self._updatedfiles.add(f)
400
399
401 def _addpath(self, f, state, mode, size, mtime):
400 def _addpath(self, f, state, mode, size, mtime):
402 oldstate = self[f]
401 oldstate = self[f]
403 if state == 'a' or oldstate == 'r':
402 if state == 'a' or oldstate == 'r':
404 scmutil.checkfilename(f)
403 scmutil.checkfilename(f)
405 if f in self._map.dirs:
404 if f in self._map.dirs:
406 raise error.Abort(_('directory %r already in dirstate') % f)
405 raise error.Abort(_('directory %r already in dirstate') % f)
407 # shadows
406 # shadows
408 for d in util.finddirs(f):
407 for d in util.finddirs(f):
409 if d in self._map.dirs:
408 if d in self._map.dirs:
410 break
409 break
411 entry = self._map.get(d)
410 entry = self._map.get(d)
412 if entry is not None and entry[0] != 'r':
411 if entry is not None and entry[0] != 'r':
413 raise error.Abort(
412 raise error.Abort(
414 _('file %r in dirstate clashes with %r') % (d, f))
413 _('file %r in dirstate clashes with %r') % (d, f))
415 if oldstate in "?r" and "dirs" in self._map.__dict__:
414 if oldstate in "?r" and "dirs" in self._map.__dict__:
416 self._map.dirs.addpath(f)
415 self._map.dirs.addpath(f)
417 self._dirty = True
416 self._dirty = True
418 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
419 self._map[f] = dirstatetuple(state, mode, size, mtime)
418 self._map[f] = dirstatetuple(state, mode, size, mtime)
420 if state != 'n' or mtime == -1:
419 if state != 'n' or mtime == -1:
421 self._map.nonnormalset.add(f)
420 self._map.nonnormalset.add(f)
422 if size == -2:
421 if size == -2:
423 self._map.otherparentset.add(f)
422 self._map.otherparentset.add(f)
424
423
425 def normal(self, f):
424 def normal(self, f):
426 '''Mark a file normal and clean.'''
425 '''Mark a file normal and clean.'''
427 s = os.lstat(self._join(f))
426 s = os.lstat(self._join(f))
428 mtime = s.st_mtime
427 mtime = s.st_mtime
429 self._addpath(f, 'n', s.st_mode,
428 self._addpath(f, 'n', s.st_mode,
430 s.st_size & _rangemask, mtime & _rangemask)
429 s.st_size & _rangemask, mtime & _rangemask)
431 self._map.copymap.pop(f, None)
430 self._map.copymap.pop(f, None)
432 if f in self._map.nonnormalset:
431 if f in self._map.nonnormalset:
433 self._map.nonnormalset.remove(f)
432 self._map.nonnormalset.remove(f)
434 if mtime > self._lastnormaltime:
433 if mtime > self._lastnormaltime:
435 # Remember the most recent modification timeslot for status(),
434 # Remember the most recent modification timeslot for status(),
436 # to make sure we won't miss future size-preserving file content
435 # to make sure we won't miss future size-preserving file content
437 # modifications that happen within the same timeslot.
436 # modifications that happen within the same timeslot.
438 self._lastnormaltime = mtime
437 self._lastnormaltime = mtime
439
438
440 def normallookup(self, f):
439 def normallookup(self, f):
441 '''Mark a file normal, but possibly dirty.'''
440 '''Mark a file normal, but possibly dirty.'''
442 if self._pl[1] != nullid:
441 if self._pl[1] != nullid:
443 # if there is a merge going on and the file was either
442 # if there is a merge going on and the file was either
444 # in state 'm' (-1) or coming from other parent (-2) before
443 # in state 'm' (-1) or coming from other parent (-2) before
445 # being removed, restore that state.
444 # being removed, restore that state.
446 entry = self._map.get(f)
445 entry = self._map.get(f)
447 if entry is not None:
446 if entry is not None:
448 if entry[0] == 'r' and entry[2] in (-1, -2):
447 if entry[0] == 'r' and entry[2] in (-1, -2):
449 source = self._map.copymap.get(f)
448 source = self._map.copymap.get(f)
450 if entry[2] == -1:
449 if entry[2] == -1:
451 self.merge(f)
450 self.merge(f)
452 elif entry[2] == -2:
451 elif entry[2] == -2:
453 self.otherparent(f)
452 self.otherparent(f)
454 if source:
453 if source:
455 self.copy(source, f)
454 self.copy(source, f)
456 return
455 return
457 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
456 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
458 return
457 return
459 self._addpath(f, 'n', 0, -1, -1)
458 self._addpath(f, 'n', 0, -1, -1)
460 self._map.copymap.pop(f, None)
459 self._map.copymap.pop(f, None)
461
460
462 def otherparent(self, f):
461 def otherparent(self, f):
463 '''Mark as coming from the other parent, always dirty.'''
462 '''Mark as coming from the other parent, always dirty.'''
464 if self._pl[1] == nullid:
463 if self._pl[1] == nullid:
465 raise error.Abort(_("setting %r to other parent "
464 raise error.Abort(_("setting %r to other parent "
466 "only allowed in merges") % f)
465 "only allowed in merges") % f)
467 if f in self and self[f] == 'n':
466 if f in self and self[f] == 'n':
468 # merge-like
467 # merge-like
469 self._addpath(f, 'm', 0, -2, -1)
468 self._addpath(f, 'm', 0, -2, -1)
470 else:
469 else:
471 # add-like
470 # add-like
472 self._addpath(f, 'n', 0, -2, -1)
471 self._addpath(f, 'n', 0, -2, -1)
473 self._map.copymap.pop(f, None)
472 self._map.copymap.pop(f, None)
474
473
475 def add(self, f):
474 def add(self, f):
476 '''Mark a file added.'''
475 '''Mark a file added.'''
477 self._addpath(f, 'a', 0, -1, -1)
476 self._addpath(f, 'a', 0, -1, -1)
478 self._map.copymap.pop(f, None)
477 self._map.copymap.pop(f, None)
479
478
480 def remove(self, f):
479 def remove(self, f):
481 '''Mark a file removed.'''
480 '''Mark a file removed.'''
482 self._dirty = True
481 self._dirty = True
483 self._droppath(f)
482 self._droppath(f)
484 size = 0
483 size = 0
485 if self._pl[1] != nullid:
484 if self._pl[1] != nullid:
486 entry = self._map.get(f)
485 entry = self._map.get(f)
487 if entry is not None:
486 if entry is not None:
488 # backup the previous state
487 # backup the previous state
489 if entry[0] == 'm': # merge
488 if entry[0] == 'm': # merge
490 size = -1
489 size = -1
491 elif entry[0] == 'n' and entry[2] == -2: # other parent
490 elif entry[0] == 'n' and entry[2] == -2: # other parent
492 size = -2
491 size = -2
493 self._map.otherparentset.add(f)
492 self._map.otherparentset.add(f)
494 self._map[f] = dirstatetuple('r', 0, size, 0)
493 self._map[f] = dirstatetuple('r', 0, size, 0)
495 self._map.nonnormalset.add(f)
494 self._map.nonnormalset.add(f)
496 if size == 0:
495 if size == 0:
497 self._map.copymap.pop(f, None)
496 self._map.copymap.pop(f, None)
498
497
499 def merge(self, f):
498 def merge(self, f):
500 '''Mark a file merged.'''
499 '''Mark a file merged.'''
501 if self._pl[1] == nullid:
500 if self._pl[1] == nullid:
502 return self.normallookup(f)
501 return self.normallookup(f)
503 return self.otherparent(f)
502 return self.otherparent(f)
504
503
505 def drop(self, f):
504 def drop(self, f):
506 '''Drop a file from the dirstate'''
505 '''Drop a file from the dirstate'''
507 if f in self._map:
506 if f in self._map:
508 self._dirty = True
507 self._dirty = True
509 self._droppath(f)
508 self._droppath(f)
510 del self._map[f]
509 del self._map[f]
511 if f in self._map.nonnormalset:
510 if f in self._map.nonnormalset:
512 self._map.nonnormalset.remove(f)
511 self._map.nonnormalset.remove(f)
513 self._map.copymap.pop(f, None)
512 self._map.copymap.pop(f, None)
514
513
515 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
514 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
516 if exists is None:
515 if exists is None:
517 exists = os.path.lexists(os.path.join(self._root, path))
516 exists = os.path.lexists(os.path.join(self._root, path))
518 if not exists:
517 if not exists:
519 # Maybe a path component exists
518 # Maybe a path component exists
520 if not ignoremissing and '/' in path:
519 if not ignoremissing and '/' in path:
521 d, f = path.rsplit('/', 1)
520 d, f = path.rsplit('/', 1)
522 d = self._normalize(d, False, ignoremissing, None)
521 d = self._normalize(d, False, ignoremissing, None)
523 folded = d + "/" + f
522 folded = d + "/" + f
524 else:
523 else:
525 # No path components, preserve original case
524 # No path components, preserve original case
526 folded = path
525 folded = path
527 else:
526 else:
528 # recursively normalize leading directory components
527 # recursively normalize leading directory components
529 # against dirstate
528 # against dirstate
530 if '/' in normed:
529 if '/' in normed:
531 d, f = normed.rsplit('/', 1)
530 d, f = normed.rsplit('/', 1)
532 d = self._normalize(d, False, ignoremissing, True)
531 d = self._normalize(d, False, ignoremissing, True)
533 r = self._root + "/" + d
532 r = self._root + "/" + d
534 folded = d + "/" + util.fspath(f, r)
533 folded = d + "/" + util.fspath(f, r)
535 else:
534 else:
536 folded = util.fspath(normed, self._root)
535 folded = util.fspath(normed, self._root)
537 storemap[normed] = folded
536 storemap[normed] = folded
538
537
539 return folded
538 return folded
540
539
541 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
540 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
542 normed = util.normcase(path)
541 normed = util.normcase(path)
543 folded = self._map.filefoldmap.get(normed, None)
542 folded = self._map.filefoldmap.get(normed, None)
544 if folded is None:
543 if folded is None:
545 if isknown:
544 if isknown:
546 folded = path
545 folded = path
547 else:
546 else:
548 folded = self._discoverpath(path, normed, ignoremissing, exists,
547 folded = self._discoverpath(path, normed, ignoremissing, exists,
549 self._map.filefoldmap)
548 self._map.filefoldmap)
550 return folded
549 return folded
551
550
552 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
551 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
553 normed = util.normcase(path)
552 normed = util.normcase(path)
554 folded = self._map.filefoldmap.get(normed, None)
553 folded = self._map.filefoldmap.get(normed, None)
555 if folded is None:
554 if folded is None:
556 folded = self._map.dirfoldmap.get(normed, None)
555 folded = self._map.dirfoldmap.get(normed, None)
557 if folded is None:
556 if folded is None:
558 if isknown:
557 if isknown:
559 folded = path
558 folded = path
560 else:
559 else:
561 # store discovered result in dirfoldmap so that future
560 # store discovered result in dirfoldmap so that future
562 # normalizefile calls don't start matching directories
561 # normalizefile calls don't start matching directories
563 folded = self._discoverpath(path, normed, ignoremissing, exists,
562 folded = self._discoverpath(path, normed, ignoremissing, exists,
564 self._map.dirfoldmap)
563 self._map.dirfoldmap)
565 return folded
564 return folded
566
565
567 def normalize(self, path, isknown=False, ignoremissing=False):
566 def normalize(self, path, isknown=False, ignoremissing=False):
568 '''
567 '''
569 normalize the case of a pathname when on a casefolding filesystem
568 normalize the case of a pathname when on a casefolding filesystem
570
569
571 isknown specifies whether the filename came from walking the
570 isknown specifies whether the filename came from walking the
572 disk, to avoid extra filesystem access.
571 disk, to avoid extra filesystem access.
573
572
574 If ignoremissing is True, missing path are returned
573 If ignoremissing is True, missing path are returned
575 unchanged. Otherwise, we try harder to normalize possibly
574 unchanged. Otherwise, we try harder to normalize possibly
576 existing path components.
575 existing path components.
577
576
578 The normalized case is determined based on the following precedence:
577 The normalized case is determined based on the following precedence:
579
578
580 - version of name already stored in the dirstate
579 - version of name already stored in the dirstate
581 - version of name stored on disk
580 - version of name stored on disk
582 - version provided via command arguments
581 - version provided via command arguments
583 '''
582 '''
584
583
585 if self._checkcase:
584 if self._checkcase:
586 return self._normalize(path, isknown, ignoremissing)
585 return self._normalize(path, isknown, ignoremissing)
587 return path
586 return path
588
587
589 def clear(self):
588 def clear(self):
590 self._map.clear()
589 self._map.clear()
591 self._lastnormaltime = 0
590 self._lastnormaltime = 0
592 self._updatedfiles.clear()
591 self._updatedfiles.clear()
593 self._dirty = True
592 self._dirty = True
594
593
595 def rebuild(self, parent, allfiles, changedfiles=None):
594 def rebuild(self, parent, allfiles, changedfiles=None):
596 if changedfiles is None:
595 if changedfiles is None:
597 # Rebuild entire dirstate
596 # Rebuild entire dirstate
598 changedfiles = allfiles
597 changedfiles = allfiles
599 lastnormaltime = self._lastnormaltime
598 lastnormaltime = self._lastnormaltime
600 self.clear()
599 self.clear()
601 self._lastnormaltime = lastnormaltime
600 self._lastnormaltime = lastnormaltime
602
601
603 if self._origpl is None:
602 if self._origpl is None:
604 self._origpl = self._pl
603 self._origpl = self._pl
605 self._map.setparents(parent, nullid)
604 self._map.setparents(parent, nullid)
606 for f in changedfiles:
605 for f in changedfiles:
607 if f in allfiles:
606 if f in allfiles:
608 self.normallookup(f)
607 self.normallookup(f)
609 else:
608 else:
610 self.drop(f)
609 self.drop(f)
611
610
612 self._dirty = True
611 self._dirty = True
613
612
614 def identity(self):
613 def identity(self):
615 '''Return identity of dirstate itself to detect changing in storage
614 '''Return identity of dirstate itself to detect changing in storage
616
615
617 If identity of previous dirstate is equal to this, writing
616 If identity of previous dirstate is equal to this, writing
618 changes based on the former dirstate out can keep consistency.
617 changes based on the former dirstate out can keep consistency.
619 '''
618 '''
620 return self._map.identity
619 return self._map.identity
621
620
622 def write(self, tr):
621 def write(self, tr):
623 if not self._dirty:
622 if not self._dirty:
624 return
623 return
625
624
626 filename = self._filename
625 filename = self._filename
627 if tr:
626 if tr:
628 # 'dirstate.write()' is not only for writing in-memory
627 # 'dirstate.write()' is not only for writing in-memory
629 # changes out, but also for dropping ambiguous timestamp.
628 # changes out, but also for dropping ambiguous timestamp.
630 # delayed writing re-raise "ambiguous timestamp issue".
629 # delayed writing re-raise "ambiguous timestamp issue".
631 # See also the wiki page below for detail:
630 # See also the wiki page below for detail:
632 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
631 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
633
632
634 # emulate dropping timestamp in 'parsers.pack_dirstate'
633 # emulate dropping timestamp in 'parsers.pack_dirstate'
635 now = _getfsnow(self._opener)
634 now = _getfsnow(self._opener)
636 dmap = self._map
635 dmap = self._map
637 for f in self._updatedfiles:
636 for f in self._updatedfiles:
638 e = dmap.get(f)
637 e = dmap.get(f)
639 if e is not None and e[0] == 'n' and e[3] == now:
638 if e is not None and e[0] == 'n' and e[3] == now:
640 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
639 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
641 self._map.nonnormalset.add(f)
640 self._map.nonnormalset.add(f)
642
641
643 # emulate that all 'dirstate.normal' results are written out
642 # emulate that all 'dirstate.normal' results are written out
644 self._lastnormaltime = 0
643 self._lastnormaltime = 0
645 self._updatedfiles.clear()
644 self._updatedfiles.clear()
646
645
647 # delay writing in-memory changes out
646 # delay writing in-memory changes out
648 tr.addfilegenerator('dirstate', (self._filename,),
647 tr.addfilegenerator('dirstate', (self._filename,),
649 self._writedirstate, location='plain')
648 self._writedirstate, location='plain')
650 return
649 return
651
650
652 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
651 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
653 self._writedirstate(st)
652 self._writedirstate(st)
654
653
655 def addparentchangecallback(self, category, callback):
654 def addparentchangecallback(self, category, callback):
656 """add a callback to be called when the wd parents are changed
655 """add a callback to be called when the wd parents are changed
657
656
658 Callback will be called with the following arguments:
657 Callback will be called with the following arguments:
659 dirstate, (oldp1, oldp2), (newp1, newp2)
658 dirstate, (oldp1, oldp2), (newp1, newp2)
660
659
661 Category is a unique identifier to allow overwriting an old callback
660 Category is a unique identifier to allow overwriting an old callback
662 with a newer callback.
661 with a newer callback.
663 """
662 """
664 self._plchangecallbacks[category] = callback
663 self._plchangecallbacks[category] = callback
665
664
666 def _writedirstate(self, st):
665 def _writedirstate(self, st):
667 # notify callbacks about parents change
666 # notify callbacks about parents change
668 if self._origpl is not None and self._origpl != self._pl:
667 if self._origpl is not None and self._origpl != self._pl:
669 for c, callback in sorted(self._plchangecallbacks.iteritems()):
668 for c, callback in sorted(self._plchangecallbacks.iteritems()):
670 callback(self, self._origpl, self._pl)
669 callback(self, self._origpl, self._pl)
671 self._origpl = None
670 self._origpl = None
672 # use the modification time of the newly created temporary file as the
671 # use the modification time of the newly created temporary file as the
673 # filesystem's notion of 'now'
672 # filesystem's notion of 'now'
674 now = util.fstat(st).st_mtime & _rangemask
673 now = util.fstat(st).st_mtime & _rangemask
675
674
676 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
675 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
677 # timestamp of each entries in dirstate, because of 'now > mtime'
676 # timestamp of each entries in dirstate, because of 'now > mtime'
678 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
677 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
679 if delaywrite > 0:
678 if delaywrite > 0:
680 # do we have any files to delay for?
679 # do we have any files to delay for?
681 for f, e in self._map.iteritems():
680 for f, e in self._map.iteritems():
682 if e[0] == 'n' and e[3] == now:
681 if e[0] == 'n' and e[3] == now:
683 import time # to avoid useless import
682 import time # to avoid useless import
684 # rather than sleep n seconds, sleep until the next
683 # rather than sleep n seconds, sleep until the next
685 # multiple of n seconds
684 # multiple of n seconds
686 clock = time.time()
685 clock = time.time()
687 start = int(clock) - (int(clock) % delaywrite)
686 start = int(clock) - (int(clock) % delaywrite)
688 end = start + delaywrite
687 end = start + delaywrite
689 time.sleep(end - clock)
688 time.sleep(end - clock)
690 now = end # trust our estimate that the end is near now
689 now = end # trust our estimate that the end is near now
691 break
690 break
692
691
693 self._map.write(st, now)
692 self._map.write(st, now)
694 self._lastnormaltime = 0
693 self._lastnormaltime = 0
695 self._dirty = False
694 self._dirty = False
696
695
697 def _dirignore(self, f):
696 def _dirignore(self, f):
698 if f == '.':
697 if f == '.':
699 return False
698 return False
700 if self._ignore(f):
699 if self._ignore(f):
701 return True
700 return True
702 for p in util.finddirs(f):
701 for p in util.finddirs(f):
703 if self._ignore(p):
702 if self._ignore(p):
704 return True
703 return True
705 return False
704 return False
706
705
707 def _ignorefiles(self):
706 def _ignorefiles(self):
708 files = []
707 files = []
709 if os.path.exists(self._join('.hgignore')):
708 if os.path.exists(self._join('.hgignore')):
710 files.append(self._join('.hgignore'))
709 files.append(self._join('.hgignore'))
711 for name, path in self._ui.configitems("ui"):
710 for name, path in self._ui.configitems("ui"):
712 if name == 'ignore' or name.startswith('ignore.'):
711 if name == 'ignore' or name.startswith('ignore.'):
713 # we need to use os.path.join here rather than self._join
712 # we need to use os.path.join here rather than self._join
714 # because path is arbitrary and user-specified
713 # because path is arbitrary and user-specified
715 files.append(os.path.join(self._rootdir, util.expandpath(path)))
714 files.append(os.path.join(self._rootdir, util.expandpath(path)))
716 return files
715 return files
717
716
718 def _ignorefileandline(self, f):
717 def _ignorefileandline(self, f):
719 files = collections.deque(self._ignorefiles())
718 files = collections.deque(self._ignorefiles())
720 visited = set()
719 visited = set()
721 while files:
720 while files:
722 i = files.popleft()
721 i = files.popleft()
723 patterns = matchmod.readpatternfile(i, self._ui.warn,
722 patterns = matchmod.readpatternfile(i, self._ui.warn,
724 sourceinfo=True)
723 sourceinfo=True)
725 for pattern, lineno, line in patterns:
724 for pattern, lineno, line in patterns:
726 kind, p = matchmod._patsplit(pattern, 'glob')
725 kind, p = matchmod._patsplit(pattern, 'glob')
727 if kind == "subinclude":
726 if kind == "subinclude":
728 if p not in visited:
727 if p not in visited:
729 files.append(p)
728 files.append(p)
730 continue
729 continue
731 m = matchmod.match(self._root, '', [], [pattern],
730 m = matchmod.match(self._root, '', [], [pattern],
732 warn=self._ui.warn)
731 warn=self._ui.warn)
733 if m(f):
732 if m(f):
734 return (i, lineno, line)
733 return (i, lineno, line)
735 visited.add(i)
734 visited.add(i)
736 return (None, -1, "")
735 return (None, -1, "")
737
736
738 def _walkexplicit(self, match, subrepos):
737 def _walkexplicit(self, match, subrepos):
739 '''Get stat data about the files explicitly specified by match.
738 '''Get stat data about the files explicitly specified by match.
740
739
741 Return a triple (results, dirsfound, dirsnotfound).
740 Return a triple (results, dirsfound, dirsnotfound).
742 - results is a mapping from filename to stat result. It also contains
741 - results is a mapping from filename to stat result. It also contains
743 listings mapping subrepos and .hg to None.
742 listings mapping subrepos and .hg to None.
744 - dirsfound is a list of files found to be directories.
743 - dirsfound is a list of files found to be directories.
745 - dirsnotfound is a list of files that the dirstate thinks are
744 - dirsnotfound is a list of files that the dirstate thinks are
746 directories and that were not found.'''
745 directories and that were not found.'''
747
746
748 def badtype(mode):
747 def badtype(mode):
749 kind = _('unknown')
748 kind = _('unknown')
750 if stat.S_ISCHR(mode):
749 if stat.S_ISCHR(mode):
751 kind = _('character device')
750 kind = _('character device')
752 elif stat.S_ISBLK(mode):
751 elif stat.S_ISBLK(mode):
753 kind = _('block device')
752 kind = _('block device')
754 elif stat.S_ISFIFO(mode):
753 elif stat.S_ISFIFO(mode):
755 kind = _('fifo')
754 kind = _('fifo')
756 elif stat.S_ISSOCK(mode):
755 elif stat.S_ISSOCK(mode):
757 kind = _('socket')
756 kind = _('socket')
758 elif stat.S_ISDIR(mode):
757 elif stat.S_ISDIR(mode):
759 kind = _('directory')
758 kind = _('directory')
760 return _('unsupported file type (type is %s)') % kind
759 return _('unsupported file type (type is %s)') % kind
761
760
762 matchedir = match.explicitdir
761 matchedir = match.explicitdir
763 badfn = match.bad
762 badfn = match.bad
764 dmap = self._map
763 dmap = self._map
765 lstat = os.lstat
764 lstat = os.lstat
766 getkind = stat.S_IFMT
765 getkind = stat.S_IFMT
767 dirkind = stat.S_IFDIR
766 dirkind = stat.S_IFDIR
768 regkind = stat.S_IFREG
767 regkind = stat.S_IFREG
769 lnkkind = stat.S_IFLNK
768 lnkkind = stat.S_IFLNK
770 join = self._join
769 join = self._join
771 dirsfound = []
770 dirsfound = []
772 foundadd = dirsfound.append
771 foundadd = dirsfound.append
773 dirsnotfound = []
772 dirsnotfound = []
774 notfoundadd = dirsnotfound.append
773 notfoundadd = dirsnotfound.append
775
774
776 if not match.isexact() and self._checkcase:
775 if not match.isexact() and self._checkcase:
777 normalize = self._normalize
776 normalize = self._normalize
778 else:
777 else:
779 normalize = None
778 normalize = None
780
779
781 files = sorted(match.files())
780 files = sorted(match.files())
782 subrepos.sort()
781 subrepos.sort()
783 i, j = 0, 0
782 i, j = 0, 0
784 while i < len(files) and j < len(subrepos):
783 while i < len(files) and j < len(subrepos):
785 subpath = subrepos[j] + "/"
784 subpath = subrepos[j] + "/"
786 if files[i] < subpath:
785 if files[i] < subpath:
787 i += 1
786 i += 1
788 continue
787 continue
789 while i < len(files) and files[i].startswith(subpath):
788 while i < len(files) and files[i].startswith(subpath):
790 del files[i]
789 del files[i]
791 j += 1
790 j += 1
792
791
793 if not files or '.' in files:
792 if not files or '.' in files:
794 files = ['.']
793 files = ['.']
795 results = dict.fromkeys(subrepos)
794 results = dict.fromkeys(subrepos)
796 results['.hg'] = None
795 results['.hg'] = None
797
796
798 alldirs = None
797 alldirs = None
799 for ff in files:
798 for ff in files:
800 # constructing the foldmap is expensive, so don't do it for the
799 # constructing the foldmap is expensive, so don't do it for the
801 # common case where files is ['.']
800 # common case where files is ['.']
802 if normalize and ff != '.':
801 if normalize and ff != '.':
803 nf = normalize(ff, False, True)
802 nf = normalize(ff, False, True)
804 else:
803 else:
805 nf = ff
804 nf = ff
806 if nf in results:
805 if nf in results:
807 continue
806 continue
808
807
809 try:
808 try:
810 st = lstat(join(nf))
809 st = lstat(join(nf))
811 kind = getkind(st.st_mode)
810 kind = getkind(st.st_mode)
812 if kind == dirkind:
811 if kind == dirkind:
813 if nf in dmap:
812 if nf in dmap:
814 # file replaced by dir on disk but still in dirstate
813 # file replaced by dir on disk but still in dirstate
815 results[nf] = None
814 results[nf] = None
816 if matchedir:
815 if matchedir:
817 matchedir(nf)
816 matchedir(nf)
818 foundadd((nf, ff))
817 foundadd((nf, ff))
819 elif kind == regkind or kind == lnkkind:
818 elif kind == regkind or kind == lnkkind:
820 results[nf] = st
819 results[nf] = st
821 else:
820 else:
822 badfn(ff, badtype(kind))
821 badfn(ff, badtype(kind))
823 if nf in dmap:
822 if nf in dmap:
824 results[nf] = None
823 results[nf] = None
825 except OSError as inst: # nf not found on disk - it is dirstate only
824 except OSError as inst: # nf not found on disk - it is dirstate only
826 if nf in dmap: # does it exactly match a missing file?
825 if nf in dmap: # does it exactly match a missing file?
827 results[nf] = None
826 results[nf] = None
828 else: # does it match a missing directory?
827 else: # does it match a missing directory?
829 if alldirs is None:
828 if alldirs is None:
830 alldirs = util.dirs(dmap._map)
829 alldirs = util.dirs(dmap._map)
831 if nf in alldirs:
830 if nf in alldirs:
832 if matchedir:
831 if matchedir:
833 matchedir(nf)
832 matchedir(nf)
834 notfoundadd(nf)
833 notfoundadd(nf)
835 else:
834 else:
836 badfn(ff, encoding.strtolocal(inst.strerror))
835 badfn(ff, encoding.strtolocal(inst.strerror))
837
836
838 # Case insensitive filesystems cannot rely on lstat() failing to detect
837 # Case insensitive filesystems cannot rely on lstat() failing to detect
839 # a case-only rename. Prune the stat object for any file that does not
838 # a case-only rename. Prune the stat object for any file that does not
840 # match the case in the filesystem, if there are multiple files that
839 # match the case in the filesystem, if there are multiple files that
841 # normalize to the same path.
840 # normalize to the same path.
842 if match.isexact() and self._checkcase:
841 if match.isexact() and self._checkcase:
843 normed = {}
842 normed = {}
844
843
845 for f, st in results.iteritems():
844 for f, st in results.iteritems():
846 if st is None:
845 if st is None:
847 continue
846 continue
848
847
849 nc = util.normcase(f)
848 nc = util.normcase(f)
850 paths = normed.get(nc)
849 paths = normed.get(nc)
851
850
852 if paths is None:
851 if paths is None:
853 paths = set()
852 paths = set()
854 normed[nc] = paths
853 normed[nc] = paths
855
854
856 paths.add(f)
855 paths.add(f)
857
856
858 for norm, paths in normed.iteritems():
857 for norm, paths in normed.iteritems():
859 if len(paths) > 1:
858 if len(paths) > 1:
860 for path in paths:
859 for path in paths:
861 folded = self._discoverpath(path, norm, True, None,
860 folded = self._discoverpath(path, norm, True, None,
862 self._map.dirfoldmap)
861 self._map.dirfoldmap)
863 if path != folded:
862 if path != folded:
864 results[path] = None
863 results[path] = None
865
864
866 return results, dirsfound, dirsnotfound
865 return results, dirsfound, dirsnotfound
867
866
868 def walk(self, match, subrepos, unknown, ignored, full=True):
867 def walk(self, match, subrepos, unknown, ignored, full=True):
869 '''
868 '''
870 Walk recursively through the directory tree, finding all files
869 Walk recursively through the directory tree, finding all files
871 matched by match.
870 matched by match.
872
871
873 If full is False, maybe skip some known-clean files.
872 If full is False, maybe skip some known-clean files.
874
873
875 Return a dict mapping filename to stat-like object (either
874 Return a dict mapping filename to stat-like object (either
876 mercurial.osutil.stat instance or return value of os.stat()).
875 mercurial.osutil.stat instance or return value of os.stat()).
877
876
878 '''
877 '''
879 # full is a flag that extensions that hook into walk can use -- this
878 # full is a flag that extensions that hook into walk can use -- this
880 # implementation doesn't use it at all. This satisfies the contract
879 # implementation doesn't use it at all. This satisfies the contract
881 # because we only guarantee a "maybe".
880 # because we only guarantee a "maybe".
882
881
883 if ignored:
882 if ignored:
884 ignore = util.never
883 ignore = util.never
885 dirignore = util.never
884 dirignore = util.never
886 elif unknown:
885 elif unknown:
887 ignore = self._ignore
886 ignore = self._ignore
888 dirignore = self._dirignore
887 dirignore = self._dirignore
889 else:
888 else:
890 # if not unknown and not ignored, drop dir recursion and step 2
889 # if not unknown and not ignored, drop dir recursion and step 2
891 ignore = util.always
890 ignore = util.always
892 dirignore = util.always
891 dirignore = util.always
893
892
894 matchfn = match.matchfn
893 matchfn = match.matchfn
895 matchalways = match.always()
894 matchalways = match.always()
896 matchtdir = match.traversedir
895 matchtdir = match.traversedir
897 dmap = self._map
896 dmap = self._map
898 listdir = util.listdir
897 listdir = util.listdir
899 lstat = os.lstat
898 lstat = os.lstat
900 dirkind = stat.S_IFDIR
899 dirkind = stat.S_IFDIR
901 regkind = stat.S_IFREG
900 regkind = stat.S_IFREG
902 lnkkind = stat.S_IFLNK
901 lnkkind = stat.S_IFLNK
903 join = self._join
902 join = self._join
904
903
905 exact = skipstep3 = False
904 exact = skipstep3 = False
906 if match.isexact(): # match.exact
905 if match.isexact(): # match.exact
907 exact = True
906 exact = True
908 dirignore = util.always # skip step 2
907 dirignore = util.always # skip step 2
909 elif match.prefix(): # match.match, no patterns
908 elif match.prefix(): # match.match, no patterns
910 skipstep3 = True
909 skipstep3 = True
911
910
912 if not exact and self._checkcase:
911 if not exact and self._checkcase:
913 normalize = self._normalize
912 normalize = self._normalize
914 normalizefile = self._normalizefile
913 normalizefile = self._normalizefile
915 skipstep3 = False
914 skipstep3 = False
916 else:
915 else:
917 normalize = self._normalize
916 normalize = self._normalize
918 normalizefile = None
917 normalizefile = None
919
918
920 # step 1: find all explicit files
919 # step 1: find all explicit files
921 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
920 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
922
921
923 skipstep3 = skipstep3 and not (work or dirsnotfound)
922 skipstep3 = skipstep3 and not (work or dirsnotfound)
924 work = [d for d in work if not dirignore(d[0])]
923 work = [d for d in work if not dirignore(d[0])]
925
924
926 # step 2: visit subdirectories
925 # step 2: visit subdirectories
927 def traverse(work, alreadynormed):
926 def traverse(work, alreadynormed):
928 wadd = work.append
927 wadd = work.append
929 while work:
928 while work:
930 nd = work.pop()
929 nd = work.pop()
931 if not match.visitdir(nd):
930 if not match.visitdir(nd):
932 continue
931 continue
933 skip = None
932 skip = None
934 if nd == '.':
933 if nd == '.':
935 nd = ''
934 nd = ''
936 else:
935 else:
937 skip = '.hg'
936 skip = '.hg'
938 try:
937 try:
939 entries = listdir(join(nd), stat=True, skip=skip)
938 entries = listdir(join(nd), stat=True, skip=skip)
940 except OSError as inst:
939 except OSError as inst:
941 if inst.errno in (errno.EACCES, errno.ENOENT):
940 if inst.errno in (errno.EACCES, errno.ENOENT):
942 match.bad(self.pathto(nd),
941 match.bad(self.pathto(nd),
943 encoding.strtolocal(inst.strerror))
942 encoding.strtolocal(inst.strerror))
944 continue
943 continue
945 raise
944 raise
946 for f, kind, st in entries:
945 for f, kind, st in entries:
947 if normalizefile:
946 if normalizefile:
948 # even though f might be a directory, we're only
947 # even though f might be a directory, we're only
949 # interested in comparing it to files currently in the
948 # interested in comparing it to files currently in the
950 # dmap -- therefore normalizefile is enough
949 # dmap -- therefore normalizefile is enough
951 nf = normalizefile(nd and (nd + "/" + f) or f, True,
950 nf = normalizefile(nd and (nd + "/" + f) or f, True,
952 True)
951 True)
953 else:
952 else:
954 nf = nd and (nd + "/" + f) or f
953 nf = nd and (nd + "/" + f) or f
955 if nf not in results:
954 if nf not in results:
956 if kind == dirkind:
955 if kind == dirkind:
957 if not ignore(nf):
956 if not ignore(nf):
958 if matchtdir:
957 if matchtdir:
959 matchtdir(nf)
958 matchtdir(nf)
960 wadd(nf)
959 wadd(nf)
961 if nf in dmap and (matchalways or matchfn(nf)):
960 if nf in dmap and (matchalways or matchfn(nf)):
962 results[nf] = None
961 results[nf] = None
963 elif kind == regkind or kind == lnkkind:
962 elif kind == regkind or kind == lnkkind:
964 if nf in dmap:
963 if nf in dmap:
965 if matchalways or matchfn(nf):
964 if matchalways or matchfn(nf):
966 results[nf] = st
965 results[nf] = st
967 elif ((matchalways or matchfn(nf))
966 elif ((matchalways or matchfn(nf))
968 and not ignore(nf)):
967 and not ignore(nf)):
969 # unknown file -- normalize if necessary
968 # unknown file -- normalize if necessary
970 if not alreadynormed:
969 if not alreadynormed:
971 nf = normalize(nf, False, True)
970 nf = normalize(nf, False, True)
972 results[nf] = st
971 results[nf] = st
973 elif nf in dmap and (matchalways or matchfn(nf)):
972 elif nf in dmap and (matchalways or matchfn(nf)):
974 results[nf] = None
973 results[nf] = None
975
974
976 for nd, d in work:
975 for nd, d in work:
977 # alreadynormed means that processwork doesn't have to do any
976 # alreadynormed means that processwork doesn't have to do any
978 # expensive directory normalization
977 # expensive directory normalization
979 alreadynormed = not normalize or nd == d
978 alreadynormed = not normalize or nd == d
980 traverse([d], alreadynormed)
979 traverse([d], alreadynormed)
981
980
982 for s in subrepos:
981 for s in subrepos:
983 del results[s]
982 del results[s]
984 del results['.hg']
983 del results['.hg']
985
984
986 # step 3: visit remaining files from dmap
985 # step 3: visit remaining files from dmap
987 if not skipstep3 and not exact:
986 if not skipstep3 and not exact:
988 # If a dmap file is not in results yet, it was either
987 # If a dmap file is not in results yet, it was either
989 # a) not matching matchfn b) ignored, c) missing, or d) under a
988 # a) not matching matchfn b) ignored, c) missing, or d) under a
990 # symlink directory.
989 # symlink directory.
991 if not results and matchalways:
990 if not results and matchalways:
992 visit = [f for f in dmap]
991 visit = [f for f in dmap]
993 else:
992 else:
994 visit = [f for f in dmap if f not in results and matchfn(f)]
993 visit = [f for f in dmap if f not in results and matchfn(f)]
995 visit.sort()
994 visit.sort()
996
995
997 if unknown:
996 if unknown:
998 # unknown == True means we walked all dirs under the roots
997 # unknown == True means we walked all dirs under the roots
999 # that wasn't ignored, and everything that matched was stat'ed
998 # that wasn't ignored, and everything that matched was stat'ed
1000 # and is already in results.
999 # and is already in results.
1001 # The rest must thus be ignored or under a symlink.
1000 # The rest must thus be ignored or under a symlink.
1002 audit_path = pathutil.pathauditor(self._root, cached=True)
1001 audit_path = pathutil.pathauditor(self._root, cached=True)
1003
1002
1004 for nf in iter(visit):
1003 for nf in iter(visit):
1005 # If a stat for the same file was already added with a
1004 # If a stat for the same file was already added with a
1006 # different case, don't add one for this, since that would
1005 # different case, don't add one for this, since that would
1007 # make it appear as if the file exists under both names
1006 # make it appear as if the file exists under both names
1008 # on disk.
1007 # on disk.
1009 if (normalizefile and
1008 if (normalizefile and
1010 normalizefile(nf, True, True) in results):
1009 normalizefile(nf, True, True) in results):
1011 results[nf] = None
1010 results[nf] = None
1012 # Report ignored items in the dmap as long as they are not
1011 # Report ignored items in the dmap as long as they are not
1013 # under a symlink directory.
1012 # under a symlink directory.
1014 elif audit_path.check(nf):
1013 elif audit_path.check(nf):
1015 try:
1014 try:
1016 results[nf] = lstat(join(nf))
1015 results[nf] = lstat(join(nf))
1017 # file was just ignored, no links, and exists
1016 # file was just ignored, no links, and exists
1018 except OSError:
1017 except OSError:
1019 # file doesn't exist
1018 # file doesn't exist
1020 results[nf] = None
1019 results[nf] = None
1021 else:
1020 else:
1022 # It's either missing or under a symlink directory
1021 # It's either missing or under a symlink directory
1023 # which we in this case report as missing
1022 # which we in this case report as missing
1024 results[nf] = None
1023 results[nf] = None
1025 else:
1024 else:
1026 # We may not have walked the full directory tree above,
1025 # We may not have walked the full directory tree above,
1027 # so stat and check everything we missed.
1026 # so stat and check everything we missed.
1028 iv = iter(visit)
1027 iv = iter(visit)
1029 for st in util.statfiles([join(i) for i in visit]):
1028 for st in util.statfiles([join(i) for i in visit]):
1030 results[next(iv)] = st
1029 results[next(iv)] = st
1031 return results
1030 return results
1032
1031
1033 def status(self, match, subrepos, ignored, clean, unknown):
1032 def status(self, match, subrepos, ignored, clean, unknown):
1034 '''Determine the status of the working copy relative to the
1033 '''Determine the status of the working copy relative to the
1035 dirstate and return a pair of (unsure, status), where status is of type
1034 dirstate and return a pair of (unsure, status), where status is of type
1036 scmutil.status and:
1035 scmutil.status and:
1037
1036
1038 unsure:
1037 unsure:
1039 files that might have been modified since the dirstate was
1038 files that might have been modified since the dirstate was
1040 written, but need to be read to be sure (size is the same
1039 written, but need to be read to be sure (size is the same
1041 but mtime differs)
1040 but mtime differs)
1042 status.modified:
1041 status.modified:
1043 files that have definitely been modified since the dirstate
1042 files that have definitely been modified since the dirstate
1044 was written (different size or mode)
1043 was written (different size or mode)
1045 status.clean:
1044 status.clean:
1046 files that have definitely not been modified since the
1045 files that have definitely not been modified since the
1047 dirstate was written
1046 dirstate was written
1048 '''
1047 '''
1049 listignored, listclean, listunknown = ignored, clean, unknown
1048 listignored, listclean, listunknown = ignored, clean, unknown
1050 lookup, modified, added, unknown, ignored = [], [], [], [], []
1049 lookup, modified, added, unknown, ignored = [], [], [], [], []
1051 removed, deleted, clean = [], [], []
1050 removed, deleted, clean = [], [], []
1052
1051
1053 dmap = self._map
1052 dmap = self._map
1054 dmap.preload()
1053 dmap.preload()
1055 dcontains = dmap.__contains__
1054 dcontains = dmap.__contains__
1056 dget = dmap.__getitem__
1055 dget = dmap.__getitem__
1057 ladd = lookup.append # aka "unsure"
1056 ladd = lookup.append # aka "unsure"
1058 madd = modified.append
1057 madd = modified.append
1059 aadd = added.append
1058 aadd = added.append
1060 uadd = unknown.append
1059 uadd = unknown.append
1061 iadd = ignored.append
1060 iadd = ignored.append
1062 radd = removed.append
1061 radd = removed.append
1063 dadd = deleted.append
1062 dadd = deleted.append
1064 cadd = clean.append
1063 cadd = clean.append
1065 mexact = match.exact
1064 mexact = match.exact
1066 dirignore = self._dirignore
1065 dirignore = self._dirignore
1067 checkexec = self._checkexec
1066 checkexec = self._checkexec
1068 copymap = self._map.copymap
1067 copymap = self._map.copymap
1069 lastnormaltime = self._lastnormaltime
1068 lastnormaltime = self._lastnormaltime
1070
1069
1071 # We need to do full walks when either
1070 # We need to do full walks when either
1072 # - we're listing all clean files, or
1071 # - we're listing all clean files, or
1073 # - match.traversedir does something, because match.traversedir should
1072 # - match.traversedir does something, because match.traversedir should
1074 # be called for every dir in the working dir
1073 # be called for every dir in the working dir
1075 full = listclean or match.traversedir is not None
1074 full = listclean or match.traversedir is not None
1076 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1075 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1077 full=full).iteritems():
1076 full=full).iteritems():
1078 if not dcontains(fn):
1077 if not dcontains(fn):
1079 if (listignored or mexact(fn)) and dirignore(fn):
1078 if (listignored or mexact(fn)) and dirignore(fn):
1080 if listignored:
1079 if listignored:
1081 iadd(fn)
1080 iadd(fn)
1082 else:
1081 else:
1083 uadd(fn)
1082 uadd(fn)
1084 continue
1083 continue
1085
1084
1086 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1085 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1087 # written like that for performance reasons. dmap[fn] is not a
1086 # written like that for performance reasons. dmap[fn] is not a
1088 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1087 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1089 # opcode has fast paths when the value to be unpacked is a tuple or
1088 # opcode has fast paths when the value to be unpacked is a tuple or
1090 # a list, but falls back to creating a full-fledged iterator in
1089 # a list, but falls back to creating a full-fledged iterator in
1091 # general. That is much slower than simply accessing and storing the
1090 # general. That is much slower than simply accessing and storing the
1092 # tuple members one by one.
1091 # tuple members one by one.
1093 t = dget(fn)
1092 t = dget(fn)
1094 state = t[0]
1093 state = t[0]
1095 mode = t[1]
1094 mode = t[1]
1096 size = t[2]
1095 size = t[2]
1097 time = t[3]
1096 time = t[3]
1098
1097
1099 if not st and state in "nma":
1098 if not st and state in "nma":
1100 dadd(fn)
1099 dadd(fn)
1101 elif state == 'n':
1100 elif state == 'n':
1102 if (size >= 0 and
1101 if (size >= 0 and
1103 ((size != st.st_size and size != st.st_size & _rangemask)
1102 ((size != st.st_size and size != st.st_size & _rangemask)
1104 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1103 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1105 or size == -2 # other parent
1104 or size == -2 # other parent
1106 or fn in copymap):
1105 or fn in copymap):
1107 madd(fn)
1106 madd(fn)
1108 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1107 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1109 ladd(fn)
1108 ladd(fn)
1110 elif st.st_mtime == lastnormaltime:
1109 elif st.st_mtime == lastnormaltime:
1111 # fn may have just been marked as normal and it may have
1110 # fn may have just been marked as normal and it may have
1112 # changed in the same second without changing its size.
1111 # changed in the same second without changing its size.
1113 # This can happen if we quickly do multiple commits.
1112 # This can happen if we quickly do multiple commits.
1114 # Force lookup, so we don't miss such a racy file change.
1113 # Force lookup, so we don't miss such a racy file change.
1115 ladd(fn)
1114 ladd(fn)
1116 elif listclean:
1115 elif listclean:
1117 cadd(fn)
1116 cadd(fn)
1118 elif state == 'm':
1117 elif state == 'm':
1119 madd(fn)
1118 madd(fn)
1120 elif state == 'a':
1119 elif state == 'a':
1121 aadd(fn)
1120 aadd(fn)
1122 elif state == 'r':
1121 elif state == 'r':
1123 radd(fn)
1122 radd(fn)
1124
1123
1125 return (lookup, scmutil.status(modified, added, removed, deleted,
1124 return (lookup, scmutil.status(modified, added, removed, deleted,
1126 unknown, ignored, clean))
1125 unknown, ignored, clean))
1127
1126
1128 def matches(self, match):
1127 def matches(self, match):
1129 '''
1128 '''
1130 return files in the dirstate (in whatever state) filtered by match
1129 return files in the dirstate (in whatever state) filtered by match
1131 '''
1130 '''
1132 dmap = self._map
1131 dmap = self._map
1133 if match.always():
1132 if match.always():
1134 return dmap.keys()
1133 return dmap.keys()
1135 files = match.files()
1134 files = match.files()
1136 if match.isexact():
1135 if match.isexact():
1137 # fast path -- filter the other way around, since typically files is
1136 # fast path -- filter the other way around, since typically files is
1138 # much smaller than dmap
1137 # much smaller than dmap
1139 return [f for f in files if f in dmap]
1138 return [f for f in files if f in dmap]
1140 if match.prefix() and all(fn in dmap for fn in files):
1139 if match.prefix() and all(fn in dmap for fn in files):
1141 # fast path -- all the values are known to be files, so just return
1140 # fast path -- all the values are known to be files, so just return
1142 # that
1141 # that
1143 return list(files)
1142 return list(files)
1144 return [f for f in dmap if match(f)]
1143 return [f for f in dmap if match(f)]
1145
1144
1146 def _actualfilename(self, tr):
1145 def _actualfilename(self, tr):
1147 if tr:
1146 if tr:
1148 return self._pendingfilename
1147 return self._pendingfilename
1149 else:
1148 else:
1150 return self._filename
1149 return self._filename
1151
1150
1152 def savebackup(self, tr, backupname):
1151 def savebackup(self, tr, backupname):
1153 '''Save current dirstate into backup file'''
1152 '''Save current dirstate into backup file'''
1154 filename = self._actualfilename(tr)
1153 filename = self._actualfilename(tr)
1155 assert backupname != filename
1154 assert backupname != filename
1156
1155
1157 # use '_writedirstate' instead of 'write' to write changes certainly,
1156 # use '_writedirstate' instead of 'write' to write changes certainly,
1158 # because the latter omits writing out if transaction is running.
1157 # because the latter omits writing out if transaction is running.
1159 # output file will be used to create backup of dirstate at this point.
1158 # output file will be used to create backup of dirstate at this point.
1160 if self._dirty or not self._opener.exists(filename):
1159 if self._dirty or not self._opener.exists(filename):
1161 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1160 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1162 checkambig=True))
1161 checkambig=True))
1163
1162
1164 if tr:
1163 if tr:
1165 # ensure that subsequent tr.writepending returns True for
1164 # ensure that subsequent tr.writepending returns True for
1166 # changes written out above, even if dirstate is never
1165 # changes written out above, even if dirstate is never
1167 # changed after this
1166 # changed after this
1168 tr.addfilegenerator('dirstate', (self._filename,),
1167 tr.addfilegenerator('dirstate', (self._filename,),
1169 self._writedirstate, location='plain')
1168 self._writedirstate, location='plain')
1170
1169
1171 # ensure that pending file written above is unlinked at
1170 # ensure that pending file written above is unlinked at
1172 # failure, even if tr.writepending isn't invoked until the
1171 # failure, even if tr.writepending isn't invoked until the
1173 # end of this transaction
1172 # end of this transaction
1174 tr.registertmp(filename, location='plain')
1173 tr.registertmp(filename, location='plain')
1175
1174
1176 self._opener.tryunlink(backupname)
1175 self._opener.tryunlink(backupname)
1177 # hardlink backup is okay because _writedirstate is always called
1176 # hardlink backup is okay because _writedirstate is always called
1178 # with an "atomictemp=True" file.
1177 # with an "atomictemp=True" file.
1179 util.copyfile(self._opener.join(filename),
1178 util.copyfile(self._opener.join(filename),
1180 self._opener.join(backupname), hardlink=True)
1179 self._opener.join(backupname), hardlink=True)
1181
1180
1182 def restorebackup(self, tr, backupname):
1181 def restorebackup(self, tr, backupname):
1183 '''Restore dirstate by backup file'''
1182 '''Restore dirstate by backup file'''
1184 # this "invalidate()" prevents "wlock.release()" from writing
1183 # this "invalidate()" prevents "wlock.release()" from writing
1185 # changes of dirstate out after restoring from backup file
1184 # changes of dirstate out after restoring from backup file
1186 self.invalidate()
1185 self.invalidate()
1187 filename = self._actualfilename(tr)
1186 filename = self._actualfilename(tr)
1188 o = self._opener
1187 o = self._opener
1189 if util.samefile(o.join(backupname), o.join(filename)):
1188 if util.samefile(o.join(backupname), o.join(filename)):
1190 o.unlink(backupname)
1189 o.unlink(backupname)
1191 else:
1190 else:
1192 o.rename(backupname, filename, checkambig=True)
1191 o.rename(backupname, filename, checkambig=True)
1193
1192
1194 def clearbackup(self, tr, backupname):
1193 def clearbackup(self, tr, backupname):
1195 '''Clear backup file'''
1194 '''Clear backup file'''
1196 self._opener.unlink(backupname)
1195 self._opener.unlink(backupname)
1197
1196
1198 class dirstatemap(object):
1197 class dirstatemap(object):
1198 """Map encapsulating the dirstate's contents.
1199
1200 The dirstate contains the following state:
1201
1202 - `identity` is the identity of the dirstate file, which can be used to
1203 detect when changes have occurred to the dirstate file.
1204
1205 - `parents` is a pair containing the parents of the working copy. The
1206 parents are updated by calling `setparents`.
1207
1208 - the state map maps filenames to tuples of (state, mode, size, mtime),
1209 where state is a single character representing 'normal', 'added',
1210 'removed', or 'merged'. It is accessed by treating the dirstate as a
1211 dict.
1212
1213 - `copymap` maps destination filenames to their source filename.
1214
1215 The dirstate also provides the following views onto the state:
1216
1217 - `nonnormalset` is a set of the filenames that have state other
1218 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1219
1220 - `otherparentset` is a set of the filenames that are marked as coming
1221 from the second parent when the dirstate is currently being merged.
1222
1223 - `dirs` is a set-like object containing all the directories that contain
1224 files in the dirstate, excluding any files that are marked as removed.
1225
1226 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1227 form that they appear as in the dirstate.
1228
1229 - `dirfoldmap` is a dict mapping normalized directory names to the
1230 denormalized form that they appear as in the dirstate.
1231
1232 Once instantiated, the nonnormalset, otherparentset, dirs, filefoldmap and
1233 dirfoldmap views must be maintained by the caller.
1234 """
1235
1199 def __init__(self, ui, opener, root):
1236 def __init__(self, ui, opener, root):
1200 self._ui = ui
1237 self._ui = ui
1201 self._opener = opener
1238 self._opener = opener
1202 self._root = root
1239 self._root = root
1203 self._filename = 'dirstate'
1240 self._filename = 'dirstate'
1204
1241
1205 self._parents = None
1242 self._parents = None
1206 self._dirtyparents = False
1243 self._dirtyparents = False
1207
1244
1208 # for consistent view between _pl() and _read() invocations
1245 # for consistent view between _pl() and _read() invocations
1209 self._pendingmode = None
1246 self._pendingmode = None
1210
1247
1211 @propertycache
1248 @propertycache
1212 def _map(self):
1249 def _map(self):
1213 self._map = {}
1250 self._map = {}
1214 self.read()
1251 self.read()
1215 return self._map
1252 return self._map
1216
1253
1217 @propertycache
1254 @propertycache
1218 def copymap(self):
1255 def copymap(self):
1219 self.copymap = {}
1256 self.copymap = {}
1220 self._map
1257 self._map
1221 return self.copymap
1258 return self.copymap
1222
1259
1223 def clear(self):
1260 def clear(self):
1224 self._map.clear()
1261 self._map.clear()
1225 self.copymap.clear()
1262 self.copymap.clear()
1226 self.setparents(nullid, nullid)
1263 self.setparents(nullid, nullid)
1227 util.clearcachedproperty(self, "dirs")
1264 util.clearcachedproperty(self, "dirs")
1228 util.clearcachedproperty(self, "filefoldmap")
1265 util.clearcachedproperty(self, "filefoldmap")
1229 util.clearcachedproperty(self, "dirfoldmap")
1266 util.clearcachedproperty(self, "dirfoldmap")
1230 util.clearcachedproperty(self, "nonnormalset")
1267 util.clearcachedproperty(self, "nonnormalset")
1231 util.clearcachedproperty(self, "otherparentset")
1268 util.clearcachedproperty(self, "otherparentset")
1232
1269
1233 def iteritems(self):
1270 def iteritems(self):
1234 return self._map.iteritems()
1271 return self._map.iteritems()
1235
1272
1236 def __len__(self):
1273 def __len__(self):
1237 return len(self._map)
1274 return len(self._map)
1238
1275
1239 def __iter__(self):
1276 def __iter__(self):
1240 return iter(self._map)
1277 return iter(self._map)
1241
1278
1242 def get(self, key, default=None):
1279 def get(self, key, default=None):
1243 return self._map.get(key, default)
1280 return self._map.get(key, default)
1244
1281
1245 def __contains__(self, key):
1282 def __contains__(self, key):
1246 return key in self._map
1283 return key in self._map
1247
1284
1248 def __setitem__(self, key, value):
1285 def __setitem__(self, key, value):
1249 self._map[key] = value
1286 self._map[key] = value
1250
1287
1251 def __getitem__(self, key):
1288 def __getitem__(self, key):
1252 return self._map[key]
1289 return self._map[key]
1253
1290
1254 def __delitem__(self, key):
1291 def __delitem__(self, key):
1255 del self._map[key]
1292 del self._map[key]
1256
1293
1257 def keys(self):
1294 def keys(self):
1258 return self._map.keys()
1295 return self._map.keys()
1259
1296
1260 def preload(self):
1297 def preload(self):
1261 """Loads the underlying data, if it's not already loaded"""
1298 """Loads the underlying data, if it's not already loaded"""
1262 self._map
1299 self._map
1263
1300
1264 def nonnormalentries(self):
1301 def nonnormalentries(self):
1265 '''Compute the nonnormal dirstate entries from the dmap'''
1302 '''Compute the nonnormal dirstate entries from the dmap'''
1266 try:
1303 try:
1267 return parsers.nonnormalotherparententries(self._map)
1304 return parsers.nonnormalotherparententries(self._map)
1268 except AttributeError:
1305 except AttributeError:
1269 nonnorm = set()
1306 nonnorm = set()
1270 otherparent = set()
1307 otherparent = set()
1271 for fname, e in self._map.iteritems():
1308 for fname, e in self._map.iteritems():
1272 if e[0] != 'n' or e[3] == -1:
1309 if e[0] != 'n' or e[3] == -1:
1273 nonnorm.add(fname)
1310 nonnorm.add(fname)
1274 if e[0] == 'n' and e[2] == -2:
1311 if e[0] == 'n' and e[2] == -2:
1275 otherparent.add(fname)
1312 otherparent.add(fname)
1276 return nonnorm, otherparent
1313 return nonnorm, otherparent
1277
1314
1278 @propertycache
1315 @propertycache
1279 def filefoldmap(self):
1316 def filefoldmap(self):
1280 """Returns a dictionary mapping normalized case paths to their
1317 """Returns a dictionary mapping normalized case paths to their
1281 non-normalized versions.
1318 non-normalized versions.
1282 """
1319 """
1283 try:
1320 try:
1284 makefilefoldmap = parsers.make_file_foldmap
1321 makefilefoldmap = parsers.make_file_foldmap
1285 except AttributeError:
1322 except AttributeError:
1286 pass
1323 pass
1287 else:
1324 else:
1288 return makefilefoldmap(self._map, util.normcasespec,
1325 return makefilefoldmap(self._map, util.normcasespec,
1289 util.normcasefallback)
1326 util.normcasefallback)
1290
1327
1291 f = {}
1328 f = {}
1292 normcase = util.normcase
1329 normcase = util.normcase
1293 for name, s in self._map.iteritems():
1330 for name, s in self._map.iteritems():
1294 if s[0] != 'r':
1331 if s[0] != 'r':
1295 f[normcase(name)] = name
1332 f[normcase(name)] = name
1296 f['.'] = '.' # prevents useless util.fspath() invocation
1333 f['.'] = '.' # prevents useless util.fspath() invocation
1297 return f
1334 return f
1298
1335
1299 @propertycache
1336 @propertycache
1300 def dirs(self):
1337 def dirs(self):
1301 """Returns a set-like object containing all the directories in the
1338 """Returns a set-like object containing all the directories in the
1302 current dirstate.
1339 current dirstate.
1303 """
1340 """
1304 return util.dirs(self._map, 'r')
1341 return util.dirs(self._map, 'r')
1305
1342
1306 def _opendirstatefile(self):
1343 def _opendirstatefile(self):
1307 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1344 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1308 if self._pendingmode is not None and self._pendingmode != mode:
1345 if self._pendingmode is not None and self._pendingmode != mode:
1309 fp.close()
1346 fp.close()
1310 raise error.Abort(_('working directory state may be '
1347 raise error.Abort(_('working directory state may be '
1311 'changed parallelly'))
1348 'changed parallelly'))
1312 self._pendingmode = mode
1349 self._pendingmode = mode
1313 return fp
1350 return fp
1314
1351
1315 def parents(self):
1352 def parents(self):
1316 if not self._parents:
1353 if not self._parents:
1317 try:
1354 try:
1318 fp = self._opendirstatefile()
1355 fp = self._opendirstatefile()
1319 st = fp.read(40)
1356 st = fp.read(40)
1320 fp.close()
1357 fp.close()
1321 except IOError as err:
1358 except IOError as err:
1322 if err.errno != errno.ENOENT:
1359 if err.errno != errno.ENOENT:
1323 raise
1360 raise
1324 # File doesn't exist, so the current state is empty
1361 # File doesn't exist, so the current state is empty
1325 st = ''
1362 st = ''
1326
1363
1327 l = len(st)
1364 l = len(st)
1328 if l == 40:
1365 if l == 40:
1329 self._parents = st[:20], st[20:40]
1366 self._parents = st[:20], st[20:40]
1330 elif l == 0:
1367 elif l == 0:
1331 self._parents = [nullid, nullid]
1368 self._parents = [nullid, nullid]
1332 else:
1369 else:
1333 raise error.Abort(_('working directory state appears '
1370 raise error.Abort(_('working directory state appears '
1334 'damaged!'))
1371 'damaged!'))
1335
1372
1336 return self._parents
1373 return self._parents
1337
1374
1338 def setparents(self, p1, p2):
1375 def setparents(self, p1, p2):
1339 self._parents = (p1, p2)
1376 self._parents = (p1, p2)
1340 self._dirtyparents = True
1377 self._dirtyparents = True
1341
1378
1342 def read(self):
1379 def read(self):
1343 # ignore HG_PENDING because identity is used only for writing
1380 # ignore HG_PENDING because identity is used only for writing
1344 self.identity = util.filestat.frompath(
1381 self.identity = util.filestat.frompath(
1345 self._opener.join(self._filename))
1382 self._opener.join(self._filename))
1346
1383
1347 try:
1384 try:
1348 fp = self._opendirstatefile()
1385 fp = self._opendirstatefile()
1349 try:
1386 try:
1350 st = fp.read()
1387 st = fp.read()
1351 finally:
1388 finally:
1352 fp.close()
1389 fp.close()
1353 except IOError as err:
1390 except IOError as err:
1354 if err.errno != errno.ENOENT:
1391 if err.errno != errno.ENOENT:
1355 raise
1392 raise
1356 return
1393 return
1357 if not st:
1394 if not st:
1358 return
1395 return
1359
1396
1360 if util.safehasattr(parsers, 'dict_new_presized'):
1397 if util.safehasattr(parsers, 'dict_new_presized'):
1361 # Make an estimate of the number of files in the dirstate based on
1398 # Make an estimate of the number of files in the dirstate based on
1362 # its size. From a linear regression on a set of real-world repos,
1399 # its size. From a linear regression on a set of real-world repos,
1363 # all over 10,000 files, the size of a dirstate entry is 85
1400 # all over 10,000 files, the size of a dirstate entry is 85
1364 # bytes. The cost of resizing is significantly higher than the cost
1401 # bytes. The cost of resizing is significantly higher than the cost
1365 # of filling in a larger presized dict, so subtract 20% from the
1402 # of filling in a larger presized dict, so subtract 20% from the
1366 # size.
1403 # size.
1367 #
1404 #
1368 # This heuristic is imperfect in many ways, so in a future dirstate
1405 # This heuristic is imperfect in many ways, so in a future dirstate
1369 # format update it makes sense to just record the number of entries
1406 # format update it makes sense to just record the number of entries
1370 # on write.
1407 # on write.
1371 self._map = parsers.dict_new_presized(len(st) / 71)
1408 self._map = parsers.dict_new_presized(len(st) / 71)
1372
1409
1373 # Python's garbage collector triggers a GC each time a certain number
1410 # Python's garbage collector triggers a GC each time a certain number
1374 # of container objects (the number being defined by
1411 # of container objects (the number being defined by
1375 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1412 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1376 # for each file in the dirstate. The C version then immediately marks
1413 # for each file in the dirstate. The C version then immediately marks
1377 # them as not to be tracked by the collector. However, this has no
1414 # them as not to be tracked by the collector. However, this has no
1378 # effect on when GCs are triggered, only on what objects the GC looks
1415 # effect on when GCs are triggered, only on what objects the GC looks
1379 # into. This means that O(number of files) GCs are unavoidable.
1416 # into. This means that O(number of files) GCs are unavoidable.
1380 # Depending on when in the process's lifetime the dirstate is parsed,
1417 # Depending on when in the process's lifetime the dirstate is parsed,
1381 # this can get very expensive. As a workaround, disable GC while
1418 # this can get very expensive. As a workaround, disable GC while
1382 # parsing the dirstate.
1419 # parsing the dirstate.
1383 #
1420 #
1384 # (we cannot decorate the function directly since it is in a C module)
1421 # (we cannot decorate the function directly since it is in a C module)
1385 parse_dirstate = util.nogc(parsers.parse_dirstate)
1422 parse_dirstate = util.nogc(parsers.parse_dirstate)
1386 p = parse_dirstate(self._map, self.copymap, st)
1423 p = parse_dirstate(self._map, self.copymap, st)
1387 if not self._dirtyparents:
1424 if not self._dirtyparents:
1388 self.setparents(*p)
1425 self.setparents(*p)
1389
1426
1390 # Avoid excess attribute lookups by fast pathing certain checks
1427 # Avoid excess attribute lookups by fast pathing certain checks
1391 self.__contains__ = self._map.__contains__
1428 self.__contains__ = self._map.__contains__
1392 self.__getitem__ = self._map.__getitem__
1429 self.__getitem__ = self._map.__getitem__
1393 self.__setitem__ = self._map.__setitem__
1430 self.__setitem__ = self._map.__setitem__
1394 self.__delitem__ = self._map.__delitem__
1431 self.__delitem__ = self._map.__delitem__
1395 self.get = self._map.get
1432 self.get = self._map.get
1396
1433
1397 def write(self, st, now):
1434 def write(self, st, now):
1398 st.write(parsers.pack_dirstate(self._map, self.copymap,
1435 st.write(parsers.pack_dirstate(self._map, self.copymap,
1399 self.parents(), now))
1436 self.parents(), now))
1400 st.close()
1437 st.close()
1401 self._dirtyparents = False
1438 self._dirtyparents = False
1402 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1439 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1403
1440
1404 @propertycache
1441 @propertycache
1405 def nonnormalset(self):
1442 def nonnormalset(self):
1406 nonnorm, otherparents = self.nonnormalentries()
1443 nonnorm, otherparents = self.nonnormalentries()
1407 self.otherparentset = otherparents
1444 self.otherparentset = otherparents
1408 return nonnorm
1445 return nonnorm
1409
1446
1410 @propertycache
1447 @propertycache
1411 def otherparentset(self):
1448 def otherparentset(self):
1412 nonnorm, otherparents = self.nonnormalentries()
1449 nonnorm, otherparents = self.nonnormalentries()
1413 self.nonnormalset = nonnorm
1450 self.nonnormalset = nonnorm
1414 return otherparents
1451 return otherparents
1415
1452
1416 @propertycache
1453 @propertycache
1417 def identity(self):
1454 def identity(self):
1418 self._map
1455 self._map
1419 return self.identity
1456 return self.identity
1420
1457
1421 @propertycache
1458 @propertycache
1422 def dirfoldmap(self):
1459 def dirfoldmap(self):
1423 f = {}
1460 f = {}
1424 normcase = util.normcase
1461 normcase = util.normcase
1425 for name in self.dirs:
1462 for name in self.dirs:
1426 f[normcase(name)] = name
1463 f[normcase(name)] = name
1427 return f
1464 return f
General Comments 0
You need to be logged in to leave comments. Login now