##// END OF EJS Templates
dirstate: move nonnormalentries to dirstatemap...
Durham Goode -
r34334:4ac04418 default
parent child Browse files
Show More
@@ -1,1371 +1,1372 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 def nonnormalentries(dmap):
58 '''Compute the nonnormal dirstate entries from the dmap'''
59 try:
60 return parsers.nonnormalotherparententries(dmap._map)
61 except AttributeError:
62 nonnorm = set()
63 otherparent = set()
64 for fname, e in dmap.iteritems():
65 if e[0] != 'n' or e[3] == -1:
66 nonnorm.add(fname)
67 if e[0] == 'n' and e[2] == -2:
68 otherparent.add(fname)
69 return nonnorm, otherparent
70
71 class dirstate(object):
57 class dirstate(object):
72
58
73 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
74 '''Create a new dirstate object.
60 '''Create a new dirstate object.
75
61
76 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
77 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
78 the dirstate.
64 the dirstate.
79 '''
65 '''
80 self._opener = opener
66 self._opener = opener
81 self._validate = validate
67 self._validate = validate
82 self._root = root
68 self._root = root
83 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
84 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
85 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
86 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
87 self._dirty = False
73 self._dirty = False
88 self._dirtypl = False
74 self._dirtypl = False
89 self._lastnormaltime = 0
75 self._lastnormaltime = 0
90 self._ui = ui
76 self._ui = ui
91 self._filecache = {}
77 self._filecache = {}
92 self._parentwriters = 0
78 self._parentwriters = 0
93 self._filename = 'dirstate'
79 self._filename = 'dirstate'
94 self._pendingfilename = '%s.pending' % self._filename
80 self._pendingfilename = '%s.pending' % self._filename
95 self._plchangecallbacks = {}
81 self._plchangecallbacks = {}
96 self._origpl = None
82 self._origpl = None
97 self._updatedfiles = set()
83 self._updatedfiles = set()
98
84
99 # for consistent view between _pl() and _read() invocations
85 # for consistent view between _pl() and _read() invocations
100 self._pendingmode = None
86 self._pendingmode = None
101
87
102 @contextlib.contextmanager
88 @contextlib.contextmanager
103 def parentchange(self):
89 def parentchange(self):
104 '''Context manager for handling dirstate parents.
90 '''Context manager for handling dirstate parents.
105
91
106 If an exception occurs in the scope of the context manager,
92 If an exception occurs in the scope of the context manager,
107 the incoherent dirstate won't be written when wlock is
93 the incoherent dirstate won't be written when wlock is
108 released.
94 released.
109 '''
95 '''
110 self._parentwriters += 1
96 self._parentwriters += 1
111 yield
97 yield
112 # Typically we want the "undo" step of a context manager in a
98 # Typically we want the "undo" step of a context manager in a
113 # finally block so it happens even when an exception
99 # finally block so it happens even when an exception
114 # occurs. In this case, however, we only want to decrement
100 # occurs. In this case, however, we only want to decrement
115 # parentwriters if the code in the with statement exits
101 # parentwriters if the code in the with statement exits
116 # normally, so we don't have a try/finally here on purpose.
102 # normally, so we don't have a try/finally here on purpose.
117 self._parentwriters -= 1
103 self._parentwriters -= 1
118
104
119 def beginparentchange(self):
105 def beginparentchange(self):
120 '''Marks the beginning of a set of changes that involve changing
106 '''Marks the beginning of a set of changes that involve changing
121 the dirstate parents. If there is an exception during this time,
107 the dirstate parents. If there is an exception during this time,
122 the dirstate will not be written when the wlock is released. This
108 the dirstate will not be written when the wlock is released. This
123 prevents writing an incoherent dirstate where the parent doesn't
109 prevents writing an incoherent dirstate where the parent doesn't
124 match the contents.
110 match the contents.
125 '''
111 '''
126 self._ui.deprecwarn('beginparentchange is obsoleted by the '
112 self._ui.deprecwarn('beginparentchange is obsoleted by the '
127 'parentchange context manager.', '4.3')
113 'parentchange context manager.', '4.3')
128 self._parentwriters += 1
114 self._parentwriters += 1
129
115
130 def endparentchange(self):
116 def endparentchange(self):
131 '''Marks the end of a set of changes that involve changing the
117 '''Marks the end of a set of changes that involve changing the
132 dirstate parents. Once all parent changes have been marked done,
118 dirstate parents. Once all parent changes have been marked done,
133 the wlock will be free to write the dirstate on release.
119 the wlock will be free to write the dirstate on release.
134 '''
120 '''
135 self._ui.deprecwarn('endparentchange is obsoleted by the '
121 self._ui.deprecwarn('endparentchange is obsoleted by the '
136 'parentchange context manager.', '4.3')
122 'parentchange context manager.', '4.3')
137 if self._parentwriters > 0:
123 if self._parentwriters > 0:
138 self._parentwriters -= 1
124 self._parentwriters -= 1
139
125
140 def pendingparentchange(self):
126 def pendingparentchange(self):
141 '''Returns true if the dirstate is in the middle of a set of changes
127 '''Returns true if the dirstate is in the middle of a set of changes
142 that modify the dirstate parent.
128 that modify the dirstate parent.
143 '''
129 '''
144 return self._parentwriters > 0
130 return self._parentwriters > 0
145
131
146 @propertycache
132 @propertycache
147 def _map(self):
133 def _map(self):
148 '''Return the dirstate contents as a map from filename to
134 '''Return the dirstate contents as a map from filename to
149 (state, mode, size, time).'''
135 (state, mode, size, time).'''
150 self._read()
136 self._read()
151 return self._map
137 return self._map
152
138
153 @propertycache
139 @propertycache
154 def _copymap(self):
140 def _copymap(self):
155 self._read()
141 self._read()
156 return self._copymap
142 return self._copymap
157
143
158 @propertycache
144 @propertycache
159 def _identity(self):
145 def _identity(self):
160 self._read()
146 self._read()
161 return self._identity
147 return self._identity
162
148
163 @propertycache
149 @propertycache
164 def _nonnormalset(self):
150 def _nonnormalset(self):
165 nonnorm, otherparents = nonnormalentries(self._map)
151 nonnorm, otherparents = self._map.nonnormalentries()
166 self._otherparentset = otherparents
152 self._otherparentset = otherparents
167 return nonnorm
153 return nonnorm
168
154
169 @propertycache
155 @propertycache
170 def _otherparentset(self):
156 def _otherparentset(self):
171 nonnorm, otherparents = nonnormalentries(self._map)
157 nonnorm, otherparents = self._map.nonnormalentries()
172 self._nonnormalset = nonnorm
158 self._nonnormalset = nonnorm
173 return otherparents
159 return otherparents
174
160
175 @propertycache
161 @propertycache
176 def _filefoldmap(self):
162 def _filefoldmap(self):
177 try:
163 try:
178 makefilefoldmap = parsers.make_file_foldmap
164 makefilefoldmap = parsers.make_file_foldmap
179 except AttributeError:
165 except AttributeError:
180 pass
166 pass
181 else:
167 else:
182 return makefilefoldmap(self._map._map, util.normcasespec,
168 return makefilefoldmap(self._map._map, util.normcasespec,
183 util.normcasefallback)
169 util.normcasefallback)
184
170
185 f = {}
171 f = {}
186 normcase = util.normcase
172 normcase = util.normcase
187 for name, s in self._map.iteritems():
173 for name, s in self._map.iteritems():
188 if s[0] != 'r':
174 if s[0] != 'r':
189 f[normcase(name)] = name
175 f[normcase(name)] = name
190 f['.'] = '.' # prevents useless util.fspath() invocation
176 f['.'] = '.' # prevents useless util.fspath() invocation
191 return f
177 return f
192
178
193 @propertycache
179 @propertycache
194 def _dirfoldmap(self):
180 def _dirfoldmap(self):
195 f = {}
181 f = {}
196 normcase = util.normcase
182 normcase = util.normcase
197 for name in self._dirs:
183 for name in self._dirs:
198 f[normcase(name)] = name
184 f[normcase(name)] = name
199 return f
185 return f
200
186
201 @property
187 @property
202 def _sparsematcher(self):
188 def _sparsematcher(self):
203 """The matcher for the sparse checkout.
189 """The matcher for the sparse checkout.
204
190
205 The working directory may not include every file from a manifest. The
191 The working directory may not include every file from a manifest. The
206 matcher obtained by this property will match a path if it is to be
192 matcher obtained by this property will match a path if it is to be
207 included in the working directory.
193 included in the working directory.
208 """
194 """
209 # TODO there is potential to cache this property. For now, the matcher
195 # TODO there is potential to cache this property. For now, the matcher
210 # is resolved on every access. (But the called function does use a
196 # is resolved on every access. (But the called function does use a
211 # cache to keep the lookup fast.)
197 # cache to keep the lookup fast.)
212 return self._sparsematchfn()
198 return self._sparsematchfn()
213
199
214 @repocache('branch')
200 @repocache('branch')
215 def _branch(self):
201 def _branch(self):
216 try:
202 try:
217 return self._opener.read("branch").strip() or "default"
203 return self._opener.read("branch").strip() or "default"
218 except IOError as inst:
204 except IOError as inst:
219 if inst.errno != errno.ENOENT:
205 if inst.errno != errno.ENOENT:
220 raise
206 raise
221 return "default"
207 return "default"
222
208
223 @propertycache
209 @propertycache
224 def _pl(self):
210 def _pl(self):
225 try:
211 try:
226 fp = self._opendirstatefile()
212 fp = self._opendirstatefile()
227 st = fp.read(40)
213 st = fp.read(40)
228 fp.close()
214 fp.close()
229 l = len(st)
215 l = len(st)
230 if l == 40:
216 if l == 40:
231 return st[:20], st[20:40]
217 return st[:20], st[20:40]
232 elif l > 0 and l < 40:
218 elif l > 0 and l < 40:
233 raise error.Abort(_('working directory state appears damaged!'))
219 raise error.Abort(_('working directory state appears damaged!'))
234 except IOError as err:
220 except IOError as err:
235 if err.errno != errno.ENOENT:
221 if err.errno != errno.ENOENT:
236 raise
222 raise
237 return [nullid, nullid]
223 return [nullid, nullid]
238
224
239 @propertycache
225 @propertycache
240 def _dirs(self):
226 def _dirs(self):
241 return util.dirs(self._map._map, 'r')
227 return util.dirs(self._map._map, 'r')
242
228
243 def dirs(self):
229 def dirs(self):
244 return self._dirs
230 return self._dirs
245
231
246 @rootcache('.hgignore')
232 @rootcache('.hgignore')
247 def _ignore(self):
233 def _ignore(self):
248 files = self._ignorefiles()
234 files = self._ignorefiles()
249 if not files:
235 if not files:
250 return matchmod.never(self._root, '')
236 return matchmod.never(self._root, '')
251
237
252 pats = ['include:%s' % f for f in files]
238 pats = ['include:%s' % f for f in files]
253 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
239 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
254
240
255 @propertycache
241 @propertycache
256 def _slash(self):
242 def _slash(self):
257 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
243 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
258
244
259 @propertycache
245 @propertycache
260 def _checklink(self):
246 def _checklink(self):
261 return util.checklink(self._root)
247 return util.checklink(self._root)
262
248
263 @propertycache
249 @propertycache
264 def _checkexec(self):
250 def _checkexec(self):
265 return util.checkexec(self._root)
251 return util.checkexec(self._root)
266
252
267 @propertycache
253 @propertycache
268 def _checkcase(self):
254 def _checkcase(self):
269 return not util.fscasesensitive(self._join('.hg'))
255 return not util.fscasesensitive(self._join('.hg'))
270
256
271 def _join(self, f):
257 def _join(self, f):
272 # much faster than os.path.join()
258 # much faster than os.path.join()
273 # it's safe because f is always a relative path
259 # it's safe because f is always a relative path
274 return self._rootdir + f
260 return self._rootdir + f
275
261
276 def flagfunc(self, buildfallback):
262 def flagfunc(self, buildfallback):
277 if self._checklink and self._checkexec:
263 if self._checklink and self._checkexec:
278 def f(x):
264 def f(x):
279 try:
265 try:
280 st = os.lstat(self._join(x))
266 st = os.lstat(self._join(x))
281 if util.statislink(st):
267 if util.statislink(st):
282 return 'l'
268 return 'l'
283 if util.statisexec(st):
269 if util.statisexec(st):
284 return 'x'
270 return 'x'
285 except OSError:
271 except OSError:
286 pass
272 pass
287 return ''
273 return ''
288 return f
274 return f
289
275
290 fallback = buildfallback()
276 fallback = buildfallback()
291 if self._checklink:
277 if self._checklink:
292 def f(x):
278 def f(x):
293 if os.path.islink(self._join(x)):
279 if os.path.islink(self._join(x)):
294 return 'l'
280 return 'l'
295 if 'x' in fallback(x):
281 if 'x' in fallback(x):
296 return 'x'
282 return 'x'
297 return ''
283 return ''
298 return f
284 return f
299 if self._checkexec:
285 if self._checkexec:
300 def f(x):
286 def f(x):
301 if 'l' in fallback(x):
287 if 'l' in fallback(x):
302 return 'l'
288 return 'l'
303 if util.isexec(self._join(x)):
289 if util.isexec(self._join(x)):
304 return 'x'
290 return 'x'
305 return ''
291 return ''
306 return f
292 return f
307 else:
293 else:
308 return fallback
294 return fallback
309
295
310 @propertycache
296 @propertycache
311 def _cwd(self):
297 def _cwd(self):
312 # internal config: ui.forcecwd
298 # internal config: ui.forcecwd
313 forcecwd = self._ui.config('ui', 'forcecwd')
299 forcecwd = self._ui.config('ui', 'forcecwd')
314 if forcecwd:
300 if forcecwd:
315 return forcecwd
301 return forcecwd
316 return pycompat.getcwd()
302 return pycompat.getcwd()
317
303
318 def getcwd(self):
304 def getcwd(self):
319 '''Return the path from which a canonical path is calculated.
305 '''Return the path from which a canonical path is calculated.
320
306
321 This path should be used to resolve file patterns or to convert
307 This path should be used to resolve file patterns or to convert
322 canonical paths back to file paths for display. It shouldn't be
308 canonical paths back to file paths for display. It shouldn't be
323 used to get real file paths. Use vfs functions instead.
309 used to get real file paths. Use vfs functions instead.
324 '''
310 '''
325 cwd = self._cwd
311 cwd = self._cwd
326 if cwd == self._root:
312 if cwd == self._root:
327 return ''
313 return ''
328 # self._root ends with a path separator if self._root is '/' or 'C:\'
314 # self._root ends with a path separator if self._root is '/' or 'C:\'
329 rootsep = self._root
315 rootsep = self._root
330 if not util.endswithsep(rootsep):
316 if not util.endswithsep(rootsep):
331 rootsep += pycompat.ossep
317 rootsep += pycompat.ossep
332 if cwd.startswith(rootsep):
318 if cwd.startswith(rootsep):
333 return cwd[len(rootsep):]
319 return cwd[len(rootsep):]
334 else:
320 else:
335 # we're outside the repo. return an absolute path.
321 # we're outside the repo. return an absolute path.
336 return cwd
322 return cwd
337
323
338 def pathto(self, f, cwd=None):
324 def pathto(self, f, cwd=None):
339 if cwd is None:
325 if cwd is None:
340 cwd = self.getcwd()
326 cwd = self.getcwd()
341 path = util.pathto(self._root, cwd, f)
327 path = util.pathto(self._root, cwd, f)
342 if self._slash:
328 if self._slash:
343 return util.pconvert(path)
329 return util.pconvert(path)
344 return path
330 return path
345
331
346 def __getitem__(self, key):
332 def __getitem__(self, key):
347 '''Return the current state of key (a filename) in the dirstate.
333 '''Return the current state of key (a filename) in the dirstate.
348
334
349 States are:
335 States are:
350 n normal
336 n normal
351 m needs merging
337 m needs merging
352 r marked for removal
338 r marked for removal
353 a marked for addition
339 a marked for addition
354 ? not tracked
340 ? not tracked
355 '''
341 '''
356 return self._map.get(key, ("?",))[0]
342 return self._map.get(key, ("?",))[0]
357
343
358 def __contains__(self, key):
344 def __contains__(self, key):
359 return key in self._map
345 return key in self._map
360
346
361 def __iter__(self):
347 def __iter__(self):
362 return iter(sorted(self._map))
348 return iter(sorted(self._map))
363
349
364 def items(self):
350 def items(self):
365 return self._map.iteritems()
351 return self._map.iteritems()
366
352
367 iteritems = items
353 iteritems = items
368
354
369 def parents(self):
355 def parents(self):
370 return [self._validate(p) for p in self._pl]
356 return [self._validate(p) for p in self._pl]
371
357
372 def p1(self):
358 def p1(self):
373 return self._validate(self._pl[0])
359 return self._validate(self._pl[0])
374
360
375 def p2(self):
361 def p2(self):
376 return self._validate(self._pl[1])
362 return self._validate(self._pl[1])
377
363
378 def branch(self):
364 def branch(self):
379 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
380
366
381 def setparents(self, p1, p2=nullid):
367 def setparents(self, p1, p2=nullid):
382 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
383
369
384 When moving from two parents to one, 'm' merged entries a
370 When moving from two parents to one, 'm' merged entries a
385 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
386 returned by the call.
372 returned by the call.
387
373
388 See localrepo.setparents()
374 See localrepo.setparents()
389 """
375 """
390 if self._parentwriters == 0:
376 if self._parentwriters == 0:
391 raise ValueError("cannot set dirstate parent without "
377 raise ValueError("cannot set dirstate parent without "
392 "calling dirstate.beginparentchange")
378 "calling dirstate.beginparentchange")
393
379
394 self._dirty = self._dirtypl = True
380 self._dirty = self._dirtypl = True
395 oldp2 = self._pl[1]
381 oldp2 = self._pl[1]
396 if self._origpl is None:
382 if self._origpl is None:
397 self._origpl = self._pl
383 self._origpl = self._pl
398 self._pl = p1, p2
384 self._pl = p1, p2
399 copies = {}
385 copies = {}
400 if oldp2 != nullid and p2 == nullid:
386 if oldp2 != nullid and p2 == nullid:
401 candidatefiles = self._nonnormalset.union(self._otherparentset)
387 candidatefiles = self._nonnormalset.union(self._otherparentset)
402 for f in candidatefiles:
388 for f in candidatefiles:
403 s = self._map.get(f)
389 s = self._map.get(f)
404 if s is None:
390 if s is None:
405 continue
391 continue
406
392
407 # Discard 'm' markers when moving away from a merge state
393 # Discard 'm' markers when moving away from a merge state
408 if s[0] == 'm':
394 if s[0] == 'm':
409 source = self._copymap.get(f)
395 source = self._copymap.get(f)
410 if source:
396 if source:
411 copies[f] = source
397 copies[f] = source
412 self.normallookup(f)
398 self.normallookup(f)
413 # Also fix up otherparent markers
399 # Also fix up otherparent markers
414 elif s[0] == 'n' and s[2] == -2:
400 elif s[0] == 'n' and s[2] == -2:
415 source = self._copymap.get(f)
401 source = self._copymap.get(f)
416 if source:
402 if source:
417 copies[f] = source
403 copies[f] = source
418 self.add(f)
404 self.add(f)
419 return copies
405 return copies
420
406
421 def setbranch(self, branch):
407 def setbranch(self, branch):
422 self._branch = encoding.fromlocal(branch)
408 self._branch = encoding.fromlocal(branch)
423 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
409 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
424 try:
410 try:
425 f.write(self._branch + '\n')
411 f.write(self._branch + '\n')
426 f.close()
412 f.close()
427
413
428 # make sure filecache has the correct stat info for _branch after
414 # make sure filecache has the correct stat info for _branch after
429 # replacing the underlying file
415 # replacing the underlying file
430 ce = self._filecache['_branch']
416 ce = self._filecache['_branch']
431 if ce:
417 if ce:
432 ce.refresh()
418 ce.refresh()
433 except: # re-raises
419 except: # re-raises
434 f.discard()
420 f.discard()
435 raise
421 raise
436
422
437 def _opendirstatefile(self):
423 def _opendirstatefile(self):
438 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
424 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
439 if self._pendingmode is not None and self._pendingmode != mode:
425 if self._pendingmode is not None and self._pendingmode != mode:
440 fp.close()
426 fp.close()
441 raise error.Abort(_('working directory state may be '
427 raise error.Abort(_('working directory state may be '
442 'changed parallelly'))
428 'changed parallelly'))
443 self._pendingmode = mode
429 self._pendingmode = mode
444 return fp
430 return fp
445
431
446 def _read(self):
432 def _read(self):
447 self._map = dirstatemap()
433 self._map = dirstatemap()
448
434
449 self._copymap = {}
435 self._copymap = {}
450 # ignore HG_PENDING because identity is used only for writing
436 # ignore HG_PENDING because identity is used only for writing
451 self._identity = util.filestat.frompath(
437 self._identity = util.filestat.frompath(
452 self._opener.join(self._filename))
438 self._opener.join(self._filename))
453 try:
439 try:
454 fp = self._opendirstatefile()
440 fp = self._opendirstatefile()
455 try:
441 try:
456 st = fp.read()
442 st = fp.read()
457 finally:
443 finally:
458 fp.close()
444 fp.close()
459 except IOError as err:
445 except IOError as err:
460 if err.errno != errno.ENOENT:
446 if err.errno != errno.ENOENT:
461 raise
447 raise
462 return
448 return
463 if not st:
449 if not st:
464 return
450 return
465
451
466 if util.safehasattr(parsers, 'dict_new_presized'):
452 if util.safehasattr(parsers, 'dict_new_presized'):
467 # Make an estimate of the number of files in the dirstate based on
453 # Make an estimate of the number of files in the dirstate based on
468 # its size. From a linear regression on a set of real-world repos,
454 # its size. From a linear regression on a set of real-world repos,
469 # all over 10,000 files, the size of a dirstate entry is 85
455 # all over 10,000 files, the size of a dirstate entry is 85
470 # bytes. The cost of resizing is significantly higher than the cost
456 # bytes. The cost of resizing is significantly higher than the cost
471 # of filling in a larger presized dict, so subtract 20% from the
457 # of filling in a larger presized dict, so subtract 20% from the
472 # size.
458 # size.
473 #
459 #
474 # This heuristic is imperfect in many ways, so in a future dirstate
460 # This heuristic is imperfect in many ways, so in a future dirstate
475 # format update it makes sense to just record the number of entries
461 # format update it makes sense to just record the number of entries
476 # on write.
462 # on write.
477 self._map._map = parsers.dict_new_presized(len(st) / 71)
463 self._map._map = parsers.dict_new_presized(len(st) / 71)
478
464
479 # Python's garbage collector triggers a GC each time a certain number
465 # Python's garbage collector triggers a GC each time a certain number
480 # of container objects (the number being defined by
466 # of container objects (the number being defined by
481 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
467 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
482 # for each file in the dirstate. The C version then immediately marks
468 # for each file in the dirstate. The C version then immediately marks
483 # them as not to be tracked by the collector. However, this has no
469 # them as not to be tracked by the collector. However, this has no
484 # effect on when GCs are triggered, only on what objects the GC looks
470 # effect on when GCs are triggered, only on what objects the GC looks
485 # into. This means that O(number of files) GCs are unavoidable.
471 # into. This means that O(number of files) GCs are unavoidable.
486 # Depending on when in the process's lifetime the dirstate is parsed,
472 # Depending on when in the process's lifetime the dirstate is parsed,
487 # this can get very expensive. As a workaround, disable GC while
473 # this can get very expensive. As a workaround, disable GC while
488 # parsing the dirstate.
474 # parsing the dirstate.
489 #
475 #
490 # (we cannot decorate the function directly since it is in a C module)
476 # (we cannot decorate the function directly since it is in a C module)
491 parse_dirstate = util.nogc(parsers.parse_dirstate)
477 parse_dirstate = util.nogc(parsers.parse_dirstate)
492 p = parse_dirstate(self._map._map, self._copymap, st)
478 p = parse_dirstate(self._map._map, self._copymap, st)
493 if not self._dirtypl:
479 if not self._dirtypl:
494 self._pl = p
480 self._pl = p
495
481
496 def invalidate(self):
482 def invalidate(self):
497 '''Causes the next access to reread the dirstate.
483 '''Causes the next access to reread the dirstate.
498
484
499 This is different from localrepo.invalidatedirstate() because it always
485 This is different from localrepo.invalidatedirstate() because it always
500 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
486 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
501 check whether the dirstate has changed before rereading it.'''
487 check whether the dirstate has changed before rereading it.'''
502
488
503 for a in ("_map", "_copymap", "_identity",
489 for a in ("_map", "_copymap", "_identity",
504 "_filefoldmap", "_dirfoldmap", "_branch",
490 "_filefoldmap", "_dirfoldmap", "_branch",
505 "_pl", "_dirs", "_ignore", "_nonnormalset",
491 "_pl", "_dirs", "_ignore", "_nonnormalset",
506 "_otherparentset"):
492 "_otherparentset"):
507 if a in self.__dict__:
493 if a in self.__dict__:
508 delattr(self, a)
494 delattr(self, a)
509 self._lastnormaltime = 0
495 self._lastnormaltime = 0
510 self._dirty = False
496 self._dirty = False
511 self._updatedfiles.clear()
497 self._updatedfiles.clear()
512 self._parentwriters = 0
498 self._parentwriters = 0
513 self._origpl = None
499 self._origpl = None
514
500
515 def copy(self, source, dest):
501 def copy(self, source, dest):
516 """Mark dest as a copy of source. Unmark dest if source is None."""
502 """Mark dest as a copy of source. Unmark dest if source is None."""
517 if source == dest:
503 if source == dest:
518 return
504 return
519 self._dirty = True
505 self._dirty = True
520 if source is not None:
506 if source is not None:
521 self._copymap[dest] = source
507 self._copymap[dest] = source
522 self._updatedfiles.add(source)
508 self._updatedfiles.add(source)
523 self._updatedfiles.add(dest)
509 self._updatedfiles.add(dest)
524 elif self._copymap.pop(dest, None):
510 elif self._copymap.pop(dest, None):
525 self._updatedfiles.add(dest)
511 self._updatedfiles.add(dest)
526
512
527 def copied(self, file):
513 def copied(self, file):
528 return self._copymap.get(file, None)
514 return self._copymap.get(file, None)
529
515
530 def copies(self):
516 def copies(self):
531 return self._copymap
517 return self._copymap
532
518
533 def _droppath(self, f):
519 def _droppath(self, f):
534 if self[f] not in "?r" and "_dirs" in self.__dict__:
520 if self[f] not in "?r" and "_dirs" in self.__dict__:
535 self._dirs.delpath(f)
521 self._dirs.delpath(f)
536
522
537 if "_filefoldmap" in self.__dict__:
523 if "_filefoldmap" in self.__dict__:
538 normed = util.normcase(f)
524 normed = util.normcase(f)
539 if normed in self._filefoldmap:
525 if normed in self._filefoldmap:
540 del self._filefoldmap[normed]
526 del self._filefoldmap[normed]
541
527
542 self._updatedfiles.add(f)
528 self._updatedfiles.add(f)
543
529
544 def _addpath(self, f, state, mode, size, mtime):
530 def _addpath(self, f, state, mode, size, mtime):
545 oldstate = self[f]
531 oldstate = self[f]
546 if state == 'a' or oldstate == 'r':
532 if state == 'a' or oldstate == 'r':
547 scmutil.checkfilename(f)
533 scmutil.checkfilename(f)
548 if f in self._dirs:
534 if f in self._dirs:
549 raise error.Abort(_('directory %r already in dirstate') % f)
535 raise error.Abort(_('directory %r already in dirstate') % f)
550 # shadows
536 # shadows
551 for d in util.finddirs(f):
537 for d in util.finddirs(f):
552 if d in self._dirs:
538 if d in self._dirs:
553 break
539 break
554 entry = self._map.get(d)
540 entry = self._map.get(d)
555 if entry is not None and entry[0] != 'r':
541 if entry is not None and entry[0] != 'r':
556 raise error.Abort(
542 raise error.Abort(
557 _('file %r in dirstate clashes with %r') % (d, f))
543 _('file %r in dirstate clashes with %r') % (d, f))
558 if oldstate in "?r" and "_dirs" in self.__dict__:
544 if oldstate in "?r" and "_dirs" in self.__dict__:
559 self._dirs.addpath(f)
545 self._dirs.addpath(f)
560 self._dirty = True
546 self._dirty = True
561 self._updatedfiles.add(f)
547 self._updatedfiles.add(f)
562 self._map[f] = dirstatetuple(state, mode, size, mtime)
548 self._map[f] = dirstatetuple(state, mode, size, mtime)
563 if state != 'n' or mtime == -1:
549 if state != 'n' or mtime == -1:
564 self._nonnormalset.add(f)
550 self._nonnormalset.add(f)
565 if size == -2:
551 if size == -2:
566 self._otherparentset.add(f)
552 self._otherparentset.add(f)
567
553
568 def normal(self, f):
554 def normal(self, f):
569 '''Mark a file normal and clean.'''
555 '''Mark a file normal and clean.'''
570 s = os.lstat(self._join(f))
556 s = os.lstat(self._join(f))
571 mtime = s.st_mtime
557 mtime = s.st_mtime
572 self._addpath(f, 'n', s.st_mode,
558 self._addpath(f, 'n', s.st_mode,
573 s.st_size & _rangemask, mtime & _rangemask)
559 s.st_size & _rangemask, mtime & _rangemask)
574 self._copymap.pop(f, None)
560 self._copymap.pop(f, None)
575 if f in self._nonnormalset:
561 if f in self._nonnormalset:
576 self._nonnormalset.remove(f)
562 self._nonnormalset.remove(f)
577 if mtime > self._lastnormaltime:
563 if mtime > self._lastnormaltime:
578 # Remember the most recent modification timeslot for status(),
564 # Remember the most recent modification timeslot for status(),
579 # to make sure we won't miss future size-preserving file content
565 # to make sure we won't miss future size-preserving file content
580 # modifications that happen within the same timeslot.
566 # modifications that happen within the same timeslot.
581 self._lastnormaltime = mtime
567 self._lastnormaltime = mtime
582
568
583 def normallookup(self, f):
569 def normallookup(self, f):
584 '''Mark a file normal, but possibly dirty.'''
570 '''Mark a file normal, but possibly dirty.'''
585 if self._pl[1] != nullid:
571 if self._pl[1] != nullid:
586 # if there is a merge going on and the file was either
572 # if there is a merge going on and the file was either
587 # in state 'm' (-1) or coming from other parent (-2) before
573 # in state 'm' (-1) or coming from other parent (-2) before
588 # being removed, restore that state.
574 # being removed, restore that state.
589 entry = self._map.get(f)
575 entry = self._map.get(f)
590 if entry is not None:
576 if entry is not None:
591 if entry[0] == 'r' and entry[2] in (-1, -2):
577 if entry[0] == 'r' and entry[2] in (-1, -2):
592 source = self._copymap.get(f)
578 source = self._copymap.get(f)
593 if entry[2] == -1:
579 if entry[2] == -1:
594 self.merge(f)
580 self.merge(f)
595 elif entry[2] == -2:
581 elif entry[2] == -2:
596 self.otherparent(f)
582 self.otherparent(f)
597 if source:
583 if source:
598 self.copy(source, f)
584 self.copy(source, f)
599 return
585 return
600 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
586 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
601 return
587 return
602 self._addpath(f, 'n', 0, -1, -1)
588 self._addpath(f, 'n', 0, -1, -1)
603 self._copymap.pop(f, None)
589 self._copymap.pop(f, None)
604 if f in self._nonnormalset:
590 if f in self._nonnormalset:
605 self._nonnormalset.remove(f)
591 self._nonnormalset.remove(f)
606
592
607 def otherparent(self, f):
593 def otherparent(self, f):
608 '''Mark as coming from the other parent, always dirty.'''
594 '''Mark as coming from the other parent, always dirty.'''
609 if self._pl[1] == nullid:
595 if self._pl[1] == nullid:
610 raise error.Abort(_("setting %r to other parent "
596 raise error.Abort(_("setting %r to other parent "
611 "only allowed in merges") % f)
597 "only allowed in merges") % f)
612 if f in self and self[f] == 'n':
598 if f in self and self[f] == 'n':
613 # merge-like
599 # merge-like
614 self._addpath(f, 'm', 0, -2, -1)
600 self._addpath(f, 'm', 0, -2, -1)
615 else:
601 else:
616 # add-like
602 # add-like
617 self._addpath(f, 'n', 0, -2, -1)
603 self._addpath(f, 'n', 0, -2, -1)
618 self._copymap.pop(f, None)
604 self._copymap.pop(f, None)
619
605
620 def add(self, f):
606 def add(self, f):
621 '''Mark a file added.'''
607 '''Mark a file added.'''
622 self._addpath(f, 'a', 0, -1, -1)
608 self._addpath(f, 'a', 0, -1, -1)
623 self._copymap.pop(f, None)
609 self._copymap.pop(f, None)
624
610
625 def remove(self, f):
611 def remove(self, f):
626 '''Mark a file removed.'''
612 '''Mark a file removed.'''
627 self._dirty = True
613 self._dirty = True
628 self._droppath(f)
614 self._droppath(f)
629 size = 0
615 size = 0
630 if self._pl[1] != nullid:
616 if self._pl[1] != nullid:
631 entry = self._map.get(f)
617 entry = self._map.get(f)
632 if entry is not None:
618 if entry is not None:
633 # backup the previous state
619 # backup the previous state
634 if entry[0] == 'm': # merge
620 if entry[0] == 'm': # merge
635 size = -1
621 size = -1
636 elif entry[0] == 'n' and entry[2] == -2: # other parent
622 elif entry[0] == 'n' and entry[2] == -2: # other parent
637 size = -2
623 size = -2
638 self._otherparentset.add(f)
624 self._otherparentset.add(f)
639 self._map[f] = dirstatetuple('r', 0, size, 0)
625 self._map[f] = dirstatetuple('r', 0, size, 0)
640 self._nonnormalset.add(f)
626 self._nonnormalset.add(f)
641 if size == 0:
627 if size == 0:
642 self._copymap.pop(f, None)
628 self._copymap.pop(f, None)
643
629
644 def merge(self, f):
630 def merge(self, f):
645 '''Mark a file merged.'''
631 '''Mark a file merged.'''
646 if self._pl[1] == nullid:
632 if self._pl[1] == nullid:
647 return self.normallookup(f)
633 return self.normallookup(f)
648 return self.otherparent(f)
634 return self.otherparent(f)
649
635
650 def drop(self, f):
636 def drop(self, f):
651 '''Drop a file from the dirstate'''
637 '''Drop a file from the dirstate'''
652 if f in self._map:
638 if f in self._map:
653 self._dirty = True
639 self._dirty = True
654 self._droppath(f)
640 self._droppath(f)
655 del self._map[f]
641 del self._map[f]
656 if f in self._nonnormalset:
642 if f in self._nonnormalset:
657 self._nonnormalset.remove(f)
643 self._nonnormalset.remove(f)
658 self._copymap.pop(f, None)
644 self._copymap.pop(f, None)
659
645
660 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
646 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
661 if exists is None:
647 if exists is None:
662 exists = os.path.lexists(os.path.join(self._root, path))
648 exists = os.path.lexists(os.path.join(self._root, path))
663 if not exists:
649 if not exists:
664 # Maybe a path component exists
650 # Maybe a path component exists
665 if not ignoremissing and '/' in path:
651 if not ignoremissing and '/' in path:
666 d, f = path.rsplit('/', 1)
652 d, f = path.rsplit('/', 1)
667 d = self._normalize(d, False, ignoremissing, None)
653 d = self._normalize(d, False, ignoremissing, None)
668 folded = d + "/" + f
654 folded = d + "/" + f
669 else:
655 else:
670 # No path components, preserve original case
656 # No path components, preserve original case
671 folded = path
657 folded = path
672 else:
658 else:
673 # recursively normalize leading directory components
659 # recursively normalize leading directory components
674 # against dirstate
660 # against dirstate
675 if '/' in normed:
661 if '/' in normed:
676 d, f = normed.rsplit('/', 1)
662 d, f = normed.rsplit('/', 1)
677 d = self._normalize(d, False, ignoremissing, True)
663 d = self._normalize(d, False, ignoremissing, True)
678 r = self._root + "/" + d
664 r = self._root + "/" + d
679 folded = d + "/" + util.fspath(f, r)
665 folded = d + "/" + util.fspath(f, r)
680 else:
666 else:
681 folded = util.fspath(normed, self._root)
667 folded = util.fspath(normed, self._root)
682 storemap[normed] = folded
668 storemap[normed] = folded
683
669
684 return folded
670 return folded
685
671
686 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
672 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
687 normed = util.normcase(path)
673 normed = util.normcase(path)
688 folded = self._filefoldmap.get(normed, None)
674 folded = self._filefoldmap.get(normed, None)
689 if folded is None:
675 if folded is None:
690 if isknown:
676 if isknown:
691 folded = path
677 folded = path
692 else:
678 else:
693 folded = self._discoverpath(path, normed, ignoremissing, exists,
679 folded = self._discoverpath(path, normed, ignoremissing, exists,
694 self._filefoldmap)
680 self._filefoldmap)
695 return folded
681 return folded
696
682
697 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
683 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
698 normed = util.normcase(path)
684 normed = util.normcase(path)
699 folded = self._filefoldmap.get(normed, None)
685 folded = self._filefoldmap.get(normed, None)
700 if folded is None:
686 if folded is None:
701 folded = self._dirfoldmap.get(normed, None)
687 folded = self._dirfoldmap.get(normed, None)
702 if folded is None:
688 if folded is None:
703 if isknown:
689 if isknown:
704 folded = path
690 folded = path
705 else:
691 else:
706 # store discovered result in dirfoldmap so that future
692 # store discovered result in dirfoldmap so that future
707 # normalizefile calls don't start matching directories
693 # normalizefile calls don't start matching directories
708 folded = self._discoverpath(path, normed, ignoremissing, exists,
694 folded = self._discoverpath(path, normed, ignoremissing, exists,
709 self._dirfoldmap)
695 self._dirfoldmap)
710 return folded
696 return folded
711
697
712 def normalize(self, path, isknown=False, ignoremissing=False):
698 def normalize(self, path, isknown=False, ignoremissing=False):
713 '''
699 '''
714 normalize the case of a pathname when on a casefolding filesystem
700 normalize the case of a pathname when on a casefolding filesystem
715
701
716 isknown specifies whether the filename came from walking the
702 isknown specifies whether the filename came from walking the
717 disk, to avoid extra filesystem access.
703 disk, to avoid extra filesystem access.
718
704
719 If ignoremissing is True, missing path are returned
705 If ignoremissing is True, missing path are returned
720 unchanged. Otherwise, we try harder to normalize possibly
706 unchanged. Otherwise, we try harder to normalize possibly
721 existing path components.
707 existing path components.
722
708
723 The normalized case is determined based on the following precedence:
709 The normalized case is determined based on the following precedence:
724
710
725 - version of name already stored in the dirstate
711 - version of name already stored in the dirstate
726 - version of name stored on disk
712 - version of name stored on disk
727 - version provided via command arguments
713 - version provided via command arguments
728 '''
714 '''
729
715
730 if self._checkcase:
716 if self._checkcase:
731 return self._normalize(path, isknown, ignoremissing)
717 return self._normalize(path, isknown, ignoremissing)
732 return path
718 return path
733
719
734 def clear(self):
720 def clear(self):
735 self._map = dirstatemap()
721 self._map = dirstatemap()
736 self._nonnormalset = set()
722 self._nonnormalset = set()
737 self._otherparentset = set()
723 self._otherparentset = set()
738 if "_dirs" in self.__dict__:
724 if "_dirs" in self.__dict__:
739 delattr(self, "_dirs")
725 delattr(self, "_dirs")
740 self._copymap = {}
726 self._copymap = {}
741 self._pl = [nullid, nullid]
727 self._pl = [nullid, nullid]
742 self._lastnormaltime = 0
728 self._lastnormaltime = 0
743 self._updatedfiles.clear()
729 self._updatedfiles.clear()
744 self._dirty = True
730 self._dirty = True
745
731
746 def rebuild(self, parent, allfiles, changedfiles=None):
732 def rebuild(self, parent, allfiles, changedfiles=None):
747 if changedfiles is None:
733 if changedfiles is None:
748 # Rebuild entire dirstate
734 # Rebuild entire dirstate
749 changedfiles = allfiles
735 changedfiles = allfiles
750 lastnormaltime = self._lastnormaltime
736 lastnormaltime = self._lastnormaltime
751 self.clear()
737 self.clear()
752 self._lastnormaltime = lastnormaltime
738 self._lastnormaltime = lastnormaltime
753
739
754 if self._origpl is None:
740 if self._origpl is None:
755 self._origpl = self._pl
741 self._origpl = self._pl
756 self._pl = (parent, nullid)
742 self._pl = (parent, nullid)
757 for f in changedfiles:
743 for f in changedfiles:
758 if f in allfiles:
744 if f in allfiles:
759 self.normallookup(f)
745 self.normallookup(f)
760 else:
746 else:
761 self.drop(f)
747 self.drop(f)
762
748
763 self._dirty = True
749 self._dirty = True
764
750
765 def identity(self):
751 def identity(self):
766 '''Return identity of dirstate itself to detect changing in storage
752 '''Return identity of dirstate itself to detect changing in storage
767
753
768 If identity of previous dirstate is equal to this, writing
754 If identity of previous dirstate is equal to this, writing
769 changes based on the former dirstate out can keep consistency.
755 changes based on the former dirstate out can keep consistency.
770 '''
756 '''
771 return self._identity
757 return self._identity
772
758
773 def write(self, tr):
759 def write(self, tr):
774 if not self._dirty:
760 if not self._dirty:
775 return
761 return
776
762
777 filename = self._filename
763 filename = self._filename
778 if tr:
764 if tr:
779 # 'dirstate.write()' is not only for writing in-memory
765 # 'dirstate.write()' is not only for writing in-memory
780 # changes out, but also for dropping ambiguous timestamp.
766 # changes out, but also for dropping ambiguous timestamp.
781 # delayed writing re-raise "ambiguous timestamp issue".
767 # delayed writing re-raise "ambiguous timestamp issue".
782 # See also the wiki page below for detail:
768 # See also the wiki page below for detail:
783 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
769 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
784
770
785 # emulate dropping timestamp in 'parsers.pack_dirstate'
771 # emulate dropping timestamp in 'parsers.pack_dirstate'
786 now = _getfsnow(self._opener)
772 now = _getfsnow(self._opener)
787 dmap = self._map
773 dmap = self._map
788 for f in self._updatedfiles:
774 for f in self._updatedfiles:
789 e = dmap.get(f)
775 e = dmap.get(f)
790 if e is not None and e[0] == 'n' and e[3] == now:
776 if e is not None and e[0] == 'n' and e[3] == now:
791 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
777 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
792 self._nonnormalset.add(f)
778 self._nonnormalset.add(f)
793
779
794 # emulate that all 'dirstate.normal' results are written out
780 # emulate that all 'dirstate.normal' results are written out
795 self._lastnormaltime = 0
781 self._lastnormaltime = 0
796 self._updatedfiles.clear()
782 self._updatedfiles.clear()
797
783
798 # delay writing in-memory changes out
784 # delay writing in-memory changes out
799 tr.addfilegenerator('dirstate', (self._filename,),
785 tr.addfilegenerator('dirstate', (self._filename,),
800 self._writedirstate, location='plain')
786 self._writedirstate, location='plain')
801 return
787 return
802
788
803 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
789 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
804 self._writedirstate(st)
790 self._writedirstate(st)
805
791
806 def addparentchangecallback(self, category, callback):
792 def addparentchangecallback(self, category, callback):
807 """add a callback to be called when the wd parents are changed
793 """add a callback to be called when the wd parents are changed
808
794
809 Callback will be called with the following arguments:
795 Callback will be called with the following arguments:
810 dirstate, (oldp1, oldp2), (newp1, newp2)
796 dirstate, (oldp1, oldp2), (newp1, newp2)
811
797
812 Category is a unique identifier to allow overwriting an old callback
798 Category is a unique identifier to allow overwriting an old callback
813 with a newer callback.
799 with a newer callback.
814 """
800 """
815 self._plchangecallbacks[category] = callback
801 self._plchangecallbacks[category] = callback
816
802
817 def _writedirstate(self, st):
803 def _writedirstate(self, st):
818 # notify callbacks about parents change
804 # notify callbacks about parents change
819 if self._origpl is not None and self._origpl != self._pl:
805 if self._origpl is not None and self._origpl != self._pl:
820 for c, callback in sorted(self._plchangecallbacks.iteritems()):
806 for c, callback in sorted(self._plchangecallbacks.iteritems()):
821 callback(self, self._origpl, self._pl)
807 callback(self, self._origpl, self._pl)
822 self._origpl = None
808 self._origpl = None
823 # use the modification time of the newly created temporary file as the
809 # use the modification time of the newly created temporary file as the
824 # filesystem's notion of 'now'
810 # filesystem's notion of 'now'
825 now = util.fstat(st).st_mtime & _rangemask
811 now = util.fstat(st).st_mtime & _rangemask
826
812
827 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
813 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
828 # timestamp of each entries in dirstate, because of 'now > mtime'
814 # timestamp of each entries in dirstate, because of 'now > mtime'
829 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
815 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
830 if delaywrite > 0:
816 if delaywrite > 0:
831 # do we have any files to delay for?
817 # do we have any files to delay for?
832 for f, e in self._map.iteritems():
818 for f, e in self._map.iteritems():
833 if e[0] == 'n' and e[3] == now:
819 if e[0] == 'n' and e[3] == now:
834 import time # to avoid useless import
820 import time # to avoid useless import
835 # rather than sleep n seconds, sleep until the next
821 # rather than sleep n seconds, sleep until the next
836 # multiple of n seconds
822 # multiple of n seconds
837 clock = time.time()
823 clock = time.time()
838 start = int(clock) - (int(clock) % delaywrite)
824 start = int(clock) - (int(clock) % delaywrite)
839 end = start + delaywrite
825 end = start + delaywrite
840 time.sleep(end - clock)
826 time.sleep(end - clock)
841 now = end # trust our estimate that the end is near now
827 now = end # trust our estimate that the end is near now
842 break
828 break
843
829
844 st.write(parsers.pack_dirstate(self._map._map, self._copymap, self._pl,
830 st.write(parsers.pack_dirstate(self._map._map, self._copymap, self._pl,
845 now))
831 now))
846 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
832 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
847 st.close()
833 st.close()
848 self._lastnormaltime = 0
834 self._lastnormaltime = 0
849 self._dirty = self._dirtypl = False
835 self._dirty = self._dirtypl = False
850
836
851 def _dirignore(self, f):
837 def _dirignore(self, f):
852 if f == '.':
838 if f == '.':
853 return False
839 return False
854 if self._ignore(f):
840 if self._ignore(f):
855 return True
841 return True
856 for p in util.finddirs(f):
842 for p in util.finddirs(f):
857 if self._ignore(p):
843 if self._ignore(p):
858 return True
844 return True
859 return False
845 return False
860
846
861 def _ignorefiles(self):
847 def _ignorefiles(self):
862 files = []
848 files = []
863 if os.path.exists(self._join('.hgignore')):
849 if os.path.exists(self._join('.hgignore')):
864 files.append(self._join('.hgignore'))
850 files.append(self._join('.hgignore'))
865 for name, path in self._ui.configitems("ui"):
851 for name, path in self._ui.configitems("ui"):
866 if name == 'ignore' or name.startswith('ignore.'):
852 if name == 'ignore' or name.startswith('ignore.'):
867 # we need to use os.path.join here rather than self._join
853 # we need to use os.path.join here rather than self._join
868 # because path is arbitrary and user-specified
854 # because path is arbitrary and user-specified
869 files.append(os.path.join(self._rootdir, util.expandpath(path)))
855 files.append(os.path.join(self._rootdir, util.expandpath(path)))
870 return files
856 return files
871
857
872 def _ignorefileandline(self, f):
858 def _ignorefileandline(self, f):
873 files = collections.deque(self._ignorefiles())
859 files = collections.deque(self._ignorefiles())
874 visited = set()
860 visited = set()
875 while files:
861 while files:
876 i = files.popleft()
862 i = files.popleft()
877 patterns = matchmod.readpatternfile(i, self._ui.warn,
863 patterns = matchmod.readpatternfile(i, self._ui.warn,
878 sourceinfo=True)
864 sourceinfo=True)
879 for pattern, lineno, line in patterns:
865 for pattern, lineno, line in patterns:
880 kind, p = matchmod._patsplit(pattern, 'glob')
866 kind, p = matchmod._patsplit(pattern, 'glob')
881 if kind == "subinclude":
867 if kind == "subinclude":
882 if p not in visited:
868 if p not in visited:
883 files.append(p)
869 files.append(p)
884 continue
870 continue
885 m = matchmod.match(self._root, '', [], [pattern],
871 m = matchmod.match(self._root, '', [], [pattern],
886 warn=self._ui.warn)
872 warn=self._ui.warn)
887 if m(f):
873 if m(f):
888 return (i, lineno, line)
874 return (i, lineno, line)
889 visited.add(i)
875 visited.add(i)
890 return (None, -1, "")
876 return (None, -1, "")
891
877
892 def _walkexplicit(self, match, subrepos):
878 def _walkexplicit(self, match, subrepos):
893 '''Get stat data about the files explicitly specified by match.
879 '''Get stat data about the files explicitly specified by match.
894
880
895 Return a triple (results, dirsfound, dirsnotfound).
881 Return a triple (results, dirsfound, dirsnotfound).
896 - results is a mapping from filename to stat result. It also contains
882 - results is a mapping from filename to stat result. It also contains
897 listings mapping subrepos and .hg to None.
883 listings mapping subrepos and .hg to None.
898 - dirsfound is a list of files found to be directories.
884 - dirsfound is a list of files found to be directories.
899 - dirsnotfound is a list of files that the dirstate thinks are
885 - dirsnotfound is a list of files that the dirstate thinks are
900 directories and that were not found.'''
886 directories and that were not found.'''
901
887
902 def badtype(mode):
888 def badtype(mode):
903 kind = _('unknown')
889 kind = _('unknown')
904 if stat.S_ISCHR(mode):
890 if stat.S_ISCHR(mode):
905 kind = _('character device')
891 kind = _('character device')
906 elif stat.S_ISBLK(mode):
892 elif stat.S_ISBLK(mode):
907 kind = _('block device')
893 kind = _('block device')
908 elif stat.S_ISFIFO(mode):
894 elif stat.S_ISFIFO(mode):
909 kind = _('fifo')
895 kind = _('fifo')
910 elif stat.S_ISSOCK(mode):
896 elif stat.S_ISSOCK(mode):
911 kind = _('socket')
897 kind = _('socket')
912 elif stat.S_ISDIR(mode):
898 elif stat.S_ISDIR(mode):
913 kind = _('directory')
899 kind = _('directory')
914 return _('unsupported file type (type is %s)') % kind
900 return _('unsupported file type (type is %s)') % kind
915
901
916 matchedir = match.explicitdir
902 matchedir = match.explicitdir
917 badfn = match.bad
903 badfn = match.bad
918 dmap = self._map
904 dmap = self._map
919 lstat = os.lstat
905 lstat = os.lstat
920 getkind = stat.S_IFMT
906 getkind = stat.S_IFMT
921 dirkind = stat.S_IFDIR
907 dirkind = stat.S_IFDIR
922 regkind = stat.S_IFREG
908 regkind = stat.S_IFREG
923 lnkkind = stat.S_IFLNK
909 lnkkind = stat.S_IFLNK
924 join = self._join
910 join = self._join
925 dirsfound = []
911 dirsfound = []
926 foundadd = dirsfound.append
912 foundadd = dirsfound.append
927 dirsnotfound = []
913 dirsnotfound = []
928 notfoundadd = dirsnotfound.append
914 notfoundadd = dirsnotfound.append
929
915
930 if not match.isexact() and self._checkcase:
916 if not match.isexact() and self._checkcase:
931 normalize = self._normalize
917 normalize = self._normalize
932 else:
918 else:
933 normalize = None
919 normalize = None
934
920
935 files = sorted(match.files())
921 files = sorted(match.files())
936 subrepos.sort()
922 subrepos.sort()
937 i, j = 0, 0
923 i, j = 0, 0
938 while i < len(files) and j < len(subrepos):
924 while i < len(files) and j < len(subrepos):
939 subpath = subrepos[j] + "/"
925 subpath = subrepos[j] + "/"
940 if files[i] < subpath:
926 if files[i] < subpath:
941 i += 1
927 i += 1
942 continue
928 continue
943 while i < len(files) and files[i].startswith(subpath):
929 while i < len(files) and files[i].startswith(subpath):
944 del files[i]
930 del files[i]
945 j += 1
931 j += 1
946
932
947 if not files or '.' in files:
933 if not files or '.' in files:
948 files = ['.']
934 files = ['.']
949 results = dict.fromkeys(subrepos)
935 results = dict.fromkeys(subrepos)
950 results['.hg'] = None
936 results['.hg'] = None
951
937
952 alldirs = None
938 alldirs = None
953 for ff in files:
939 for ff in files:
954 # constructing the foldmap is expensive, so don't do it for the
940 # constructing the foldmap is expensive, so don't do it for the
955 # common case where files is ['.']
941 # common case where files is ['.']
956 if normalize and ff != '.':
942 if normalize and ff != '.':
957 nf = normalize(ff, False, True)
943 nf = normalize(ff, False, True)
958 else:
944 else:
959 nf = ff
945 nf = ff
960 if nf in results:
946 if nf in results:
961 continue
947 continue
962
948
963 try:
949 try:
964 st = lstat(join(nf))
950 st = lstat(join(nf))
965 kind = getkind(st.st_mode)
951 kind = getkind(st.st_mode)
966 if kind == dirkind:
952 if kind == dirkind:
967 if nf in dmap:
953 if nf in dmap:
968 # file replaced by dir on disk but still in dirstate
954 # file replaced by dir on disk but still in dirstate
969 results[nf] = None
955 results[nf] = None
970 if matchedir:
956 if matchedir:
971 matchedir(nf)
957 matchedir(nf)
972 foundadd((nf, ff))
958 foundadd((nf, ff))
973 elif kind == regkind or kind == lnkkind:
959 elif kind == regkind or kind == lnkkind:
974 results[nf] = st
960 results[nf] = st
975 else:
961 else:
976 badfn(ff, badtype(kind))
962 badfn(ff, badtype(kind))
977 if nf in dmap:
963 if nf in dmap:
978 results[nf] = None
964 results[nf] = None
979 except OSError as inst: # nf not found on disk - it is dirstate only
965 except OSError as inst: # nf not found on disk - it is dirstate only
980 if nf in dmap: # does it exactly match a missing file?
966 if nf in dmap: # does it exactly match a missing file?
981 results[nf] = None
967 results[nf] = None
982 else: # does it match a missing directory?
968 else: # does it match a missing directory?
983 if alldirs is None:
969 if alldirs is None:
984 alldirs = util.dirs(dmap._map)
970 alldirs = util.dirs(dmap._map)
985 if nf in alldirs:
971 if nf in alldirs:
986 if matchedir:
972 if matchedir:
987 matchedir(nf)
973 matchedir(nf)
988 notfoundadd(nf)
974 notfoundadd(nf)
989 else:
975 else:
990 badfn(ff, encoding.strtolocal(inst.strerror))
976 badfn(ff, encoding.strtolocal(inst.strerror))
991
977
992 # Case insensitive filesystems cannot rely on lstat() failing to detect
978 # Case insensitive filesystems cannot rely on lstat() failing to detect
993 # a case-only rename. Prune the stat object for any file that does not
979 # a case-only rename. Prune the stat object for any file that does not
994 # match the case in the filesystem, if there are multiple files that
980 # match the case in the filesystem, if there are multiple files that
995 # normalize to the same path.
981 # normalize to the same path.
996 if match.isexact() and self._checkcase:
982 if match.isexact() and self._checkcase:
997 normed = {}
983 normed = {}
998
984
999 for f, st in results.iteritems():
985 for f, st in results.iteritems():
1000 if st is None:
986 if st is None:
1001 continue
987 continue
1002
988
1003 nc = util.normcase(f)
989 nc = util.normcase(f)
1004 paths = normed.get(nc)
990 paths = normed.get(nc)
1005
991
1006 if paths is None:
992 if paths is None:
1007 paths = set()
993 paths = set()
1008 normed[nc] = paths
994 normed[nc] = paths
1009
995
1010 paths.add(f)
996 paths.add(f)
1011
997
1012 for norm, paths in normed.iteritems():
998 for norm, paths in normed.iteritems():
1013 if len(paths) > 1:
999 if len(paths) > 1:
1014 for path in paths:
1000 for path in paths:
1015 folded = self._discoverpath(path, norm, True, None,
1001 folded = self._discoverpath(path, norm, True, None,
1016 self._dirfoldmap)
1002 self._dirfoldmap)
1017 if path != folded:
1003 if path != folded:
1018 results[path] = None
1004 results[path] = None
1019
1005
1020 return results, dirsfound, dirsnotfound
1006 return results, dirsfound, dirsnotfound
1021
1007
1022 def walk(self, match, subrepos, unknown, ignored, full=True):
1008 def walk(self, match, subrepos, unknown, ignored, full=True):
1023 '''
1009 '''
1024 Walk recursively through the directory tree, finding all files
1010 Walk recursively through the directory tree, finding all files
1025 matched by match.
1011 matched by match.
1026
1012
1027 If full is False, maybe skip some known-clean files.
1013 If full is False, maybe skip some known-clean files.
1028
1014
1029 Return a dict mapping filename to stat-like object (either
1015 Return a dict mapping filename to stat-like object (either
1030 mercurial.osutil.stat instance or return value of os.stat()).
1016 mercurial.osutil.stat instance or return value of os.stat()).
1031
1017
1032 '''
1018 '''
1033 # full is a flag that extensions that hook into walk can use -- this
1019 # full is a flag that extensions that hook into walk can use -- this
1034 # implementation doesn't use it at all. This satisfies the contract
1020 # implementation doesn't use it at all. This satisfies the contract
1035 # because we only guarantee a "maybe".
1021 # because we only guarantee a "maybe".
1036
1022
1037 if ignored:
1023 if ignored:
1038 ignore = util.never
1024 ignore = util.never
1039 dirignore = util.never
1025 dirignore = util.never
1040 elif unknown:
1026 elif unknown:
1041 ignore = self._ignore
1027 ignore = self._ignore
1042 dirignore = self._dirignore
1028 dirignore = self._dirignore
1043 else:
1029 else:
1044 # if not unknown and not ignored, drop dir recursion and step 2
1030 # if not unknown and not ignored, drop dir recursion and step 2
1045 ignore = util.always
1031 ignore = util.always
1046 dirignore = util.always
1032 dirignore = util.always
1047
1033
1048 matchfn = match.matchfn
1034 matchfn = match.matchfn
1049 matchalways = match.always()
1035 matchalways = match.always()
1050 matchtdir = match.traversedir
1036 matchtdir = match.traversedir
1051 dmap = self._map
1037 dmap = self._map
1052 listdir = util.listdir
1038 listdir = util.listdir
1053 lstat = os.lstat
1039 lstat = os.lstat
1054 dirkind = stat.S_IFDIR
1040 dirkind = stat.S_IFDIR
1055 regkind = stat.S_IFREG
1041 regkind = stat.S_IFREG
1056 lnkkind = stat.S_IFLNK
1042 lnkkind = stat.S_IFLNK
1057 join = self._join
1043 join = self._join
1058
1044
1059 exact = skipstep3 = False
1045 exact = skipstep3 = False
1060 if match.isexact(): # match.exact
1046 if match.isexact(): # match.exact
1061 exact = True
1047 exact = True
1062 dirignore = util.always # skip step 2
1048 dirignore = util.always # skip step 2
1063 elif match.prefix(): # match.match, no patterns
1049 elif match.prefix(): # match.match, no patterns
1064 skipstep3 = True
1050 skipstep3 = True
1065
1051
1066 if not exact and self._checkcase:
1052 if not exact and self._checkcase:
1067 normalize = self._normalize
1053 normalize = self._normalize
1068 normalizefile = self._normalizefile
1054 normalizefile = self._normalizefile
1069 skipstep3 = False
1055 skipstep3 = False
1070 else:
1056 else:
1071 normalize = self._normalize
1057 normalize = self._normalize
1072 normalizefile = None
1058 normalizefile = None
1073
1059
1074 # step 1: find all explicit files
1060 # step 1: find all explicit files
1075 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1061 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1076
1062
1077 skipstep3 = skipstep3 and not (work or dirsnotfound)
1063 skipstep3 = skipstep3 and not (work or dirsnotfound)
1078 work = [d for d in work if not dirignore(d[0])]
1064 work = [d for d in work if not dirignore(d[0])]
1079
1065
1080 # step 2: visit subdirectories
1066 # step 2: visit subdirectories
1081 def traverse(work, alreadynormed):
1067 def traverse(work, alreadynormed):
1082 wadd = work.append
1068 wadd = work.append
1083 while work:
1069 while work:
1084 nd = work.pop()
1070 nd = work.pop()
1085 if not match.visitdir(nd):
1071 if not match.visitdir(nd):
1086 continue
1072 continue
1087 skip = None
1073 skip = None
1088 if nd == '.':
1074 if nd == '.':
1089 nd = ''
1075 nd = ''
1090 else:
1076 else:
1091 skip = '.hg'
1077 skip = '.hg'
1092 try:
1078 try:
1093 entries = listdir(join(nd), stat=True, skip=skip)
1079 entries = listdir(join(nd), stat=True, skip=skip)
1094 except OSError as inst:
1080 except OSError as inst:
1095 if inst.errno in (errno.EACCES, errno.ENOENT):
1081 if inst.errno in (errno.EACCES, errno.ENOENT):
1096 match.bad(self.pathto(nd),
1082 match.bad(self.pathto(nd),
1097 encoding.strtolocal(inst.strerror))
1083 encoding.strtolocal(inst.strerror))
1098 continue
1084 continue
1099 raise
1085 raise
1100 for f, kind, st in entries:
1086 for f, kind, st in entries:
1101 if normalizefile:
1087 if normalizefile:
1102 # even though f might be a directory, we're only
1088 # even though f might be a directory, we're only
1103 # interested in comparing it to files currently in the
1089 # interested in comparing it to files currently in the
1104 # dmap -- therefore normalizefile is enough
1090 # dmap -- therefore normalizefile is enough
1105 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1091 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1106 True)
1092 True)
1107 else:
1093 else:
1108 nf = nd and (nd + "/" + f) or f
1094 nf = nd and (nd + "/" + f) or f
1109 if nf not in results:
1095 if nf not in results:
1110 if kind == dirkind:
1096 if kind == dirkind:
1111 if not ignore(nf):
1097 if not ignore(nf):
1112 if matchtdir:
1098 if matchtdir:
1113 matchtdir(nf)
1099 matchtdir(nf)
1114 wadd(nf)
1100 wadd(nf)
1115 if nf in dmap and (matchalways or matchfn(nf)):
1101 if nf in dmap and (matchalways or matchfn(nf)):
1116 results[nf] = None
1102 results[nf] = None
1117 elif kind == regkind or kind == lnkkind:
1103 elif kind == regkind or kind == lnkkind:
1118 if nf in dmap:
1104 if nf in dmap:
1119 if matchalways or matchfn(nf):
1105 if matchalways or matchfn(nf):
1120 results[nf] = st
1106 results[nf] = st
1121 elif ((matchalways or matchfn(nf))
1107 elif ((matchalways or matchfn(nf))
1122 and not ignore(nf)):
1108 and not ignore(nf)):
1123 # unknown file -- normalize if necessary
1109 # unknown file -- normalize if necessary
1124 if not alreadynormed:
1110 if not alreadynormed:
1125 nf = normalize(nf, False, True)
1111 nf = normalize(nf, False, True)
1126 results[nf] = st
1112 results[nf] = st
1127 elif nf in dmap and (matchalways or matchfn(nf)):
1113 elif nf in dmap and (matchalways or matchfn(nf)):
1128 results[nf] = None
1114 results[nf] = None
1129
1115
1130 for nd, d in work:
1116 for nd, d in work:
1131 # alreadynormed means that processwork doesn't have to do any
1117 # alreadynormed means that processwork doesn't have to do any
1132 # expensive directory normalization
1118 # expensive directory normalization
1133 alreadynormed = not normalize or nd == d
1119 alreadynormed = not normalize or nd == d
1134 traverse([d], alreadynormed)
1120 traverse([d], alreadynormed)
1135
1121
1136 for s in subrepos:
1122 for s in subrepos:
1137 del results[s]
1123 del results[s]
1138 del results['.hg']
1124 del results['.hg']
1139
1125
1140 # step 3: visit remaining files from dmap
1126 # step 3: visit remaining files from dmap
1141 if not skipstep3 and not exact:
1127 if not skipstep3 and not exact:
1142 # If a dmap file is not in results yet, it was either
1128 # If a dmap file is not in results yet, it was either
1143 # a) not matching matchfn b) ignored, c) missing, or d) under a
1129 # a) not matching matchfn b) ignored, c) missing, or d) under a
1144 # symlink directory.
1130 # symlink directory.
1145 if not results and matchalways:
1131 if not results and matchalways:
1146 visit = [f for f in dmap]
1132 visit = [f for f in dmap]
1147 else:
1133 else:
1148 visit = [f for f in dmap if f not in results and matchfn(f)]
1134 visit = [f for f in dmap if f not in results and matchfn(f)]
1149 visit.sort()
1135 visit.sort()
1150
1136
1151 if unknown:
1137 if unknown:
1152 # unknown == True means we walked all dirs under the roots
1138 # unknown == True means we walked all dirs under the roots
1153 # that wasn't ignored, and everything that matched was stat'ed
1139 # that wasn't ignored, and everything that matched was stat'ed
1154 # and is already in results.
1140 # and is already in results.
1155 # The rest must thus be ignored or under a symlink.
1141 # The rest must thus be ignored or under a symlink.
1156 audit_path = pathutil.pathauditor(self._root, cached=True)
1142 audit_path = pathutil.pathauditor(self._root, cached=True)
1157
1143
1158 for nf in iter(visit):
1144 for nf in iter(visit):
1159 # If a stat for the same file was already added with a
1145 # If a stat for the same file was already added with a
1160 # different case, don't add one for this, since that would
1146 # different case, don't add one for this, since that would
1161 # make it appear as if the file exists under both names
1147 # make it appear as if the file exists under both names
1162 # on disk.
1148 # on disk.
1163 if (normalizefile and
1149 if (normalizefile and
1164 normalizefile(nf, True, True) in results):
1150 normalizefile(nf, True, True) in results):
1165 results[nf] = None
1151 results[nf] = None
1166 # Report ignored items in the dmap as long as they are not
1152 # Report ignored items in the dmap as long as they are not
1167 # under a symlink directory.
1153 # under a symlink directory.
1168 elif audit_path.check(nf):
1154 elif audit_path.check(nf):
1169 try:
1155 try:
1170 results[nf] = lstat(join(nf))
1156 results[nf] = lstat(join(nf))
1171 # file was just ignored, no links, and exists
1157 # file was just ignored, no links, and exists
1172 except OSError:
1158 except OSError:
1173 # file doesn't exist
1159 # file doesn't exist
1174 results[nf] = None
1160 results[nf] = None
1175 else:
1161 else:
1176 # It's either missing or under a symlink directory
1162 # It's either missing or under a symlink directory
1177 # which we in this case report as missing
1163 # which we in this case report as missing
1178 results[nf] = None
1164 results[nf] = None
1179 else:
1165 else:
1180 # We may not have walked the full directory tree above,
1166 # We may not have walked the full directory tree above,
1181 # so stat and check everything we missed.
1167 # so stat and check everything we missed.
1182 iv = iter(visit)
1168 iv = iter(visit)
1183 for st in util.statfiles([join(i) for i in visit]):
1169 for st in util.statfiles([join(i) for i in visit]):
1184 results[next(iv)] = st
1170 results[next(iv)] = st
1185 return results
1171 return results
1186
1172
1187 def status(self, match, subrepos, ignored, clean, unknown):
1173 def status(self, match, subrepos, ignored, clean, unknown):
1188 '''Determine the status of the working copy relative to the
1174 '''Determine the status of the working copy relative to the
1189 dirstate and return a pair of (unsure, status), where status is of type
1175 dirstate and return a pair of (unsure, status), where status is of type
1190 scmutil.status and:
1176 scmutil.status and:
1191
1177
1192 unsure:
1178 unsure:
1193 files that might have been modified since the dirstate was
1179 files that might have been modified since the dirstate was
1194 written, but need to be read to be sure (size is the same
1180 written, but need to be read to be sure (size is the same
1195 but mtime differs)
1181 but mtime differs)
1196 status.modified:
1182 status.modified:
1197 files that have definitely been modified since the dirstate
1183 files that have definitely been modified since the dirstate
1198 was written (different size or mode)
1184 was written (different size or mode)
1199 status.clean:
1185 status.clean:
1200 files that have definitely not been modified since the
1186 files that have definitely not been modified since the
1201 dirstate was written
1187 dirstate was written
1202 '''
1188 '''
1203 listignored, listclean, listunknown = ignored, clean, unknown
1189 listignored, listclean, listunknown = ignored, clean, unknown
1204 lookup, modified, added, unknown, ignored = [], [], [], [], []
1190 lookup, modified, added, unknown, ignored = [], [], [], [], []
1205 removed, deleted, clean = [], [], []
1191 removed, deleted, clean = [], [], []
1206
1192
1207 dmap = self._map
1193 dmap = self._map
1208 ladd = lookup.append # aka "unsure"
1194 ladd = lookup.append # aka "unsure"
1209 madd = modified.append
1195 madd = modified.append
1210 aadd = added.append
1196 aadd = added.append
1211 uadd = unknown.append
1197 uadd = unknown.append
1212 iadd = ignored.append
1198 iadd = ignored.append
1213 radd = removed.append
1199 radd = removed.append
1214 dadd = deleted.append
1200 dadd = deleted.append
1215 cadd = clean.append
1201 cadd = clean.append
1216 mexact = match.exact
1202 mexact = match.exact
1217 dirignore = self._dirignore
1203 dirignore = self._dirignore
1218 checkexec = self._checkexec
1204 checkexec = self._checkexec
1219 copymap = self._copymap
1205 copymap = self._copymap
1220 lastnormaltime = self._lastnormaltime
1206 lastnormaltime = self._lastnormaltime
1221
1207
1222 # We need to do full walks when either
1208 # We need to do full walks when either
1223 # - we're listing all clean files, or
1209 # - we're listing all clean files, or
1224 # - match.traversedir does something, because match.traversedir should
1210 # - match.traversedir does something, because match.traversedir should
1225 # be called for every dir in the working dir
1211 # be called for every dir in the working dir
1226 full = listclean or match.traversedir is not None
1212 full = listclean or match.traversedir is not None
1227 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1213 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1228 full=full).iteritems():
1214 full=full).iteritems():
1229 if fn not in dmap:
1215 if fn not in dmap:
1230 if (listignored or mexact(fn)) and dirignore(fn):
1216 if (listignored or mexact(fn)) and dirignore(fn):
1231 if listignored:
1217 if listignored:
1232 iadd(fn)
1218 iadd(fn)
1233 else:
1219 else:
1234 uadd(fn)
1220 uadd(fn)
1235 continue
1221 continue
1236
1222
1237 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1223 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1238 # written like that for performance reasons. dmap[fn] is not a
1224 # written like that for performance reasons. dmap[fn] is not a
1239 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1225 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1240 # opcode has fast paths when the value to be unpacked is a tuple or
1226 # opcode has fast paths when the value to be unpacked is a tuple or
1241 # a list, but falls back to creating a full-fledged iterator in
1227 # a list, but falls back to creating a full-fledged iterator in
1242 # general. That is much slower than simply accessing and storing the
1228 # general. That is much slower than simply accessing and storing the
1243 # tuple members one by one.
1229 # tuple members one by one.
1244 t = dmap[fn]
1230 t = dmap[fn]
1245 state = t[0]
1231 state = t[0]
1246 mode = t[1]
1232 mode = t[1]
1247 size = t[2]
1233 size = t[2]
1248 time = t[3]
1234 time = t[3]
1249
1235
1250 if not st and state in "nma":
1236 if not st and state in "nma":
1251 dadd(fn)
1237 dadd(fn)
1252 elif state == 'n':
1238 elif state == 'n':
1253 if (size >= 0 and
1239 if (size >= 0 and
1254 ((size != st.st_size and size != st.st_size & _rangemask)
1240 ((size != st.st_size and size != st.st_size & _rangemask)
1255 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1241 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1256 or size == -2 # other parent
1242 or size == -2 # other parent
1257 or fn in copymap):
1243 or fn in copymap):
1258 madd(fn)
1244 madd(fn)
1259 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1245 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1260 ladd(fn)
1246 ladd(fn)
1261 elif st.st_mtime == lastnormaltime:
1247 elif st.st_mtime == lastnormaltime:
1262 # fn may have just been marked as normal and it may have
1248 # fn may have just been marked as normal and it may have
1263 # changed in the same second without changing its size.
1249 # changed in the same second without changing its size.
1264 # This can happen if we quickly do multiple commits.
1250 # This can happen if we quickly do multiple commits.
1265 # Force lookup, so we don't miss such a racy file change.
1251 # Force lookup, so we don't miss such a racy file change.
1266 ladd(fn)
1252 ladd(fn)
1267 elif listclean:
1253 elif listclean:
1268 cadd(fn)
1254 cadd(fn)
1269 elif state == 'm':
1255 elif state == 'm':
1270 madd(fn)
1256 madd(fn)
1271 elif state == 'a':
1257 elif state == 'a':
1272 aadd(fn)
1258 aadd(fn)
1273 elif state == 'r':
1259 elif state == 'r':
1274 radd(fn)
1260 radd(fn)
1275
1261
1276 return (lookup, scmutil.status(modified, added, removed, deleted,
1262 return (lookup, scmutil.status(modified, added, removed, deleted,
1277 unknown, ignored, clean))
1263 unknown, ignored, clean))
1278
1264
1279 def matches(self, match):
1265 def matches(self, match):
1280 '''
1266 '''
1281 return files in the dirstate (in whatever state) filtered by match
1267 return files in the dirstate (in whatever state) filtered by match
1282 '''
1268 '''
1283 dmap = self._map
1269 dmap = self._map
1284 if match.always():
1270 if match.always():
1285 return dmap.keys()
1271 return dmap.keys()
1286 files = match.files()
1272 files = match.files()
1287 if match.isexact():
1273 if match.isexact():
1288 # fast path -- filter the other way around, since typically files is
1274 # fast path -- filter the other way around, since typically files is
1289 # much smaller than dmap
1275 # much smaller than dmap
1290 return [f for f in files if f in dmap]
1276 return [f for f in files if f in dmap]
1291 if match.prefix() and all(fn in dmap for fn in files):
1277 if match.prefix() and all(fn in dmap for fn in files):
1292 # fast path -- all the values are known to be files, so just return
1278 # fast path -- all the values are known to be files, so just return
1293 # that
1279 # that
1294 return list(files)
1280 return list(files)
1295 return [f for f in dmap if match(f)]
1281 return [f for f in dmap if match(f)]
1296
1282
1297 def _actualfilename(self, tr):
1283 def _actualfilename(self, tr):
1298 if tr:
1284 if tr:
1299 return self._pendingfilename
1285 return self._pendingfilename
1300 else:
1286 else:
1301 return self._filename
1287 return self._filename
1302
1288
1303 def savebackup(self, tr, backupname):
1289 def savebackup(self, tr, backupname):
1304 '''Save current dirstate into backup file'''
1290 '''Save current dirstate into backup file'''
1305 filename = self._actualfilename(tr)
1291 filename = self._actualfilename(tr)
1306 assert backupname != filename
1292 assert backupname != filename
1307
1293
1308 # use '_writedirstate' instead of 'write' to write changes certainly,
1294 # use '_writedirstate' instead of 'write' to write changes certainly,
1309 # because the latter omits writing out if transaction is running.
1295 # because the latter omits writing out if transaction is running.
1310 # output file will be used to create backup of dirstate at this point.
1296 # output file will be used to create backup of dirstate at this point.
1311 if self._dirty or not self._opener.exists(filename):
1297 if self._dirty or not self._opener.exists(filename):
1312 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1298 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1313 checkambig=True))
1299 checkambig=True))
1314
1300
1315 if tr:
1301 if tr:
1316 # ensure that subsequent tr.writepending returns True for
1302 # ensure that subsequent tr.writepending returns True for
1317 # changes written out above, even if dirstate is never
1303 # changes written out above, even if dirstate is never
1318 # changed after this
1304 # changed after this
1319 tr.addfilegenerator('dirstate', (self._filename,),
1305 tr.addfilegenerator('dirstate', (self._filename,),
1320 self._writedirstate, location='plain')
1306 self._writedirstate, location='plain')
1321
1307
1322 # ensure that pending file written above is unlinked at
1308 # ensure that pending file written above is unlinked at
1323 # failure, even if tr.writepending isn't invoked until the
1309 # failure, even if tr.writepending isn't invoked until the
1324 # end of this transaction
1310 # end of this transaction
1325 tr.registertmp(filename, location='plain')
1311 tr.registertmp(filename, location='plain')
1326
1312
1327 self._opener.tryunlink(backupname)
1313 self._opener.tryunlink(backupname)
1328 # hardlink backup is okay because _writedirstate is always called
1314 # hardlink backup is okay because _writedirstate is always called
1329 # with an "atomictemp=True" file.
1315 # with an "atomictemp=True" file.
1330 util.copyfile(self._opener.join(filename),
1316 util.copyfile(self._opener.join(filename),
1331 self._opener.join(backupname), hardlink=True)
1317 self._opener.join(backupname), hardlink=True)
1332
1318
1333 def restorebackup(self, tr, backupname):
1319 def restorebackup(self, tr, backupname):
1334 '''Restore dirstate by backup file'''
1320 '''Restore dirstate by backup file'''
1335 # this "invalidate()" prevents "wlock.release()" from writing
1321 # this "invalidate()" prevents "wlock.release()" from writing
1336 # changes of dirstate out after restoring from backup file
1322 # changes of dirstate out after restoring from backup file
1337 self.invalidate()
1323 self.invalidate()
1338 filename = self._actualfilename(tr)
1324 filename = self._actualfilename(tr)
1339 self._opener.rename(backupname, filename, checkambig=True)
1325 self._opener.rename(backupname, filename, checkambig=True)
1340
1326
1341 def clearbackup(self, tr, backupname):
1327 def clearbackup(self, tr, backupname):
1342 '''Clear backup file'''
1328 '''Clear backup file'''
1343 self._opener.unlink(backupname)
1329 self._opener.unlink(backupname)
1344
1330
1345 class dirstatemap(object):
1331 class dirstatemap(object):
1346 def __init__(self):
1332 def __init__(self):
1347 self._map = {}
1333 self._map = {}
1348
1334
1349 def iteritems(self):
1335 def iteritems(self):
1350 return self._map.iteritems()
1336 return self._map.iteritems()
1351
1337
1352 def __iter__(self):
1338 def __iter__(self):
1353 return iter(self._map)
1339 return iter(self._map)
1354
1340
1355 def get(self, key, default=None):
1341 def get(self, key, default=None):
1356 return self._map.get(key, default)
1342 return self._map.get(key, default)
1357
1343
1358 def __contains__(self, key):
1344 def __contains__(self, key):
1359 return key in self._map
1345 return key in self._map
1360
1346
1361 def __setitem__(self, key, value):
1347 def __setitem__(self, key, value):
1362 self._map[key] = value
1348 self._map[key] = value
1363
1349
1364 def __getitem__(self, key):
1350 def __getitem__(self, key):
1365 return self._map[key]
1351 return self._map[key]
1366
1352
1367 def __delitem__(self, key):
1353 def __delitem__(self, key):
1368 del self._map[key]
1354 del self._map[key]
1369
1355
1370 def keys(self):
1356 def keys(self):
1371 return self._map.keys()
1357 return self._map.keys()
1358
1359 def nonnormalentries(self):
1360 '''Compute the nonnormal dirstate entries from the dmap'''
1361 try:
1362 return parsers.nonnormalotherparententries(self._map)
1363 except AttributeError:
1364 nonnorm = set()
1365 otherparent = set()
1366 for fname, e in self._map.iteritems():
1367 if e[0] != 'n' or e[3] == -1:
1368 nonnorm.add(fname)
1369 if e[0] == 'n' and e[2] == -2:
1370 otherparent.add(fname)
1371 return nonnorm, otherparent
1372
General Comments 0
You need to be logged in to leave comments. Login now