##// END OF EJS Templates
dirstate: move opendirstatefile to dirstatemap...
Durham Goode -
r34338:c36c3fa7 default
parent child Browse files
Show More
@@ -1,1377 +1,1383 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._dirtypl = False
74 self._dirtypl = False
75 self._lastnormaltime = 0
75 self._lastnormaltime = 0
76 self._ui = ui
76 self._ui = ui
77 self._filecache = {}
77 self._filecache = {}
78 self._parentwriters = 0
78 self._parentwriters = 0
79 self._filename = 'dirstate'
79 self._filename = 'dirstate'
80 self._pendingfilename = '%s.pending' % self._filename
80 self._pendingfilename = '%s.pending' % self._filename
81 self._plchangecallbacks = {}
81 self._plchangecallbacks = {}
82 self._origpl = None
82 self._origpl = None
83 self._updatedfiles = set()
83 self._updatedfiles = set()
84
84
85 # for consistent view between _pl() and _read() invocations
86 self._pendingmode = None
87
88 @contextlib.contextmanager
85 @contextlib.contextmanager
89 def parentchange(self):
86 def parentchange(self):
90 '''Context manager for handling dirstate parents.
87 '''Context manager for handling dirstate parents.
91
88
92 If an exception occurs in the scope of the context manager,
89 If an exception occurs in the scope of the context manager,
93 the incoherent dirstate won't be written when wlock is
90 the incoherent dirstate won't be written when wlock is
94 released.
91 released.
95 '''
92 '''
96 self._parentwriters += 1
93 self._parentwriters += 1
97 yield
94 yield
98 # Typically we want the "undo" step of a context manager in a
95 # Typically we want the "undo" step of a context manager in a
99 # finally block so it happens even when an exception
96 # finally block so it happens even when an exception
100 # occurs. In this case, however, we only want to decrement
97 # occurs. In this case, however, we only want to decrement
101 # parentwriters if the code in the with statement exits
98 # parentwriters if the code in the with statement exits
102 # normally, so we don't have a try/finally here on purpose.
99 # normally, so we don't have a try/finally here on purpose.
103 self._parentwriters -= 1
100 self._parentwriters -= 1
104
101
105 def beginparentchange(self):
102 def beginparentchange(self):
106 '''Marks the beginning of a set of changes that involve changing
103 '''Marks the beginning of a set of changes that involve changing
107 the dirstate parents. If there is an exception during this time,
104 the dirstate parents. If there is an exception during this time,
108 the dirstate will not be written when the wlock is released. This
105 the dirstate will not be written when the wlock is released. This
109 prevents writing an incoherent dirstate where the parent doesn't
106 prevents writing an incoherent dirstate where the parent doesn't
110 match the contents.
107 match the contents.
111 '''
108 '''
112 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 self._ui.deprecwarn('beginparentchange is obsoleted by the '
113 'parentchange context manager.', '4.3')
110 'parentchange context manager.', '4.3')
114 self._parentwriters += 1
111 self._parentwriters += 1
115
112
116 def endparentchange(self):
113 def endparentchange(self):
117 '''Marks the end of a set of changes that involve changing the
114 '''Marks the end of a set of changes that involve changing the
118 dirstate parents. Once all parent changes have been marked done,
115 dirstate parents. Once all parent changes have been marked done,
119 the wlock will be free to write the dirstate on release.
116 the wlock will be free to write the dirstate on release.
120 '''
117 '''
121 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 self._ui.deprecwarn('endparentchange is obsoleted by the '
122 'parentchange context manager.', '4.3')
119 'parentchange context manager.', '4.3')
123 if self._parentwriters > 0:
120 if self._parentwriters > 0:
124 self._parentwriters -= 1
121 self._parentwriters -= 1
125
122
126 def pendingparentchange(self):
123 def pendingparentchange(self):
127 '''Returns true if the dirstate is in the middle of a set of changes
124 '''Returns true if the dirstate is in the middle of a set of changes
128 that modify the dirstate parent.
125 that modify the dirstate parent.
129 '''
126 '''
130 return self._parentwriters > 0
127 return self._parentwriters > 0
131
128
132 @propertycache
129 @propertycache
133 def _map(self):
130 def _map(self):
134 '''Return the dirstate contents as a map from filename to
131 '''Return the dirstate contents as a map from filename to
135 (state, mode, size, time).'''
132 (state, mode, size, time).'''
136 self._read()
133 self._read()
137 return self._map
134 return self._map
138
135
139 @propertycache
136 @propertycache
140 def _identity(self):
137 def _identity(self):
141 self._read()
138 self._read()
142 return self._identity
139 return self._identity
143
140
144 @propertycache
141 @propertycache
145 def _nonnormalset(self):
142 def _nonnormalset(self):
146 nonnorm, otherparents = self._map.nonnormalentries()
143 nonnorm, otherparents = self._map.nonnormalentries()
147 self._otherparentset = otherparents
144 self._otherparentset = otherparents
148 return nonnorm
145 return nonnorm
149
146
150 @propertycache
147 @propertycache
151 def _otherparentset(self):
148 def _otherparentset(self):
152 nonnorm, otherparents = self._map.nonnormalentries()
149 nonnorm, otherparents = self._map.nonnormalentries()
153 self._nonnormalset = nonnorm
150 self._nonnormalset = nonnorm
154 return otherparents
151 return otherparents
155
152
156 @propertycache
153 @propertycache
157 def _filefoldmap(self):
154 def _filefoldmap(self):
158 return self._map.filefoldmap()
155 return self._map.filefoldmap()
159
156
160 @propertycache
157 @propertycache
161 def _dirfoldmap(self):
158 def _dirfoldmap(self):
162 f = {}
159 f = {}
163 normcase = util.normcase
160 normcase = util.normcase
164 for name in self._dirs:
161 for name in self._dirs:
165 f[normcase(name)] = name
162 f[normcase(name)] = name
166 return f
163 return f
167
164
168 @property
165 @property
169 def _sparsematcher(self):
166 def _sparsematcher(self):
170 """The matcher for the sparse checkout.
167 """The matcher for the sparse checkout.
171
168
172 The working directory may not include every file from a manifest. The
169 The working directory may not include every file from a manifest. The
173 matcher obtained by this property will match a path if it is to be
170 matcher obtained by this property will match a path if it is to be
174 included in the working directory.
171 included in the working directory.
175 """
172 """
176 # TODO there is potential to cache this property. For now, the matcher
173 # TODO there is potential to cache this property. For now, the matcher
177 # is resolved on every access. (But the called function does use a
174 # is resolved on every access. (But the called function does use a
178 # cache to keep the lookup fast.)
175 # cache to keep the lookup fast.)
179 return self._sparsematchfn()
176 return self._sparsematchfn()
180
177
181 @repocache('branch')
178 @repocache('branch')
182 def _branch(self):
179 def _branch(self):
183 try:
180 try:
184 return self._opener.read("branch").strip() or "default"
181 return self._opener.read("branch").strip() or "default"
185 except IOError as inst:
182 except IOError as inst:
186 if inst.errno != errno.ENOENT:
183 if inst.errno != errno.ENOENT:
187 raise
184 raise
188 return "default"
185 return "default"
189
186
190 @propertycache
187 @propertycache
191 def _pl(self):
188 def _pl(self):
192 try:
189 try:
193 fp = self._opendirstatefile()
190 fp = self._map._opendirstatefile()
194 st = fp.read(40)
191 st = fp.read(40)
195 fp.close()
192 fp.close()
196 l = len(st)
193 l = len(st)
197 if l == 40:
194 if l == 40:
198 return st[:20], st[20:40]
195 return st[:20], st[20:40]
199 elif l > 0 and l < 40:
196 elif l > 0 and l < 40:
200 raise error.Abort(_('working directory state appears damaged!'))
197 raise error.Abort(_('working directory state appears damaged!'))
201 except IOError as err:
198 except IOError as err:
202 if err.errno != errno.ENOENT:
199 if err.errno != errno.ENOENT:
203 raise
200 raise
204 return [nullid, nullid]
201 return [nullid, nullid]
205
202
206 @propertycache
203 @propertycache
207 def _dirs(self):
204 def _dirs(self):
208 return self._map.dirs()
205 return self._map.dirs()
209
206
210 def dirs(self):
207 def dirs(self):
211 return self._dirs
208 return self._dirs
212
209
213 @rootcache('.hgignore')
210 @rootcache('.hgignore')
214 def _ignore(self):
211 def _ignore(self):
215 files = self._ignorefiles()
212 files = self._ignorefiles()
216 if not files:
213 if not files:
217 return matchmod.never(self._root, '')
214 return matchmod.never(self._root, '')
218
215
219 pats = ['include:%s' % f for f in files]
216 pats = ['include:%s' % f for f in files]
220 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
217 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
221
218
222 @propertycache
219 @propertycache
223 def _slash(self):
220 def _slash(self):
224 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
221 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
225
222
226 @propertycache
223 @propertycache
227 def _checklink(self):
224 def _checklink(self):
228 return util.checklink(self._root)
225 return util.checklink(self._root)
229
226
230 @propertycache
227 @propertycache
231 def _checkexec(self):
228 def _checkexec(self):
232 return util.checkexec(self._root)
229 return util.checkexec(self._root)
233
230
234 @propertycache
231 @propertycache
235 def _checkcase(self):
232 def _checkcase(self):
236 return not util.fscasesensitive(self._join('.hg'))
233 return not util.fscasesensitive(self._join('.hg'))
237
234
238 def _join(self, f):
235 def _join(self, f):
239 # much faster than os.path.join()
236 # much faster than os.path.join()
240 # it's safe because f is always a relative path
237 # it's safe because f is always a relative path
241 return self._rootdir + f
238 return self._rootdir + f
242
239
243 def flagfunc(self, buildfallback):
240 def flagfunc(self, buildfallback):
244 if self._checklink and self._checkexec:
241 if self._checklink and self._checkexec:
245 def f(x):
242 def f(x):
246 try:
243 try:
247 st = os.lstat(self._join(x))
244 st = os.lstat(self._join(x))
248 if util.statislink(st):
245 if util.statislink(st):
249 return 'l'
246 return 'l'
250 if util.statisexec(st):
247 if util.statisexec(st):
251 return 'x'
248 return 'x'
252 except OSError:
249 except OSError:
253 pass
250 pass
254 return ''
251 return ''
255 return f
252 return f
256
253
257 fallback = buildfallback()
254 fallback = buildfallback()
258 if self._checklink:
255 if self._checklink:
259 def f(x):
256 def f(x):
260 if os.path.islink(self._join(x)):
257 if os.path.islink(self._join(x)):
261 return 'l'
258 return 'l'
262 if 'x' in fallback(x):
259 if 'x' in fallback(x):
263 return 'x'
260 return 'x'
264 return ''
261 return ''
265 return f
262 return f
266 if self._checkexec:
263 if self._checkexec:
267 def f(x):
264 def f(x):
268 if 'l' in fallback(x):
265 if 'l' in fallback(x):
269 return 'l'
266 return 'l'
270 if util.isexec(self._join(x)):
267 if util.isexec(self._join(x)):
271 return 'x'
268 return 'x'
272 return ''
269 return ''
273 return f
270 return f
274 else:
271 else:
275 return fallback
272 return fallback
276
273
277 @propertycache
274 @propertycache
278 def _cwd(self):
275 def _cwd(self):
279 # internal config: ui.forcecwd
276 # internal config: ui.forcecwd
280 forcecwd = self._ui.config('ui', 'forcecwd')
277 forcecwd = self._ui.config('ui', 'forcecwd')
281 if forcecwd:
278 if forcecwd:
282 return forcecwd
279 return forcecwd
283 return pycompat.getcwd()
280 return pycompat.getcwd()
284
281
285 def getcwd(self):
282 def getcwd(self):
286 '''Return the path from which a canonical path is calculated.
283 '''Return the path from which a canonical path is calculated.
287
284
288 This path should be used to resolve file patterns or to convert
285 This path should be used to resolve file patterns or to convert
289 canonical paths back to file paths for display. It shouldn't be
286 canonical paths back to file paths for display. It shouldn't be
290 used to get real file paths. Use vfs functions instead.
287 used to get real file paths. Use vfs functions instead.
291 '''
288 '''
292 cwd = self._cwd
289 cwd = self._cwd
293 if cwd == self._root:
290 if cwd == self._root:
294 return ''
291 return ''
295 # self._root ends with a path separator if self._root is '/' or 'C:\'
292 # self._root ends with a path separator if self._root is '/' or 'C:\'
296 rootsep = self._root
293 rootsep = self._root
297 if not util.endswithsep(rootsep):
294 if not util.endswithsep(rootsep):
298 rootsep += pycompat.ossep
295 rootsep += pycompat.ossep
299 if cwd.startswith(rootsep):
296 if cwd.startswith(rootsep):
300 return cwd[len(rootsep):]
297 return cwd[len(rootsep):]
301 else:
298 else:
302 # we're outside the repo. return an absolute path.
299 # we're outside the repo. return an absolute path.
303 return cwd
300 return cwd
304
301
305 def pathto(self, f, cwd=None):
302 def pathto(self, f, cwd=None):
306 if cwd is None:
303 if cwd is None:
307 cwd = self.getcwd()
304 cwd = self.getcwd()
308 path = util.pathto(self._root, cwd, f)
305 path = util.pathto(self._root, cwd, f)
309 if self._slash:
306 if self._slash:
310 return util.pconvert(path)
307 return util.pconvert(path)
311 return path
308 return path
312
309
313 def __getitem__(self, key):
310 def __getitem__(self, key):
314 '''Return the current state of key (a filename) in the dirstate.
311 '''Return the current state of key (a filename) in the dirstate.
315
312
316 States are:
313 States are:
317 n normal
314 n normal
318 m needs merging
315 m needs merging
319 r marked for removal
316 r marked for removal
320 a marked for addition
317 a marked for addition
321 ? not tracked
318 ? not tracked
322 '''
319 '''
323 return self._map.get(key, ("?",))[0]
320 return self._map.get(key, ("?",))[0]
324
321
325 def __contains__(self, key):
322 def __contains__(self, key):
326 return key in self._map
323 return key in self._map
327
324
328 def __iter__(self):
325 def __iter__(self):
329 return iter(sorted(self._map))
326 return iter(sorted(self._map))
330
327
331 def items(self):
328 def items(self):
332 return self._map.iteritems()
329 return self._map.iteritems()
333
330
334 iteritems = items
331 iteritems = items
335
332
336 def parents(self):
333 def parents(self):
337 return [self._validate(p) for p in self._pl]
334 return [self._validate(p) for p in self._pl]
338
335
339 def p1(self):
336 def p1(self):
340 return self._validate(self._pl[0])
337 return self._validate(self._pl[0])
341
338
342 def p2(self):
339 def p2(self):
343 return self._validate(self._pl[1])
340 return self._validate(self._pl[1])
344
341
345 def branch(self):
342 def branch(self):
346 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
347
344
348 def setparents(self, p1, p2=nullid):
345 def setparents(self, p1, p2=nullid):
349 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
350
347
351 When moving from two parents to one, 'm' merged entries a
348 When moving from two parents to one, 'm' merged entries a
352 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
353 returned by the call.
350 returned by the call.
354
351
355 See localrepo.setparents()
352 See localrepo.setparents()
356 """
353 """
357 if self._parentwriters == 0:
354 if self._parentwriters == 0:
358 raise ValueError("cannot set dirstate parent without "
355 raise ValueError("cannot set dirstate parent without "
359 "calling dirstate.beginparentchange")
356 "calling dirstate.beginparentchange")
360
357
361 self._dirty = self._dirtypl = True
358 self._dirty = self._dirtypl = True
362 oldp2 = self._pl[1]
359 oldp2 = self._pl[1]
363 if self._origpl is None:
360 if self._origpl is None:
364 self._origpl = self._pl
361 self._origpl = self._pl
365 self._pl = p1, p2
362 self._pl = p1, p2
366 copies = {}
363 copies = {}
367 if oldp2 != nullid and p2 == nullid:
364 if oldp2 != nullid and p2 == nullid:
368 candidatefiles = self._nonnormalset.union(self._otherparentset)
365 candidatefiles = self._nonnormalset.union(self._otherparentset)
369 for f in candidatefiles:
366 for f in candidatefiles:
370 s = self._map.get(f)
367 s = self._map.get(f)
371 if s is None:
368 if s is None:
372 continue
369 continue
373
370
374 # Discard 'm' markers when moving away from a merge state
371 # Discard 'm' markers when moving away from a merge state
375 if s[0] == 'm':
372 if s[0] == 'm':
376 source = self._map.copymap.get(f)
373 source = self._map.copymap.get(f)
377 if source:
374 if source:
378 copies[f] = source
375 copies[f] = source
379 self.normallookup(f)
376 self.normallookup(f)
380 # Also fix up otherparent markers
377 # Also fix up otherparent markers
381 elif s[0] == 'n' and s[2] == -2:
378 elif s[0] == 'n' and s[2] == -2:
382 source = self._map.copymap.get(f)
379 source = self._map.copymap.get(f)
383 if source:
380 if source:
384 copies[f] = source
381 copies[f] = source
385 self.add(f)
382 self.add(f)
386 return copies
383 return copies
387
384
388 def setbranch(self, branch):
385 def setbranch(self, branch):
389 self._branch = encoding.fromlocal(branch)
386 self._branch = encoding.fromlocal(branch)
390 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
387 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
391 try:
388 try:
392 f.write(self._branch + '\n')
389 f.write(self._branch + '\n')
393 f.close()
390 f.close()
394
391
395 # make sure filecache has the correct stat info for _branch after
392 # make sure filecache has the correct stat info for _branch after
396 # replacing the underlying file
393 # replacing the underlying file
397 ce = self._filecache['_branch']
394 ce = self._filecache['_branch']
398 if ce:
395 if ce:
399 ce.refresh()
396 ce.refresh()
400 except: # re-raises
397 except: # re-raises
401 f.discard()
398 f.discard()
402 raise
399 raise
403
400
404 def _opendirstatefile(self):
405 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
406 if self._pendingmode is not None and self._pendingmode != mode:
407 fp.close()
408 raise error.Abort(_('working directory state may be '
409 'changed parallelly'))
410 self._pendingmode = mode
411 return fp
412
413 def _read(self):
401 def _read(self):
414 self._map = dirstatemap()
402 self._map = dirstatemap(self._ui, self._opener, self._root)
415
403
416 # ignore HG_PENDING because identity is used only for writing
404 # ignore HG_PENDING because identity is used only for writing
417 self._identity = util.filestat.frompath(
405 self._identity = util.filestat.frompath(
418 self._opener.join(self._filename))
406 self._opener.join(self._filename))
419 try:
407 try:
420 fp = self._opendirstatefile()
408 fp = self._map._opendirstatefile()
421 try:
409 try:
422 st = fp.read()
410 st = fp.read()
423 finally:
411 finally:
424 fp.close()
412 fp.close()
425 except IOError as err:
413 except IOError as err:
426 if err.errno != errno.ENOENT:
414 if err.errno != errno.ENOENT:
427 raise
415 raise
428 return
416 return
429 if not st:
417 if not st:
430 return
418 return
431
419
432 if util.safehasattr(parsers, 'dict_new_presized'):
420 if util.safehasattr(parsers, 'dict_new_presized'):
433 # Make an estimate of the number of files in the dirstate based on
421 # Make an estimate of the number of files in the dirstate based on
434 # its size. From a linear regression on a set of real-world repos,
422 # its size. From a linear regression on a set of real-world repos,
435 # all over 10,000 files, the size of a dirstate entry is 85
423 # all over 10,000 files, the size of a dirstate entry is 85
436 # bytes. The cost of resizing is significantly higher than the cost
424 # bytes. The cost of resizing is significantly higher than the cost
437 # of filling in a larger presized dict, so subtract 20% from the
425 # of filling in a larger presized dict, so subtract 20% from the
438 # size.
426 # size.
439 #
427 #
440 # This heuristic is imperfect in many ways, so in a future dirstate
428 # This heuristic is imperfect in many ways, so in a future dirstate
441 # format update it makes sense to just record the number of entries
429 # format update it makes sense to just record the number of entries
442 # on write.
430 # on write.
443 self._map._map = parsers.dict_new_presized(len(st) / 71)
431 self._map._map = parsers.dict_new_presized(len(st) / 71)
444
432
445 # Python's garbage collector triggers a GC each time a certain number
433 # Python's garbage collector triggers a GC each time a certain number
446 # of container objects (the number being defined by
434 # of container objects (the number being defined by
447 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
435 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
448 # for each file in the dirstate. The C version then immediately marks
436 # for each file in the dirstate. The C version then immediately marks
449 # them as not to be tracked by the collector. However, this has no
437 # them as not to be tracked by the collector. However, this has no
450 # effect on when GCs are triggered, only on what objects the GC looks
438 # effect on when GCs are triggered, only on what objects the GC looks
451 # into. This means that O(number of files) GCs are unavoidable.
439 # into. This means that O(number of files) GCs are unavoidable.
452 # Depending on when in the process's lifetime the dirstate is parsed,
440 # Depending on when in the process's lifetime the dirstate is parsed,
453 # this can get very expensive. As a workaround, disable GC while
441 # this can get very expensive. As a workaround, disable GC while
454 # parsing the dirstate.
442 # parsing the dirstate.
455 #
443 #
456 # (we cannot decorate the function directly since it is in a C module)
444 # (we cannot decorate the function directly since it is in a C module)
457 parse_dirstate = util.nogc(parsers.parse_dirstate)
445 parse_dirstate = util.nogc(parsers.parse_dirstate)
458 p = parse_dirstate(self._map._map, self._map.copymap, st)
446 p = parse_dirstate(self._map._map, self._map.copymap, st)
459 if not self._dirtypl:
447 if not self._dirtypl:
460 self._pl = p
448 self._pl = p
461
449
462 def invalidate(self):
450 def invalidate(self):
463 '''Causes the next access to reread the dirstate.
451 '''Causes the next access to reread the dirstate.
464
452
465 This is different from localrepo.invalidatedirstate() because it always
453 This is different from localrepo.invalidatedirstate() because it always
466 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
454 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
467 check whether the dirstate has changed before rereading it.'''
455 check whether the dirstate has changed before rereading it.'''
468
456
469 for a in ("_map", "_identity",
457 for a in ("_map", "_identity",
470 "_filefoldmap", "_dirfoldmap", "_branch",
458 "_filefoldmap", "_dirfoldmap", "_branch",
471 "_pl", "_dirs", "_ignore", "_nonnormalset",
459 "_pl", "_dirs", "_ignore", "_nonnormalset",
472 "_otherparentset"):
460 "_otherparentset"):
473 if a in self.__dict__:
461 if a in self.__dict__:
474 delattr(self, a)
462 delattr(self, a)
475 self._lastnormaltime = 0
463 self._lastnormaltime = 0
476 self._dirty = False
464 self._dirty = False
477 self._updatedfiles.clear()
465 self._updatedfiles.clear()
478 self._parentwriters = 0
466 self._parentwriters = 0
479 self._origpl = None
467 self._origpl = None
480
468
481 def copy(self, source, dest):
469 def copy(self, source, dest):
482 """Mark dest as a copy of source. Unmark dest if source is None."""
470 """Mark dest as a copy of source. Unmark dest if source is None."""
483 if source == dest:
471 if source == dest:
484 return
472 return
485 self._dirty = True
473 self._dirty = True
486 if source is not None:
474 if source is not None:
487 self._map.copymap[dest] = source
475 self._map.copymap[dest] = source
488 self._updatedfiles.add(source)
476 self._updatedfiles.add(source)
489 self._updatedfiles.add(dest)
477 self._updatedfiles.add(dest)
490 elif self._map.copymap.pop(dest, None):
478 elif self._map.copymap.pop(dest, None):
491 self._updatedfiles.add(dest)
479 self._updatedfiles.add(dest)
492
480
493 def copied(self, file):
481 def copied(self, file):
494 return self._map.copymap.get(file, None)
482 return self._map.copymap.get(file, None)
495
483
496 def copies(self):
484 def copies(self):
497 return self._map.copymap
485 return self._map.copymap
498
486
499 def _droppath(self, f):
487 def _droppath(self, f):
500 if self[f] not in "?r" and "_dirs" in self.__dict__:
488 if self[f] not in "?r" and "_dirs" in self.__dict__:
501 self._dirs.delpath(f)
489 self._dirs.delpath(f)
502
490
503 if "_filefoldmap" in self.__dict__:
491 if "_filefoldmap" in self.__dict__:
504 normed = util.normcase(f)
492 normed = util.normcase(f)
505 if normed in self._filefoldmap:
493 if normed in self._filefoldmap:
506 del self._filefoldmap[normed]
494 del self._filefoldmap[normed]
507
495
508 self._updatedfiles.add(f)
496 self._updatedfiles.add(f)
509
497
510 def _addpath(self, f, state, mode, size, mtime):
498 def _addpath(self, f, state, mode, size, mtime):
511 oldstate = self[f]
499 oldstate = self[f]
512 if state == 'a' or oldstate == 'r':
500 if state == 'a' or oldstate == 'r':
513 scmutil.checkfilename(f)
501 scmutil.checkfilename(f)
514 if f in self._dirs:
502 if f in self._dirs:
515 raise error.Abort(_('directory %r already in dirstate') % f)
503 raise error.Abort(_('directory %r already in dirstate') % f)
516 # shadows
504 # shadows
517 for d in util.finddirs(f):
505 for d in util.finddirs(f):
518 if d in self._dirs:
506 if d in self._dirs:
519 break
507 break
520 entry = self._map.get(d)
508 entry = self._map.get(d)
521 if entry is not None and entry[0] != 'r':
509 if entry is not None and entry[0] != 'r':
522 raise error.Abort(
510 raise error.Abort(
523 _('file %r in dirstate clashes with %r') % (d, f))
511 _('file %r in dirstate clashes with %r') % (d, f))
524 if oldstate in "?r" and "_dirs" in self.__dict__:
512 if oldstate in "?r" and "_dirs" in self.__dict__:
525 self._dirs.addpath(f)
513 self._dirs.addpath(f)
526 self._dirty = True
514 self._dirty = True
527 self._updatedfiles.add(f)
515 self._updatedfiles.add(f)
528 self._map[f] = dirstatetuple(state, mode, size, mtime)
516 self._map[f] = dirstatetuple(state, mode, size, mtime)
529 if state != 'n' or mtime == -1:
517 if state != 'n' or mtime == -1:
530 self._nonnormalset.add(f)
518 self._nonnormalset.add(f)
531 if size == -2:
519 if size == -2:
532 self._otherparentset.add(f)
520 self._otherparentset.add(f)
533
521
534 def normal(self, f):
522 def normal(self, f):
535 '''Mark a file normal and clean.'''
523 '''Mark a file normal and clean.'''
536 s = os.lstat(self._join(f))
524 s = os.lstat(self._join(f))
537 mtime = s.st_mtime
525 mtime = s.st_mtime
538 self._addpath(f, 'n', s.st_mode,
526 self._addpath(f, 'n', s.st_mode,
539 s.st_size & _rangemask, mtime & _rangemask)
527 s.st_size & _rangemask, mtime & _rangemask)
540 self._map.copymap.pop(f, None)
528 self._map.copymap.pop(f, None)
541 if f in self._nonnormalset:
529 if f in self._nonnormalset:
542 self._nonnormalset.remove(f)
530 self._nonnormalset.remove(f)
543 if mtime > self._lastnormaltime:
531 if mtime > self._lastnormaltime:
544 # Remember the most recent modification timeslot for status(),
532 # Remember the most recent modification timeslot for status(),
545 # to make sure we won't miss future size-preserving file content
533 # to make sure we won't miss future size-preserving file content
546 # modifications that happen within the same timeslot.
534 # modifications that happen within the same timeslot.
547 self._lastnormaltime = mtime
535 self._lastnormaltime = mtime
548
536
549 def normallookup(self, f):
537 def normallookup(self, f):
550 '''Mark a file normal, but possibly dirty.'''
538 '''Mark a file normal, but possibly dirty.'''
551 if self._pl[1] != nullid:
539 if self._pl[1] != nullid:
552 # if there is a merge going on and the file was either
540 # if there is a merge going on and the file was either
553 # in state 'm' (-1) or coming from other parent (-2) before
541 # in state 'm' (-1) or coming from other parent (-2) before
554 # being removed, restore that state.
542 # being removed, restore that state.
555 entry = self._map.get(f)
543 entry = self._map.get(f)
556 if entry is not None:
544 if entry is not None:
557 if entry[0] == 'r' and entry[2] in (-1, -2):
545 if entry[0] == 'r' and entry[2] in (-1, -2):
558 source = self._map.copymap.get(f)
546 source = self._map.copymap.get(f)
559 if entry[2] == -1:
547 if entry[2] == -1:
560 self.merge(f)
548 self.merge(f)
561 elif entry[2] == -2:
549 elif entry[2] == -2:
562 self.otherparent(f)
550 self.otherparent(f)
563 if source:
551 if source:
564 self.copy(source, f)
552 self.copy(source, f)
565 return
553 return
566 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
554 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
567 return
555 return
568 self._addpath(f, 'n', 0, -1, -1)
556 self._addpath(f, 'n', 0, -1, -1)
569 self._map.copymap.pop(f, None)
557 self._map.copymap.pop(f, None)
570 if f in self._nonnormalset:
558 if f in self._nonnormalset:
571 self._nonnormalset.remove(f)
559 self._nonnormalset.remove(f)
572
560
573 def otherparent(self, f):
561 def otherparent(self, f):
574 '''Mark as coming from the other parent, always dirty.'''
562 '''Mark as coming from the other parent, always dirty.'''
575 if self._pl[1] == nullid:
563 if self._pl[1] == nullid:
576 raise error.Abort(_("setting %r to other parent "
564 raise error.Abort(_("setting %r to other parent "
577 "only allowed in merges") % f)
565 "only allowed in merges") % f)
578 if f in self and self[f] == 'n':
566 if f in self and self[f] == 'n':
579 # merge-like
567 # merge-like
580 self._addpath(f, 'm', 0, -2, -1)
568 self._addpath(f, 'm', 0, -2, -1)
581 else:
569 else:
582 # add-like
570 # add-like
583 self._addpath(f, 'n', 0, -2, -1)
571 self._addpath(f, 'n', 0, -2, -1)
584 self._map.copymap.pop(f, None)
572 self._map.copymap.pop(f, None)
585
573
586 def add(self, f):
574 def add(self, f):
587 '''Mark a file added.'''
575 '''Mark a file added.'''
588 self._addpath(f, 'a', 0, -1, -1)
576 self._addpath(f, 'a', 0, -1, -1)
589 self._map.copymap.pop(f, None)
577 self._map.copymap.pop(f, None)
590
578
591 def remove(self, f):
579 def remove(self, f):
592 '''Mark a file removed.'''
580 '''Mark a file removed.'''
593 self._dirty = True
581 self._dirty = True
594 self._droppath(f)
582 self._droppath(f)
595 size = 0
583 size = 0
596 if self._pl[1] != nullid:
584 if self._pl[1] != nullid:
597 entry = self._map.get(f)
585 entry = self._map.get(f)
598 if entry is not None:
586 if entry is not None:
599 # backup the previous state
587 # backup the previous state
600 if entry[0] == 'm': # merge
588 if entry[0] == 'm': # merge
601 size = -1
589 size = -1
602 elif entry[0] == 'n' and entry[2] == -2: # other parent
590 elif entry[0] == 'n' and entry[2] == -2: # other parent
603 size = -2
591 size = -2
604 self._otherparentset.add(f)
592 self._otherparentset.add(f)
605 self._map[f] = dirstatetuple('r', 0, size, 0)
593 self._map[f] = dirstatetuple('r', 0, size, 0)
606 self._nonnormalset.add(f)
594 self._nonnormalset.add(f)
607 if size == 0:
595 if size == 0:
608 self._map.copymap.pop(f, None)
596 self._map.copymap.pop(f, None)
609
597
610 def merge(self, f):
598 def merge(self, f):
611 '''Mark a file merged.'''
599 '''Mark a file merged.'''
612 if self._pl[1] == nullid:
600 if self._pl[1] == nullid:
613 return self.normallookup(f)
601 return self.normallookup(f)
614 return self.otherparent(f)
602 return self.otherparent(f)
615
603
616 def drop(self, f):
604 def drop(self, f):
617 '''Drop a file from the dirstate'''
605 '''Drop a file from the dirstate'''
618 if f in self._map:
606 if f in self._map:
619 self._dirty = True
607 self._dirty = True
620 self._droppath(f)
608 self._droppath(f)
621 del self._map[f]
609 del self._map[f]
622 if f in self._nonnormalset:
610 if f in self._nonnormalset:
623 self._nonnormalset.remove(f)
611 self._nonnormalset.remove(f)
624 self._map.copymap.pop(f, None)
612 self._map.copymap.pop(f, None)
625
613
626 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
614 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
627 if exists is None:
615 if exists is None:
628 exists = os.path.lexists(os.path.join(self._root, path))
616 exists = os.path.lexists(os.path.join(self._root, path))
629 if not exists:
617 if not exists:
630 # Maybe a path component exists
618 # Maybe a path component exists
631 if not ignoremissing and '/' in path:
619 if not ignoremissing and '/' in path:
632 d, f = path.rsplit('/', 1)
620 d, f = path.rsplit('/', 1)
633 d = self._normalize(d, False, ignoremissing, None)
621 d = self._normalize(d, False, ignoremissing, None)
634 folded = d + "/" + f
622 folded = d + "/" + f
635 else:
623 else:
636 # No path components, preserve original case
624 # No path components, preserve original case
637 folded = path
625 folded = path
638 else:
626 else:
639 # recursively normalize leading directory components
627 # recursively normalize leading directory components
640 # against dirstate
628 # against dirstate
641 if '/' in normed:
629 if '/' in normed:
642 d, f = normed.rsplit('/', 1)
630 d, f = normed.rsplit('/', 1)
643 d = self._normalize(d, False, ignoremissing, True)
631 d = self._normalize(d, False, ignoremissing, True)
644 r = self._root + "/" + d
632 r = self._root + "/" + d
645 folded = d + "/" + util.fspath(f, r)
633 folded = d + "/" + util.fspath(f, r)
646 else:
634 else:
647 folded = util.fspath(normed, self._root)
635 folded = util.fspath(normed, self._root)
648 storemap[normed] = folded
636 storemap[normed] = folded
649
637
650 return folded
638 return folded
651
639
652 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
640 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
653 normed = util.normcase(path)
641 normed = util.normcase(path)
654 folded = self._filefoldmap.get(normed, None)
642 folded = self._filefoldmap.get(normed, None)
655 if folded is None:
643 if folded is None:
656 if isknown:
644 if isknown:
657 folded = path
645 folded = path
658 else:
646 else:
659 folded = self._discoverpath(path, normed, ignoremissing, exists,
647 folded = self._discoverpath(path, normed, ignoremissing, exists,
660 self._filefoldmap)
648 self._filefoldmap)
661 return folded
649 return folded
662
650
663 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
651 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
664 normed = util.normcase(path)
652 normed = util.normcase(path)
665 folded = self._filefoldmap.get(normed, None)
653 folded = self._filefoldmap.get(normed, None)
666 if folded is None:
654 if folded is None:
667 folded = self._dirfoldmap.get(normed, None)
655 folded = self._dirfoldmap.get(normed, None)
668 if folded is None:
656 if folded is None:
669 if isknown:
657 if isknown:
670 folded = path
658 folded = path
671 else:
659 else:
672 # store discovered result in dirfoldmap so that future
660 # store discovered result in dirfoldmap so that future
673 # normalizefile calls don't start matching directories
661 # normalizefile calls don't start matching directories
674 folded = self._discoverpath(path, normed, ignoremissing, exists,
662 folded = self._discoverpath(path, normed, ignoremissing, exists,
675 self._dirfoldmap)
663 self._dirfoldmap)
676 return folded
664 return folded
677
665
678 def normalize(self, path, isknown=False, ignoremissing=False):
666 def normalize(self, path, isknown=False, ignoremissing=False):
679 '''
667 '''
680 normalize the case of a pathname when on a casefolding filesystem
668 normalize the case of a pathname when on a casefolding filesystem
681
669
682 isknown specifies whether the filename came from walking the
670 isknown specifies whether the filename came from walking the
683 disk, to avoid extra filesystem access.
671 disk, to avoid extra filesystem access.
684
672
685 If ignoremissing is True, missing path are returned
673 If ignoremissing is True, missing path are returned
686 unchanged. Otherwise, we try harder to normalize possibly
674 unchanged. Otherwise, we try harder to normalize possibly
687 existing path components.
675 existing path components.
688
676
689 The normalized case is determined based on the following precedence:
677 The normalized case is determined based on the following precedence:
690
678
691 - version of name already stored in the dirstate
679 - version of name already stored in the dirstate
692 - version of name stored on disk
680 - version of name stored on disk
693 - version provided via command arguments
681 - version provided via command arguments
694 '''
682 '''
695
683
696 if self._checkcase:
684 if self._checkcase:
697 return self._normalize(path, isknown, ignoremissing)
685 return self._normalize(path, isknown, ignoremissing)
698 return path
686 return path
699
687
700 def clear(self):
688 def clear(self):
701 self._map = dirstatemap()
689 self._map = dirstatemap(self._ui, self._opener, self._root)
702 self._nonnormalset = set()
690 self._nonnormalset = set()
703 self._otherparentset = set()
691 self._otherparentset = set()
704 if "_dirs" in self.__dict__:
692 if "_dirs" in self.__dict__:
705 delattr(self, "_dirs")
693 delattr(self, "_dirs")
706 self._pl = [nullid, nullid]
694 self._pl = [nullid, nullid]
707 self._lastnormaltime = 0
695 self._lastnormaltime = 0
708 self._updatedfiles.clear()
696 self._updatedfiles.clear()
709 self._dirty = True
697 self._dirty = True
710
698
711 def rebuild(self, parent, allfiles, changedfiles=None):
699 def rebuild(self, parent, allfiles, changedfiles=None):
712 if changedfiles is None:
700 if changedfiles is None:
713 # Rebuild entire dirstate
701 # Rebuild entire dirstate
714 changedfiles = allfiles
702 changedfiles = allfiles
715 lastnormaltime = self._lastnormaltime
703 lastnormaltime = self._lastnormaltime
716 self.clear()
704 self.clear()
717 self._lastnormaltime = lastnormaltime
705 self._lastnormaltime = lastnormaltime
718
706
719 if self._origpl is None:
707 if self._origpl is None:
720 self._origpl = self._pl
708 self._origpl = self._pl
721 self._pl = (parent, nullid)
709 self._pl = (parent, nullid)
722 for f in changedfiles:
710 for f in changedfiles:
723 if f in allfiles:
711 if f in allfiles:
724 self.normallookup(f)
712 self.normallookup(f)
725 else:
713 else:
726 self.drop(f)
714 self.drop(f)
727
715
728 self._dirty = True
716 self._dirty = True
729
717
730 def identity(self):
718 def identity(self):
731 '''Return identity of dirstate itself to detect changing in storage
719 '''Return identity of dirstate itself to detect changing in storage
732
720
733 If identity of previous dirstate is equal to this, writing
721 If identity of previous dirstate is equal to this, writing
734 changes based on the former dirstate out can keep consistency.
722 changes based on the former dirstate out can keep consistency.
735 '''
723 '''
736 return self._identity
724 return self._identity
737
725
738 def write(self, tr):
726 def write(self, tr):
739 if not self._dirty:
727 if not self._dirty:
740 return
728 return
741
729
742 filename = self._filename
730 filename = self._filename
743 if tr:
731 if tr:
744 # 'dirstate.write()' is not only for writing in-memory
732 # 'dirstate.write()' is not only for writing in-memory
745 # changes out, but also for dropping ambiguous timestamp.
733 # changes out, but also for dropping ambiguous timestamp.
746 # delayed writing re-raise "ambiguous timestamp issue".
734 # delayed writing re-raise "ambiguous timestamp issue".
747 # See also the wiki page below for detail:
735 # See also the wiki page below for detail:
748 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
736 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
749
737
750 # emulate dropping timestamp in 'parsers.pack_dirstate'
738 # emulate dropping timestamp in 'parsers.pack_dirstate'
751 now = _getfsnow(self._opener)
739 now = _getfsnow(self._opener)
752 dmap = self._map
740 dmap = self._map
753 for f in self._updatedfiles:
741 for f in self._updatedfiles:
754 e = dmap.get(f)
742 e = dmap.get(f)
755 if e is not None and e[0] == 'n' and e[3] == now:
743 if e is not None and e[0] == 'n' and e[3] == now:
756 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
744 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
757 self._nonnormalset.add(f)
745 self._nonnormalset.add(f)
758
746
759 # emulate that all 'dirstate.normal' results are written out
747 # emulate that all 'dirstate.normal' results are written out
760 self._lastnormaltime = 0
748 self._lastnormaltime = 0
761 self._updatedfiles.clear()
749 self._updatedfiles.clear()
762
750
763 # delay writing in-memory changes out
751 # delay writing in-memory changes out
764 tr.addfilegenerator('dirstate', (self._filename,),
752 tr.addfilegenerator('dirstate', (self._filename,),
765 self._writedirstate, location='plain')
753 self._writedirstate, location='plain')
766 return
754 return
767
755
768 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
756 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
769 self._writedirstate(st)
757 self._writedirstate(st)
770
758
771 def addparentchangecallback(self, category, callback):
759 def addparentchangecallback(self, category, callback):
772 """add a callback to be called when the wd parents are changed
760 """add a callback to be called when the wd parents are changed
773
761
774 Callback will be called with the following arguments:
762 Callback will be called with the following arguments:
775 dirstate, (oldp1, oldp2), (newp1, newp2)
763 dirstate, (oldp1, oldp2), (newp1, newp2)
776
764
777 Category is a unique identifier to allow overwriting an old callback
765 Category is a unique identifier to allow overwriting an old callback
778 with a newer callback.
766 with a newer callback.
779 """
767 """
780 self._plchangecallbacks[category] = callback
768 self._plchangecallbacks[category] = callback
781
769
782 def _writedirstate(self, st):
770 def _writedirstate(self, st):
783 # notify callbacks about parents change
771 # notify callbacks about parents change
784 if self._origpl is not None and self._origpl != self._pl:
772 if self._origpl is not None and self._origpl != self._pl:
785 for c, callback in sorted(self._plchangecallbacks.iteritems()):
773 for c, callback in sorted(self._plchangecallbacks.iteritems()):
786 callback(self, self._origpl, self._pl)
774 callback(self, self._origpl, self._pl)
787 self._origpl = None
775 self._origpl = None
788 # use the modification time of the newly created temporary file as the
776 # use the modification time of the newly created temporary file as the
789 # filesystem's notion of 'now'
777 # filesystem's notion of 'now'
790 now = util.fstat(st).st_mtime & _rangemask
778 now = util.fstat(st).st_mtime & _rangemask
791
779
792 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
780 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
793 # timestamp of each entries in dirstate, because of 'now > mtime'
781 # timestamp of each entries in dirstate, because of 'now > mtime'
794 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
782 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
795 if delaywrite > 0:
783 if delaywrite > 0:
796 # do we have any files to delay for?
784 # do we have any files to delay for?
797 for f, e in self._map.iteritems():
785 for f, e in self._map.iteritems():
798 if e[0] == 'n' and e[3] == now:
786 if e[0] == 'n' and e[3] == now:
799 import time # to avoid useless import
787 import time # to avoid useless import
800 # rather than sleep n seconds, sleep until the next
788 # rather than sleep n seconds, sleep until the next
801 # multiple of n seconds
789 # multiple of n seconds
802 clock = time.time()
790 clock = time.time()
803 start = int(clock) - (int(clock) % delaywrite)
791 start = int(clock) - (int(clock) % delaywrite)
804 end = start + delaywrite
792 end = start + delaywrite
805 time.sleep(end - clock)
793 time.sleep(end - clock)
806 now = end # trust our estimate that the end is near now
794 now = end # trust our estimate that the end is near now
807 break
795 break
808
796
809 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
797 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
810 self._pl, now))
798 self._pl, now))
811 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
799 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
812 st.close()
800 st.close()
813 self._lastnormaltime = 0
801 self._lastnormaltime = 0
814 self._dirty = self._dirtypl = False
802 self._dirty = self._dirtypl = False
815
803
816 def _dirignore(self, f):
804 def _dirignore(self, f):
817 if f == '.':
805 if f == '.':
818 return False
806 return False
819 if self._ignore(f):
807 if self._ignore(f):
820 return True
808 return True
821 for p in util.finddirs(f):
809 for p in util.finddirs(f):
822 if self._ignore(p):
810 if self._ignore(p):
823 return True
811 return True
824 return False
812 return False
825
813
826 def _ignorefiles(self):
814 def _ignorefiles(self):
827 files = []
815 files = []
828 if os.path.exists(self._join('.hgignore')):
816 if os.path.exists(self._join('.hgignore')):
829 files.append(self._join('.hgignore'))
817 files.append(self._join('.hgignore'))
830 for name, path in self._ui.configitems("ui"):
818 for name, path in self._ui.configitems("ui"):
831 if name == 'ignore' or name.startswith('ignore.'):
819 if name == 'ignore' or name.startswith('ignore.'):
832 # we need to use os.path.join here rather than self._join
820 # we need to use os.path.join here rather than self._join
833 # because path is arbitrary and user-specified
821 # because path is arbitrary and user-specified
834 files.append(os.path.join(self._rootdir, util.expandpath(path)))
822 files.append(os.path.join(self._rootdir, util.expandpath(path)))
835 return files
823 return files
836
824
837 def _ignorefileandline(self, f):
825 def _ignorefileandline(self, f):
838 files = collections.deque(self._ignorefiles())
826 files = collections.deque(self._ignorefiles())
839 visited = set()
827 visited = set()
840 while files:
828 while files:
841 i = files.popleft()
829 i = files.popleft()
842 patterns = matchmod.readpatternfile(i, self._ui.warn,
830 patterns = matchmod.readpatternfile(i, self._ui.warn,
843 sourceinfo=True)
831 sourceinfo=True)
844 for pattern, lineno, line in patterns:
832 for pattern, lineno, line in patterns:
845 kind, p = matchmod._patsplit(pattern, 'glob')
833 kind, p = matchmod._patsplit(pattern, 'glob')
846 if kind == "subinclude":
834 if kind == "subinclude":
847 if p not in visited:
835 if p not in visited:
848 files.append(p)
836 files.append(p)
849 continue
837 continue
850 m = matchmod.match(self._root, '', [], [pattern],
838 m = matchmod.match(self._root, '', [], [pattern],
851 warn=self._ui.warn)
839 warn=self._ui.warn)
852 if m(f):
840 if m(f):
853 return (i, lineno, line)
841 return (i, lineno, line)
854 visited.add(i)
842 visited.add(i)
855 return (None, -1, "")
843 return (None, -1, "")
856
844
857 def _walkexplicit(self, match, subrepos):
845 def _walkexplicit(self, match, subrepos):
858 '''Get stat data about the files explicitly specified by match.
846 '''Get stat data about the files explicitly specified by match.
859
847
860 Return a triple (results, dirsfound, dirsnotfound).
848 Return a triple (results, dirsfound, dirsnotfound).
861 - results is a mapping from filename to stat result. It also contains
849 - results is a mapping from filename to stat result. It also contains
862 listings mapping subrepos and .hg to None.
850 listings mapping subrepos and .hg to None.
863 - dirsfound is a list of files found to be directories.
851 - dirsfound is a list of files found to be directories.
864 - dirsnotfound is a list of files that the dirstate thinks are
852 - dirsnotfound is a list of files that the dirstate thinks are
865 directories and that were not found.'''
853 directories and that were not found.'''
866
854
867 def badtype(mode):
855 def badtype(mode):
868 kind = _('unknown')
856 kind = _('unknown')
869 if stat.S_ISCHR(mode):
857 if stat.S_ISCHR(mode):
870 kind = _('character device')
858 kind = _('character device')
871 elif stat.S_ISBLK(mode):
859 elif stat.S_ISBLK(mode):
872 kind = _('block device')
860 kind = _('block device')
873 elif stat.S_ISFIFO(mode):
861 elif stat.S_ISFIFO(mode):
874 kind = _('fifo')
862 kind = _('fifo')
875 elif stat.S_ISSOCK(mode):
863 elif stat.S_ISSOCK(mode):
876 kind = _('socket')
864 kind = _('socket')
877 elif stat.S_ISDIR(mode):
865 elif stat.S_ISDIR(mode):
878 kind = _('directory')
866 kind = _('directory')
879 return _('unsupported file type (type is %s)') % kind
867 return _('unsupported file type (type is %s)') % kind
880
868
881 matchedir = match.explicitdir
869 matchedir = match.explicitdir
882 badfn = match.bad
870 badfn = match.bad
883 dmap = self._map
871 dmap = self._map
884 lstat = os.lstat
872 lstat = os.lstat
885 getkind = stat.S_IFMT
873 getkind = stat.S_IFMT
886 dirkind = stat.S_IFDIR
874 dirkind = stat.S_IFDIR
887 regkind = stat.S_IFREG
875 regkind = stat.S_IFREG
888 lnkkind = stat.S_IFLNK
876 lnkkind = stat.S_IFLNK
889 join = self._join
877 join = self._join
890 dirsfound = []
878 dirsfound = []
891 foundadd = dirsfound.append
879 foundadd = dirsfound.append
892 dirsnotfound = []
880 dirsnotfound = []
893 notfoundadd = dirsnotfound.append
881 notfoundadd = dirsnotfound.append
894
882
895 if not match.isexact() and self._checkcase:
883 if not match.isexact() and self._checkcase:
896 normalize = self._normalize
884 normalize = self._normalize
897 else:
885 else:
898 normalize = None
886 normalize = None
899
887
900 files = sorted(match.files())
888 files = sorted(match.files())
901 subrepos.sort()
889 subrepos.sort()
902 i, j = 0, 0
890 i, j = 0, 0
903 while i < len(files) and j < len(subrepos):
891 while i < len(files) and j < len(subrepos):
904 subpath = subrepos[j] + "/"
892 subpath = subrepos[j] + "/"
905 if files[i] < subpath:
893 if files[i] < subpath:
906 i += 1
894 i += 1
907 continue
895 continue
908 while i < len(files) and files[i].startswith(subpath):
896 while i < len(files) and files[i].startswith(subpath):
909 del files[i]
897 del files[i]
910 j += 1
898 j += 1
911
899
912 if not files or '.' in files:
900 if not files or '.' in files:
913 files = ['.']
901 files = ['.']
914 results = dict.fromkeys(subrepos)
902 results = dict.fromkeys(subrepos)
915 results['.hg'] = None
903 results['.hg'] = None
916
904
917 alldirs = None
905 alldirs = None
918 for ff in files:
906 for ff in files:
919 # constructing the foldmap is expensive, so don't do it for the
907 # constructing the foldmap is expensive, so don't do it for the
920 # common case where files is ['.']
908 # common case where files is ['.']
921 if normalize and ff != '.':
909 if normalize and ff != '.':
922 nf = normalize(ff, False, True)
910 nf = normalize(ff, False, True)
923 else:
911 else:
924 nf = ff
912 nf = ff
925 if nf in results:
913 if nf in results:
926 continue
914 continue
927
915
928 try:
916 try:
929 st = lstat(join(nf))
917 st = lstat(join(nf))
930 kind = getkind(st.st_mode)
918 kind = getkind(st.st_mode)
931 if kind == dirkind:
919 if kind == dirkind:
932 if nf in dmap:
920 if nf in dmap:
933 # file replaced by dir on disk but still in dirstate
921 # file replaced by dir on disk but still in dirstate
934 results[nf] = None
922 results[nf] = None
935 if matchedir:
923 if matchedir:
936 matchedir(nf)
924 matchedir(nf)
937 foundadd((nf, ff))
925 foundadd((nf, ff))
938 elif kind == regkind or kind == lnkkind:
926 elif kind == regkind or kind == lnkkind:
939 results[nf] = st
927 results[nf] = st
940 else:
928 else:
941 badfn(ff, badtype(kind))
929 badfn(ff, badtype(kind))
942 if nf in dmap:
930 if nf in dmap:
943 results[nf] = None
931 results[nf] = None
944 except OSError as inst: # nf not found on disk - it is dirstate only
932 except OSError as inst: # nf not found on disk - it is dirstate only
945 if nf in dmap: # does it exactly match a missing file?
933 if nf in dmap: # does it exactly match a missing file?
946 results[nf] = None
934 results[nf] = None
947 else: # does it match a missing directory?
935 else: # does it match a missing directory?
948 if alldirs is None:
936 if alldirs is None:
949 alldirs = util.dirs(dmap._map)
937 alldirs = util.dirs(dmap._map)
950 if nf in alldirs:
938 if nf in alldirs:
951 if matchedir:
939 if matchedir:
952 matchedir(nf)
940 matchedir(nf)
953 notfoundadd(nf)
941 notfoundadd(nf)
954 else:
942 else:
955 badfn(ff, encoding.strtolocal(inst.strerror))
943 badfn(ff, encoding.strtolocal(inst.strerror))
956
944
957 # Case insensitive filesystems cannot rely on lstat() failing to detect
945 # Case insensitive filesystems cannot rely on lstat() failing to detect
958 # a case-only rename. Prune the stat object for any file that does not
946 # a case-only rename. Prune the stat object for any file that does not
959 # match the case in the filesystem, if there are multiple files that
947 # match the case in the filesystem, if there are multiple files that
960 # normalize to the same path.
948 # normalize to the same path.
961 if match.isexact() and self._checkcase:
949 if match.isexact() and self._checkcase:
962 normed = {}
950 normed = {}
963
951
964 for f, st in results.iteritems():
952 for f, st in results.iteritems():
965 if st is None:
953 if st is None:
966 continue
954 continue
967
955
968 nc = util.normcase(f)
956 nc = util.normcase(f)
969 paths = normed.get(nc)
957 paths = normed.get(nc)
970
958
971 if paths is None:
959 if paths is None:
972 paths = set()
960 paths = set()
973 normed[nc] = paths
961 normed[nc] = paths
974
962
975 paths.add(f)
963 paths.add(f)
976
964
977 for norm, paths in normed.iteritems():
965 for norm, paths in normed.iteritems():
978 if len(paths) > 1:
966 if len(paths) > 1:
979 for path in paths:
967 for path in paths:
980 folded = self._discoverpath(path, norm, True, None,
968 folded = self._discoverpath(path, norm, True, None,
981 self._dirfoldmap)
969 self._dirfoldmap)
982 if path != folded:
970 if path != folded:
983 results[path] = None
971 results[path] = None
984
972
985 return results, dirsfound, dirsnotfound
973 return results, dirsfound, dirsnotfound
986
974
987 def walk(self, match, subrepos, unknown, ignored, full=True):
975 def walk(self, match, subrepos, unknown, ignored, full=True):
988 '''
976 '''
989 Walk recursively through the directory tree, finding all files
977 Walk recursively through the directory tree, finding all files
990 matched by match.
978 matched by match.
991
979
992 If full is False, maybe skip some known-clean files.
980 If full is False, maybe skip some known-clean files.
993
981
994 Return a dict mapping filename to stat-like object (either
982 Return a dict mapping filename to stat-like object (either
995 mercurial.osutil.stat instance or return value of os.stat()).
983 mercurial.osutil.stat instance or return value of os.stat()).
996
984
997 '''
985 '''
998 # full is a flag that extensions that hook into walk can use -- this
986 # full is a flag that extensions that hook into walk can use -- this
999 # implementation doesn't use it at all. This satisfies the contract
987 # implementation doesn't use it at all. This satisfies the contract
1000 # because we only guarantee a "maybe".
988 # because we only guarantee a "maybe".
1001
989
1002 if ignored:
990 if ignored:
1003 ignore = util.never
991 ignore = util.never
1004 dirignore = util.never
992 dirignore = util.never
1005 elif unknown:
993 elif unknown:
1006 ignore = self._ignore
994 ignore = self._ignore
1007 dirignore = self._dirignore
995 dirignore = self._dirignore
1008 else:
996 else:
1009 # if not unknown and not ignored, drop dir recursion and step 2
997 # if not unknown and not ignored, drop dir recursion and step 2
1010 ignore = util.always
998 ignore = util.always
1011 dirignore = util.always
999 dirignore = util.always
1012
1000
1013 matchfn = match.matchfn
1001 matchfn = match.matchfn
1014 matchalways = match.always()
1002 matchalways = match.always()
1015 matchtdir = match.traversedir
1003 matchtdir = match.traversedir
1016 dmap = self._map
1004 dmap = self._map
1017 listdir = util.listdir
1005 listdir = util.listdir
1018 lstat = os.lstat
1006 lstat = os.lstat
1019 dirkind = stat.S_IFDIR
1007 dirkind = stat.S_IFDIR
1020 regkind = stat.S_IFREG
1008 regkind = stat.S_IFREG
1021 lnkkind = stat.S_IFLNK
1009 lnkkind = stat.S_IFLNK
1022 join = self._join
1010 join = self._join
1023
1011
1024 exact = skipstep3 = False
1012 exact = skipstep3 = False
1025 if match.isexact(): # match.exact
1013 if match.isexact(): # match.exact
1026 exact = True
1014 exact = True
1027 dirignore = util.always # skip step 2
1015 dirignore = util.always # skip step 2
1028 elif match.prefix(): # match.match, no patterns
1016 elif match.prefix(): # match.match, no patterns
1029 skipstep3 = True
1017 skipstep3 = True
1030
1018
1031 if not exact and self._checkcase:
1019 if not exact and self._checkcase:
1032 normalize = self._normalize
1020 normalize = self._normalize
1033 normalizefile = self._normalizefile
1021 normalizefile = self._normalizefile
1034 skipstep3 = False
1022 skipstep3 = False
1035 else:
1023 else:
1036 normalize = self._normalize
1024 normalize = self._normalize
1037 normalizefile = None
1025 normalizefile = None
1038
1026
1039 # step 1: find all explicit files
1027 # step 1: find all explicit files
1040 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1028 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1041
1029
1042 skipstep3 = skipstep3 and not (work or dirsnotfound)
1030 skipstep3 = skipstep3 and not (work or dirsnotfound)
1043 work = [d for d in work if not dirignore(d[0])]
1031 work = [d for d in work if not dirignore(d[0])]
1044
1032
1045 # step 2: visit subdirectories
1033 # step 2: visit subdirectories
1046 def traverse(work, alreadynormed):
1034 def traverse(work, alreadynormed):
1047 wadd = work.append
1035 wadd = work.append
1048 while work:
1036 while work:
1049 nd = work.pop()
1037 nd = work.pop()
1050 if not match.visitdir(nd):
1038 if not match.visitdir(nd):
1051 continue
1039 continue
1052 skip = None
1040 skip = None
1053 if nd == '.':
1041 if nd == '.':
1054 nd = ''
1042 nd = ''
1055 else:
1043 else:
1056 skip = '.hg'
1044 skip = '.hg'
1057 try:
1045 try:
1058 entries = listdir(join(nd), stat=True, skip=skip)
1046 entries = listdir(join(nd), stat=True, skip=skip)
1059 except OSError as inst:
1047 except OSError as inst:
1060 if inst.errno in (errno.EACCES, errno.ENOENT):
1048 if inst.errno in (errno.EACCES, errno.ENOENT):
1061 match.bad(self.pathto(nd),
1049 match.bad(self.pathto(nd),
1062 encoding.strtolocal(inst.strerror))
1050 encoding.strtolocal(inst.strerror))
1063 continue
1051 continue
1064 raise
1052 raise
1065 for f, kind, st in entries:
1053 for f, kind, st in entries:
1066 if normalizefile:
1054 if normalizefile:
1067 # even though f might be a directory, we're only
1055 # even though f might be a directory, we're only
1068 # interested in comparing it to files currently in the
1056 # interested in comparing it to files currently in the
1069 # dmap -- therefore normalizefile is enough
1057 # dmap -- therefore normalizefile is enough
1070 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1058 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1071 True)
1059 True)
1072 else:
1060 else:
1073 nf = nd and (nd + "/" + f) or f
1061 nf = nd and (nd + "/" + f) or f
1074 if nf not in results:
1062 if nf not in results:
1075 if kind == dirkind:
1063 if kind == dirkind:
1076 if not ignore(nf):
1064 if not ignore(nf):
1077 if matchtdir:
1065 if matchtdir:
1078 matchtdir(nf)
1066 matchtdir(nf)
1079 wadd(nf)
1067 wadd(nf)
1080 if nf in dmap and (matchalways or matchfn(nf)):
1068 if nf in dmap and (matchalways or matchfn(nf)):
1081 results[nf] = None
1069 results[nf] = None
1082 elif kind == regkind or kind == lnkkind:
1070 elif kind == regkind or kind == lnkkind:
1083 if nf in dmap:
1071 if nf in dmap:
1084 if matchalways or matchfn(nf):
1072 if matchalways or matchfn(nf):
1085 results[nf] = st
1073 results[nf] = st
1086 elif ((matchalways or matchfn(nf))
1074 elif ((matchalways or matchfn(nf))
1087 and not ignore(nf)):
1075 and not ignore(nf)):
1088 # unknown file -- normalize if necessary
1076 # unknown file -- normalize if necessary
1089 if not alreadynormed:
1077 if not alreadynormed:
1090 nf = normalize(nf, False, True)
1078 nf = normalize(nf, False, True)
1091 results[nf] = st
1079 results[nf] = st
1092 elif nf in dmap and (matchalways or matchfn(nf)):
1080 elif nf in dmap and (matchalways or matchfn(nf)):
1093 results[nf] = None
1081 results[nf] = None
1094
1082
1095 for nd, d in work:
1083 for nd, d in work:
1096 # alreadynormed means that processwork doesn't have to do any
1084 # alreadynormed means that processwork doesn't have to do any
1097 # expensive directory normalization
1085 # expensive directory normalization
1098 alreadynormed = not normalize or nd == d
1086 alreadynormed = not normalize or nd == d
1099 traverse([d], alreadynormed)
1087 traverse([d], alreadynormed)
1100
1088
1101 for s in subrepos:
1089 for s in subrepos:
1102 del results[s]
1090 del results[s]
1103 del results['.hg']
1091 del results['.hg']
1104
1092
1105 # step 3: visit remaining files from dmap
1093 # step 3: visit remaining files from dmap
1106 if not skipstep3 and not exact:
1094 if not skipstep3 and not exact:
1107 # If a dmap file is not in results yet, it was either
1095 # If a dmap file is not in results yet, it was either
1108 # a) not matching matchfn b) ignored, c) missing, or d) under a
1096 # a) not matching matchfn b) ignored, c) missing, or d) under a
1109 # symlink directory.
1097 # symlink directory.
1110 if not results and matchalways:
1098 if not results and matchalways:
1111 visit = [f for f in dmap]
1099 visit = [f for f in dmap]
1112 else:
1100 else:
1113 visit = [f for f in dmap if f not in results and matchfn(f)]
1101 visit = [f for f in dmap if f not in results and matchfn(f)]
1114 visit.sort()
1102 visit.sort()
1115
1103
1116 if unknown:
1104 if unknown:
1117 # unknown == True means we walked all dirs under the roots
1105 # unknown == True means we walked all dirs under the roots
1118 # that wasn't ignored, and everything that matched was stat'ed
1106 # that wasn't ignored, and everything that matched was stat'ed
1119 # and is already in results.
1107 # and is already in results.
1120 # The rest must thus be ignored or under a symlink.
1108 # The rest must thus be ignored or under a symlink.
1121 audit_path = pathutil.pathauditor(self._root, cached=True)
1109 audit_path = pathutil.pathauditor(self._root, cached=True)
1122
1110
1123 for nf in iter(visit):
1111 for nf in iter(visit):
1124 # If a stat for the same file was already added with a
1112 # If a stat for the same file was already added with a
1125 # different case, don't add one for this, since that would
1113 # different case, don't add one for this, since that would
1126 # make it appear as if the file exists under both names
1114 # make it appear as if the file exists under both names
1127 # on disk.
1115 # on disk.
1128 if (normalizefile and
1116 if (normalizefile and
1129 normalizefile(nf, True, True) in results):
1117 normalizefile(nf, True, True) in results):
1130 results[nf] = None
1118 results[nf] = None
1131 # Report ignored items in the dmap as long as they are not
1119 # Report ignored items in the dmap as long as they are not
1132 # under a symlink directory.
1120 # under a symlink directory.
1133 elif audit_path.check(nf):
1121 elif audit_path.check(nf):
1134 try:
1122 try:
1135 results[nf] = lstat(join(nf))
1123 results[nf] = lstat(join(nf))
1136 # file was just ignored, no links, and exists
1124 # file was just ignored, no links, and exists
1137 except OSError:
1125 except OSError:
1138 # file doesn't exist
1126 # file doesn't exist
1139 results[nf] = None
1127 results[nf] = None
1140 else:
1128 else:
1141 # It's either missing or under a symlink directory
1129 # It's either missing or under a symlink directory
1142 # which we in this case report as missing
1130 # which we in this case report as missing
1143 results[nf] = None
1131 results[nf] = None
1144 else:
1132 else:
1145 # We may not have walked the full directory tree above,
1133 # We may not have walked the full directory tree above,
1146 # so stat and check everything we missed.
1134 # so stat and check everything we missed.
1147 iv = iter(visit)
1135 iv = iter(visit)
1148 for st in util.statfiles([join(i) for i in visit]):
1136 for st in util.statfiles([join(i) for i in visit]):
1149 results[next(iv)] = st
1137 results[next(iv)] = st
1150 return results
1138 return results
1151
1139
1152 def status(self, match, subrepos, ignored, clean, unknown):
1140 def status(self, match, subrepos, ignored, clean, unknown):
1153 '''Determine the status of the working copy relative to the
1141 '''Determine the status of the working copy relative to the
1154 dirstate and return a pair of (unsure, status), where status is of type
1142 dirstate and return a pair of (unsure, status), where status is of type
1155 scmutil.status and:
1143 scmutil.status and:
1156
1144
1157 unsure:
1145 unsure:
1158 files that might have been modified since the dirstate was
1146 files that might have been modified since the dirstate was
1159 written, but need to be read to be sure (size is the same
1147 written, but need to be read to be sure (size is the same
1160 but mtime differs)
1148 but mtime differs)
1161 status.modified:
1149 status.modified:
1162 files that have definitely been modified since the dirstate
1150 files that have definitely been modified since the dirstate
1163 was written (different size or mode)
1151 was written (different size or mode)
1164 status.clean:
1152 status.clean:
1165 files that have definitely not been modified since the
1153 files that have definitely not been modified since the
1166 dirstate was written
1154 dirstate was written
1167 '''
1155 '''
1168 listignored, listclean, listunknown = ignored, clean, unknown
1156 listignored, listclean, listunknown = ignored, clean, unknown
1169 lookup, modified, added, unknown, ignored = [], [], [], [], []
1157 lookup, modified, added, unknown, ignored = [], [], [], [], []
1170 removed, deleted, clean = [], [], []
1158 removed, deleted, clean = [], [], []
1171
1159
1172 dmap = self._map
1160 dmap = self._map
1173 ladd = lookup.append # aka "unsure"
1161 ladd = lookup.append # aka "unsure"
1174 madd = modified.append
1162 madd = modified.append
1175 aadd = added.append
1163 aadd = added.append
1176 uadd = unknown.append
1164 uadd = unknown.append
1177 iadd = ignored.append
1165 iadd = ignored.append
1178 radd = removed.append
1166 radd = removed.append
1179 dadd = deleted.append
1167 dadd = deleted.append
1180 cadd = clean.append
1168 cadd = clean.append
1181 mexact = match.exact
1169 mexact = match.exact
1182 dirignore = self._dirignore
1170 dirignore = self._dirignore
1183 checkexec = self._checkexec
1171 checkexec = self._checkexec
1184 copymap = self._map.copymap
1172 copymap = self._map.copymap
1185 lastnormaltime = self._lastnormaltime
1173 lastnormaltime = self._lastnormaltime
1186
1174
1187 # We need to do full walks when either
1175 # We need to do full walks when either
1188 # - we're listing all clean files, or
1176 # - we're listing all clean files, or
1189 # - match.traversedir does something, because match.traversedir should
1177 # - match.traversedir does something, because match.traversedir should
1190 # be called for every dir in the working dir
1178 # be called for every dir in the working dir
1191 full = listclean or match.traversedir is not None
1179 full = listclean or match.traversedir is not None
1192 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1180 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1193 full=full).iteritems():
1181 full=full).iteritems():
1194 if fn not in dmap:
1182 if fn not in dmap:
1195 if (listignored or mexact(fn)) and dirignore(fn):
1183 if (listignored or mexact(fn)) and dirignore(fn):
1196 if listignored:
1184 if listignored:
1197 iadd(fn)
1185 iadd(fn)
1198 else:
1186 else:
1199 uadd(fn)
1187 uadd(fn)
1200 continue
1188 continue
1201
1189
1202 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1190 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1203 # written like that for performance reasons. dmap[fn] is not a
1191 # written like that for performance reasons. dmap[fn] is not a
1204 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1192 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1205 # opcode has fast paths when the value to be unpacked is a tuple or
1193 # opcode has fast paths when the value to be unpacked is a tuple or
1206 # a list, but falls back to creating a full-fledged iterator in
1194 # a list, but falls back to creating a full-fledged iterator in
1207 # general. That is much slower than simply accessing and storing the
1195 # general. That is much slower than simply accessing and storing the
1208 # tuple members one by one.
1196 # tuple members one by one.
1209 t = dmap[fn]
1197 t = dmap[fn]
1210 state = t[0]
1198 state = t[0]
1211 mode = t[1]
1199 mode = t[1]
1212 size = t[2]
1200 size = t[2]
1213 time = t[3]
1201 time = t[3]
1214
1202
1215 if not st and state in "nma":
1203 if not st and state in "nma":
1216 dadd(fn)
1204 dadd(fn)
1217 elif state == 'n':
1205 elif state == 'n':
1218 if (size >= 0 and
1206 if (size >= 0 and
1219 ((size != st.st_size and size != st.st_size & _rangemask)
1207 ((size != st.st_size and size != st.st_size & _rangemask)
1220 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1208 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1221 or size == -2 # other parent
1209 or size == -2 # other parent
1222 or fn in copymap):
1210 or fn in copymap):
1223 madd(fn)
1211 madd(fn)
1224 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1212 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1225 ladd(fn)
1213 ladd(fn)
1226 elif st.st_mtime == lastnormaltime:
1214 elif st.st_mtime == lastnormaltime:
1227 # fn may have just been marked as normal and it may have
1215 # fn may have just been marked as normal and it may have
1228 # changed in the same second without changing its size.
1216 # changed in the same second without changing its size.
1229 # This can happen if we quickly do multiple commits.
1217 # This can happen if we quickly do multiple commits.
1230 # Force lookup, so we don't miss such a racy file change.
1218 # Force lookup, so we don't miss such a racy file change.
1231 ladd(fn)
1219 ladd(fn)
1232 elif listclean:
1220 elif listclean:
1233 cadd(fn)
1221 cadd(fn)
1234 elif state == 'm':
1222 elif state == 'm':
1235 madd(fn)
1223 madd(fn)
1236 elif state == 'a':
1224 elif state == 'a':
1237 aadd(fn)
1225 aadd(fn)
1238 elif state == 'r':
1226 elif state == 'r':
1239 radd(fn)
1227 radd(fn)
1240
1228
1241 return (lookup, scmutil.status(modified, added, removed, deleted,
1229 return (lookup, scmutil.status(modified, added, removed, deleted,
1242 unknown, ignored, clean))
1230 unknown, ignored, clean))
1243
1231
1244 def matches(self, match):
1232 def matches(self, match):
1245 '''
1233 '''
1246 return files in the dirstate (in whatever state) filtered by match
1234 return files in the dirstate (in whatever state) filtered by match
1247 '''
1235 '''
1248 dmap = self._map
1236 dmap = self._map
1249 if match.always():
1237 if match.always():
1250 return dmap.keys()
1238 return dmap.keys()
1251 files = match.files()
1239 files = match.files()
1252 if match.isexact():
1240 if match.isexact():
1253 # fast path -- filter the other way around, since typically files is
1241 # fast path -- filter the other way around, since typically files is
1254 # much smaller than dmap
1242 # much smaller than dmap
1255 return [f for f in files if f in dmap]
1243 return [f for f in files if f in dmap]
1256 if match.prefix() and all(fn in dmap for fn in files):
1244 if match.prefix() and all(fn in dmap for fn in files):
1257 # fast path -- all the values are known to be files, so just return
1245 # fast path -- all the values are known to be files, so just return
1258 # that
1246 # that
1259 return list(files)
1247 return list(files)
1260 return [f for f in dmap if match(f)]
1248 return [f for f in dmap if match(f)]
1261
1249
1262 def _actualfilename(self, tr):
1250 def _actualfilename(self, tr):
1263 if tr:
1251 if tr:
1264 return self._pendingfilename
1252 return self._pendingfilename
1265 else:
1253 else:
1266 return self._filename
1254 return self._filename
1267
1255
1268 def savebackup(self, tr, backupname):
1256 def savebackup(self, tr, backupname):
1269 '''Save current dirstate into backup file'''
1257 '''Save current dirstate into backup file'''
1270 filename = self._actualfilename(tr)
1258 filename = self._actualfilename(tr)
1271 assert backupname != filename
1259 assert backupname != filename
1272
1260
1273 # use '_writedirstate' instead of 'write' to write changes certainly,
1261 # use '_writedirstate' instead of 'write' to write changes certainly,
1274 # because the latter omits writing out if transaction is running.
1262 # because the latter omits writing out if transaction is running.
1275 # output file will be used to create backup of dirstate at this point.
1263 # output file will be used to create backup of dirstate at this point.
1276 if self._dirty or not self._opener.exists(filename):
1264 if self._dirty or not self._opener.exists(filename):
1277 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1265 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1278 checkambig=True))
1266 checkambig=True))
1279
1267
1280 if tr:
1268 if tr:
1281 # ensure that subsequent tr.writepending returns True for
1269 # ensure that subsequent tr.writepending returns True for
1282 # changes written out above, even if dirstate is never
1270 # changes written out above, even if dirstate is never
1283 # changed after this
1271 # changed after this
1284 tr.addfilegenerator('dirstate', (self._filename,),
1272 tr.addfilegenerator('dirstate', (self._filename,),
1285 self._writedirstate, location='plain')
1273 self._writedirstate, location='plain')
1286
1274
1287 # ensure that pending file written above is unlinked at
1275 # ensure that pending file written above is unlinked at
1288 # failure, even if tr.writepending isn't invoked until the
1276 # failure, even if tr.writepending isn't invoked until the
1289 # end of this transaction
1277 # end of this transaction
1290 tr.registertmp(filename, location='plain')
1278 tr.registertmp(filename, location='plain')
1291
1279
1292 self._opener.tryunlink(backupname)
1280 self._opener.tryunlink(backupname)
1293 # hardlink backup is okay because _writedirstate is always called
1281 # hardlink backup is okay because _writedirstate is always called
1294 # with an "atomictemp=True" file.
1282 # with an "atomictemp=True" file.
1295 util.copyfile(self._opener.join(filename),
1283 util.copyfile(self._opener.join(filename),
1296 self._opener.join(backupname), hardlink=True)
1284 self._opener.join(backupname), hardlink=True)
1297
1285
1298 def restorebackup(self, tr, backupname):
1286 def restorebackup(self, tr, backupname):
1299 '''Restore dirstate by backup file'''
1287 '''Restore dirstate by backup file'''
1300 # this "invalidate()" prevents "wlock.release()" from writing
1288 # this "invalidate()" prevents "wlock.release()" from writing
1301 # changes of dirstate out after restoring from backup file
1289 # changes of dirstate out after restoring from backup file
1302 self.invalidate()
1290 self.invalidate()
1303 filename = self._actualfilename(tr)
1291 filename = self._actualfilename(tr)
1304 self._opener.rename(backupname, filename, checkambig=True)
1292 self._opener.rename(backupname, filename, checkambig=True)
1305
1293
1306 def clearbackup(self, tr, backupname):
1294 def clearbackup(self, tr, backupname):
1307 '''Clear backup file'''
1295 '''Clear backup file'''
1308 self._opener.unlink(backupname)
1296 self._opener.unlink(backupname)
1309
1297
1310 class dirstatemap(object):
1298 class dirstatemap(object):
1311 def __init__(self):
1299 def __init__(self, ui, opener, root):
1300 self._ui = ui
1301 self._opener = opener
1302 self._root = root
1303 self._filename = 'dirstate'
1304
1312 self._map = {}
1305 self._map = {}
1313 self.copymap = {}
1306 self.copymap = {}
1314
1307
1308 # for consistent view between _pl() and _read() invocations
1309 self._pendingmode = None
1310
1315 def iteritems(self):
1311 def iteritems(self):
1316 return self._map.iteritems()
1312 return self._map.iteritems()
1317
1313
1318 def __iter__(self):
1314 def __iter__(self):
1319 return iter(self._map)
1315 return iter(self._map)
1320
1316
1321 def get(self, key, default=None):
1317 def get(self, key, default=None):
1322 return self._map.get(key, default)
1318 return self._map.get(key, default)
1323
1319
1324 def __contains__(self, key):
1320 def __contains__(self, key):
1325 return key in self._map
1321 return key in self._map
1326
1322
1327 def __setitem__(self, key, value):
1323 def __setitem__(self, key, value):
1328 self._map[key] = value
1324 self._map[key] = value
1329
1325
1330 def __getitem__(self, key):
1326 def __getitem__(self, key):
1331 return self._map[key]
1327 return self._map[key]
1332
1328
1333 def __delitem__(self, key):
1329 def __delitem__(self, key):
1334 del self._map[key]
1330 del self._map[key]
1335
1331
1336 def keys(self):
1332 def keys(self):
1337 return self._map.keys()
1333 return self._map.keys()
1338
1334
1339 def nonnormalentries(self):
1335 def nonnormalentries(self):
1340 '''Compute the nonnormal dirstate entries from the dmap'''
1336 '''Compute the nonnormal dirstate entries from the dmap'''
1341 try:
1337 try:
1342 return parsers.nonnormalotherparententries(self._map)
1338 return parsers.nonnormalotherparententries(self._map)
1343 except AttributeError:
1339 except AttributeError:
1344 nonnorm = set()
1340 nonnorm = set()
1345 otherparent = set()
1341 otherparent = set()
1346 for fname, e in self._map.iteritems():
1342 for fname, e in self._map.iteritems():
1347 if e[0] != 'n' or e[3] == -1:
1343 if e[0] != 'n' or e[3] == -1:
1348 nonnorm.add(fname)
1344 nonnorm.add(fname)
1349 if e[0] == 'n' and e[2] == -2:
1345 if e[0] == 'n' and e[2] == -2:
1350 otherparent.add(fname)
1346 otherparent.add(fname)
1351 return nonnorm, otherparent
1347 return nonnorm, otherparent
1352
1348
1353 def filefoldmap(self):
1349 def filefoldmap(self):
1354 """Returns a dictionary mapping normalized case paths to their
1350 """Returns a dictionary mapping normalized case paths to their
1355 non-normalized versions.
1351 non-normalized versions.
1356 """
1352 """
1357 try:
1353 try:
1358 makefilefoldmap = parsers.make_file_foldmap
1354 makefilefoldmap = parsers.make_file_foldmap
1359 except AttributeError:
1355 except AttributeError:
1360 pass
1356 pass
1361 else:
1357 else:
1362 return makefilefoldmap(self._map, util.normcasespec,
1358 return makefilefoldmap(self._map, util.normcasespec,
1363 util.normcasefallback)
1359 util.normcasefallback)
1364
1360
1365 f = {}
1361 f = {}
1366 normcase = util.normcase
1362 normcase = util.normcase
1367 for name, s in self._map.iteritems():
1363 for name, s in self._map.iteritems():
1368 if s[0] != 'r':
1364 if s[0] != 'r':
1369 f[normcase(name)] = name
1365 f[normcase(name)] = name
1370 f['.'] = '.' # prevents useless util.fspath() invocation
1366 f['.'] = '.' # prevents useless util.fspath() invocation
1371 return f
1367 return f
1372
1368
1373 def dirs(self):
1369 def dirs(self):
1374 """Returns a set-like object containing all the directories in the
1370 """Returns a set-like object containing all the directories in the
1375 current dirstate.
1371 current dirstate.
1376 """
1372 """
1377 return util.dirs(self._map, 'r')
1373 return util.dirs(self._map, 'r')
1374
1375 def _opendirstatefile(self):
1376 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1377 if self._pendingmode is not None and self._pendingmode != mode:
1378 fp.close()
1379 raise error.Abort(_('working directory state may be '
1380 'changed parallelly'))
1381 self._pendingmode = mode
1382 return fp
1383
General Comments 0
You need to be logged in to leave comments. Login now