##// END OF EJS Templates
dirstate: introduce new context manager for marking dirstate parent changes
Augie Fackler -
r32346:73e67c43 default
parent child Browse files
Show More
@@ -1,1287 +1,1305 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import errno
12 import errno
12 import os
13 import os
13 import stat
14 import stat
14
15
15 from .i18n import _
16 from .i18n import _
16 from .node import nullid
17 from .node import nullid
17 from . import (
18 from . import (
18 encoding,
19 encoding,
19 error,
20 error,
20 match as matchmod,
21 match as matchmod,
21 parsers,
22 parsers,
22 pathutil,
23 pathutil,
23 pycompat,
24 pycompat,
24 scmutil,
25 scmutil,
25 txnutil,
26 txnutil,
26 util,
27 util,
27 )
28 )
28
29
29 propertycache = util.propertycache
30 propertycache = util.propertycache
30 filecache = scmutil.filecache
31 filecache = scmutil.filecache
31 _rangemask = 0x7fffffff
32 _rangemask = 0x7fffffff
32
33
33 dirstatetuple = parsers.dirstatetuple
34 dirstatetuple = parsers.dirstatetuple
34
35
35 class repocache(filecache):
36 class repocache(filecache):
36 """filecache for files in .hg/"""
37 """filecache for files in .hg/"""
37 def join(self, obj, fname):
38 def join(self, obj, fname):
38 return obj._opener.join(fname)
39 return obj._opener.join(fname)
39
40
40 class rootcache(filecache):
41 class rootcache(filecache):
41 """filecache for files in the repository root"""
42 """filecache for files in the repository root"""
42 def join(self, obj, fname):
43 def join(self, obj, fname):
43 return obj._join(fname)
44 return obj._join(fname)
44
45
45 def _getfsnow(vfs):
46 def _getfsnow(vfs):
46 '''Get "now" timestamp on filesystem'''
47 '''Get "now" timestamp on filesystem'''
47 tmpfd, tmpname = vfs.mkstemp()
48 tmpfd, tmpname = vfs.mkstemp()
48 try:
49 try:
49 return os.fstat(tmpfd).st_mtime
50 return os.fstat(tmpfd).st_mtime
50 finally:
51 finally:
51 os.close(tmpfd)
52 os.close(tmpfd)
52 vfs.unlink(tmpname)
53 vfs.unlink(tmpname)
53
54
54 def nonnormalentries(dmap):
55 def nonnormalentries(dmap):
55 '''Compute the nonnormal dirstate entries from the dmap'''
56 '''Compute the nonnormal dirstate entries from the dmap'''
56 try:
57 try:
57 return parsers.nonnormalotherparententries(dmap)
58 return parsers.nonnormalotherparententries(dmap)
58 except AttributeError:
59 except AttributeError:
59 nonnorm = set()
60 nonnorm = set()
60 otherparent = set()
61 otherparent = set()
61 for fname, e in dmap.iteritems():
62 for fname, e in dmap.iteritems():
62 if e[0] != 'n' or e[3] == -1:
63 if e[0] != 'n' or e[3] == -1:
63 nonnorm.add(fname)
64 nonnorm.add(fname)
64 if e[0] == 'n' and e[2] == -2:
65 if e[0] == 'n' and e[2] == -2:
65 otherparent.add(fname)
66 otherparent.add(fname)
66 return nonnorm, otherparent
67 return nonnorm, otherparent
67
68
68 class dirstate(object):
69 class dirstate(object):
69
70
70 def __init__(self, opener, ui, root, validate):
71 def __init__(self, opener, ui, root, validate):
71 '''Create a new dirstate object.
72 '''Create a new dirstate object.
72
73
73 opener is an open()-like callable that can be used to open the
74 opener is an open()-like callable that can be used to open the
74 dirstate file; root is the root of the directory tracked by
75 dirstate file; root is the root of the directory tracked by
75 the dirstate.
76 the dirstate.
76 '''
77 '''
77 self._opener = opener
78 self._opener = opener
78 self._validate = validate
79 self._validate = validate
79 self._root = root
80 self._root = root
80 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
81 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
81 # UNC path pointing to root share (issue4557)
82 # UNC path pointing to root share (issue4557)
82 self._rootdir = pathutil.normasprefix(root)
83 self._rootdir = pathutil.normasprefix(root)
83 # internal config: ui.forcecwd
84 # internal config: ui.forcecwd
84 forcecwd = ui.config('ui', 'forcecwd')
85 forcecwd = ui.config('ui', 'forcecwd')
85 if forcecwd:
86 if forcecwd:
86 self._cwd = forcecwd
87 self._cwd = forcecwd
87 self._dirty = False
88 self._dirty = False
88 self._dirtypl = False
89 self._dirtypl = False
89 self._lastnormaltime = 0
90 self._lastnormaltime = 0
90 self._ui = ui
91 self._ui = ui
91 self._filecache = {}
92 self._filecache = {}
92 self._parentwriters = 0
93 self._parentwriters = 0
93 self._filename = 'dirstate'
94 self._filename = 'dirstate'
94 self._pendingfilename = '%s.pending' % self._filename
95 self._pendingfilename = '%s.pending' % self._filename
95 self._plchangecallbacks = {}
96 self._plchangecallbacks = {}
96 self._origpl = None
97 self._origpl = None
97 self._updatedfiles = set()
98 self._updatedfiles = set()
98
99
99 # for consistent view between _pl() and _read() invocations
100 # for consistent view between _pl() and _read() invocations
100 self._pendingmode = None
101 self._pendingmode = None
101
102
103 @contextlib.contextmanager
104 def parentchange(self):
105 '''Context manager for handling dirstate parents.
106
107 If an exception occurs in the scope of the context manager,
108 the incoherent dirstate won't be written when wlock is
109 released.
110 '''
111 self._parentwriters += 1
112 yield
113 # Typically we want the "undo" step of a context manager in a
114 # finally block so it happens even when an exception
115 # occurs. In this case, however, we only want to decrement
116 # parentwriters if the code in the with statement exits
117 # normally, so we don't have a try/finally here on purpose.
118 self._parentwriters -= 1
119
102 def beginparentchange(self):
120 def beginparentchange(self):
103 '''Marks the beginning of a set of changes that involve changing
121 '''Marks the beginning of a set of changes that involve changing
104 the dirstate parents. If there is an exception during this time,
122 the dirstate parents. If there is an exception during this time,
105 the dirstate will not be written when the wlock is released. This
123 the dirstate will not be written when the wlock is released. This
106 prevents writing an incoherent dirstate where the parent doesn't
124 prevents writing an incoherent dirstate where the parent doesn't
107 match the contents.
125 match the contents.
108 '''
126 '''
109 self._parentwriters += 1
127 self._parentwriters += 1
110
128
111 def endparentchange(self):
129 def endparentchange(self):
112 '''Marks the end of a set of changes that involve changing the
130 '''Marks the end of a set of changes that involve changing the
113 dirstate parents. Once all parent changes have been marked done,
131 dirstate parents. Once all parent changes have been marked done,
114 the wlock will be free to write the dirstate on release.
132 the wlock will be free to write the dirstate on release.
115 '''
133 '''
116 if self._parentwriters > 0:
134 if self._parentwriters > 0:
117 self._parentwriters -= 1
135 self._parentwriters -= 1
118
136
119 def pendingparentchange(self):
137 def pendingparentchange(self):
120 '''Returns true if the dirstate is in the middle of a set of changes
138 '''Returns true if the dirstate is in the middle of a set of changes
121 that modify the dirstate parent.
139 that modify the dirstate parent.
122 '''
140 '''
123 return self._parentwriters > 0
141 return self._parentwriters > 0
124
142
125 @propertycache
143 @propertycache
126 def _map(self):
144 def _map(self):
127 '''Return the dirstate contents as a map from filename to
145 '''Return the dirstate contents as a map from filename to
128 (state, mode, size, time).'''
146 (state, mode, size, time).'''
129 self._read()
147 self._read()
130 return self._map
148 return self._map
131
149
132 @propertycache
150 @propertycache
133 def _copymap(self):
151 def _copymap(self):
134 self._read()
152 self._read()
135 return self._copymap
153 return self._copymap
136
154
137 @propertycache
155 @propertycache
138 def _nonnormalset(self):
156 def _nonnormalset(self):
139 nonnorm, otherparents = nonnormalentries(self._map)
157 nonnorm, otherparents = nonnormalentries(self._map)
140 self._otherparentset = otherparents
158 self._otherparentset = otherparents
141 return nonnorm
159 return nonnorm
142
160
143 @propertycache
161 @propertycache
144 def _otherparentset(self):
162 def _otherparentset(self):
145 nonnorm, otherparents = nonnormalentries(self._map)
163 nonnorm, otherparents = nonnormalentries(self._map)
146 self._nonnormalset = nonnorm
164 self._nonnormalset = nonnorm
147 return otherparents
165 return otherparents
148
166
149 @propertycache
167 @propertycache
150 def _filefoldmap(self):
168 def _filefoldmap(self):
151 try:
169 try:
152 makefilefoldmap = parsers.make_file_foldmap
170 makefilefoldmap = parsers.make_file_foldmap
153 except AttributeError:
171 except AttributeError:
154 pass
172 pass
155 else:
173 else:
156 return makefilefoldmap(self._map, util.normcasespec,
174 return makefilefoldmap(self._map, util.normcasespec,
157 util.normcasefallback)
175 util.normcasefallback)
158
176
159 f = {}
177 f = {}
160 normcase = util.normcase
178 normcase = util.normcase
161 for name, s in self._map.iteritems():
179 for name, s in self._map.iteritems():
162 if s[0] != 'r':
180 if s[0] != 'r':
163 f[normcase(name)] = name
181 f[normcase(name)] = name
164 f['.'] = '.' # prevents useless util.fspath() invocation
182 f['.'] = '.' # prevents useless util.fspath() invocation
165 return f
183 return f
166
184
167 @propertycache
185 @propertycache
168 def _dirfoldmap(self):
186 def _dirfoldmap(self):
169 f = {}
187 f = {}
170 normcase = util.normcase
188 normcase = util.normcase
171 for name in self._dirs:
189 for name in self._dirs:
172 f[normcase(name)] = name
190 f[normcase(name)] = name
173 return f
191 return f
174
192
175 @repocache('branch')
193 @repocache('branch')
176 def _branch(self):
194 def _branch(self):
177 try:
195 try:
178 return self._opener.read("branch").strip() or "default"
196 return self._opener.read("branch").strip() or "default"
179 except IOError as inst:
197 except IOError as inst:
180 if inst.errno != errno.ENOENT:
198 if inst.errno != errno.ENOENT:
181 raise
199 raise
182 return "default"
200 return "default"
183
201
184 @propertycache
202 @propertycache
185 def _pl(self):
203 def _pl(self):
186 try:
204 try:
187 fp = self._opendirstatefile()
205 fp = self._opendirstatefile()
188 st = fp.read(40)
206 st = fp.read(40)
189 fp.close()
207 fp.close()
190 l = len(st)
208 l = len(st)
191 if l == 40:
209 if l == 40:
192 return st[:20], st[20:40]
210 return st[:20], st[20:40]
193 elif l > 0 and l < 40:
211 elif l > 0 and l < 40:
194 raise error.Abort(_('working directory state appears damaged!'))
212 raise error.Abort(_('working directory state appears damaged!'))
195 except IOError as err:
213 except IOError as err:
196 if err.errno != errno.ENOENT:
214 if err.errno != errno.ENOENT:
197 raise
215 raise
198 return [nullid, nullid]
216 return [nullid, nullid]
199
217
200 @propertycache
218 @propertycache
201 def _dirs(self):
219 def _dirs(self):
202 return util.dirs(self._map, 'r')
220 return util.dirs(self._map, 'r')
203
221
204 def dirs(self):
222 def dirs(self):
205 return self._dirs
223 return self._dirs
206
224
207 @rootcache('.hgignore')
225 @rootcache('.hgignore')
208 def _ignore(self):
226 def _ignore(self):
209 files = self._ignorefiles()
227 files = self._ignorefiles()
210 if not files:
228 if not files:
211 return util.never
229 return util.never
212
230
213 pats = ['include:%s' % f for f in files]
231 pats = ['include:%s' % f for f in files]
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
232 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
215
233
216 @propertycache
234 @propertycache
217 def _slash(self):
235 def _slash(self):
218 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
236 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
219
237
220 @propertycache
238 @propertycache
221 def _checklink(self):
239 def _checklink(self):
222 return util.checklink(self._root)
240 return util.checklink(self._root)
223
241
224 @propertycache
242 @propertycache
225 def _checkexec(self):
243 def _checkexec(self):
226 return util.checkexec(self._root)
244 return util.checkexec(self._root)
227
245
228 @propertycache
246 @propertycache
229 def _checkcase(self):
247 def _checkcase(self):
230 return not util.fscasesensitive(self._join('.hg'))
248 return not util.fscasesensitive(self._join('.hg'))
231
249
232 def _join(self, f):
250 def _join(self, f):
233 # much faster than os.path.join()
251 # much faster than os.path.join()
234 # it's safe because f is always a relative path
252 # it's safe because f is always a relative path
235 return self._rootdir + f
253 return self._rootdir + f
236
254
237 def flagfunc(self, buildfallback):
255 def flagfunc(self, buildfallback):
238 if self._checklink and self._checkexec:
256 if self._checklink and self._checkexec:
239 def f(x):
257 def f(x):
240 try:
258 try:
241 st = os.lstat(self._join(x))
259 st = os.lstat(self._join(x))
242 if util.statislink(st):
260 if util.statislink(st):
243 return 'l'
261 return 'l'
244 if util.statisexec(st):
262 if util.statisexec(st):
245 return 'x'
263 return 'x'
246 except OSError:
264 except OSError:
247 pass
265 pass
248 return ''
266 return ''
249 return f
267 return f
250
268
251 fallback = buildfallback()
269 fallback = buildfallback()
252 if self._checklink:
270 if self._checklink:
253 def f(x):
271 def f(x):
254 if os.path.islink(self._join(x)):
272 if os.path.islink(self._join(x)):
255 return 'l'
273 return 'l'
256 if 'x' in fallback(x):
274 if 'x' in fallback(x):
257 return 'x'
275 return 'x'
258 return ''
276 return ''
259 return f
277 return f
260 if self._checkexec:
278 if self._checkexec:
261 def f(x):
279 def f(x):
262 if 'l' in fallback(x):
280 if 'l' in fallback(x):
263 return 'l'
281 return 'l'
264 if util.isexec(self._join(x)):
282 if util.isexec(self._join(x)):
265 return 'x'
283 return 'x'
266 return ''
284 return ''
267 return f
285 return f
268 else:
286 else:
269 return fallback
287 return fallback
270
288
271 @propertycache
289 @propertycache
272 def _cwd(self):
290 def _cwd(self):
273 return pycompat.getcwd()
291 return pycompat.getcwd()
274
292
275 def getcwd(self):
293 def getcwd(self):
276 '''Return the path from which a canonical path is calculated.
294 '''Return the path from which a canonical path is calculated.
277
295
278 This path should be used to resolve file patterns or to convert
296 This path should be used to resolve file patterns or to convert
279 canonical paths back to file paths for display. It shouldn't be
297 canonical paths back to file paths for display. It shouldn't be
280 used to get real file paths. Use vfs functions instead.
298 used to get real file paths. Use vfs functions instead.
281 '''
299 '''
282 cwd = self._cwd
300 cwd = self._cwd
283 if cwd == self._root:
301 if cwd == self._root:
284 return ''
302 return ''
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
303 # self._root ends with a path separator if self._root is '/' or 'C:\'
286 rootsep = self._root
304 rootsep = self._root
287 if not util.endswithsep(rootsep):
305 if not util.endswithsep(rootsep):
288 rootsep += pycompat.ossep
306 rootsep += pycompat.ossep
289 if cwd.startswith(rootsep):
307 if cwd.startswith(rootsep):
290 return cwd[len(rootsep):]
308 return cwd[len(rootsep):]
291 else:
309 else:
292 # we're outside the repo. return an absolute path.
310 # we're outside the repo. return an absolute path.
293 return cwd
311 return cwd
294
312
295 def pathto(self, f, cwd=None):
313 def pathto(self, f, cwd=None):
296 if cwd is None:
314 if cwd is None:
297 cwd = self.getcwd()
315 cwd = self.getcwd()
298 path = util.pathto(self._root, cwd, f)
316 path = util.pathto(self._root, cwd, f)
299 if self._slash:
317 if self._slash:
300 return util.pconvert(path)
318 return util.pconvert(path)
301 return path
319 return path
302
320
303 def __getitem__(self, key):
321 def __getitem__(self, key):
304 '''Return the current state of key (a filename) in the dirstate.
322 '''Return the current state of key (a filename) in the dirstate.
305
323
306 States are:
324 States are:
307 n normal
325 n normal
308 m needs merging
326 m needs merging
309 r marked for removal
327 r marked for removal
310 a marked for addition
328 a marked for addition
311 ? not tracked
329 ? not tracked
312 '''
330 '''
313 return self._map.get(key, ("?",))[0]
331 return self._map.get(key, ("?",))[0]
314
332
315 def __contains__(self, key):
333 def __contains__(self, key):
316 return key in self._map
334 return key in self._map
317
335
318 def __iter__(self):
336 def __iter__(self):
319 for x in sorted(self._map):
337 for x in sorted(self._map):
320 yield x
338 yield x
321
339
322 def iteritems(self):
340 def iteritems(self):
323 return self._map.iteritems()
341 return self._map.iteritems()
324
342
325 def parents(self):
343 def parents(self):
326 return [self._validate(p) for p in self._pl]
344 return [self._validate(p) for p in self._pl]
327
345
328 def p1(self):
346 def p1(self):
329 return self._validate(self._pl[0])
347 return self._validate(self._pl[0])
330
348
331 def p2(self):
349 def p2(self):
332 return self._validate(self._pl[1])
350 return self._validate(self._pl[1])
333
351
334 def branch(self):
352 def branch(self):
335 return encoding.tolocal(self._branch)
353 return encoding.tolocal(self._branch)
336
354
337 def setparents(self, p1, p2=nullid):
355 def setparents(self, p1, p2=nullid):
338 """Set dirstate parents to p1 and p2.
356 """Set dirstate parents to p1 and p2.
339
357
340 When moving from two parents to one, 'm' merged entries a
358 When moving from two parents to one, 'm' merged entries a
341 adjusted to normal and previous copy records discarded and
359 adjusted to normal and previous copy records discarded and
342 returned by the call.
360 returned by the call.
343
361
344 See localrepo.setparents()
362 See localrepo.setparents()
345 """
363 """
346 if self._parentwriters == 0:
364 if self._parentwriters == 0:
347 raise ValueError("cannot set dirstate parent without "
365 raise ValueError("cannot set dirstate parent without "
348 "calling dirstate.beginparentchange")
366 "calling dirstate.beginparentchange")
349
367
350 self._dirty = self._dirtypl = True
368 self._dirty = self._dirtypl = True
351 oldp2 = self._pl[1]
369 oldp2 = self._pl[1]
352 if self._origpl is None:
370 if self._origpl is None:
353 self._origpl = self._pl
371 self._origpl = self._pl
354 self._pl = p1, p2
372 self._pl = p1, p2
355 copies = {}
373 copies = {}
356 if oldp2 != nullid and p2 == nullid:
374 if oldp2 != nullid and p2 == nullid:
357 candidatefiles = self._nonnormalset.union(self._otherparentset)
375 candidatefiles = self._nonnormalset.union(self._otherparentset)
358 for f in candidatefiles:
376 for f in candidatefiles:
359 s = self._map.get(f)
377 s = self._map.get(f)
360 if s is None:
378 if s is None:
361 continue
379 continue
362
380
363 # Discard 'm' markers when moving away from a merge state
381 # Discard 'm' markers when moving away from a merge state
364 if s[0] == 'm':
382 if s[0] == 'm':
365 if f in self._copymap:
383 if f in self._copymap:
366 copies[f] = self._copymap[f]
384 copies[f] = self._copymap[f]
367 self.normallookup(f)
385 self.normallookup(f)
368 # Also fix up otherparent markers
386 # Also fix up otherparent markers
369 elif s[0] == 'n' and s[2] == -2:
387 elif s[0] == 'n' and s[2] == -2:
370 if f in self._copymap:
388 if f in self._copymap:
371 copies[f] = self._copymap[f]
389 copies[f] = self._copymap[f]
372 self.add(f)
390 self.add(f)
373 return copies
391 return copies
374
392
375 def setbranch(self, branch):
393 def setbranch(self, branch):
376 self._branch = encoding.fromlocal(branch)
394 self._branch = encoding.fromlocal(branch)
377 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
395 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
378 try:
396 try:
379 f.write(self._branch + '\n')
397 f.write(self._branch + '\n')
380 f.close()
398 f.close()
381
399
382 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
383 # replacing the underlying file
401 # replacing the underlying file
384 ce = self._filecache['_branch']
402 ce = self._filecache['_branch']
385 if ce:
403 if ce:
386 ce.refresh()
404 ce.refresh()
387 except: # re-raises
405 except: # re-raises
388 f.discard()
406 f.discard()
389 raise
407 raise
390
408
391 def _opendirstatefile(self):
409 def _opendirstatefile(self):
392 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
410 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
393 if self._pendingmode is not None and self._pendingmode != mode:
411 if self._pendingmode is not None and self._pendingmode != mode:
394 fp.close()
412 fp.close()
395 raise error.Abort(_('working directory state may be '
413 raise error.Abort(_('working directory state may be '
396 'changed parallelly'))
414 'changed parallelly'))
397 self._pendingmode = mode
415 self._pendingmode = mode
398 return fp
416 return fp
399
417
400 def _read(self):
418 def _read(self):
401 self._map = {}
419 self._map = {}
402 self._copymap = {}
420 self._copymap = {}
403 try:
421 try:
404 fp = self._opendirstatefile()
422 fp = self._opendirstatefile()
405 try:
423 try:
406 st = fp.read()
424 st = fp.read()
407 finally:
425 finally:
408 fp.close()
426 fp.close()
409 except IOError as err:
427 except IOError as err:
410 if err.errno != errno.ENOENT:
428 if err.errno != errno.ENOENT:
411 raise
429 raise
412 return
430 return
413 if not st:
431 if not st:
414 return
432 return
415
433
416 if util.safehasattr(parsers, 'dict_new_presized'):
434 if util.safehasattr(parsers, 'dict_new_presized'):
417 # Make an estimate of the number of files in the dirstate based on
435 # Make an estimate of the number of files in the dirstate based on
418 # its size. From a linear regression on a set of real-world repos,
436 # its size. From a linear regression on a set of real-world repos,
419 # all over 10,000 files, the size of a dirstate entry is 85
437 # all over 10,000 files, the size of a dirstate entry is 85
420 # bytes. The cost of resizing is significantly higher than the cost
438 # bytes. The cost of resizing is significantly higher than the cost
421 # of filling in a larger presized dict, so subtract 20% from the
439 # of filling in a larger presized dict, so subtract 20% from the
422 # size.
440 # size.
423 #
441 #
424 # This heuristic is imperfect in many ways, so in a future dirstate
442 # This heuristic is imperfect in many ways, so in a future dirstate
425 # format update it makes sense to just record the number of entries
443 # format update it makes sense to just record the number of entries
426 # on write.
444 # on write.
427 self._map = parsers.dict_new_presized(len(st) / 71)
445 self._map = parsers.dict_new_presized(len(st) / 71)
428
446
429 # Python's garbage collector triggers a GC each time a certain number
447 # Python's garbage collector triggers a GC each time a certain number
430 # of container objects (the number being defined by
448 # of container objects (the number being defined by
431 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
449 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
432 # for each file in the dirstate. The C version then immediately marks
450 # for each file in the dirstate. The C version then immediately marks
433 # them as not to be tracked by the collector. However, this has no
451 # them as not to be tracked by the collector. However, this has no
434 # effect on when GCs are triggered, only on what objects the GC looks
452 # effect on when GCs are triggered, only on what objects the GC looks
435 # into. This means that O(number of files) GCs are unavoidable.
453 # into. This means that O(number of files) GCs are unavoidable.
436 # Depending on when in the process's lifetime the dirstate is parsed,
454 # Depending on when in the process's lifetime the dirstate is parsed,
437 # this can get very expensive. As a workaround, disable GC while
455 # this can get very expensive. As a workaround, disable GC while
438 # parsing the dirstate.
456 # parsing the dirstate.
439 #
457 #
440 # (we cannot decorate the function directly since it is in a C module)
458 # (we cannot decorate the function directly since it is in a C module)
441 parse_dirstate = util.nogc(parsers.parse_dirstate)
459 parse_dirstate = util.nogc(parsers.parse_dirstate)
442 p = parse_dirstate(self._map, self._copymap, st)
460 p = parse_dirstate(self._map, self._copymap, st)
443 if not self._dirtypl:
461 if not self._dirtypl:
444 self._pl = p
462 self._pl = p
445
463
446 def invalidate(self):
464 def invalidate(self):
447 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
465 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
448 "_pl", "_dirs", "_ignore", "_nonnormalset",
466 "_pl", "_dirs", "_ignore", "_nonnormalset",
449 "_otherparentset"):
467 "_otherparentset"):
450 if a in self.__dict__:
468 if a in self.__dict__:
451 delattr(self, a)
469 delattr(self, a)
452 self._lastnormaltime = 0
470 self._lastnormaltime = 0
453 self._dirty = False
471 self._dirty = False
454 self._updatedfiles.clear()
472 self._updatedfiles.clear()
455 self._parentwriters = 0
473 self._parentwriters = 0
456 self._origpl = None
474 self._origpl = None
457
475
458 def copy(self, source, dest):
476 def copy(self, source, dest):
459 """Mark dest as a copy of source. Unmark dest if source is None."""
477 """Mark dest as a copy of source. Unmark dest if source is None."""
460 if source == dest:
478 if source == dest:
461 return
479 return
462 self._dirty = True
480 self._dirty = True
463 if source is not None:
481 if source is not None:
464 self._copymap[dest] = source
482 self._copymap[dest] = source
465 self._updatedfiles.add(source)
483 self._updatedfiles.add(source)
466 self._updatedfiles.add(dest)
484 self._updatedfiles.add(dest)
467 elif dest in self._copymap:
485 elif dest in self._copymap:
468 del self._copymap[dest]
486 del self._copymap[dest]
469 self._updatedfiles.add(dest)
487 self._updatedfiles.add(dest)
470
488
471 def copied(self, file):
489 def copied(self, file):
472 return self._copymap.get(file, None)
490 return self._copymap.get(file, None)
473
491
474 def copies(self):
492 def copies(self):
475 return self._copymap
493 return self._copymap
476
494
477 def _droppath(self, f):
495 def _droppath(self, f):
478 if self[f] not in "?r" and "_dirs" in self.__dict__:
496 if self[f] not in "?r" and "_dirs" in self.__dict__:
479 self._dirs.delpath(f)
497 self._dirs.delpath(f)
480
498
481 if "_filefoldmap" in self.__dict__:
499 if "_filefoldmap" in self.__dict__:
482 normed = util.normcase(f)
500 normed = util.normcase(f)
483 if normed in self._filefoldmap:
501 if normed in self._filefoldmap:
484 del self._filefoldmap[normed]
502 del self._filefoldmap[normed]
485
503
486 self._updatedfiles.add(f)
504 self._updatedfiles.add(f)
487
505
488 def _addpath(self, f, state, mode, size, mtime):
506 def _addpath(self, f, state, mode, size, mtime):
489 oldstate = self[f]
507 oldstate = self[f]
490 if state == 'a' or oldstate == 'r':
508 if state == 'a' or oldstate == 'r':
491 scmutil.checkfilename(f)
509 scmutil.checkfilename(f)
492 if f in self._dirs:
510 if f in self._dirs:
493 raise error.Abort(_('directory %r already in dirstate') % f)
511 raise error.Abort(_('directory %r already in dirstate') % f)
494 # shadows
512 # shadows
495 for d in util.finddirs(f):
513 for d in util.finddirs(f):
496 if d in self._dirs:
514 if d in self._dirs:
497 break
515 break
498 if d in self._map and self[d] != 'r':
516 if d in self._map and self[d] != 'r':
499 raise error.Abort(
517 raise error.Abort(
500 _('file %r in dirstate clashes with %r') % (d, f))
518 _('file %r in dirstate clashes with %r') % (d, f))
501 if oldstate in "?r" and "_dirs" in self.__dict__:
519 if oldstate in "?r" and "_dirs" in self.__dict__:
502 self._dirs.addpath(f)
520 self._dirs.addpath(f)
503 self._dirty = True
521 self._dirty = True
504 self._updatedfiles.add(f)
522 self._updatedfiles.add(f)
505 self._map[f] = dirstatetuple(state, mode, size, mtime)
523 self._map[f] = dirstatetuple(state, mode, size, mtime)
506 if state != 'n' or mtime == -1:
524 if state != 'n' or mtime == -1:
507 self._nonnormalset.add(f)
525 self._nonnormalset.add(f)
508 if size == -2:
526 if size == -2:
509 self._otherparentset.add(f)
527 self._otherparentset.add(f)
510
528
511 def normal(self, f):
529 def normal(self, f):
512 '''Mark a file normal and clean.'''
530 '''Mark a file normal and clean.'''
513 s = os.lstat(self._join(f))
531 s = os.lstat(self._join(f))
514 mtime = s.st_mtime
532 mtime = s.st_mtime
515 self._addpath(f, 'n', s.st_mode,
533 self._addpath(f, 'n', s.st_mode,
516 s.st_size & _rangemask, mtime & _rangemask)
534 s.st_size & _rangemask, mtime & _rangemask)
517 if f in self._copymap:
535 if f in self._copymap:
518 del self._copymap[f]
536 del self._copymap[f]
519 if f in self._nonnormalset:
537 if f in self._nonnormalset:
520 self._nonnormalset.remove(f)
538 self._nonnormalset.remove(f)
521 if mtime > self._lastnormaltime:
539 if mtime > self._lastnormaltime:
522 # Remember the most recent modification timeslot for status(),
540 # Remember the most recent modification timeslot for status(),
523 # to make sure we won't miss future size-preserving file content
541 # to make sure we won't miss future size-preserving file content
524 # modifications that happen within the same timeslot.
542 # modifications that happen within the same timeslot.
525 self._lastnormaltime = mtime
543 self._lastnormaltime = mtime
526
544
527 def normallookup(self, f):
545 def normallookup(self, f):
528 '''Mark a file normal, but possibly dirty.'''
546 '''Mark a file normal, but possibly dirty.'''
529 if self._pl[1] != nullid and f in self._map:
547 if self._pl[1] != nullid and f in self._map:
530 # if there is a merge going on and the file was either
548 # if there is a merge going on and the file was either
531 # in state 'm' (-1) or coming from other parent (-2) before
549 # in state 'm' (-1) or coming from other parent (-2) before
532 # being removed, restore that state.
550 # being removed, restore that state.
533 entry = self._map[f]
551 entry = self._map[f]
534 if entry[0] == 'r' and entry[2] in (-1, -2):
552 if entry[0] == 'r' and entry[2] in (-1, -2):
535 source = self._copymap.get(f)
553 source = self._copymap.get(f)
536 if entry[2] == -1:
554 if entry[2] == -1:
537 self.merge(f)
555 self.merge(f)
538 elif entry[2] == -2:
556 elif entry[2] == -2:
539 self.otherparent(f)
557 self.otherparent(f)
540 if source:
558 if source:
541 self.copy(source, f)
559 self.copy(source, f)
542 return
560 return
543 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
561 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
544 return
562 return
545 self._addpath(f, 'n', 0, -1, -1)
563 self._addpath(f, 'n', 0, -1, -1)
546 if f in self._copymap:
564 if f in self._copymap:
547 del self._copymap[f]
565 del self._copymap[f]
548 if f in self._nonnormalset:
566 if f in self._nonnormalset:
549 self._nonnormalset.remove(f)
567 self._nonnormalset.remove(f)
550
568
551 def otherparent(self, f):
569 def otherparent(self, f):
552 '''Mark as coming from the other parent, always dirty.'''
570 '''Mark as coming from the other parent, always dirty.'''
553 if self._pl[1] == nullid:
571 if self._pl[1] == nullid:
554 raise error.Abort(_("setting %r to other parent "
572 raise error.Abort(_("setting %r to other parent "
555 "only allowed in merges") % f)
573 "only allowed in merges") % f)
556 if f in self and self[f] == 'n':
574 if f in self and self[f] == 'n':
557 # merge-like
575 # merge-like
558 self._addpath(f, 'm', 0, -2, -1)
576 self._addpath(f, 'm', 0, -2, -1)
559 else:
577 else:
560 # add-like
578 # add-like
561 self._addpath(f, 'n', 0, -2, -1)
579 self._addpath(f, 'n', 0, -2, -1)
562
580
563 if f in self._copymap:
581 if f in self._copymap:
564 del self._copymap[f]
582 del self._copymap[f]
565
583
566 def add(self, f):
584 def add(self, f):
567 '''Mark a file added.'''
585 '''Mark a file added.'''
568 self._addpath(f, 'a', 0, -1, -1)
586 self._addpath(f, 'a', 0, -1, -1)
569 if f in self._copymap:
587 if f in self._copymap:
570 del self._copymap[f]
588 del self._copymap[f]
571
589
572 def remove(self, f):
590 def remove(self, f):
573 '''Mark a file removed.'''
591 '''Mark a file removed.'''
574 self._dirty = True
592 self._dirty = True
575 self._droppath(f)
593 self._droppath(f)
576 size = 0
594 size = 0
577 if self._pl[1] != nullid and f in self._map:
595 if self._pl[1] != nullid and f in self._map:
578 # backup the previous state
596 # backup the previous state
579 entry = self._map[f]
597 entry = self._map[f]
580 if entry[0] == 'm': # merge
598 if entry[0] == 'm': # merge
581 size = -1
599 size = -1
582 elif entry[0] == 'n' and entry[2] == -2: # other parent
600 elif entry[0] == 'n' and entry[2] == -2: # other parent
583 size = -2
601 size = -2
584 self._otherparentset.add(f)
602 self._otherparentset.add(f)
585 self._map[f] = dirstatetuple('r', 0, size, 0)
603 self._map[f] = dirstatetuple('r', 0, size, 0)
586 self._nonnormalset.add(f)
604 self._nonnormalset.add(f)
587 if size == 0 and f in self._copymap:
605 if size == 0 and f in self._copymap:
588 del self._copymap[f]
606 del self._copymap[f]
589
607
590 def merge(self, f):
608 def merge(self, f):
591 '''Mark a file merged.'''
609 '''Mark a file merged.'''
592 if self._pl[1] == nullid:
610 if self._pl[1] == nullid:
593 return self.normallookup(f)
611 return self.normallookup(f)
594 return self.otherparent(f)
612 return self.otherparent(f)
595
613
596 def drop(self, f):
614 def drop(self, f):
597 '''Drop a file from the dirstate'''
615 '''Drop a file from the dirstate'''
598 if f in self._map:
616 if f in self._map:
599 self._dirty = True
617 self._dirty = True
600 self._droppath(f)
618 self._droppath(f)
601 del self._map[f]
619 del self._map[f]
602 if f in self._nonnormalset:
620 if f in self._nonnormalset:
603 self._nonnormalset.remove(f)
621 self._nonnormalset.remove(f)
604 if f in self._copymap:
622 if f in self._copymap:
605 del self._copymap[f]
623 del self._copymap[f]
606
624
607 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
625 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
608 if exists is None:
626 if exists is None:
609 exists = os.path.lexists(os.path.join(self._root, path))
627 exists = os.path.lexists(os.path.join(self._root, path))
610 if not exists:
628 if not exists:
611 # Maybe a path component exists
629 # Maybe a path component exists
612 if not ignoremissing and '/' in path:
630 if not ignoremissing and '/' in path:
613 d, f = path.rsplit('/', 1)
631 d, f = path.rsplit('/', 1)
614 d = self._normalize(d, False, ignoremissing, None)
632 d = self._normalize(d, False, ignoremissing, None)
615 folded = d + "/" + f
633 folded = d + "/" + f
616 else:
634 else:
617 # No path components, preserve original case
635 # No path components, preserve original case
618 folded = path
636 folded = path
619 else:
637 else:
620 # recursively normalize leading directory components
638 # recursively normalize leading directory components
621 # against dirstate
639 # against dirstate
622 if '/' in normed:
640 if '/' in normed:
623 d, f = normed.rsplit('/', 1)
641 d, f = normed.rsplit('/', 1)
624 d = self._normalize(d, False, ignoremissing, True)
642 d = self._normalize(d, False, ignoremissing, True)
625 r = self._root + "/" + d
643 r = self._root + "/" + d
626 folded = d + "/" + util.fspath(f, r)
644 folded = d + "/" + util.fspath(f, r)
627 else:
645 else:
628 folded = util.fspath(normed, self._root)
646 folded = util.fspath(normed, self._root)
629 storemap[normed] = folded
647 storemap[normed] = folded
630
648
631 return folded
649 return folded
632
650
633 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
651 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
634 normed = util.normcase(path)
652 normed = util.normcase(path)
635 folded = self._filefoldmap.get(normed, None)
653 folded = self._filefoldmap.get(normed, None)
636 if folded is None:
654 if folded is None:
637 if isknown:
655 if isknown:
638 folded = path
656 folded = path
639 else:
657 else:
640 folded = self._discoverpath(path, normed, ignoremissing, exists,
658 folded = self._discoverpath(path, normed, ignoremissing, exists,
641 self._filefoldmap)
659 self._filefoldmap)
642 return folded
660 return folded
643
661
644 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
662 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
645 normed = util.normcase(path)
663 normed = util.normcase(path)
646 folded = self._filefoldmap.get(normed, None)
664 folded = self._filefoldmap.get(normed, None)
647 if folded is None:
665 if folded is None:
648 folded = self._dirfoldmap.get(normed, None)
666 folded = self._dirfoldmap.get(normed, None)
649 if folded is None:
667 if folded is None:
650 if isknown:
668 if isknown:
651 folded = path
669 folded = path
652 else:
670 else:
653 # store discovered result in dirfoldmap so that future
671 # store discovered result in dirfoldmap so that future
654 # normalizefile calls don't start matching directories
672 # normalizefile calls don't start matching directories
655 folded = self._discoverpath(path, normed, ignoremissing, exists,
673 folded = self._discoverpath(path, normed, ignoremissing, exists,
656 self._dirfoldmap)
674 self._dirfoldmap)
657 return folded
675 return folded
658
676
659 def normalize(self, path, isknown=False, ignoremissing=False):
677 def normalize(self, path, isknown=False, ignoremissing=False):
660 '''
678 '''
661 normalize the case of a pathname when on a casefolding filesystem
679 normalize the case of a pathname when on a casefolding filesystem
662
680
663 isknown specifies whether the filename came from walking the
681 isknown specifies whether the filename came from walking the
664 disk, to avoid extra filesystem access.
682 disk, to avoid extra filesystem access.
665
683
666 If ignoremissing is True, missing path are returned
684 If ignoremissing is True, missing path are returned
667 unchanged. Otherwise, we try harder to normalize possibly
685 unchanged. Otherwise, we try harder to normalize possibly
668 existing path components.
686 existing path components.
669
687
670 The normalized case is determined based on the following precedence:
688 The normalized case is determined based on the following precedence:
671
689
672 - version of name already stored in the dirstate
690 - version of name already stored in the dirstate
673 - version of name stored on disk
691 - version of name stored on disk
674 - version provided via command arguments
692 - version provided via command arguments
675 '''
693 '''
676
694
677 if self._checkcase:
695 if self._checkcase:
678 return self._normalize(path, isknown, ignoremissing)
696 return self._normalize(path, isknown, ignoremissing)
679 return path
697 return path
680
698
681 def clear(self):
699 def clear(self):
682 self._map = {}
700 self._map = {}
683 self._nonnormalset = set()
701 self._nonnormalset = set()
684 self._otherparentset = set()
702 self._otherparentset = set()
685 if "_dirs" in self.__dict__:
703 if "_dirs" in self.__dict__:
686 delattr(self, "_dirs")
704 delattr(self, "_dirs")
687 self._copymap = {}
705 self._copymap = {}
688 self._pl = [nullid, nullid]
706 self._pl = [nullid, nullid]
689 self._lastnormaltime = 0
707 self._lastnormaltime = 0
690 self._updatedfiles.clear()
708 self._updatedfiles.clear()
691 self._dirty = True
709 self._dirty = True
692
710
693 def rebuild(self, parent, allfiles, changedfiles=None):
711 def rebuild(self, parent, allfiles, changedfiles=None):
694 if changedfiles is None:
712 if changedfiles is None:
695 # Rebuild entire dirstate
713 # Rebuild entire dirstate
696 changedfiles = allfiles
714 changedfiles = allfiles
697 lastnormaltime = self._lastnormaltime
715 lastnormaltime = self._lastnormaltime
698 self.clear()
716 self.clear()
699 self._lastnormaltime = lastnormaltime
717 self._lastnormaltime = lastnormaltime
700
718
701 if self._origpl is None:
719 if self._origpl is None:
702 self._origpl = self._pl
720 self._origpl = self._pl
703 self._pl = (parent, nullid)
721 self._pl = (parent, nullid)
704 for f in changedfiles:
722 for f in changedfiles:
705 if f in allfiles:
723 if f in allfiles:
706 self.normallookup(f)
724 self.normallookup(f)
707 else:
725 else:
708 self.drop(f)
726 self.drop(f)
709
727
710 self._dirty = True
728 self._dirty = True
711
729
712 def write(self, tr):
730 def write(self, tr):
713 if not self._dirty:
731 if not self._dirty:
714 return
732 return
715
733
716 filename = self._filename
734 filename = self._filename
717 if tr:
735 if tr:
718 # 'dirstate.write()' is not only for writing in-memory
736 # 'dirstate.write()' is not only for writing in-memory
719 # changes out, but also for dropping ambiguous timestamp.
737 # changes out, but also for dropping ambiguous timestamp.
720 # delayed writing re-raise "ambiguous timestamp issue".
738 # delayed writing re-raise "ambiguous timestamp issue".
721 # See also the wiki page below for detail:
739 # See also the wiki page below for detail:
722 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
740 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
723
741
724 # emulate dropping timestamp in 'parsers.pack_dirstate'
742 # emulate dropping timestamp in 'parsers.pack_dirstate'
725 now = _getfsnow(self._opener)
743 now = _getfsnow(self._opener)
726 dmap = self._map
744 dmap = self._map
727 for f in self._updatedfiles:
745 for f in self._updatedfiles:
728 e = dmap.get(f)
746 e = dmap.get(f)
729 if e is not None and e[0] == 'n' and e[3] == now:
747 if e is not None and e[0] == 'n' and e[3] == now:
730 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
748 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
731 self._nonnormalset.add(f)
749 self._nonnormalset.add(f)
732
750
733 # emulate that all 'dirstate.normal' results are written out
751 # emulate that all 'dirstate.normal' results are written out
734 self._lastnormaltime = 0
752 self._lastnormaltime = 0
735 self._updatedfiles.clear()
753 self._updatedfiles.clear()
736
754
737 # delay writing in-memory changes out
755 # delay writing in-memory changes out
738 tr.addfilegenerator('dirstate', (self._filename,),
756 tr.addfilegenerator('dirstate', (self._filename,),
739 self._writedirstate, location='plain')
757 self._writedirstate, location='plain')
740 return
758 return
741
759
742 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
760 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
743 self._writedirstate(st)
761 self._writedirstate(st)
744
762
745 def addparentchangecallback(self, category, callback):
763 def addparentchangecallback(self, category, callback):
746 """add a callback to be called when the wd parents are changed
764 """add a callback to be called when the wd parents are changed
747
765
748 Callback will be called with the following arguments:
766 Callback will be called with the following arguments:
749 dirstate, (oldp1, oldp2), (newp1, newp2)
767 dirstate, (oldp1, oldp2), (newp1, newp2)
750
768
751 Category is a unique identifier to allow overwriting an old callback
769 Category is a unique identifier to allow overwriting an old callback
752 with a newer callback.
770 with a newer callback.
753 """
771 """
754 self._plchangecallbacks[category] = callback
772 self._plchangecallbacks[category] = callback
755
773
756 def _writedirstate(self, st):
774 def _writedirstate(self, st):
757 # notify callbacks about parents change
775 # notify callbacks about parents change
758 if self._origpl is not None and self._origpl != self._pl:
776 if self._origpl is not None and self._origpl != self._pl:
759 for c, callback in sorted(self._plchangecallbacks.iteritems()):
777 for c, callback in sorted(self._plchangecallbacks.iteritems()):
760 callback(self, self._origpl, self._pl)
778 callback(self, self._origpl, self._pl)
761 self._origpl = None
779 self._origpl = None
762 # use the modification time of the newly created temporary file as the
780 # use the modification time of the newly created temporary file as the
763 # filesystem's notion of 'now'
781 # filesystem's notion of 'now'
764 now = util.fstat(st).st_mtime & _rangemask
782 now = util.fstat(st).st_mtime & _rangemask
765
783
766 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
784 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
767 # timestamp of each entries in dirstate, because of 'now > mtime'
785 # timestamp of each entries in dirstate, because of 'now > mtime'
768 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
786 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
769 if delaywrite > 0:
787 if delaywrite > 0:
770 # do we have any files to delay for?
788 # do we have any files to delay for?
771 for f, e in self._map.iteritems():
789 for f, e in self._map.iteritems():
772 if e[0] == 'n' and e[3] == now:
790 if e[0] == 'n' and e[3] == now:
773 import time # to avoid useless import
791 import time # to avoid useless import
774 # rather than sleep n seconds, sleep until the next
792 # rather than sleep n seconds, sleep until the next
775 # multiple of n seconds
793 # multiple of n seconds
776 clock = time.time()
794 clock = time.time()
777 start = int(clock) - (int(clock) % delaywrite)
795 start = int(clock) - (int(clock) % delaywrite)
778 end = start + delaywrite
796 end = start + delaywrite
779 time.sleep(end - clock)
797 time.sleep(end - clock)
780 now = end # trust our estimate that the end is near now
798 now = end # trust our estimate that the end is near now
781 break
799 break
782
800
783 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
801 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
784 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
802 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
785 st.close()
803 st.close()
786 self._lastnormaltime = 0
804 self._lastnormaltime = 0
787 self._dirty = self._dirtypl = False
805 self._dirty = self._dirtypl = False
788
806
789 def _dirignore(self, f):
807 def _dirignore(self, f):
790 if f == '.':
808 if f == '.':
791 return False
809 return False
792 if self._ignore(f):
810 if self._ignore(f):
793 return True
811 return True
794 for p in util.finddirs(f):
812 for p in util.finddirs(f):
795 if self._ignore(p):
813 if self._ignore(p):
796 return True
814 return True
797 return False
815 return False
798
816
799 def _ignorefiles(self):
817 def _ignorefiles(self):
800 files = []
818 files = []
801 if os.path.exists(self._join('.hgignore')):
819 if os.path.exists(self._join('.hgignore')):
802 files.append(self._join('.hgignore'))
820 files.append(self._join('.hgignore'))
803 for name, path in self._ui.configitems("ui"):
821 for name, path in self._ui.configitems("ui"):
804 if name == 'ignore' or name.startswith('ignore.'):
822 if name == 'ignore' or name.startswith('ignore.'):
805 # we need to use os.path.join here rather than self._join
823 # we need to use os.path.join here rather than self._join
806 # because path is arbitrary and user-specified
824 # because path is arbitrary and user-specified
807 files.append(os.path.join(self._rootdir, util.expandpath(path)))
825 files.append(os.path.join(self._rootdir, util.expandpath(path)))
808 return files
826 return files
809
827
810 def _ignorefileandline(self, f):
828 def _ignorefileandline(self, f):
811 files = collections.deque(self._ignorefiles())
829 files = collections.deque(self._ignorefiles())
812 visited = set()
830 visited = set()
813 while files:
831 while files:
814 i = files.popleft()
832 i = files.popleft()
815 patterns = matchmod.readpatternfile(i, self._ui.warn,
833 patterns = matchmod.readpatternfile(i, self._ui.warn,
816 sourceinfo=True)
834 sourceinfo=True)
817 for pattern, lineno, line in patterns:
835 for pattern, lineno, line in patterns:
818 kind, p = matchmod._patsplit(pattern, 'glob')
836 kind, p = matchmod._patsplit(pattern, 'glob')
819 if kind == "subinclude":
837 if kind == "subinclude":
820 if p not in visited:
838 if p not in visited:
821 files.append(p)
839 files.append(p)
822 continue
840 continue
823 m = matchmod.match(self._root, '', [], [pattern],
841 m = matchmod.match(self._root, '', [], [pattern],
824 warn=self._ui.warn)
842 warn=self._ui.warn)
825 if m(f):
843 if m(f):
826 return (i, lineno, line)
844 return (i, lineno, line)
827 visited.add(i)
845 visited.add(i)
828 return (None, -1, "")
846 return (None, -1, "")
829
847
830 def _walkexplicit(self, match, subrepos):
848 def _walkexplicit(self, match, subrepos):
831 '''Get stat data about the files explicitly specified by match.
849 '''Get stat data about the files explicitly specified by match.
832
850
833 Return a triple (results, dirsfound, dirsnotfound).
851 Return a triple (results, dirsfound, dirsnotfound).
834 - results is a mapping from filename to stat result. It also contains
852 - results is a mapping from filename to stat result. It also contains
835 listings mapping subrepos and .hg to None.
853 listings mapping subrepos and .hg to None.
836 - dirsfound is a list of files found to be directories.
854 - dirsfound is a list of files found to be directories.
837 - dirsnotfound is a list of files that the dirstate thinks are
855 - dirsnotfound is a list of files that the dirstate thinks are
838 directories and that were not found.'''
856 directories and that were not found.'''
839
857
840 def badtype(mode):
858 def badtype(mode):
841 kind = _('unknown')
859 kind = _('unknown')
842 if stat.S_ISCHR(mode):
860 if stat.S_ISCHR(mode):
843 kind = _('character device')
861 kind = _('character device')
844 elif stat.S_ISBLK(mode):
862 elif stat.S_ISBLK(mode):
845 kind = _('block device')
863 kind = _('block device')
846 elif stat.S_ISFIFO(mode):
864 elif stat.S_ISFIFO(mode):
847 kind = _('fifo')
865 kind = _('fifo')
848 elif stat.S_ISSOCK(mode):
866 elif stat.S_ISSOCK(mode):
849 kind = _('socket')
867 kind = _('socket')
850 elif stat.S_ISDIR(mode):
868 elif stat.S_ISDIR(mode):
851 kind = _('directory')
869 kind = _('directory')
852 return _('unsupported file type (type is %s)') % kind
870 return _('unsupported file type (type is %s)') % kind
853
871
854 matchedir = match.explicitdir
872 matchedir = match.explicitdir
855 badfn = match.bad
873 badfn = match.bad
856 dmap = self._map
874 dmap = self._map
857 lstat = os.lstat
875 lstat = os.lstat
858 getkind = stat.S_IFMT
876 getkind = stat.S_IFMT
859 dirkind = stat.S_IFDIR
877 dirkind = stat.S_IFDIR
860 regkind = stat.S_IFREG
878 regkind = stat.S_IFREG
861 lnkkind = stat.S_IFLNK
879 lnkkind = stat.S_IFLNK
862 join = self._join
880 join = self._join
863 dirsfound = []
881 dirsfound = []
864 foundadd = dirsfound.append
882 foundadd = dirsfound.append
865 dirsnotfound = []
883 dirsnotfound = []
866 notfoundadd = dirsnotfound.append
884 notfoundadd = dirsnotfound.append
867
885
868 if not match.isexact() and self._checkcase:
886 if not match.isexact() and self._checkcase:
869 normalize = self._normalize
887 normalize = self._normalize
870 else:
888 else:
871 normalize = None
889 normalize = None
872
890
873 files = sorted(match.files())
891 files = sorted(match.files())
874 subrepos.sort()
892 subrepos.sort()
875 i, j = 0, 0
893 i, j = 0, 0
876 while i < len(files) and j < len(subrepos):
894 while i < len(files) and j < len(subrepos):
877 subpath = subrepos[j] + "/"
895 subpath = subrepos[j] + "/"
878 if files[i] < subpath:
896 if files[i] < subpath:
879 i += 1
897 i += 1
880 continue
898 continue
881 while i < len(files) and files[i].startswith(subpath):
899 while i < len(files) and files[i].startswith(subpath):
882 del files[i]
900 del files[i]
883 j += 1
901 j += 1
884
902
885 if not files or '.' in files:
903 if not files or '.' in files:
886 files = ['.']
904 files = ['.']
887 results = dict.fromkeys(subrepos)
905 results = dict.fromkeys(subrepos)
888 results['.hg'] = None
906 results['.hg'] = None
889
907
890 alldirs = None
908 alldirs = None
891 for ff in files:
909 for ff in files:
892 # constructing the foldmap is expensive, so don't do it for the
910 # constructing the foldmap is expensive, so don't do it for the
893 # common case where files is ['.']
911 # common case where files is ['.']
894 if normalize and ff != '.':
912 if normalize and ff != '.':
895 nf = normalize(ff, False, True)
913 nf = normalize(ff, False, True)
896 else:
914 else:
897 nf = ff
915 nf = ff
898 if nf in results:
916 if nf in results:
899 continue
917 continue
900
918
901 try:
919 try:
902 st = lstat(join(nf))
920 st = lstat(join(nf))
903 kind = getkind(st.st_mode)
921 kind = getkind(st.st_mode)
904 if kind == dirkind:
922 if kind == dirkind:
905 if nf in dmap:
923 if nf in dmap:
906 # file replaced by dir on disk but still in dirstate
924 # file replaced by dir on disk but still in dirstate
907 results[nf] = None
925 results[nf] = None
908 if matchedir:
926 if matchedir:
909 matchedir(nf)
927 matchedir(nf)
910 foundadd((nf, ff))
928 foundadd((nf, ff))
911 elif kind == regkind or kind == lnkkind:
929 elif kind == regkind or kind == lnkkind:
912 results[nf] = st
930 results[nf] = st
913 else:
931 else:
914 badfn(ff, badtype(kind))
932 badfn(ff, badtype(kind))
915 if nf in dmap:
933 if nf in dmap:
916 results[nf] = None
934 results[nf] = None
917 except OSError as inst: # nf not found on disk - it is dirstate only
935 except OSError as inst: # nf not found on disk - it is dirstate only
918 if nf in dmap: # does it exactly match a missing file?
936 if nf in dmap: # does it exactly match a missing file?
919 results[nf] = None
937 results[nf] = None
920 else: # does it match a missing directory?
938 else: # does it match a missing directory?
921 if alldirs is None:
939 if alldirs is None:
922 alldirs = util.dirs(dmap)
940 alldirs = util.dirs(dmap)
923 if nf in alldirs:
941 if nf in alldirs:
924 if matchedir:
942 if matchedir:
925 matchedir(nf)
943 matchedir(nf)
926 notfoundadd(nf)
944 notfoundadd(nf)
927 else:
945 else:
928 badfn(ff, inst.strerror)
946 badfn(ff, inst.strerror)
929
947
930 # Case insensitive filesystems cannot rely on lstat() failing to detect
948 # Case insensitive filesystems cannot rely on lstat() failing to detect
931 # a case-only rename. Prune the stat object for any file that does not
949 # a case-only rename. Prune the stat object for any file that does not
932 # match the case in the filesystem, if there are multiple files that
950 # match the case in the filesystem, if there are multiple files that
933 # normalize to the same path.
951 # normalize to the same path.
934 if match.isexact() and self._checkcase:
952 if match.isexact() and self._checkcase:
935 normed = {}
953 normed = {}
936
954
937 for f, st in results.iteritems():
955 for f, st in results.iteritems():
938 if st is None:
956 if st is None:
939 continue
957 continue
940
958
941 nc = util.normcase(f)
959 nc = util.normcase(f)
942 paths = normed.get(nc)
960 paths = normed.get(nc)
943
961
944 if paths is None:
962 if paths is None:
945 paths = set()
963 paths = set()
946 normed[nc] = paths
964 normed[nc] = paths
947
965
948 paths.add(f)
966 paths.add(f)
949
967
950 for norm, paths in normed.iteritems():
968 for norm, paths in normed.iteritems():
951 if len(paths) > 1:
969 if len(paths) > 1:
952 for path in paths:
970 for path in paths:
953 folded = self._discoverpath(path, norm, True, None,
971 folded = self._discoverpath(path, norm, True, None,
954 self._dirfoldmap)
972 self._dirfoldmap)
955 if path != folded:
973 if path != folded:
956 results[path] = None
974 results[path] = None
957
975
958 return results, dirsfound, dirsnotfound
976 return results, dirsfound, dirsnotfound
959
977
960 def walk(self, match, subrepos, unknown, ignored, full=True):
978 def walk(self, match, subrepos, unknown, ignored, full=True):
961 '''
979 '''
962 Walk recursively through the directory tree, finding all files
980 Walk recursively through the directory tree, finding all files
963 matched by match.
981 matched by match.
964
982
965 If full is False, maybe skip some known-clean files.
983 If full is False, maybe skip some known-clean files.
966
984
967 Return a dict mapping filename to stat-like object (either
985 Return a dict mapping filename to stat-like object (either
968 mercurial.osutil.stat instance or return value of os.stat()).
986 mercurial.osutil.stat instance or return value of os.stat()).
969
987
970 '''
988 '''
971 # full is a flag that extensions that hook into walk can use -- this
989 # full is a flag that extensions that hook into walk can use -- this
972 # implementation doesn't use it at all. This satisfies the contract
990 # implementation doesn't use it at all. This satisfies the contract
973 # because we only guarantee a "maybe".
991 # because we only guarantee a "maybe".
974
992
975 if ignored:
993 if ignored:
976 ignore = util.never
994 ignore = util.never
977 dirignore = util.never
995 dirignore = util.never
978 elif unknown:
996 elif unknown:
979 ignore = self._ignore
997 ignore = self._ignore
980 dirignore = self._dirignore
998 dirignore = self._dirignore
981 else:
999 else:
982 # if not unknown and not ignored, drop dir recursion and step 2
1000 # if not unknown and not ignored, drop dir recursion and step 2
983 ignore = util.always
1001 ignore = util.always
984 dirignore = util.always
1002 dirignore = util.always
985
1003
986 matchfn = match.matchfn
1004 matchfn = match.matchfn
987 matchalways = match.always()
1005 matchalways = match.always()
988 matchtdir = match.traversedir
1006 matchtdir = match.traversedir
989 dmap = self._map
1007 dmap = self._map
990 listdir = util.listdir
1008 listdir = util.listdir
991 lstat = os.lstat
1009 lstat = os.lstat
992 dirkind = stat.S_IFDIR
1010 dirkind = stat.S_IFDIR
993 regkind = stat.S_IFREG
1011 regkind = stat.S_IFREG
994 lnkkind = stat.S_IFLNK
1012 lnkkind = stat.S_IFLNK
995 join = self._join
1013 join = self._join
996
1014
997 exact = skipstep3 = False
1015 exact = skipstep3 = False
998 if match.isexact(): # match.exact
1016 if match.isexact(): # match.exact
999 exact = True
1017 exact = True
1000 dirignore = util.always # skip step 2
1018 dirignore = util.always # skip step 2
1001 elif match.prefix(): # match.match, no patterns
1019 elif match.prefix(): # match.match, no patterns
1002 skipstep3 = True
1020 skipstep3 = True
1003
1021
1004 if not exact and self._checkcase:
1022 if not exact and self._checkcase:
1005 normalize = self._normalize
1023 normalize = self._normalize
1006 normalizefile = self._normalizefile
1024 normalizefile = self._normalizefile
1007 skipstep3 = False
1025 skipstep3 = False
1008 else:
1026 else:
1009 normalize = self._normalize
1027 normalize = self._normalize
1010 normalizefile = None
1028 normalizefile = None
1011
1029
1012 # step 1: find all explicit files
1030 # step 1: find all explicit files
1013 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1031 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1014
1032
1015 skipstep3 = skipstep3 and not (work or dirsnotfound)
1033 skipstep3 = skipstep3 and not (work or dirsnotfound)
1016 work = [d for d in work if not dirignore(d[0])]
1034 work = [d for d in work if not dirignore(d[0])]
1017
1035
1018 # step 2: visit subdirectories
1036 # step 2: visit subdirectories
1019 def traverse(work, alreadynormed):
1037 def traverse(work, alreadynormed):
1020 wadd = work.append
1038 wadd = work.append
1021 while work:
1039 while work:
1022 nd = work.pop()
1040 nd = work.pop()
1023 if not match.visitdir(nd):
1041 if not match.visitdir(nd):
1024 continue
1042 continue
1025 skip = None
1043 skip = None
1026 if nd == '.':
1044 if nd == '.':
1027 nd = ''
1045 nd = ''
1028 else:
1046 else:
1029 skip = '.hg'
1047 skip = '.hg'
1030 try:
1048 try:
1031 entries = listdir(join(nd), stat=True, skip=skip)
1049 entries = listdir(join(nd), stat=True, skip=skip)
1032 except OSError as inst:
1050 except OSError as inst:
1033 if inst.errno in (errno.EACCES, errno.ENOENT):
1051 if inst.errno in (errno.EACCES, errno.ENOENT):
1034 match.bad(self.pathto(nd), inst.strerror)
1052 match.bad(self.pathto(nd), inst.strerror)
1035 continue
1053 continue
1036 raise
1054 raise
1037 for f, kind, st in entries:
1055 for f, kind, st in entries:
1038 if normalizefile:
1056 if normalizefile:
1039 # even though f might be a directory, we're only
1057 # even though f might be a directory, we're only
1040 # interested in comparing it to files currently in the
1058 # interested in comparing it to files currently in the
1041 # dmap -- therefore normalizefile is enough
1059 # dmap -- therefore normalizefile is enough
1042 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1060 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1043 True)
1061 True)
1044 else:
1062 else:
1045 nf = nd and (nd + "/" + f) or f
1063 nf = nd and (nd + "/" + f) or f
1046 if nf not in results:
1064 if nf not in results:
1047 if kind == dirkind:
1065 if kind == dirkind:
1048 if not ignore(nf):
1066 if not ignore(nf):
1049 if matchtdir:
1067 if matchtdir:
1050 matchtdir(nf)
1068 matchtdir(nf)
1051 wadd(nf)
1069 wadd(nf)
1052 if nf in dmap and (matchalways or matchfn(nf)):
1070 if nf in dmap and (matchalways or matchfn(nf)):
1053 results[nf] = None
1071 results[nf] = None
1054 elif kind == regkind or kind == lnkkind:
1072 elif kind == regkind or kind == lnkkind:
1055 if nf in dmap:
1073 if nf in dmap:
1056 if matchalways or matchfn(nf):
1074 if matchalways or matchfn(nf):
1057 results[nf] = st
1075 results[nf] = st
1058 elif ((matchalways or matchfn(nf))
1076 elif ((matchalways or matchfn(nf))
1059 and not ignore(nf)):
1077 and not ignore(nf)):
1060 # unknown file -- normalize if necessary
1078 # unknown file -- normalize if necessary
1061 if not alreadynormed:
1079 if not alreadynormed:
1062 nf = normalize(nf, False, True)
1080 nf = normalize(nf, False, True)
1063 results[nf] = st
1081 results[nf] = st
1064 elif nf in dmap and (matchalways or matchfn(nf)):
1082 elif nf in dmap and (matchalways or matchfn(nf)):
1065 results[nf] = None
1083 results[nf] = None
1066
1084
1067 for nd, d in work:
1085 for nd, d in work:
1068 # alreadynormed means that processwork doesn't have to do any
1086 # alreadynormed means that processwork doesn't have to do any
1069 # expensive directory normalization
1087 # expensive directory normalization
1070 alreadynormed = not normalize or nd == d
1088 alreadynormed = not normalize or nd == d
1071 traverse([d], alreadynormed)
1089 traverse([d], alreadynormed)
1072
1090
1073 for s in subrepos:
1091 for s in subrepos:
1074 del results[s]
1092 del results[s]
1075 del results['.hg']
1093 del results['.hg']
1076
1094
1077 # step 3: visit remaining files from dmap
1095 # step 3: visit remaining files from dmap
1078 if not skipstep3 and not exact:
1096 if not skipstep3 and not exact:
1079 # If a dmap file is not in results yet, it was either
1097 # If a dmap file is not in results yet, it was either
1080 # a) not matching matchfn b) ignored, c) missing, or d) under a
1098 # a) not matching matchfn b) ignored, c) missing, or d) under a
1081 # symlink directory.
1099 # symlink directory.
1082 if not results and matchalways:
1100 if not results and matchalways:
1083 visit = [f for f in dmap]
1101 visit = [f for f in dmap]
1084 else:
1102 else:
1085 visit = [f for f in dmap if f not in results and matchfn(f)]
1103 visit = [f for f in dmap if f not in results and matchfn(f)]
1086 visit.sort()
1104 visit.sort()
1087
1105
1088 if unknown:
1106 if unknown:
1089 # unknown == True means we walked all dirs under the roots
1107 # unknown == True means we walked all dirs under the roots
1090 # that wasn't ignored, and everything that matched was stat'ed
1108 # that wasn't ignored, and everything that matched was stat'ed
1091 # and is already in results.
1109 # and is already in results.
1092 # The rest must thus be ignored or under a symlink.
1110 # The rest must thus be ignored or under a symlink.
1093 audit_path = pathutil.pathauditor(self._root)
1111 audit_path = pathutil.pathauditor(self._root)
1094
1112
1095 for nf in iter(visit):
1113 for nf in iter(visit):
1096 # If a stat for the same file was already added with a
1114 # If a stat for the same file was already added with a
1097 # different case, don't add one for this, since that would
1115 # different case, don't add one for this, since that would
1098 # make it appear as if the file exists under both names
1116 # make it appear as if the file exists under both names
1099 # on disk.
1117 # on disk.
1100 if (normalizefile and
1118 if (normalizefile and
1101 normalizefile(nf, True, True) in results):
1119 normalizefile(nf, True, True) in results):
1102 results[nf] = None
1120 results[nf] = None
1103 # Report ignored items in the dmap as long as they are not
1121 # Report ignored items in the dmap as long as they are not
1104 # under a symlink directory.
1122 # under a symlink directory.
1105 elif audit_path.check(nf):
1123 elif audit_path.check(nf):
1106 try:
1124 try:
1107 results[nf] = lstat(join(nf))
1125 results[nf] = lstat(join(nf))
1108 # file was just ignored, no links, and exists
1126 # file was just ignored, no links, and exists
1109 except OSError:
1127 except OSError:
1110 # file doesn't exist
1128 # file doesn't exist
1111 results[nf] = None
1129 results[nf] = None
1112 else:
1130 else:
1113 # It's either missing or under a symlink directory
1131 # It's either missing or under a symlink directory
1114 # which we in this case report as missing
1132 # which we in this case report as missing
1115 results[nf] = None
1133 results[nf] = None
1116 else:
1134 else:
1117 # We may not have walked the full directory tree above,
1135 # We may not have walked the full directory tree above,
1118 # so stat and check everything we missed.
1136 # so stat and check everything we missed.
1119 iv = iter(visit)
1137 iv = iter(visit)
1120 for st in util.statfiles([join(i) for i in visit]):
1138 for st in util.statfiles([join(i) for i in visit]):
1121 results[next(iv)] = st
1139 results[next(iv)] = st
1122 return results
1140 return results
1123
1141
1124 def status(self, match, subrepos, ignored, clean, unknown):
1142 def status(self, match, subrepos, ignored, clean, unknown):
1125 '''Determine the status of the working copy relative to the
1143 '''Determine the status of the working copy relative to the
1126 dirstate and return a pair of (unsure, status), where status is of type
1144 dirstate and return a pair of (unsure, status), where status is of type
1127 scmutil.status and:
1145 scmutil.status and:
1128
1146
1129 unsure:
1147 unsure:
1130 files that might have been modified since the dirstate was
1148 files that might have been modified since the dirstate was
1131 written, but need to be read to be sure (size is the same
1149 written, but need to be read to be sure (size is the same
1132 but mtime differs)
1150 but mtime differs)
1133 status.modified:
1151 status.modified:
1134 files that have definitely been modified since the dirstate
1152 files that have definitely been modified since the dirstate
1135 was written (different size or mode)
1153 was written (different size or mode)
1136 status.clean:
1154 status.clean:
1137 files that have definitely not been modified since the
1155 files that have definitely not been modified since the
1138 dirstate was written
1156 dirstate was written
1139 '''
1157 '''
1140 listignored, listclean, listunknown = ignored, clean, unknown
1158 listignored, listclean, listunknown = ignored, clean, unknown
1141 lookup, modified, added, unknown, ignored = [], [], [], [], []
1159 lookup, modified, added, unknown, ignored = [], [], [], [], []
1142 removed, deleted, clean = [], [], []
1160 removed, deleted, clean = [], [], []
1143
1161
1144 dmap = self._map
1162 dmap = self._map
1145 ladd = lookup.append # aka "unsure"
1163 ladd = lookup.append # aka "unsure"
1146 madd = modified.append
1164 madd = modified.append
1147 aadd = added.append
1165 aadd = added.append
1148 uadd = unknown.append
1166 uadd = unknown.append
1149 iadd = ignored.append
1167 iadd = ignored.append
1150 radd = removed.append
1168 radd = removed.append
1151 dadd = deleted.append
1169 dadd = deleted.append
1152 cadd = clean.append
1170 cadd = clean.append
1153 mexact = match.exact
1171 mexact = match.exact
1154 dirignore = self._dirignore
1172 dirignore = self._dirignore
1155 checkexec = self._checkexec
1173 checkexec = self._checkexec
1156 copymap = self._copymap
1174 copymap = self._copymap
1157 lastnormaltime = self._lastnormaltime
1175 lastnormaltime = self._lastnormaltime
1158
1176
1159 # We need to do full walks when either
1177 # We need to do full walks when either
1160 # - we're listing all clean files, or
1178 # - we're listing all clean files, or
1161 # - match.traversedir does something, because match.traversedir should
1179 # - match.traversedir does something, because match.traversedir should
1162 # be called for every dir in the working dir
1180 # be called for every dir in the working dir
1163 full = listclean or match.traversedir is not None
1181 full = listclean or match.traversedir is not None
1164 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1182 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1165 full=full).iteritems():
1183 full=full).iteritems():
1166 if fn not in dmap:
1184 if fn not in dmap:
1167 if (listignored or mexact(fn)) and dirignore(fn):
1185 if (listignored or mexact(fn)) and dirignore(fn):
1168 if listignored:
1186 if listignored:
1169 iadd(fn)
1187 iadd(fn)
1170 else:
1188 else:
1171 uadd(fn)
1189 uadd(fn)
1172 continue
1190 continue
1173
1191
1174 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1192 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1175 # written like that for performance reasons. dmap[fn] is not a
1193 # written like that for performance reasons. dmap[fn] is not a
1176 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1194 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1177 # opcode has fast paths when the value to be unpacked is a tuple or
1195 # opcode has fast paths when the value to be unpacked is a tuple or
1178 # a list, but falls back to creating a full-fledged iterator in
1196 # a list, but falls back to creating a full-fledged iterator in
1179 # general. That is much slower than simply accessing and storing the
1197 # general. That is much slower than simply accessing and storing the
1180 # tuple members one by one.
1198 # tuple members one by one.
1181 t = dmap[fn]
1199 t = dmap[fn]
1182 state = t[0]
1200 state = t[0]
1183 mode = t[1]
1201 mode = t[1]
1184 size = t[2]
1202 size = t[2]
1185 time = t[3]
1203 time = t[3]
1186
1204
1187 if not st and state in "nma":
1205 if not st and state in "nma":
1188 dadd(fn)
1206 dadd(fn)
1189 elif state == 'n':
1207 elif state == 'n':
1190 if (size >= 0 and
1208 if (size >= 0 and
1191 ((size != st.st_size and size != st.st_size & _rangemask)
1209 ((size != st.st_size and size != st.st_size & _rangemask)
1192 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1210 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1193 or size == -2 # other parent
1211 or size == -2 # other parent
1194 or fn in copymap):
1212 or fn in copymap):
1195 madd(fn)
1213 madd(fn)
1196 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1214 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1197 ladd(fn)
1215 ladd(fn)
1198 elif st.st_mtime == lastnormaltime:
1216 elif st.st_mtime == lastnormaltime:
1199 # fn may have just been marked as normal and it may have
1217 # fn may have just been marked as normal and it may have
1200 # changed in the same second without changing its size.
1218 # changed in the same second without changing its size.
1201 # This can happen if we quickly do multiple commits.
1219 # This can happen if we quickly do multiple commits.
1202 # Force lookup, so we don't miss such a racy file change.
1220 # Force lookup, so we don't miss such a racy file change.
1203 ladd(fn)
1221 ladd(fn)
1204 elif listclean:
1222 elif listclean:
1205 cadd(fn)
1223 cadd(fn)
1206 elif state == 'm':
1224 elif state == 'm':
1207 madd(fn)
1225 madd(fn)
1208 elif state == 'a':
1226 elif state == 'a':
1209 aadd(fn)
1227 aadd(fn)
1210 elif state == 'r':
1228 elif state == 'r':
1211 radd(fn)
1229 radd(fn)
1212
1230
1213 return (lookup, scmutil.status(modified, added, removed, deleted,
1231 return (lookup, scmutil.status(modified, added, removed, deleted,
1214 unknown, ignored, clean))
1232 unknown, ignored, clean))
1215
1233
1216 def matches(self, match):
1234 def matches(self, match):
1217 '''
1235 '''
1218 return files in the dirstate (in whatever state) filtered by match
1236 return files in the dirstate (in whatever state) filtered by match
1219 '''
1237 '''
1220 dmap = self._map
1238 dmap = self._map
1221 if match.always():
1239 if match.always():
1222 return dmap.keys()
1240 return dmap.keys()
1223 files = match.files()
1241 files = match.files()
1224 if match.isexact():
1242 if match.isexact():
1225 # fast path -- filter the other way around, since typically files is
1243 # fast path -- filter the other way around, since typically files is
1226 # much smaller than dmap
1244 # much smaller than dmap
1227 return [f for f in files if f in dmap]
1245 return [f for f in files if f in dmap]
1228 if match.prefix() and all(fn in dmap for fn in files):
1246 if match.prefix() and all(fn in dmap for fn in files):
1229 # fast path -- all the values are known to be files, so just return
1247 # fast path -- all the values are known to be files, so just return
1230 # that
1248 # that
1231 return list(files)
1249 return list(files)
1232 return [f for f in dmap if match(f)]
1250 return [f for f in dmap if match(f)]
1233
1251
1234 def _actualfilename(self, tr):
1252 def _actualfilename(self, tr):
1235 if tr:
1253 if tr:
1236 return self._pendingfilename
1254 return self._pendingfilename
1237 else:
1255 else:
1238 return self._filename
1256 return self._filename
1239
1257
1240 def savebackup(self, tr, suffix='', prefix=''):
1258 def savebackup(self, tr, suffix='', prefix=''):
1241 '''Save current dirstate into backup file with suffix'''
1259 '''Save current dirstate into backup file with suffix'''
1242 assert len(suffix) > 0 or len(prefix) > 0
1260 assert len(suffix) > 0 or len(prefix) > 0
1243 filename = self._actualfilename(tr)
1261 filename = self._actualfilename(tr)
1244
1262
1245 # use '_writedirstate' instead of 'write' to write changes certainly,
1263 # use '_writedirstate' instead of 'write' to write changes certainly,
1246 # because the latter omits writing out if transaction is running.
1264 # because the latter omits writing out if transaction is running.
1247 # output file will be used to create backup of dirstate at this point.
1265 # output file will be used to create backup of dirstate at this point.
1248 if self._dirty or not self._opener.exists(filename):
1266 if self._dirty or not self._opener.exists(filename):
1249 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1267 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1250 checkambig=True))
1268 checkambig=True))
1251
1269
1252 if tr:
1270 if tr:
1253 # ensure that subsequent tr.writepending returns True for
1271 # ensure that subsequent tr.writepending returns True for
1254 # changes written out above, even if dirstate is never
1272 # changes written out above, even if dirstate is never
1255 # changed after this
1273 # changed after this
1256 tr.addfilegenerator('dirstate', (self._filename,),
1274 tr.addfilegenerator('dirstate', (self._filename,),
1257 self._writedirstate, location='plain')
1275 self._writedirstate, location='plain')
1258
1276
1259 # ensure that pending file written above is unlinked at
1277 # ensure that pending file written above is unlinked at
1260 # failure, even if tr.writepending isn't invoked until the
1278 # failure, even if tr.writepending isn't invoked until the
1261 # end of this transaction
1279 # end of this transaction
1262 tr.registertmp(filename, location='plain')
1280 tr.registertmp(filename, location='plain')
1263
1281
1264 backupname = prefix + self._filename + suffix
1282 backupname = prefix + self._filename + suffix
1265 assert backupname != filename
1283 assert backupname != filename
1266 self._opener.tryunlink(backupname)
1284 self._opener.tryunlink(backupname)
1267 # hardlink backup is okay because _writedirstate is always called
1285 # hardlink backup is okay because _writedirstate is always called
1268 # with an "atomictemp=True" file.
1286 # with an "atomictemp=True" file.
1269 util.copyfile(self._opener.join(filename),
1287 util.copyfile(self._opener.join(filename),
1270 self._opener.join(backupname), hardlink=True)
1288 self._opener.join(backupname), hardlink=True)
1271
1289
1272 def restorebackup(self, tr, suffix='', prefix=''):
1290 def restorebackup(self, tr, suffix='', prefix=''):
1273 '''Restore dirstate by backup file with suffix'''
1291 '''Restore dirstate by backup file with suffix'''
1274 assert len(suffix) > 0 or len(prefix) > 0
1292 assert len(suffix) > 0 or len(prefix) > 0
1275 # this "invalidate()" prevents "wlock.release()" from writing
1293 # this "invalidate()" prevents "wlock.release()" from writing
1276 # changes of dirstate out after restoring from backup file
1294 # changes of dirstate out after restoring from backup file
1277 self.invalidate()
1295 self.invalidate()
1278 filename = self._actualfilename(tr)
1296 filename = self._actualfilename(tr)
1279 # using self._filename to avoid having "pending" in the backup filename
1297 # using self._filename to avoid having "pending" in the backup filename
1280 self._opener.rename(prefix + self._filename + suffix, filename,
1298 self._opener.rename(prefix + self._filename + suffix, filename,
1281 checkambig=True)
1299 checkambig=True)
1282
1300
1283 def clearbackup(self, tr, suffix='', prefix=''):
1301 def clearbackup(self, tr, suffix='', prefix=''):
1284 '''Clear backup file with suffix'''
1302 '''Clear backup file with suffix'''
1285 assert len(suffix) > 0 or len(prefix) > 0
1303 assert len(suffix) > 0 or len(prefix) > 0
1286 # using self._filename to avoid having "pending" in the backup filename
1304 # using self._filename to avoid having "pending" in the backup filename
1287 self._opener.unlink(prefix + self._filename + suffix)
1305 self._opener.unlink(prefix + self._filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now