##// END OF EJS Templates
dirstate: remove file from copymap on drop...
Mateusz Kwapich -
r29247:3e438497 default
parent child Browse files
Show More
@@ -1,1245 +1,1247 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 match as matchmod,
20 match as matchmod,
21 osutil,
21 osutil,
22 parsers,
22 parsers,
23 pathutil,
23 pathutil,
24 scmutil,
24 scmutil,
25 util,
25 util,
26 )
26 )
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29 filecache = scmutil.filecache
29 filecache = scmutil.filecache
30 _rangemask = 0x7fffffff
30 _rangemask = 0x7fffffff
31
31
32 dirstatetuple = parsers.dirstatetuple
32 dirstatetuple = parsers.dirstatetuple
33
33
34 class repocache(filecache):
34 class repocache(filecache):
35 """filecache for files in .hg/"""
35 """filecache for files in .hg/"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj._opener.join(fname)
37 return obj._opener.join(fname)
38
38
39 class rootcache(filecache):
39 class rootcache(filecache):
40 """filecache for files in the repository root"""
40 """filecache for files in the repository root"""
41 def join(self, obj, fname):
41 def join(self, obj, fname):
42 return obj._join(fname)
42 return obj._join(fname)
43
43
44 def _getfsnow(vfs):
44 def _getfsnow(vfs):
45 '''Get "now" timestamp on filesystem'''
45 '''Get "now" timestamp on filesystem'''
46 tmpfd, tmpname = vfs.mkstemp()
46 tmpfd, tmpname = vfs.mkstemp()
47 try:
47 try:
48 return os.fstat(tmpfd).st_mtime
48 return os.fstat(tmpfd).st_mtime
49 finally:
49 finally:
50 os.close(tmpfd)
50 os.close(tmpfd)
51 vfs.unlink(tmpname)
51 vfs.unlink(tmpname)
52
52
53 def nonnormalentries(dmap):
53 def nonnormalentries(dmap):
54 '''Compute the nonnormal dirstate entries from the dmap'''
54 '''Compute the nonnormal dirstate entries from the dmap'''
55 try:
55 try:
56 return parsers.nonnormalentries(dmap)
56 return parsers.nonnormalentries(dmap)
57 except AttributeError:
57 except AttributeError:
58 return set(fname for fname, e in dmap.iteritems()
58 return set(fname for fname, e in dmap.iteritems()
59 if e[0] != 'n' or e[3] == -1)
59 if e[0] != 'n' or e[3] == -1)
60
60
61 def _trypending(root, vfs, filename):
61 def _trypending(root, vfs, filename):
62 '''Open file to be read according to HG_PENDING environment variable
62 '''Open file to be read according to HG_PENDING environment variable
63
63
64 This opens '.pending' of specified 'filename' only when HG_PENDING
64 This opens '.pending' of specified 'filename' only when HG_PENDING
65 is equal to 'root'.
65 is equal to 'root'.
66
66
67 This returns '(fp, is_pending_opened)' tuple.
67 This returns '(fp, is_pending_opened)' tuple.
68 '''
68 '''
69 if root == os.environ.get('HG_PENDING'):
69 if root == os.environ.get('HG_PENDING'):
70 try:
70 try:
71 return (vfs('%s.pending' % filename), True)
71 return (vfs('%s.pending' % filename), True)
72 except IOError as inst:
72 except IOError as inst:
73 if inst.errno != errno.ENOENT:
73 if inst.errno != errno.ENOENT:
74 raise
74 raise
75 return (vfs(filename), False)
75 return (vfs(filename), False)
76
76
77 _token = object()
77 _token = object()
78
78
79 class dirstate(object):
79 class dirstate(object):
80
80
81 def __init__(self, opener, ui, root, validate):
81 def __init__(self, opener, ui, root, validate):
82 '''Create a new dirstate object.
82 '''Create a new dirstate object.
83
83
84 opener is an open()-like callable that can be used to open the
84 opener is an open()-like callable that can be used to open the
85 dirstate file; root is the root of the directory tracked by
85 dirstate file; root is the root of the directory tracked by
86 the dirstate.
86 the dirstate.
87 '''
87 '''
88 self._opener = opener
88 self._opener = opener
89 self._validate = validate
89 self._validate = validate
90 self._root = root
90 self._root = root
91 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
91 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
92 # UNC path pointing to root share (issue4557)
92 # UNC path pointing to root share (issue4557)
93 self._rootdir = pathutil.normasprefix(root)
93 self._rootdir = pathutil.normasprefix(root)
94 # internal config: ui.forcecwd
94 # internal config: ui.forcecwd
95 forcecwd = ui.config('ui', 'forcecwd')
95 forcecwd = ui.config('ui', 'forcecwd')
96 if forcecwd:
96 if forcecwd:
97 self._cwd = forcecwd
97 self._cwd = forcecwd
98 self._dirty = False
98 self._dirty = False
99 self._dirtypl = False
99 self._dirtypl = False
100 self._lastnormaltime = 0
100 self._lastnormaltime = 0
101 self._ui = ui
101 self._ui = ui
102 self._filecache = {}
102 self._filecache = {}
103 self._parentwriters = 0
103 self._parentwriters = 0
104 self._filename = 'dirstate'
104 self._filename = 'dirstate'
105 self._pendingfilename = '%s.pending' % self._filename
105 self._pendingfilename = '%s.pending' % self._filename
106
106
107 # for consistent view between _pl() and _read() invocations
107 # for consistent view between _pl() and _read() invocations
108 self._pendingmode = None
108 self._pendingmode = None
109
109
110 def beginparentchange(self):
110 def beginparentchange(self):
111 '''Marks the beginning of a set of changes that involve changing
111 '''Marks the beginning of a set of changes that involve changing
112 the dirstate parents. If there is an exception during this time,
112 the dirstate parents. If there is an exception during this time,
113 the dirstate will not be written when the wlock is released. This
113 the dirstate will not be written when the wlock is released. This
114 prevents writing an incoherent dirstate where the parent doesn't
114 prevents writing an incoherent dirstate where the parent doesn't
115 match the contents.
115 match the contents.
116 '''
116 '''
117 self._parentwriters += 1
117 self._parentwriters += 1
118
118
119 def endparentchange(self):
119 def endparentchange(self):
120 '''Marks the end of a set of changes that involve changing the
120 '''Marks the end of a set of changes that involve changing the
121 dirstate parents. Once all parent changes have been marked done,
121 dirstate parents. Once all parent changes have been marked done,
122 the wlock will be free to write the dirstate on release.
122 the wlock will be free to write the dirstate on release.
123 '''
123 '''
124 if self._parentwriters > 0:
124 if self._parentwriters > 0:
125 self._parentwriters -= 1
125 self._parentwriters -= 1
126
126
127 def pendingparentchange(self):
127 def pendingparentchange(self):
128 '''Returns true if the dirstate is in the middle of a set of changes
128 '''Returns true if the dirstate is in the middle of a set of changes
129 that modify the dirstate parent.
129 that modify the dirstate parent.
130 '''
130 '''
131 return self._parentwriters > 0
131 return self._parentwriters > 0
132
132
133 @propertycache
133 @propertycache
134 def _map(self):
134 def _map(self):
135 '''Return the dirstate contents as a map from filename to
135 '''Return the dirstate contents as a map from filename to
136 (state, mode, size, time).'''
136 (state, mode, size, time).'''
137 self._read()
137 self._read()
138 return self._map
138 return self._map
139
139
140 @propertycache
140 @propertycache
141 def _copymap(self):
141 def _copymap(self):
142 self._read()
142 self._read()
143 return self._copymap
143 return self._copymap
144
144
145 @propertycache
145 @propertycache
146 def _nonnormalset(self):
146 def _nonnormalset(self):
147 return nonnormalentries(self._map)
147 return nonnormalentries(self._map)
148
148
149 @propertycache
149 @propertycache
150 def _filefoldmap(self):
150 def _filefoldmap(self):
151 try:
151 try:
152 makefilefoldmap = parsers.make_file_foldmap
152 makefilefoldmap = parsers.make_file_foldmap
153 except AttributeError:
153 except AttributeError:
154 pass
154 pass
155 else:
155 else:
156 return makefilefoldmap(self._map, util.normcasespec,
156 return makefilefoldmap(self._map, util.normcasespec,
157 util.normcasefallback)
157 util.normcasefallback)
158
158
159 f = {}
159 f = {}
160 normcase = util.normcase
160 normcase = util.normcase
161 for name, s in self._map.iteritems():
161 for name, s in self._map.iteritems():
162 if s[0] != 'r':
162 if s[0] != 'r':
163 f[normcase(name)] = name
163 f[normcase(name)] = name
164 f['.'] = '.' # prevents useless util.fspath() invocation
164 f['.'] = '.' # prevents useless util.fspath() invocation
165 return f
165 return f
166
166
167 @propertycache
167 @propertycache
168 def _dirfoldmap(self):
168 def _dirfoldmap(self):
169 f = {}
169 f = {}
170 normcase = util.normcase
170 normcase = util.normcase
171 for name in self._dirs:
171 for name in self._dirs:
172 f[normcase(name)] = name
172 f[normcase(name)] = name
173 return f
173 return f
174
174
175 @repocache('branch')
175 @repocache('branch')
176 def _branch(self):
176 def _branch(self):
177 try:
177 try:
178 return self._opener.read("branch").strip() or "default"
178 return self._opener.read("branch").strip() or "default"
179 except IOError as inst:
179 except IOError as inst:
180 if inst.errno != errno.ENOENT:
180 if inst.errno != errno.ENOENT:
181 raise
181 raise
182 return "default"
182 return "default"
183
183
184 @propertycache
184 @propertycache
185 def _pl(self):
185 def _pl(self):
186 try:
186 try:
187 fp = self._opendirstatefile()
187 fp = self._opendirstatefile()
188 st = fp.read(40)
188 st = fp.read(40)
189 fp.close()
189 fp.close()
190 l = len(st)
190 l = len(st)
191 if l == 40:
191 if l == 40:
192 return st[:20], st[20:40]
192 return st[:20], st[20:40]
193 elif l > 0 and l < 40:
193 elif l > 0 and l < 40:
194 raise error.Abort(_('working directory state appears damaged!'))
194 raise error.Abort(_('working directory state appears damaged!'))
195 except IOError as err:
195 except IOError as err:
196 if err.errno != errno.ENOENT:
196 if err.errno != errno.ENOENT:
197 raise
197 raise
198 return [nullid, nullid]
198 return [nullid, nullid]
199
199
200 @propertycache
200 @propertycache
201 def _dirs(self):
201 def _dirs(self):
202 return util.dirs(self._map, 'r')
202 return util.dirs(self._map, 'r')
203
203
204 def dirs(self):
204 def dirs(self):
205 return self._dirs
205 return self._dirs
206
206
207 @rootcache('.hgignore')
207 @rootcache('.hgignore')
208 def _ignore(self):
208 def _ignore(self):
209 files = self._ignorefiles()
209 files = self._ignorefiles()
210 if not files:
210 if not files:
211 return util.never
211 return util.never
212
212
213 pats = ['include:%s' % f for f in files]
213 pats = ['include:%s' % f for f in files]
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
215
215
216 @propertycache
216 @propertycache
217 def _slash(self):
217 def _slash(self):
218 return self._ui.configbool('ui', 'slash') and os.sep != '/'
218 return self._ui.configbool('ui', 'slash') and os.sep != '/'
219
219
220 @propertycache
220 @propertycache
221 def _checklink(self):
221 def _checklink(self):
222 return util.checklink(self._root)
222 return util.checklink(self._root)
223
223
224 @propertycache
224 @propertycache
225 def _checkexec(self):
225 def _checkexec(self):
226 return util.checkexec(self._root)
226 return util.checkexec(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkcase(self):
229 def _checkcase(self):
230 return not util.checkcase(self._join('.hg'))
230 return not util.checkcase(self._join('.hg'))
231
231
232 def _join(self, f):
232 def _join(self, f):
233 # much faster than os.path.join()
233 # much faster than os.path.join()
234 # it's safe because f is always a relative path
234 # it's safe because f is always a relative path
235 return self._rootdir + f
235 return self._rootdir + f
236
236
237 def flagfunc(self, buildfallback):
237 def flagfunc(self, buildfallback):
238 if self._checklink and self._checkexec:
238 if self._checklink and self._checkexec:
239 def f(x):
239 def f(x):
240 try:
240 try:
241 st = os.lstat(self._join(x))
241 st = os.lstat(self._join(x))
242 if util.statislink(st):
242 if util.statislink(st):
243 return 'l'
243 return 'l'
244 if util.statisexec(st):
244 if util.statisexec(st):
245 return 'x'
245 return 'x'
246 except OSError:
246 except OSError:
247 pass
247 pass
248 return ''
248 return ''
249 return f
249 return f
250
250
251 fallback = buildfallback()
251 fallback = buildfallback()
252 if self._checklink:
252 if self._checklink:
253 def f(x):
253 def f(x):
254 if os.path.islink(self._join(x)):
254 if os.path.islink(self._join(x)):
255 return 'l'
255 return 'l'
256 if 'x' in fallback(x):
256 if 'x' in fallback(x):
257 return 'x'
257 return 'x'
258 return ''
258 return ''
259 return f
259 return f
260 if self._checkexec:
260 if self._checkexec:
261 def f(x):
261 def f(x):
262 if 'l' in fallback(x):
262 if 'l' in fallback(x):
263 return 'l'
263 return 'l'
264 if util.isexec(self._join(x)):
264 if util.isexec(self._join(x)):
265 return 'x'
265 return 'x'
266 return ''
266 return ''
267 return f
267 return f
268 else:
268 else:
269 return fallback
269 return fallback
270
270
271 @propertycache
271 @propertycache
272 def _cwd(self):
272 def _cwd(self):
273 return os.getcwd()
273 return os.getcwd()
274
274
275 def getcwd(self):
275 def getcwd(self):
276 '''Return the path from which a canonical path is calculated.
276 '''Return the path from which a canonical path is calculated.
277
277
278 This path should be used to resolve file patterns or to convert
278 This path should be used to resolve file patterns or to convert
279 canonical paths back to file paths for display. It shouldn't be
279 canonical paths back to file paths for display. It shouldn't be
280 used to get real file paths. Use vfs functions instead.
280 used to get real file paths. Use vfs functions instead.
281 '''
281 '''
282 cwd = self._cwd
282 cwd = self._cwd
283 if cwd == self._root:
283 if cwd == self._root:
284 return ''
284 return ''
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
286 rootsep = self._root
286 rootsep = self._root
287 if not util.endswithsep(rootsep):
287 if not util.endswithsep(rootsep):
288 rootsep += os.sep
288 rootsep += os.sep
289 if cwd.startswith(rootsep):
289 if cwd.startswith(rootsep):
290 return cwd[len(rootsep):]
290 return cwd[len(rootsep):]
291 else:
291 else:
292 # we're outside the repo. return an absolute path.
292 # we're outside the repo. return an absolute path.
293 return cwd
293 return cwd
294
294
295 def pathto(self, f, cwd=None):
295 def pathto(self, f, cwd=None):
296 if cwd is None:
296 if cwd is None:
297 cwd = self.getcwd()
297 cwd = self.getcwd()
298 path = util.pathto(self._root, cwd, f)
298 path = util.pathto(self._root, cwd, f)
299 if self._slash:
299 if self._slash:
300 return util.pconvert(path)
300 return util.pconvert(path)
301 return path
301 return path
302
302
303 def __getitem__(self, key):
303 def __getitem__(self, key):
304 '''Return the current state of key (a filename) in the dirstate.
304 '''Return the current state of key (a filename) in the dirstate.
305
305
306 States are:
306 States are:
307 n normal
307 n normal
308 m needs merging
308 m needs merging
309 r marked for removal
309 r marked for removal
310 a marked for addition
310 a marked for addition
311 ? not tracked
311 ? not tracked
312 '''
312 '''
313 return self._map.get(key, ("?",))[0]
313 return self._map.get(key, ("?",))[0]
314
314
315 def __contains__(self, key):
315 def __contains__(self, key):
316 return key in self._map
316 return key in self._map
317
317
318 def __iter__(self):
318 def __iter__(self):
319 for x in sorted(self._map):
319 for x in sorted(self._map):
320 yield x
320 yield x
321
321
322 def iteritems(self):
322 def iteritems(self):
323 return self._map.iteritems()
323 return self._map.iteritems()
324
324
325 def parents(self):
325 def parents(self):
326 return [self._validate(p) for p in self._pl]
326 return [self._validate(p) for p in self._pl]
327
327
328 def p1(self):
328 def p1(self):
329 return self._validate(self._pl[0])
329 return self._validate(self._pl[0])
330
330
331 def p2(self):
331 def p2(self):
332 return self._validate(self._pl[1])
332 return self._validate(self._pl[1])
333
333
334 def branch(self):
334 def branch(self):
335 return encoding.tolocal(self._branch)
335 return encoding.tolocal(self._branch)
336
336
337 def setparents(self, p1, p2=nullid):
337 def setparents(self, p1, p2=nullid):
338 """Set dirstate parents to p1 and p2.
338 """Set dirstate parents to p1 and p2.
339
339
340 When moving from two parents to one, 'm' merged entries a
340 When moving from two parents to one, 'm' merged entries a
341 adjusted to normal and previous copy records discarded and
341 adjusted to normal and previous copy records discarded and
342 returned by the call.
342 returned by the call.
343
343
344 See localrepo.setparents()
344 See localrepo.setparents()
345 """
345 """
346 if self._parentwriters == 0:
346 if self._parentwriters == 0:
347 raise ValueError("cannot set dirstate parent without "
347 raise ValueError("cannot set dirstate parent without "
348 "calling dirstate.beginparentchange")
348 "calling dirstate.beginparentchange")
349
349
350 self._dirty = self._dirtypl = True
350 self._dirty = self._dirtypl = True
351 oldp2 = self._pl[1]
351 oldp2 = self._pl[1]
352 self._pl = p1, p2
352 self._pl = p1, p2
353 copies = {}
353 copies = {}
354 if oldp2 != nullid and p2 == nullid:
354 if oldp2 != nullid and p2 == nullid:
355 for f, s in self._map.iteritems():
355 for f, s in self._map.iteritems():
356 # Discard 'm' markers when moving away from a merge state
356 # Discard 'm' markers when moving away from a merge state
357 if s[0] == 'm':
357 if s[0] == 'm':
358 if f in self._copymap:
358 if f in self._copymap:
359 copies[f] = self._copymap[f]
359 copies[f] = self._copymap[f]
360 self.normallookup(f)
360 self.normallookup(f)
361 # Also fix up otherparent markers
361 # Also fix up otherparent markers
362 elif s[0] == 'n' and s[2] == -2:
362 elif s[0] == 'n' and s[2] == -2:
363 if f in self._copymap:
363 if f in self._copymap:
364 copies[f] = self._copymap[f]
364 copies[f] = self._copymap[f]
365 self.add(f)
365 self.add(f)
366 return copies
366 return copies
367
367
368 def setbranch(self, branch):
368 def setbranch(self, branch):
369 self._branch = encoding.fromlocal(branch)
369 self._branch = encoding.fromlocal(branch)
370 f = self._opener('branch', 'w', atomictemp=True)
370 f = self._opener('branch', 'w', atomictemp=True)
371 try:
371 try:
372 f.write(self._branch + '\n')
372 f.write(self._branch + '\n')
373 f.close()
373 f.close()
374
374
375 # make sure filecache has the correct stat info for _branch after
375 # make sure filecache has the correct stat info for _branch after
376 # replacing the underlying file
376 # replacing the underlying file
377 ce = self._filecache['_branch']
377 ce = self._filecache['_branch']
378 if ce:
378 if ce:
379 ce.refresh()
379 ce.refresh()
380 except: # re-raises
380 except: # re-raises
381 f.discard()
381 f.discard()
382 raise
382 raise
383
383
384 def _opendirstatefile(self):
384 def _opendirstatefile(self):
385 fp, mode = _trypending(self._root, self._opener, self._filename)
385 fp, mode = _trypending(self._root, self._opener, self._filename)
386 if self._pendingmode is not None and self._pendingmode != mode:
386 if self._pendingmode is not None and self._pendingmode != mode:
387 fp.close()
387 fp.close()
388 raise error.Abort(_('working directory state may be '
388 raise error.Abort(_('working directory state may be '
389 'changed parallelly'))
389 'changed parallelly'))
390 self._pendingmode = mode
390 self._pendingmode = mode
391 return fp
391 return fp
392
392
393 def _read(self):
393 def _read(self):
394 self._map = {}
394 self._map = {}
395 self._copymap = {}
395 self._copymap = {}
396 try:
396 try:
397 fp = self._opendirstatefile()
397 fp = self._opendirstatefile()
398 try:
398 try:
399 st = fp.read()
399 st = fp.read()
400 finally:
400 finally:
401 fp.close()
401 fp.close()
402 except IOError as err:
402 except IOError as err:
403 if err.errno != errno.ENOENT:
403 if err.errno != errno.ENOENT:
404 raise
404 raise
405 return
405 return
406 if not st:
406 if not st:
407 return
407 return
408
408
409 if util.safehasattr(parsers, 'dict_new_presized'):
409 if util.safehasattr(parsers, 'dict_new_presized'):
410 # Make an estimate of the number of files in the dirstate based on
410 # Make an estimate of the number of files in the dirstate based on
411 # its size. From a linear regression on a set of real-world repos,
411 # its size. From a linear regression on a set of real-world repos,
412 # all over 10,000 files, the size of a dirstate entry is 85
412 # all over 10,000 files, the size of a dirstate entry is 85
413 # bytes. The cost of resizing is significantly higher than the cost
413 # bytes. The cost of resizing is significantly higher than the cost
414 # of filling in a larger presized dict, so subtract 20% from the
414 # of filling in a larger presized dict, so subtract 20% from the
415 # size.
415 # size.
416 #
416 #
417 # This heuristic is imperfect in many ways, so in a future dirstate
417 # This heuristic is imperfect in many ways, so in a future dirstate
418 # format update it makes sense to just record the number of entries
418 # format update it makes sense to just record the number of entries
419 # on write.
419 # on write.
420 self._map = parsers.dict_new_presized(len(st) / 71)
420 self._map = parsers.dict_new_presized(len(st) / 71)
421
421
422 # Python's garbage collector triggers a GC each time a certain number
422 # Python's garbage collector triggers a GC each time a certain number
423 # of container objects (the number being defined by
423 # of container objects (the number being defined by
424 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
424 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
425 # for each file in the dirstate. The C version then immediately marks
425 # for each file in the dirstate. The C version then immediately marks
426 # them as not to be tracked by the collector. However, this has no
426 # them as not to be tracked by the collector. However, this has no
427 # effect on when GCs are triggered, only on what objects the GC looks
427 # effect on when GCs are triggered, only on what objects the GC looks
428 # into. This means that O(number of files) GCs are unavoidable.
428 # into. This means that O(number of files) GCs are unavoidable.
429 # Depending on when in the process's lifetime the dirstate is parsed,
429 # Depending on when in the process's lifetime the dirstate is parsed,
430 # this can get very expensive. As a workaround, disable GC while
430 # this can get very expensive. As a workaround, disable GC while
431 # parsing the dirstate.
431 # parsing the dirstate.
432 #
432 #
433 # (we cannot decorate the function directly since it is in a C module)
433 # (we cannot decorate the function directly since it is in a C module)
434 parse_dirstate = util.nogc(parsers.parse_dirstate)
434 parse_dirstate = util.nogc(parsers.parse_dirstate)
435 p = parse_dirstate(self._map, self._copymap, st)
435 p = parse_dirstate(self._map, self._copymap, st)
436 if not self._dirtypl:
436 if not self._dirtypl:
437 self._pl = p
437 self._pl = p
438
438
439 def invalidate(self):
439 def invalidate(self):
440 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
440 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
441 "_pl", "_dirs", "_ignore", "_nonnormalset"):
441 "_pl", "_dirs", "_ignore", "_nonnormalset"):
442 if a in self.__dict__:
442 if a in self.__dict__:
443 delattr(self, a)
443 delattr(self, a)
444 self._lastnormaltime = 0
444 self._lastnormaltime = 0
445 self._dirty = False
445 self._dirty = False
446 self._parentwriters = 0
446 self._parentwriters = 0
447
447
448 def copy(self, source, dest):
448 def copy(self, source, dest):
449 """Mark dest as a copy of source. Unmark dest if source is None."""
449 """Mark dest as a copy of source. Unmark dest if source is None."""
450 if source == dest:
450 if source == dest:
451 return
451 return
452 self._dirty = True
452 self._dirty = True
453 if source is not None:
453 if source is not None:
454 self._copymap[dest] = source
454 self._copymap[dest] = source
455 elif dest in self._copymap:
455 elif dest in self._copymap:
456 del self._copymap[dest]
456 del self._copymap[dest]
457
457
458 def copied(self, file):
458 def copied(self, file):
459 return self._copymap.get(file, None)
459 return self._copymap.get(file, None)
460
460
461 def copies(self):
461 def copies(self):
462 return self._copymap
462 return self._copymap
463
463
464 def _droppath(self, f):
464 def _droppath(self, f):
465 if self[f] not in "?r" and "_dirs" in self.__dict__:
465 if self[f] not in "?r" and "_dirs" in self.__dict__:
466 self._dirs.delpath(f)
466 self._dirs.delpath(f)
467
467
468 if "_filefoldmap" in self.__dict__:
468 if "_filefoldmap" in self.__dict__:
469 normed = util.normcase(f)
469 normed = util.normcase(f)
470 if normed in self._filefoldmap:
470 if normed in self._filefoldmap:
471 del self._filefoldmap[normed]
471 del self._filefoldmap[normed]
472
472
473 def _addpath(self, f, state, mode, size, mtime):
473 def _addpath(self, f, state, mode, size, mtime):
474 oldstate = self[f]
474 oldstate = self[f]
475 if state == 'a' or oldstate == 'r':
475 if state == 'a' or oldstate == 'r':
476 scmutil.checkfilename(f)
476 scmutil.checkfilename(f)
477 if f in self._dirs:
477 if f in self._dirs:
478 raise error.Abort(_('directory %r already in dirstate') % f)
478 raise error.Abort(_('directory %r already in dirstate') % f)
479 # shadows
479 # shadows
480 for d in util.finddirs(f):
480 for d in util.finddirs(f):
481 if d in self._dirs:
481 if d in self._dirs:
482 break
482 break
483 if d in self._map and self[d] != 'r':
483 if d in self._map and self[d] != 'r':
484 raise error.Abort(
484 raise error.Abort(
485 _('file %r in dirstate clashes with %r') % (d, f))
485 _('file %r in dirstate clashes with %r') % (d, f))
486 if oldstate in "?r" and "_dirs" in self.__dict__:
486 if oldstate in "?r" and "_dirs" in self.__dict__:
487 self._dirs.addpath(f)
487 self._dirs.addpath(f)
488 self._dirty = True
488 self._dirty = True
489 self._map[f] = dirstatetuple(state, mode, size, mtime)
489 self._map[f] = dirstatetuple(state, mode, size, mtime)
490 if state != 'n' or mtime == -1:
490 if state != 'n' or mtime == -1:
491 self._nonnormalset.add(f)
491 self._nonnormalset.add(f)
492
492
493 def normal(self, f):
493 def normal(self, f):
494 '''Mark a file normal and clean.'''
494 '''Mark a file normal and clean.'''
495 s = os.lstat(self._join(f))
495 s = os.lstat(self._join(f))
496 mtime = s.st_mtime
496 mtime = s.st_mtime
497 self._addpath(f, 'n', s.st_mode,
497 self._addpath(f, 'n', s.st_mode,
498 s.st_size & _rangemask, mtime & _rangemask)
498 s.st_size & _rangemask, mtime & _rangemask)
499 if f in self._copymap:
499 if f in self._copymap:
500 del self._copymap[f]
500 del self._copymap[f]
501 if f in self._nonnormalset:
501 if f in self._nonnormalset:
502 self._nonnormalset.remove(f)
502 self._nonnormalset.remove(f)
503 if mtime > self._lastnormaltime:
503 if mtime > self._lastnormaltime:
504 # Remember the most recent modification timeslot for status(),
504 # Remember the most recent modification timeslot for status(),
505 # to make sure we won't miss future size-preserving file content
505 # to make sure we won't miss future size-preserving file content
506 # modifications that happen within the same timeslot.
506 # modifications that happen within the same timeslot.
507 self._lastnormaltime = mtime
507 self._lastnormaltime = mtime
508
508
509 def normallookup(self, f):
509 def normallookup(self, f):
510 '''Mark a file normal, but possibly dirty.'''
510 '''Mark a file normal, but possibly dirty.'''
511 if self._pl[1] != nullid and f in self._map:
511 if self._pl[1] != nullid and f in self._map:
512 # if there is a merge going on and the file was either
512 # if there is a merge going on and the file was either
513 # in state 'm' (-1) or coming from other parent (-2) before
513 # in state 'm' (-1) or coming from other parent (-2) before
514 # being removed, restore that state.
514 # being removed, restore that state.
515 entry = self._map[f]
515 entry = self._map[f]
516 if entry[0] == 'r' and entry[2] in (-1, -2):
516 if entry[0] == 'r' and entry[2] in (-1, -2):
517 source = self._copymap.get(f)
517 source = self._copymap.get(f)
518 if entry[2] == -1:
518 if entry[2] == -1:
519 self.merge(f)
519 self.merge(f)
520 elif entry[2] == -2:
520 elif entry[2] == -2:
521 self.otherparent(f)
521 self.otherparent(f)
522 if source:
522 if source:
523 self.copy(source, f)
523 self.copy(source, f)
524 return
524 return
525 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
525 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
526 return
526 return
527 self._addpath(f, 'n', 0, -1, -1)
527 self._addpath(f, 'n', 0, -1, -1)
528 if f in self._copymap:
528 if f in self._copymap:
529 del self._copymap[f]
529 del self._copymap[f]
530 if f in self._nonnormalset:
530 if f in self._nonnormalset:
531 self._nonnormalset.remove(f)
531 self._nonnormalset.remove(f)
532
532
533 def otherparent(self, f):
533 def otherparent(self, f):
534 '''Mark as coming from the other parent, always dirty.'''
534 '''Mark as coming from the other parent, always dirty.'''
535 if self._pl[1] == nullid:
535 if self._pl[1] == nullid:
536 raise error.Abort(_("setting %r to other parent "
536 raise error.Abort(_("setting %r to other parent "
537 "only allowed in merges") % f)
537 "only allowed in merges") % f)
538 if f in self and self[f] == 'n':
538 if f in self and self[f] == 'n':
539 # merge-like
539 # merge-like
540 self._addpath(f, 'm', 0, -2, -1)
540 self._addpath(f, 'm', 0, -2, -1)
541 else:
541 else:
542 # add-like
542 # add-like
543 self._addpath(f, 'n', 0, -2, -1)
543 self._addpath(f, 'n', 0, -2, -1)
544
544
545 if f in self._copymap:
545 if f in self._copymap:
546 del self._copymap[f]
546 del self._copymap[f]
547
547
548 def add(self, f):
548 def add(self, f):
549 '''Mark a file added.'''
549 '''Mark a file added.'''
550 self._addpath(f, 'a', 0, -1, -1)
550 self._addpath(f, 'a', 0, -1, -1)
551 if f in self._copymap:
551 if f in self._copymap:
552 del self._copymap[f]
552 del self._copymap[f]
553
553
554 def remove(self, f):
554 def remove(self, f):
555 '''Mark a file removed.'''
555 '''Mark a file removed.'''
556 self._dirty = True
556 self._dirty = True
557 self._droppath(f)
557 self._droppath(f)
558 size = 0
558 size = 0
559 if self._pl[1] != nullid and f in self._map:
559 if self._pl[1] != nullid and f in self._map:
560 # backup the previous state
560 # backup the previous state
561 entry = self._map[f]
561 entry = self._map[f]
562 if entry[0] == 'm': # merge
562 if entry[0] == 'm': # merge
563 size = -1
563 size = -1
564 elif entry[0] == 'n' and entry[2] == -2: # other parent
564 elif entry[0] == 'n' and entry[2] == -2: # other parent
565 size = -2
565 size = -2
566 self._map[f] = dirstatetuple('r', 0, size, 0)
566 self._map[f] = dirstatetuple('r', 0, size, 0)
567 self._nonnormalset.add(f)
567 self._nonnormalset.add(f)
568 if size == 0 and f in self._copymap:
568 if size == 0 and f in self._copymap:
569 del self._copymap[f]
569 del self._copymap[f]
570
570
571 def merge(self, f):
571 def merge(self, f):
572 '''Mark a file merged.'''
572 '''Mark a file merged.'''
573 if self._pl[1] == nullid:
573 if self._pl[1] == nullid:
574 return self.normallookup(f)
574 return self.normallookup(f)
575 return self.otherparent(f)
575 return self.otherparent(f)
576
576
577 def drop(self, f):
577 def drop(self, f):
578 '''Drop a file from the dirstate'''
578 '''Drop a file from the dirstate'''
579 if f in self._map:
579 if f in self._map:
580 self._dirty = True
580 self._dirty = True
581 self._droppath(f)
581 self._droppath(f)
582 del self._map[f]
582 del self._map[f]
583 if f in self._nonnormalset:
583 if f in self._nonnormalset:
584 self._nonnormalset.remove(f)
584 self._nonnormalset.remove(f)
585 if f in self._copymap:
586 del self._copymap[f]
585
587
586 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
588 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
587 if exists is None:
589 if exists is None:
588 exists = os.path.lexists(os.path.join(self._root, path))
590 exists = os.path.lexists(os.path.join(self._root, path))
589 if not exists:
591 if not exists:
590 # Maybe a path component exists
592 # Maybe a path component exists
591 if not ignoremissing and '/' in path:
593 if not ignoremissing and '/' in path:
592 d, f = path.rsplit('/', 1)
594 d, f = path.rsplit('/', 1)
593 d = self._normalize(d, False, ignoremissing, None)
595 d = self._normalize(d, False, ignoremissing, None)
594 folded = d + "/" + f
596 folded = d + "/" + f
595 else:
597 else:
596 # No path components, preserve original case
598 # No path components, preserve original case
597 folded = path
599 folded = path
598 else:
600 else:
599 # recursively normalize leading directory components
601 # recursively normalize leading directory components
600 # against dirstate
602 # against dirstate
601 if '/' in normed:
603 if '/' in normed:
602 d, f = normed.rsplit('/', 1)
604 d, f = normed.rsplit('/', 1)
603 d = self._normalize(d, False, ignoremissing, True)
605 d = self._normalize(d, False, ignoremissing, True)
604 r = self._root + "/" + d
606 r = self._root + "/" + d
605 folded = d + "/" + util.fspath(f, r)
607 folded = d + "/" + util.fspath(f, r)
606 else:
608 else:
607 folded = util.fspath(normed, self._root)
609 folded = util.fspath(normed, self._root)
608 storemap[normed] = folded
610 storemap[normed] = folded
609
611
610 return folded
612 return folded
611
613
612 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
614 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
613 normed = util.normcase(path)
615 normed = util.normcase(path)
614 folded = self._filefoldmap.get(normed, None)
616 folded = self._filefoldmap.get(normed, None)
615 if folded is None:
617 if folded is None:
616 if isknown:
618 if isknown:
617 folded = path
619 folded = path
618 else:
620 else:
619 folded = self._discoverpath(path, normed, ignoremissing, exists,
621 folded = self._discoverpath(path, normed, ignoremissing, exists,
620 self._filefoldmap)
622 self._filefoldmap)
621 return folded
623 return folded
622
624
623 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
625 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
624 normed = util.normcase(path)
626 normed = util.normcase(path)
625 folded = self._filefoldmap.get(normed, None)
627 folded = self._filefoldmap.get(normed, None)
626 if folded is None:
628 if folded is None:
627 folded = self._dirfoldmap.get(normed, None)
629 folded = self._dirfoldmap.get(normed, None)
628 if folded is None:
630 if folded is None:
629 if isknown:
631 if isknown:
630 folded = path
632 folded = path
631 else:
633 else:
632 # store discovered result in dirfoldmap so that future
634 # store discovered result in dirfoldmap so that future
633 # normalizefile calls don't start matching directories
635 # normalizefile calls don't start matching directories
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
636 folded = self._discoverpath(path, normed, ignoremissing, exists,
635 self._dirfoldmap)
637 self._dirfoldmap)
636 return folded
638 return folded
637
639
638 def normalize(self, path, isknown=False, ignoremissing=False):
640 def normalize(self, path, isknown=False, ignoremissing=False):
639 '''
641 '''
640 normalize the case of a pathname when on a casefolding filesystem
642 normalize the case of a pathname when on a casefolding filesystem
641
643
642 isknown specifies whether the filename came from walking the
644 isknown specifies whether the filename came from walking the
643 disk, to avoid extra filesystem access.
645 disk, to avoid extra filesystem access.
644
646
645 If ignoremissing is True, missing path are returned
647 If ignoremissing is True, missing path are returned
646 unchanged. Otherwise, we try harder to normalize possibly
648 unchanged. Otherwise, we try harder to normalize possibly
647 existing path components.
649 existing path components.
648
650
649 The normalized case is determined based on the following precedence:
651 The normalized case is determined based on the following precedence:
650
652
651 - version of name already stored in the dirstate
653 - version of name already stored in the dirstate
652 - version of name stored on disk
654 - version of name stored on disk
653 - version provided via command arguments
655 - version provided via command arguments
654 '''
656 '''
655
657
656 if self._checkcase:
658 if self._checkcase:
657 return self._normalize(path, isknown, ignoremissing)
659 return self._normalize(path, isknown, ignoremissing)
658 return path
660 return path
659
661
660 def clear(self):
662 def clear(self):
661 self._map = {}
663 self._map = {}
662 self._nonnormalset = set()
664 self._nonnormalset = set()
663 if "_dirs" in self.__dict__:
665 if "_dirs" in self.__dict__:
664 delattr(self, "_dirs")
666 delattr(self, "_dirs")
665 self._copymap = {}
667 self._copymap = {}
666 self._pl = [nullid, nullid]
668 self._pl = [nullid, nullid]
667 self._lastnormaltime = 0
669 self._lastnormaltime = 0
668 self._dirty = True
670 self._dirty = True
669
671
670 def rebuild(self, parent, allfiles, changedfiles=None):
672 def rebuild(self, parent, allfiles, changedfiles=None):
671 if changedfiles is None:
673 if changedfiles is None:
672 # Rebuild entire dirstate
674 # Rebuild entire dirstate
673 changedfiles = allfiles
675 changedfiles = allfiles
674 lastnormaltime = self._lastnormaltime
676 lastnormaltime = self._lastnormaltime
675 self.clear()
677 self.clear()
676 self._lastnormaltime = lastnormaltime
678 self._lastnormaltime = lastnormaltime
677
679
678 for f in changedfiles:
680 for f in changedfiles:
679 mode = 0o666
681 mode = 0o666
680 if f in allfiles and 'x' in allfiles.flags(f):
682 if f in allfiles and 'x' in allfiles.flags(f):
681 mode = 0o777
683 mode = 0o777
682
684
683 if f in allfiles:
685 if f in allfiles:
684 self._map[f] = dirstatetuple('n', mode, -1, 0)
686 self._map[f] = dirstatetuple('n', mode, -1, 0)
685 else:
687 else:
686 self._map.pop(f, None)
688 self._map.pop(f, None)
687 if f in self._nonnormalset:
689 if f in self._nonnormalset:
688 self._nonnormalset.remove(f)
690 self._nonnormalset.remove(f)
689
691
690 self._pl = (parent, nullid)
692 self._pl = (parent, nullid)
691 self._dirty = True
693 self._dirty = True
692
694
693 def write(self, tr=_token):
695 def write(self, tr=_token):
694 if not self._dirty:
696 if not self._dirty:
695 return
697 return
696
698
697 filename = self._filename
699 filename = self._filename
698 if tr is _token: # not explicitly specified
700 if tr is _token: # not explicitly specified
699 self._ui.deprecwarn('use dirstate.write with '
701 self._ui.deprecwarn('use dirstate.write with '
700 'repo.currenttransaction()',
702 'repo.currenttransaction()',
701 '3.9')
703 '3.9')
702
704
703 if self._opener.lexists(self._pendingfilename):
705 if self._opener.lexists(self._pendingfilename):
704 # if pending file already exists, in-memory changes
706 # if pending file already exists, in-memory changes
705 # should be written into it, because it has priority
707 # should be written into it, because it has priority
706 # to '.hg/dirstate' at reading under HG_PENDING mode
708 # to '.hg/dirstate' at reading under HG_PENDING mode
707 filename = self._pendingfilename
709 filename = self._pendingfilename
708 elif tr:
710 elif tr:
709 # 'dirstate.write()' is not only for writing in-memory
711 # 'dirstate.write()' is not only for writing in-memory
710 # changes out, but also for dropping ambiguous timestamp.
712 # changes out, but also for dropping ambiguous timestamp.
711 # delayed writing re-raise "ambiguous timestamp issue".
713 # delayed writing re-raise "ambiguous timestamp issue".
712 # See also the wiki page below for detail:
714 # See also the wiki page below for detail:
713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
715 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
714
716
715 # emulate dropping timestamp in 'parsers.pack_dirstate'
717 # emulate dropping timestamp in 'parsers.pack_dirstate'
716 now = _getfsnow(self._opener)
718 now = _getfsnow(self._opener)
717 dmap = self._map
719 dmap = self._map
718 for f, e in dmap.iteritems():
720 for f, e in dmap.iteritems():
719 if e[0] == 'n' and e[3] == now:
721 if e[0] == 'n' and e[3] == now:
720 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
722 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
721 self._nonnormalset.add(f)
723 self._nonnormalset.add(f)
722
724
723 # emulate that all 'dirstate.normal' results are written out
725 # emulate that all 'dirstate.normal' results are written out
724 self._lastnormaltime = 0
726 self._lastnormaltime = 0
725
727
726 # delay writing in-memory changes out
728 # delay writing in-memory changes out
727 tr.addfilegenerator('dirstate', (self._filename,),
729 tr.addfilegenerator('dirstate', (self._filename,),
728 self._writedirstate, location='plain')
730 self._writedirstate, location='plain')
729 return
731 return
730
732
731 st = self._opener(filename, "w", atomictemp=True)
733 st = self._opener(filename, "w", atomictemp=True)
732 self._writedirstate(st)
734 self._writedirstate(st)
733
735
734 def _writedirstate(self, st):
736 def _writedirstate(self, st):
735 # use the modification time of the newly created temporary file as the
737 # use the modification time of the newly created temporary file as the
736 # filesystem's notion of 'now'
738 # filesystem's notion of 'now'
737 now = util.fstat(st).st_mtime & _rangemask
739 now = util.fstat(st).st_mtime & _rangemask
738
740
739 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
741 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
740 # timestamp of each entries in dirstate, because of 'now > mtime'
742 # timestamp of each entries in dirstate, because of 'now > mtime'
741 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
743 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
742 if delaywrite > 0:
744 if delaywrite > 0:
743 # do we have any files to delay for?
745 # do we have any files to delay for?
744 for f, e in self._map.iteritems():
746 for f, e in self._map.iteritems():
745 if e[0] == 'n' and e[3] == now:
747 if e[0] == 'n' and e[3] == now:
746 import time # to avoid useless import
748 import time # to avoid useless import
747 # rather than sleep n seconds, sleep until the next
749 # rather than sleep n seconds, sleep until the next
748 # multiple of n seconds
750 # multiple of n seconds
749 clock = time.time()
751 clock = time.time()
750 start = int(clock) - (int(clock) % delaywrite)
752 start = int(clock) - (int(clock) % delaywrite)
751 end = start + delaywrite
753 end = start + delaywrite
752 time.sleep(end - clock)
754 time.sleep(end - clock)
753 break
755 break
754
756
755 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
757 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
756 self._nonnormalset = nonnormalentries(self._map)
758 self._nonnormalset = nonnormalentries(self._map)
757 st.close()
759 st.close()
758 self._lastnormaltime = 0
760 self._lastnormaltime = 0
759 self._dirty = self._dirtypl = False
761 self._dirty = self._dirtypl = False
760
762
761 def _dirignore(self, f):
763 def _dirignore(self, f):
762 if f == '.':
764 if f == '.':
763 return False
765 return False
764 if self._ignore(f):
766 if self._ignore(f):
765 return True
767 return True
766 for p in util.finddirs(f):
768 for p in util.finddirs(f):
767 if self._ignore(p):
769 if self._ignore(p):
768 return True
770 return True
769 return False
771 return False
770
772
771 def _ignorefiles(self):
773 def _ignorefiles(self):
772 files = []
774 files = []
773 if os.path.exists(self._join('.hgignore')):
775 if os.path.exists(self._join('.hgignore')):
774 files.append(self._join('.hgignore'))
776 files.append(self._join('.hgignore'))
775 for name, path in self._ui.configitems("ui"):
777 for name, path in self._ui.configitems("ui"):
776 if name == 'ignore' or name.startswith('ignore.'):
778 if name == 'ignore' or name.startswith('ignore.'):
777 # we need to use os.path.join here rather than self._join
779 # we need to use os.path.join here rather than self._join
778 # because path is arbitrary and user-specified
780 # because path is arbitrary and user-specified
779 files.append(os.path.join(self._rootdir, util.expandpath(path)))
781 files.append(os.path.join(self._rootdir, util.expandpath(path)))
780 return files
782 return files
781
783
782 def _ignorefileandline(self, f):
784 def _ignorefileandline(self, f):
783 files = collections.deque(self._ignorefiles())
785 files = collections.deque(self._ignorefiles())
784 visited = set()
786 visited = set()
785 while files:
787 while files:
786 i = files.popleft()
788 i = files.popleft()
787 patterns = matchmod.readpatternfile(i, self._ui.warn,
789 patterns = matchmod.readpatternfile(i, self._ui.warn,
788 sourceinfo=True)
790 sourceinfo=True)
789 for pattern, lineno, line in patterns:
791 for pattern, lineno, line in patterns:
790 kind, p = matchmod._patsplit(pattern, 'glob')
792 kind, p = matchmod._patsplit(pattern, 'glob')
791 if kind == "subinclude":
793 if kind == "subinclude":
792 if p not in visited:
794 if p not in visited:
793 files.append(p)
795 files.append(p)
794 continue
796 continue
795 m = matchmod.match(self._root, '', [], [pattern],
797 m = matchmod.match(self._root, '', [], [pattern],
796 warn=self._ui.warn)
798 warn=self._ui.warn)
797 if m(f):
799 if m(f):
798 return (i, lineno, line)
800 return (i, lineno, line)
799 visited.add(i)
801 visited.add(i)
800 return (None, -1, "")
802 return (None, -1, "")
801
803
802 def _walkexplicit(self, match, subrepos):
804 def _walkexplicit(self, match, subrepos):
803 '''Get stat data about the files explicitly specified by match.
805 '''Get stat data about the files explicitly specified by match.
804
806
805 Return a triple (results, dirsfound, dirsnotfound).
807 Return a triple (results, dirsfound, dirsnotfound).
806 - results is a mapping from filename to stat result. It also contains
808 - results is a mapping from filename to stat result. It also contains
807 listings mapping subrepos and .hg to None.
809 listings mapping subrepos and .hg to None.
808 - dirsfound is a list of files found to be directories.
810 - dirsfound is a list of files found to be directories.
809 - dirsnotfound is a list of files that the dirstate thinks are
811 - dirsnotfound is a list of files that the dirstate thinks are
810 directories and that were not found.'''
812 directories and that were not found.'''
811
813
812 def badtype(mode):
814 def badtype(mode):
813 kind = _('unknown')
815 kind = _('unknown')
814 if stat.S_ISCHR(mode):
816 if stat.S_ISCHR(mode):
815 kind = _('character device')
817 kind = _('character device')
816 elif stat.S_ISBLK(mode):
818 elif stat.S_ISBLK(mode):
817 kind = _('block device')
819 kind = _('block device')
818 elif stat.S_ISFIFO(mode):
820 elif stat.S_ISFIFO(mode):
819 kind = _('fifo')
821 kind = _('fifo')
820 elif stat.S_ISSOCK(mode):
822 elif stat.S_ISSOCK(mode):
821 kind = _('socket')
823 kind = _('socket')
822 elif stat.S_ISDIR(mode):
824 elif stat.S_ISDIR(mode):
823 kind = _('directory')
825 kind = _('directory')
824 return _('unsupported file type (type is %s)') % kind
826 return _('unsupported file type (type is %s)') % kind
825
827
826 matchedir = match.explicitdir
828 matchedir = match.explicitdir
827 badfn = match.bad
829 badfn = match.bad
828 dmap = self._map
830 dmap = self._map
829 lstat = os.lstat
831 lstat = os.lstat
830 getkind = stat.S_IFMT
832 getkind = stat.S_IFMT
831 dirkind = stat.S_IFDIR
833 dirkind = stat.S_IFDIR
832 regkind = stat.S_IFREG
834 regkind = stat.S_IFREG
833 lnkkind = stat.S_IFLNK
835 lnkkind = stat.S_IFLNK
834 join = self._join
836 join = self._join
835 dirsfound = []
837 dirsfound = []
836 foundadd = dirsfound.append
838 foundadd = dirsfound.append
837 dirsnotfound = []
839 dirsnotfound = []
838 notfoundadd = dirsnotfound.append
840 notfoundadd = dirsnotfound.append
839
841
840 if not match.isexact() and self._checkcase:
842 if not match.isexact() and self._checkcase:
841 normalize = self._normalize
843 normalize = self._normalize
842 else:
844 else:
843 normalize = None
845 normalize = None
844
846
845 files = sorted(match.files())
847 files = sorted(match.files())
846 subrepos.sort()
848 subrepos.sort()
847 i, j = 0, 0
849 i, j = 0, 0
848 while i < len(files) and j < len(subrepos):
850 while i < len(files) and j < len(subrepos):
849 subpath = subrepos[j] + "/"
851 subpath = subrepos[j] + "/"
850 if files[i] < subpath:
852 if files[i] < subpath:
851 i += 1
853 i += 1
852 continue
854 continue
853 while i < len(files) and files[i].startswith(subpath):
855 while i < len(files) and files[i].startswith(subpath):
854 del files[i]
856 del files[i]
855 j += 1
857 j += 1
856
858
857 if not files or '.' in files:
859 if not files or '.' in files:
858 files = ['.']
860 files = ['.']
859 results = dict.fromkeys(subrepos)
861 results = dict.fromkeys(subrepos)
860 results['.hg'] = None
862 results['.hg'] = None
861
863
862 alldirs = None
864 alldirs = None
863 for ff in files:
865 for ff in files:
864 # constructing the foldmap is expensive, so don't do it for the
866 # constructing the foldmap is expensive, so don't do it for the
865 # common case where files is ['.']
867 # common case where files is ['.']
866 if normalize and ff != '.':
868 if normalize and ff != '.':
867 nf = normalize(ff, False, True)
869 nf = normalize(ff, False, True)
868 else:
870 else:
869 nf = ff
871 nf = ff
870 if nf in results:
872 if nf in results:
871 continue
873 continue
872
874
873 try:
875 try:
874 st = lstat(join(nf))
876 st = lstat(join(nf))
875 kind = getkind(st.st_mode)
877 kind = getkind(st.st_mode)
876 if kind == dirkind:
878 if kind == dirkind:
877 if nf in dmap:
879 if nf in dmap:
878 # file replaced by dir on disk but still in dirstate
880 # file replaced by dir on disk but still in dirstate
879 results[nf] = None
881 results[nf] = None
880 if matchedir:
882 if matchedir:
881 matchedir(nf)
883 matchedir(nf)
882 foundadd((nf, ff))
884 foundadd((nf, ff))
883 elif kind == regkind or kind == lnkkind:
885 elif kind == regkind or kind == lnkkind:
884 results[nf] = st
886 results[nf] = st
885 else:
887 else:
886 badfn(ff, badtype(kind))
888 badfn(ff, badtype(kind))
887 if nf in dmap:
889 if nf in dmap:
888 results[nf] = None
890 results[nf] = None
889 except OSError as inst: # nf not found on disk - it is dirstate only
891 except OSError as inst: # nf not found on disk - it is dirstate only
890 if nf in dmap: # does it exactly match a missing file?
892 if nf in dmap: # does it exactly match a missing file?
891 results[nf] = None
893 results[nf] = None
892 else: # does it match a missing directory?
894 else: # does it match a missing directory?
893 if alldirs is None:
895 if alldirs is None:
894 alldirs = util.dirs(dmap)
896 alldirs = util.dirs(dmap)
895 if nf in alldirs:
897 if nf in alldirs:
896 if matchedir:
898 if matchedir:
897 matchedir(nf)
899 matchedir(nf)
898 notfoundadd(nf)
900 notfoundadd(nf)
899 else:
901 else:
900 badfn(ff, inst.strerror)
902 badfn(ff, inst.strerror)
901
903
902 # Case insensitive filesystems cannot rely on lstat() failing to detect
904 # Case insensitive filesystems cannot rely on lstat() failing to detect
903 # a case-only rename. Prune the stat object for any file that does not
905 # a case-only rename. Prune the stat object for any file that does not
904 # match the case in the filesystem, if there are multiple files that
906 # match the case in the filesystem, if there are multiple files that
905 # normalize to the same path.
907 # normalize to the same path.
906 if match.isexact() and self._checkcase:
908 if match.isexact() and self._checkcase:
907 normed = {}
909 normed = {}
908
910
909 for f, st in results.iteritems():
911 for f, st in results.iteritems():
910 if st is None:
912 if st is None:
911 continue
913 continue
912
914
913 nc = util.normcase(f)
915 nc = util.normcase(f)
914 paths = normed.get(nc)
916 paths = normed.get(nc)
915
917
916 if paths is None:
918 if paths is None:
917 paths = set()
919 paths = set()
918 normed[nc] = paths
920 normed[nc] = paths
919
921
920 paths.add(f)
922 paths.add(f)
921
923
922 for norm, paths in normed.iteritems():
924 for norm, paths in normed.iteritems():
923 if len(paths) > 1:
925 if len(paths) > 1:
924 for path in paths:
926 for path in paths:
925 folded = self._discoverpath(path, norm, True, None,
927 folded = self._discoverpath(path, norm, True, None,
926 self._dirfoldmap)
928 self._dirfoldmap)
927 if path != folded:
929 if path != folded:
928 results[path] = None
930 results[path] = None
929
931
930 return results, dirsfound, dirsnotfound
932 return results, dirsfound, dirsnotfound
931
933
932 def walk(self, match, subrepos, unknown, ignored, full=True):
934 def walk(self, match, subrepos, unknown, ignored, full=True):
933 '''
935 '''
934 Walk recursively through the directory tree, finding all files
936 Walk recursively through the directory tree, finding all files
935 matched by match.
937 matched by match.
936
938
937 If full is False, maybe skip some known-clean files.
939 If full is False, maybe skip some known-clean files.
938
940
939 Return a dict mapping filename to stat-like object (either
941 Return a dict mapping filename to stat-like object (either
940 mercurial.osutil.stat instance or return value of os.stat()).
942 mercurial.osutil.stat instance or return value of os.stat()).
941
943
942 '''
944 '''
943 # full is a flag that extensions that hook into walk can use -- this
945 # full is a flag that extensions that hook into walk can use -- this
944 # implementation doesn't use it at all. This satisfies the contract
946 # implementation doesn't use it at all. This satisfies the contract
945 # because we only guarantee a "maybe".
947 # because we only guarantee a "maybe".
946
948
947 if ignored:
949 if ignored:
948 ignore = util.never
950 ignore = util.never
949 dirignore = util.never
951 dirignore = util.never
950 elif unknown:
952 elif unknown:
951 ignore = self._ignore
953 ignore = self._ignore
952 dirignore = self._dirignore
954 dirignore = self._dirignore
953 else:
955 else:
954 # if not unknown and not ignored, drop dir recursion and step 2
956 # if not unknown and not ignored, drop dir recursion and step 2
955 ignore = util.always
957 ignore = util.always
956 dirignore = util.always
958 dirignore = util.always
957
959
958 matchfn = match.matchfn
960 matchfn = match.matchfn
959 matchalways = match.always()
961 matchalways = match.always()
960 matchtdir = match.traversedir
962 matchtdir = match.traversedir
961 dmap = self._map
963 dmap = self._map
962 listdir = osutil.listdir
964 listdir = osutil.listdir
963 lstat = os.lstat
965 lstat = os.lstat
964 dirkind = stat.S_IFDIR
966 dirkind = stat.S_IFDIR
965 regkind = stat.S_IFREG
967 regkind = stat.S_IFREG
966 lnkkind = stat.S_IFLNK
968 lnkkind = stat.S_IFLNK
967 join = self._join
969 join = self._join
968
970
969 exact = skipstep3 = False
971 exact = skipstep3 = False
970 if match.isexact(): # match.exact
972 if match.isexact(): # match.exact
971 exact = True
973 exact = True
972 dirignore = util.always # skip step 2
974 dirignore = util.always # skip step 2
973 elif match.prefix(): # match.match, no patterns
975 elif match.prefix(): # match.match, no patterns
974 skipstep3 = True
976 skipstep3 = True
975
977
976 if not exact and self._checkcase:
978 if not exact and self._checkcase:
977 normalize = self._normalize
979 normalize = self._normalize
978 normalizefile = self._normalizefile
980 normalizefile = self._normalizefile
979 skipstep3 = False
981 skipstep3 = False
980 else:
982 else:
981 normalize = self._normalize
983 normalize = self._normalize
982 normalizefile = None
984 normalizefile = None
983
985
984 # step 1: find all explicit files
986 # step 1: find all explicit files
985 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
987 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
986
988
987 skipstep3 = skipstep3 and not (work or dirsnotfound)
989 skipstep3 = skipstep3 and not (work or dirsnotfound)
988 work = [d for d in work if not dirignore(d[0])]
990 work = [d for d in work if not dirignore(d[0])]
989
991
990 # step 2: visit subdirectories
992 # step 2: visit subdirectories
991 def traverse(work, alreadynormed):
993 def traverse(work, alreadynormed):
992 wadd = work.append
994 wadd = work.append
993 while work:
995 while work:
994 nd = work.pop()
996 nd = work.pop()
995 skip = None
997 skip = None
996 if nd == '.':
998 if nd == '.':
997 nd = ''
999 nd = ''
998 else:
1000 else:
999 skip = '.hg'
1001 skip = '.hg'
1000 try:
1002 try:
1001 entries = listdir(join(nd), stat=True, skip=skip)
1003 entries = listdir(join(nd), stat=True, skip=skip)
1002 except OSError as inst:
1004 except OSError as inst:
1003 if inst.errno in (errno.EACCES, errno.ENOENT):
1005 if inst.errno in (errno.EACCES, errno.ENOENT):
1004 match.bad(self.pathto(nd), inst.strerror)
1006 match.bad(self.pathto(nd), inst.strerror)
1005 continue
1007 continue
1006 raise
1008 raise
1007 for f, kind, st in entries:
1009 for f, kind, st in entries:
1008 if normalizefile:
1010 if normalizefile:
1009 # even though f might be a directory, we're only
1011 # even though f might be a directory, we're only
1010 # interested in comparing it to files currently in the
1012 # interested in comparing it to files currently in the
1011 # dmap -- therefore normalizefile is enough
1013 # dmap -- therefore normalizefile is enough
1012 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1014 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1013 True)
1015 True)
1014 else:
1016 else:
1015 nf = nd and (nd + "/" + f) or f
1017 nf = nd and (nd + "/" + f) or f
1016 if nf not in results:
1018 if nf not in results:
1017 if kind == dirkind:
1019 if kind == dirkind:
1018 if not ignore(nf):
1020 if not ignore(nf):
1019 if matchtdir:
1021 if matchtdir:
1020 matchtdir(nf)
1022 matchtdir(nf)
1021 wadd(nf)
1023 wadd(nf)
1022 if nf in dmap and (matchalways or matchfn(nf)):
1024 if nf in dmap and (matchalways or matchfn(nf)):
1023 results[nf] = None
1025 results[nf] = None
1024 elif kind == regkind or kind == lnkkind:
1026 elif kind == regkind or kind == lnkkind:
1025 if nf in dmap:
1027 if nf in dmap:
1026 if matchalways or matchfn(nf):
1028 if matchalways or matchfn(nf):
1027 results[nf] = st
1029 results[nf] = st
1028 elif ((matchalways or matchfn(nf))
1030 elif ((matchalways or matchfn(nf))
1029 and not ignore(nf)):
1031 and not ignore(nf)):
1030 # unknown file -- normalize if necessary
1032 # unknown file -- normalize if necessary
1031 if not alreadynormed:
1033 if not alreadynormed:
1032 nf = normalize(nf, False, True)
1034 nf = normalize(nf, False, True)
1033 results[nf] = st
1035 results[nf] = st
1034 elif nf in dmap and (matchalways or matchfn(nf)):
1036 elif nf in dmap and (matchalways or matchfn(nf)):
1035 results[nf] = None
1037 results[nf] = None
1036
1038
1037 for nd, d in work:
1039 for nd, d in work:
1038 # alreadynormed means that processwork doesn't have to do any
1040 # alreadynormed means that processwork doesn't have to do any
1039 # expensive directory normalization
1041 # expensive directory normalization
1040 alreadynormed = not normalize or nd == d
1042 alreadynormed = not normalize or nd == d
1041 traverse([d], alreadynormed)
1043 traverse([d], alreadynormed)
1042
1044
1043 for s in subrepos:
1045 for s in subrepos:
1044 del results[s]
1046 del results[s]
1045 del results['.hg']
1047 del results['.hg']
1046
1048
1047 # step 3: visit remaining files from dmap
1049 # step 3: visit remaining files from dmap
1048 if not skipstep3 and not exact:
1050 if not skipstep3 and not exact:
1049 # If a dmap file is not in results yet, it was either
1051 # If a dmap file is not in results yet, it was either
1050 # a) not matching matchfn b) ignored, c) missing, or d) under a
1052 # a) not matching matchfn b) ignored, c) missing, or d) under a
1051 # symlink directory.
1053 # symlink directory.
1052 if not results and matchalways:
1054 if not results and matchalways:
1053 visit = dmap.keys()
1055 visit = dmap.keys()
1054 else:
1056 else:
1055 visit = [f for f in dmap if f not in results and matchfn(f)]
1057 visit = [f for f in dmap if f not in results and matchfn(f)]
1056 visit.sort()
1058 visit.sort()
1057
1059
1058 if unknown:
1060 if unknown:
1059 # unknown == True means we walked all dirs under the roots
1061 # unknown == True means we walked all dirs under the roots
1060 # that wasn't ignored, and everything that matched was stat'ed
1062 # that wasn't ignored, and everything that matched was stat'ed
1061 # and is already in results.
1063 # and is already in results.
1062 # The rest must thus be ignored or under a symlink.
1064 # The rest must thus be ignored or under a symlink.
1063 audit_path = pathutil.pathauditor(self._root)
1065 audit_path = pathutil.pathauditor(self._root)
1064
1066
1065 for nf in iter(visit):
1067 for nf in iter(visit):
1066 # If a stat for the same file was already added with a
1068 # If a stat for the same file was already added with a
1067 # different case, don't add one for this, since that would
1069 # different case, don't add one for this, since that would
1068 # make it appear as if the file exists under both names
1070 # make it appear as if the file exists under both names
1069 # on disk.
1071 # on disk.
1070 if (normalizefile and
1072 if (normalizefile and
1071 normalizefile(nf, True, True) in results):
1073 normalizefile(nf, True, True) in results):
1072 results[nf] = None
1074 results[nf] = None
1073 # Report ignored items in the dmap as long as they are not
1075 # Report ignored items in the dmap as long as they are not
1074 # under a symlink directory.
1076 # under a symlink directory.
1075 elif audit_path.check(nf):
1077 elif audit_path.check(nf):
1076 try:
1078 try:
1077 results[nf] = lstat(join(nf))
1079 results[nf] = lstat(join(nf))
1078 # file was just ignored, no links, and exists
1080 # file was just ignored, no links, and exists
1079 except OSError:
1081 except OSError:
1080 # file doesn't exist
1082 # file doesn't exist
1081 results[nf] = None
1083 results[nf] = None
1082 else:
1084 else:
1083 # It's either missing or under a symlink directory
1085 # It's either missing or under a symlink directory
1084 # which we in this case report as missing
1086 # which we in this case report as missing
1085 results[nf] = None
1087 results[nf] = None
1086 else:
1088 else:
1087 # We may not have walked the full directory tree above,
1089 # We may not have walked the full directory tree above,
1088 # so stat and check everything we missed.
1090 # so stat and check everything we missed.
1089 nf = iter(visit).next
1091 nf = iter(visit).next
1090 for st in util.statfiles([join(i) for i in visit]):
1092 for st in util.statfiles([join(i) for i in visit]):
1091 results[nf()] = st
1093 results[nf()] = st
1092 return results
1094 return results
1093
1095
1094 def status(self, match, subrepos, ignored, clean, unknown):
1096 def status(self, match, subrepos, ignored, clean, unknown):
1095 '''Determine the status of the working copy relative to the
1097 '''Determine the status of the working copy relative to the
1096 dirstate and return a pair of (unsure, status), where status is of type
1098 dirstate and return a pair of (unsure, status), where status is of type
1097 scmutil.status and:
1099 scmutil.status and:
1098
1100
1099 unsure:
1101 unsure:
1100 files that might have been modified since the dirstate was
1102 files that might have been modified since the dirstate was
1101 written, but need to be read to be sure (size is the same
1103 written, but need to be read to be sure (size is the same
1102 but mtime differs)
1104 but mtime differs)
1103 status.modified:
1105 status.modified:
1104 files that have definitely been modified since the dirstate
1106 files that have definitely been modified since the dirstate
1105 was written (different size or mode)
1107 was written (different size or mode)
1106 status.clean:
1108 status.clean:
1107 files that have definitely not been modified since the
1109 files that have definitely not been modified since the
1108 dirstate was written
1110 dirstate was written
1109 '''
1111 '''
1110 listignored, listclean, listunknown = ignored, clean, unknown
1112 listignored, listclean, listunknown = ignored, clean, unknown
1111 lookup, modified, added, unknown, ignored = [], [], [], [], []
1113 lookup, modified, added, unknown, ignored = [], [], [], [], []
1112 removed, deleted, clean = [], [], []
1114 removed, deleted, clean = [], [], []
1113
1115
1114 dmap = self._map
1116 dmap = self._map
1115 ladd = lookup.append # aka "unsure"
1117 ladd = lookup.append # aka "unsure"
1116 madd = modified.append
1118 madd = modified.append
1117 aadd = added.append
1119 aadd = added.append
1118 uadd = unknown.append
1120 uadd = unknown.append
1119 iadd = ignored.append
1121 iadd = ignored.append
1120 radd = removed.append
1122 radd = removed.append
1121 dadd = deleted.append
1123 dadd = deleted.append
1122 cadd = clean.append
1124 cadd = clean.append
1123 mexact = match.exact
1125 mexact = match.exact
1124 dirignore = self._dirignore
1126 dirignore = self._dirignore
1125 checkexec = self._checkexec
1127 checkexec = self._checkexec
1126 copymap = self._copymap
1128 copymap = self._copymap
1127 lastnormaltime = self._lastnormaltime
1129 lastnormaltime = self._lastnormaltime
1128
1130
1129 # We need to do full walks when either
1131 # We need to do full walks when either
1130 # - we're listing all clean files, or
1132 # - we're listing all clean files, or
1131 # - match.traversedir does something, because match.traversedir should
1133 # - match.traversedir does something, because match.traversedir should
1132 # be called for every dir in the working dir
1134 # be called for every dir in the working dir
1133 full = listclean or match.traversedir is not None
1135 full = listclean or match.traversedir is not None
1134 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1136 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1135 full=full).iteritems():
1137 full=full).iteritems():
1136 if fn not in dmap:
1138 if fn not in dmap:
1137 if (listignored or mexact(fn)) and dirignore(fn):
1139 if (listignored or mexact(fn)) and dirignore(fn):
1138 if listignored:
1140 if listignored:
1139 iadd(fn)
1141 iadd(fn)
1140 else:
1142 else:
1141 uadd(fn)
1143 uadd(fn)
1142 continue
1144 continue
1143
1145
1144 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1146 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1145 # written like that for performance reasons. dmap[fn] is not a
1147 # written like that for performance reasons. dmap[fn] is not a
1146 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1148 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1147 # opcode has fast paths when the value to be unpacked is a tuple or
1149 # opcode has fast paths when the value to be unpacked is a tuple or
1148 # a list, but falls back to creating a full-fledged iterator in
1150 # a list, but falls back to creating a full-fledged iterator in
1149 # general. That is much slower than simply accessing and storing the
1151 # general. That is much slower than simply accessing and storing the
1150 # tuple members one by one.
1152 # tuple members one by one.
1151 t = dmap[fn]
1153 t = dmap[fn]
1152 state = t[0]
1154 state = t[0]
1153 mode = t[1]
1155 mode = t[1]
1154 size = t[2]
1156 size = t[2]
1155 time = t[3]
1157 time = t[3]
1156
1158
1157 if not st and state in "nma":
1159 if not st and state in "nma":
1158 dadd(fn)
1160 dadd(fn)
1159 elif state == 'n':
1161 elif state == 'n':
1160 if (size >= 0 and
1162 if (size >= 0 and
1161 ((size != st.st_size and size != st.st_size & _rangemask)
1163 ((size != st.st_size and size != st.st_size & _rangemask)
1162 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1164 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1163 or size == -2 # other parent
1165 or size == -2 # other parent
1164 or fn in copymap):
1166 or fn in copymap):
1165 madd(fn)
1167 madd(fn)
1166 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1168 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1167 ladd(fn)
1169 ladd(fn)
1168 elif st.st_mtime == lastnormaltime:
1170 elif st.st_mtime == lastnormaltime:
1169 # fn may have just been marked as normal and it may have
1171 # fn may have just been marked as normal and it may have
1170 # changed in the same second without changing its size.
1172 # changed in the same second without changing its size.
1171 # This can happen if we quickly do multiple commits.
1173 # This can happen if we quickly do multiple commits.
1172 # Force lookup, so we don't miss such a racy file change.
1174 # Force lookup, so we don't miss such a racy file change.
1173 ladd(fn)
1175 ladd(fn)
1174 elif listclean:
1176 elif listclean:
1175 cadd(fn)
1177 cadd(fn)
1176 elif state == 'm':
1178 elif state == 'm':
1177 madd(fn)
1179 madd(fn)
1178 elif state == 'a':
1180 elif state == 'a':
1179 aadd(fn)
1181 aadd(fn)
1180 elif state == 'r':
1182 elif state == 'r':
1181 radd(fn)
1183 radd(fn)
1182
1184
1183 return (lookup, scmutil.status(modified, added, removed, deleted,
1185 return (lookup, scmutil.status(modified, added, removed, deleted,
1184 unknown, ignored, clean))
1186 unknown, ignored, clean))
1185
1187
1186 def matches(self, match):
1188 def matches(self, match):
1187 '''
1189 '''
1188 return files in the dirstate (in whatever state) filtered by match
1190 return files in the dirstate (in whatever state) filtered by match
1189 '''
1191 '''
1190 dmap = self._map
1192 dmap = self._map
1191 if match.always():
1193 if match.always():
1192 return dmap.keys()
1194 return dmap.keys()
1193 files = match.files()
1195 files = match.files()
1194 if match.isexact():
1196 if match.isexact():
1195 # fast path -- filter the other way around, since typically files is
1197 # fast path -- filter the other way around, since typically files is
1196 # much smaller than dmap
1198 # much smaller than dmap
1197 return [f for f in files if f in dmap]
1199 return [f for f in files if f in dmap]
1198 if match.prefix() and all(fn in dmap for fn in files):
1200 if match.prefix() and all(fn in dmap for fn in files):
1199 # fast path -- all the values are known to be files, so just return
1201 # fast path -- all the values are known to be files, so just return
1200 # that
1202 # that
1201 return list(files)
1203 return list(files)
1202 return [f for f in dmap if match(f)]
1204 return [f for f in dmap if match(f)]
1203
1205
1204 def _actualfilename(self, tr):
1206 def _actualfilename(self, tr):
1205 if tr:
1207 if tr:
1206 return self._pendingfilename
1208 return self._pendingfilename
1207 else:
1209 else:
1208 return self._filename
1210 return self._filename
1209
1211
1210 def savebackup(self, tr, suffix='', prefix=''):
1212 def savebackup(self, tr, suffix='', prefix=''):
1211 '''Save current dirstate into backup file with suffix'''
1213 '''Save current dirstate into backup file with suffix'''
1212 filename = self._actualfilename(tr)
1214 filename = self._actualfilename(tr)
1213
1215
1214 # use '_writedirstate' instead of 'write' to write changes certainly,
1216 # use '_writedirstate' instead of 'write' to write changes certainly,
1215 # because the latter omits writing out if transaction is running.
1217 # because the latter omits writing out if transaction is running.
1216 # output file will be used to create backup of dirstate at this point.
1218 # output file will be used to create backup of dirstate at this point.
1217 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1219 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1218
1220
1219 if tr:
1221 if tr:
1220 # ensure that subsequent tr.writepending returns True for
1222 # ensure that subsequent tr.writepending returns True for
1221 # changes written out above, even if dirstate is never
1223 # changes written out above, even if dirstate is never
1222 # changed after this
1224 # changed after this
1223 tr.addfilegenerator('dirstate', (self._filename,),
1225 tr.addfilegenerator('dirstate', (self._filename,),
1224 self._writedirstate, location='plain')
1226 self._writedirstate, location='plain')
1225
1227
1226 # ensure that pending file written above is unlinked at
1228 # ensure that pending file written above is unlinked at
1227 # failure, even if tr.writepending isn't invoked until the
1229 # failure, even if tr.writepending isn't invoked until the
1228 # end of this transaction
1230 # end of this transaction
1229 tr.registertmp(filename, location='plain')
1231 tr.registertmp(filename, location='plain')
1230
1232
1231 self._opener.write(prefix + filename + suffix,
1233 self._opener.write(prefix + filename + suffix,
1232 self._opener.tryread(filename))
1234 self._opener.tryread(filename))
1233
1235
1234 def restorebackup(self, tr, suffix='', prefix=''):
1236 def restorebackup(self, tr, suffix='', prefix=''):
1235 '''Restore dirstate by backup file with suffix'''
1237 '''Restore dirstate by backup file with suffix'''
1236 # this "invalidate()" prevents "wlock.release()" from writing
1238 # this "invalidate()" prevents "wlock.release()" from writing
1237 # changes of dirstate out after restoring from backup file
1239 # changes of dirstate out after restoring from backup file
1238 self.invalidate()
1240 self.invalidate()
1239 filename = self._actualfilename(tr)
1241 filename = self._actualfilename(tr)
1240 self._opener.rename(prefix + filename + suffix, filename)
1242 self._opener.rename(prefix + filename + suffix, filename)
1241
1243
1242 def clearbackup(self, tr, suffix='', prefix=''):
1244 def clearbackup(self, tr, suffix='', prefix=''):
1243 '''Clear backup file with suffix'''
1245 '''Clear backup file with suffix'''
1244 filename = self._actualfilename(tr)
1246 filename = self._actualfilename(tr)
1245 self._opener.unlink(prefix + filename + suffix)
1247 self._opener.unlink(prefix + filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now