##// END OF EJS Templates
devel: officially deprecate dirstate.write without transaction argument...
Pierre-Yves David -
r29149:2e40fada default
parent child Browse files
Show More
@@ -1,1242 +1,1242 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 match as matchmod,
20 match as matchmod,
21 osutil,
21 osutil,
22 parsers,
22 parsers,
23 pathutil,
23 pathutil,
24 scmutil,
24 scmutil,
25 util,
25 util,
26 )
26 )
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29 filecache = scmutil.filecache
29 filecache = scmutil.filecache
30 _rangemask = 0x7fffffff
30 _rangemask = 0x7fffffff
31
31
32 dirstatetuple = parsers.dirstatetuple
32 dirstatetuple = parsers.dirstatetuple
33
33
34 class repocache(filecache):
34 class repocache(filecache):
35 """filecache for files in .hg/"""
35 """filecache for files in .hg/"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj._opener.join(fname)
37 return obj._opener.join(fname)
38
38
39 class rootcache(filecache):
39 class rootcache(filecache):
40 """filecache for files in the repository root"""
40 """filecache for files in the repository root"""
41 def join(self, obj, fname):
41 def join(self, obj, fname):
42 return obj._join(fname)
42 return obj._join(fname)
43
43
44 def _getfsnow(vfs):
44 def _getfsnow(vfs):
45 '''Get "now" timestamp on filesystem'''
45 '''Get "now" timestamp on filesystem'''
46 tmpfd, tmpname = vfs.mkstemp()
46 tmpfd, tmpname = vfs.mkstemp()
47 try:
47 try:
48 return os.fstat(tmpfd).st_mtime
48 return os.fstat(tmpfd).st_mtime
49 finally:
49 finally:
50 os.close(tmpfd)
50 os.close(tmpfd)
51 vfs.unlink(tmpname)
51 vfs.unlink(tmpname)
52
52
53 def nonnormalentries(dmap):
53 def nonnormalentries(dmap):
54 '''Compute the nonnormal dirstate entries from the dmap'''
54 '''Compute the nonnormal dirstate entries from the dmap'''
55 try:
55 try:
56 return parsers.nonnormalentries(dmap)
56 return parsers.nonnormalentries(dmap)
57 except AttributeError:
57 except AttributeError:
58 return set(fname for fname, e in dmap.iteritems()
58 return set(fname for fname, e in dmap.iteritems()
59 if e[0] != 'n' or e[3] == -1)
59 if e[0] != 'n' or e[3] == -1)
60
60
61 def _trypending(root, vfs, filename):
61 def _trypending(root, vfs, filename):
62 '''Open file to be read according to HG_PENDING environment variable
62 '''Open file to be read according to HG_PENDING environment variable
63
63
64 This opens '.pending' of specified 'filename' only when HG_PENDING
64 This opens '.pending' of specified 'filename' only when HG_PENDING
65 is equal to 'root'.
65 is equal to 'root'.
66
66
67 This returns '(fp, is_pending_opened)' tuple.
67 This returns '(fp, is_pending_opened)' tuple.
68 '''
68 '''
69 if root == os.environ.get('HG_PENDING'):
69 if root == os.environ.get('HG_PENDING'):
70 try:
70 try:
71 return (vfs('%s.pending' % filename), True)
71 return (vfs('%s.pending' % filename), True)
72 except IOError as inst:
72 except IOError as inst:
73 if inst.errno != errno.ENOENT:
73 if inst.errno != errno.ENOENT:
74 raise
74 raise
75 return (vfs(filename), False)
75 return (vfs(filename), False)
76
76
77 class dirstate(object):
77 class dirstate(object):
78
78
79 def __init__(self, opener, ui, root, validate):
79 def __init__(self, opener, ui, root, validate):
80 '''Create a new dirstate object.
80 '''Create a new dirstate object.
81
81
82 opener is an open()-like callable that can be used to open the
82 opener is an open()-like callable that can be used to open the
83 dirstate file; root is the root of the directory tracked by
83 dirstate file; root is the root of the directory tracked by
84 the dirstate.
84 the dirstate.
85 '''
85 '''
86 self._opener = opener
86 self._opener = opener
87 self._validate = validate
87 self._validate = validate
88 self._root = root
88 self._root = root
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
90 # UNC path pointing to root share (issue4557)
90 # UNC path pointing to root share (issue4557)
91 self._rootdir = pathutil.normasprefix(root)
91 self._rootdir = pathutil.normasprefix(root)
92 # internal config: ui.forcecwd
92 # internal config: ui.forcecwd
93 forcecwd = ui.config('ui', 'forcecwd')
93 forcecwd = ui.config('ui', 'forcecwd')
94 if forcecwd:
94 if forcecwd:
95 self._cwd = forcecwd
95 self._cwd = forcecwd
96 self._dirty = False
96 self._dirty = False
97 self._dirtypl = False
97 self._dirtypl = False
98 self._lastnormaltime = 0
98 self._lastnormaltime = 0
99 self._ui = ui
99 self._ui = ui
100 self._filecache = {}
100 self._filecache = {}
101 self._parentwriters = 0
101 self._parentwriters = 0
102 self._filename = 'dirstate'
102 self._filename = 'dirstate'
103 self._pendingfilename = '%s.pending' % self._filename
103 self._pendingfilename = '%s.pending' % self._filename
104
104
105 # for consistent view between _pl() and _read() invocations
105 # for consistent view between _pl() and _read() invocations
106 self._pendingmode = None
106 self._pendingmode = None
107
107
108 def beginparentchange(self):
108 def beginparentchange(self):
109 '''Marks the beginning of a set of changes that involve changing
109 '''Marks the beginning of a set of changes that involve changing
110 the dirstate parents. If there is an exception during this time,
110 the dirstate parents. If there is an exception during this time,
111 the dirstate will not be written when the wlock is released. This
111 the dirstate will not be written when the wlock is released. This
112 prevents writing an incoherent dirstate where the parent doesn't
112 prevents writing an incoherent dirstate where the parent doesn't
113 match the contents.
113 match the contents.
114 '''
114 '''
115 self._parentwriters += 1
115 self._parentwriters += 1
116
116
117 def endparentchange(self):
117 def endparentchange(self):
118 '''Marks the end of a set of changes that involve changing the
118 '''Marks the end of a set of changes that involve changing the
119 dirstate parents. Once all parent changes have been marked done,
119 dirstate parents. Once all parent changes have been marked done,
120 the wlock will be free to write the dirstate on release.
120 the wlock will be free to write the dirstate on release.
121 '''
121 '''
122 if self._parentwriters > 0:
122 if self._parentwriters > 0:
123 self._parentwriters -= 1
123 self._parentwriters -= 1
124
124
125 def pendingparentchange(self):
125 def pendingparentchange(self):
126 '''Returns true if the dirstate is in the middle of a set of changes
126 '''Returns true if the dirstate is in the middle of a set of changes
127 that modify the dirstate parent.
127 that modify the dirstate parent.
128 '''
128 '''
129 return self._parentwriters > 0
129 return self._parentwriters > 0
130
130
131 @propertycache
131 @propertycache
132 def _map(self):
132 def _map(self):
133 '''Return the dirstate contents as a map from filename to
133 '''Return the dirstate contents as a map from filename to
134 (state, mode, size, time).'''
134 (state, mode, size, time).'''
135 self._read()
135 self._read()
136 return self._map
136 return self._map
137
137
138 @propertycache
138 @propertycache
139 def _copymap(self):
139 def _copymap(self):
140 self._read()
140 self._read()
141 return self._copymap
141 return self._copymap
142
142
143 @propertycache
143 @propertycache
144 def _nonnormalset(self):
144 def _nonnormalset(self):
145 return nonnormalentries(self._map)
145 return nonnormalentries(self._map)
146
146
147 @propertycache
147 @propertycache
148 def _filefoldmap(self):
148 def _filefoldmap(self):
149 try:
149 try:
150 makefilefoldmap = parsers.make_file_foldmap
150 makefilefoldmap = parsers.make_file_foldmap
151 except AttributeError:
151 except AttributeError:
152 pass
152 pass
153 else:
153 else:
154 return makefilefoldmap(self._map, util.normcasespec,
154 return makefilefoldmap(self._map, util.normcasespec,
155 util.normcasefallback)
155 util.normcasefallback)
156
156
157 f = {}
157 f = {}
158 normcase = util.normcase
158 normcase = util.normcase
159 for name, s in self._map.iteritems():
159 for name, s in self._map.iteritems():
160 if s[0] != 'r':
160 if s[0] != 'r':
161 f[normcase(name)] = name
161 f[normcase(name)] = name
162 f['.'] = '.' # prevents useless util.fspath() invocation
162 f['.'] = '.' # prevents useless util.fspath() invocation
163 return f
163 return f
164
164
165 @propertycache
165 @propertycache
166 def _dirfoldmap(self):
166 def _dirfoldmap(self):
167 f = {}
167 f = {}
168 normcase = util.normcase
168 normcase = util.normcase
169 for name in self._dirs:
169 for name in self._dirs:
170 f[normcase(name)] = name
170 f[normcase(name)] = name
171 return f
171 return f
172
172
173 @repocache('branch')
173 @repocache('branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read("branch").strip() or "default"
176 return self._opener.read("branch").strip() or "default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return "default"
180 return "default"
181
181
182 @propertycache
182 @propertycache
183 def _pl(self):
183 def _pl(self):
184 try:
184 try:
185 fp = self._opendirstatefile()
185 fp = self._opendirstatefile()
186 st = fp.read(40)
186 st = fp.read(40)
187 fp.close()
187 fp.close()
188 l = len(st)
188 l = len(st)
189 if l == 40:
189 if l == 40:
190 return st[:20], st[20:40]
190 return st[:20], st[20:40]
191 elif l > 0 and l < 40:
191 elif l > 0 and l < 40:
192 raise error.Abort(_('working directory state appears damaged!'))
192 raise error.Abort(_('working directory state appears damaged!'))
193 except IOError as err:
193 except IOError as err:
194 if err.errno != errno.ENOENT:
194 if err.errno != errno.ENOENT:
195 raise
195 raise
196 return [nullid, nullid]
196 return [nullid, nullid]
197
197
198 @propertycache
198 @propertycache
199 def _dirs(self):
199 def _dirs(self):
200 return util.dirs(self._map, 'r')
200 return util.dirs(self._map, 'r')
201
201
202 def dirs(self):
202 def dirs(self):
203 return self._dirs
203 return self._dirs
204
204
205 @rootcache('.hgignore')
205 @rootcache('.hgignore')
206 def _ignore(self):
206 def _ignore(self):
207 files = self._ignorefiles()
207 files = self._ignorefiles()
208 if not files:
208 if not files:
209 return util.never
209 return util.never
210
210
211 pats = ['include:%s' % f for f in files]
211 pats = ['include:%s' % f for f in files]
212 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
212 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
213
213
214 @propertycache
214 @propertycache
215 def _slash(self):
215 def _slash(self):
216 return self._ui.configbool('ui', 'slash') and os.sep != '/'
216 return self._ui.configbool('ui', 'slash') and os.sep != '/'
217
217
218 @propertycache
218 @propertycache
219 def _checklink(self):
219 def _checklink(self):
220 return util.checklink(self._root)
220 return util.checklink(self._root)
221
221
222 @propertycache
222 @propertycache
223 def _checkexec(self):
223 def _checkexec(self):
224 return util.checkexec(self._root)
224 return util.checkexec(self._root)
225
225
226 @propertycache
226 @propertycache
227 def _checkcase(self):
227 def _checkcase(self):
228 return not util.checkcase(self._join('.hg'))
228 return not util.checkcase(self._join('.hg'))
229
229
230 def _join(self, f):
230 def _join(self, f):
231 # much faster than os.path.join()
231 # much faster than os.path.join()
232 # it's safe because f is always a relative path
232 # it's safe because f is always a relative path
233 return self._rootdir + f
233 return self._rootdir + f
234
234
235 def flagfunc(self, buildfallback):
235 def flagfunc(self, buildfallback):
236 if self._checklink and self._checkexec:
236 if self._checklink and self._checkexec:
237 def f(x):
237 def f(x):
238 try:
238 try:
239 st = os.lstat(self._join(x))
239 st = os.lstat(self._join(x))
240 if util.statislink(st):
240 if util.statislink(st):
241 return 'l'
241 return 'l'
242 if util.statisexec(st):
242 if util.statisexec(st):
243 return 'x'
243 return 'x'
244 except OSError:
244 except OSError:
245 pass
245 pass
246 return ''
246 return ''
247 return f
247 return f
248
248
249 fallback = buildfallback()
249 fallback = buildfallback()
250 if self._checklink:
250 if self._checklink:
251 def f(x):
251 def f(x):
252 if os.path.islink(self._join(x)):
252 if os.path.islink(self._join(x)):
253 return 'l'
253 return 'l'
254 if 'x' in fallback(x):
254 if 'x' in fallback(x):
255 return 'x'
255 return 'x'
256 return ''
256 return ''
257 return f
257 return f
258 if self._checkexec:
258 if self._checkexec:
259 def f(x):
259 def f(x):
260 if 'l' in fallback(x):
260 if 'l' in fallback(x):
261 return 'l'
261 return 'l'
262 if util.isexec(self._join(x)):
262 if util.isexec(self._join(x)):
263 return 'x'
263 return 'x'
264 return ''
264 return ''
265 return f
265 return f
266 else:
266 else:
267 return fallback
267 return fallback
268
268
269 @propertycache
269 @propertycache
270 def _cwd(self):
270 def _cwd(self):
271 return os.getcwd()
271 return os.getcwd()
272
272
273 def getcwd(self):
273 def getcwd(self):
274 '''Return the path from which a canonical path is calculated.
274 '''Return the path from which a canonical path is calculated.
275
275
276 This path should be used to resolve file patterns or to convert
276 This path should be used to resolve file patterns or to convert
277 canonical paths back to file paths for display. It shouldn't be
277 canonical paths back to file paths for display. It shouldn't be
278 used to get real file paths. Use vfs functions instead.
278 used to get real file paths. Use vfs functions instead.
279 '''
279 '''
280 cwd = self._cwd
280 cwd = self._cwd
281 if cwd == self._root:
281 if cwd == self._root:
282 return ''
282 return ''
283 # self._root ends with a path separator if self._root is '/' or 'C:\'
283 # self._root ends with a path separator if self._root is '/' or 'C:\'
284 rootsep = self._root
284 rootsep = self._root
285 if not util.endswithsep(rootsep):
285 if not util.endswithsep(rootsep):
286 rootsep += os.sep
286 rootsep += os.sep
287 if cwd.startswith(rootsep):
287 if cwd.startswith(rootsep):
288 return cwd[len(rootsep):]
288 return cwd[len(rootsep):]
289 else:
289 else:
290 # we're outside the repo. return an absolute path.
290 # we're outside the repo. return an absolute path.
291 return cwd
291 return cwd
292
292
293 def pathto(self, f, cwd=None):
293 def pathto(self, f, cwd=None):
294 if cwd is None:
294 if cwd is None:
295 cwd = self.getcwd()
295 cwd = self.getcwd()
296 path = util.pathto(self._root, cwd, f)
296 path = util.pathto(self._root, cwd, f)
297 if self._slash:
297 if self._slash:
298 return util.pconvert(path)
298 return util.pconvert(path)
299 return path
299 return path
300
300
301 def __getitem__(self, key):
301 def __getitem__(self, key):
302 '''Return the current state of key (a filename) in the dirstate.
302 '''Return the current state of key (a filename) in the dirstate.
303
303
304 States are:
304 States are:
305 n normal
305 n normal
306 m needs merging
306 m needs merging
307 r marked for removal
307 r marked for removal
308 a marked for addition
308 a marked for addition
309 ? not tracked
309 ? not tracked
310 '''
310 '''
311 return self._map.get(key, ("?",))[0]
311 return self._map.get(key, ("?",))[0]
312
312
313 def __contains__(self, key):
313 def __contains__(self, key):
314 return key in self._map
314 return key in self._map
315
315
316 def __iter__(self):
316 def __iter__(self):
317 for x in sorted(self._map):
317 for x in sorted(self._map):
318 yield x
318 yield x
319
319
320 def iteritems(self):
320 def iteritems(self):
321 return self._map.iteritems()
321 return self._map.iteritems()
322
322
323 def parents(self):
323 def parents(self):
324 return [self._validate(p) for p in self._pl]
324 return [self._validate(p) for p in self._pl]
325
325
326 def p1(self):
326 def p1(self):
327 return self._validate(self._pl[0])
327 return self._validate(self._pl[0])
328
328
329 def p2(self):
329 def p2(self):
330 return self._validate(self._pl[1])
330 return self._validate(self._pl[1])
331
331
332 def branch(self):
332 def branch(self):
333 return encoding.tolocal(self._branch)
333 return encoding.tolocal(self._branch)
334
334
335 def setparents(self, p1, p2=nullid):
335 def setparents(self, p1, p2=nullid):
336 """Set dirstate parents to p1 and p2.
336 """Set dirstate parents to p1 and p2.
337
337
338 When moving from two parents to one, 'm' merged entries a
338 When moving from two parents to one, 'm' merged entries a
339 adjusted to normal and previous copy records discarded and
339 adjusted to normal and previous copy records discarded and
340 returned by the call.
340 returned by the call.
341
341
342 See localrepo.setparents()
342 See localrepo.setparents()
343 """
343 """
344 if self._parentwriters == 0:
344 if self._parentwriters == 0:
345 raise ValueError("cannot set dirstate parent without "
345 raise ValueError("cannot set dirstate parent without "
346 "calling dirstate.beginparentchange")
346 "calling dirstate.beginparentchange")
347
347
348 self._dirty = self._dirtypl = True
348 self._dirty = self._dirtypl = True
349 oldp2 = self._pl[1]
349 oldp2 = self._pl[1]
350 self._pl = p1, p2
350 self._pl = p1, p2
351 copies = {}
351 copies = {}
352 if oldp2 != nullid and p2 == nullid:
352 if oldp2 != nullid and p2 == nullid:
353 for f, s in self._map.iteritems():
353 for f, s in self._map.iteritems():
354 # Discard 'm' markers when moving away from a merge state
354 # Discard 'm' markers when moving away from a merge state
355 if s[0] == 'm':
355 if s[0] == 'm':
356 if f in self._copymap:
356 if f in self._copymap:
357 copies[f] = self._copymap[f]
357 copies[f] = self._copymap[f]
358 self.normallookup(f)
358 self.normallookup(f)
359 # Also fix up otherparent markers
359 # Also fix up otherparent markers
360 elif s[0] == 'n' and s[2] == -2:
360 elif s[0] == 'n' and s[2] == -2:
361 if f in self._copymap:
361 if f in self._copymap:
362 copies[f] = self._copymap[f]
362 copies[f] = self._copymap[f]
363 self.add(f)
363 self.add(f)
364 return copies
364 return copies
365
365
366 def setbranch(self, branch):
366 def setbranch(self, branch):
367 self._branch = encoding.fromlocal(branch)
367 self._branch = encoding.fromlocal(branch)
368 f = self._opener('branch', 'w', atomictemp=True)
368 f = self._opener('branch', 'w', atomictemp=True)
369 try:
369 try:
370 f.write(self._branch + '\n')
370 f.write(self._branch + '\n')
371 f.close()
371 f.close()
372
372
373 # make sure filecache has the correct stat info for _branch after
373 # make sure filecache has the correct stat info for _branch after
374 # replacing the underlying file
374 # replacing the underlying file
375 ce = self._filecache['_branch']
375 ce = self._filecache['_branch']
376 if ce:
376 if ce:
377 ce.refresh()
377 ce.refresh()
378 except: # re-raises
378 except: # re-raises
379 f.discard()
379 f.discard()
380 raise
380 raise
381
381
382 def _opendirstatefile(self):
382 def _opendirstatefile(self):
383 fp, mode = _trypending(self._root, self._opener, self._filename)
383 fp, mode = _trypending(self._root, self._opener, self._filename)
384 if self._pendingmode is not None and self._pendingmode != mode:
384 if self._pendingmode is not None and self._pendingmode != mode:
385 fp.close()
385 fp.close()
386 raise error.Abort(_('working directory state may be '
386 raise error.Abort(_('working directory state may be '
387 'changed parallelly'))
387 'changed parallelly'))
388 self._pendingmode = mode
388 self._pendingmode = mode
389 return fp
389 return fp
390
390
391 def _read(self):
391 def _read(self):
392 self._map = {}
392 self._map = {}
393 self._copymap = {}
393 self._copymap = {}
394 try:
394 try:
395 fp = self._opendirstatefile()
395 fp = self._opendirstatefile()
396 try:
396 try:
397 st = fp.read()
397 st = fp.read()
398 finally:
398 finally:
399 fp.close()
399 fp.close()
400 except IOError as err:
400 except IOError as err:
401 if err.errno != errno.ENOENT:
401 if err.errno != errno.ENOENT:
402 raise
402 raise
403 return
403 return
404 if not st:
404 if not st:
405 return
405 return
406
406
407 if util.safehasattr(parsers, 'dict_new_presized'):
407 if util.safehasattr(parsers, 'dict_new_presized'):
408 # Make an estimate of the number of files in the dirstate based on
408 # Make an estimate of the number of files in the dirstate based on
409 # its size. From a linear regression on a set of real-world repos,
409 # its size. From a linear regression on a set of real-world repos,
410 # all over 10,000 files, the size of a dirstate entry is 85
410 # all over 10,000 files, the size of a dirstate entry is 85
411 # bytes. The cost of resizing is significantly higher than the cost
411 # bytes. The cost of resizing is significantly higher than the cost
412 # of filling in a larger presized dict, so subtract 20% from the
412 # of filling in a larger presized dict, so subtract 20% from the
413 # size.
413 # size.
414 #
414 #
415 # This heuristic is imperfect in many ways, so in a future dirstate
415 # This heuristic is imperfect in many ways, so in a future dirstate
416 # format update it makes sense to just record the number of entries
416 # format update it makes sense to just record the number of entries
417 # on write.
417 # on write.
418 self._map = parsers.dict_new_presized(len(st) / 71)
418 self._map = parsers.dict_new_presized(len(st) / 71)
419
419
420 # Python's garbage collector triggers a GC each time a certain number
420 # Python's garbage collector triggers a GC each time a certain number
421 # of container objects (the number being defined by
421 # of container objects (the number being defined by
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
423 # for each file in the dirstate. The C version then immediately marks
423 # for each file in the dirstate. The C version then immediately marks
424 # them as not to be tracked by the collector. However, this has no
424 # them as not to be tracked by the collector. However, this has no
425 # effect on when GCs are triggered, only on what objects the GC looks
425 # effect on when GCs are triggered, only on what objects the GC looks
426 # into. This means that O(number of files) GCs are unavoidable.
426 # into. This means that O(number of files) GCs are unavoidable.
427 # Depending on when in the process's lifetime the dirstate is parsed,
427 # Depending on when in the process's lifetime the dirstate is parsed,
428 # this can get very expensive. As a workaround, disable GC while
428 # this can get very expensive. As a workaround, disable GC while
429 # parsing the dirstate.
429 # parsing the dirstate.
430 #
430 #
431 # (we cannot decorate the function directly since it is in a C module)
431 # (we cannot decorate the function directly since it is in a C module)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
433 p = parse_dirstate(self._map, self._copymap, st)
433 p = parse_dirstate(self._map, self._copymap, st)
434 if not self._dirtypl:
434 if not self._dirtypl:
435 self._pl = p
435 self._pl = p
436
436
437 def invalidate(self):
437 def invalidate(self):
438 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
438 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
439 "_pl", "_dirs", "_ignore", "_nonnormalset"):
439 "_pl", "_dirs", "_ignore", "_nonnormalset"):
440 if a in self.__dict__:
440 if a in self.__dict__:
441 delattr(self, a)
441 delattr(self, a)
442 self._lastnormaltime = 0
442 self._lastnormaltime = 0
443 self._dirty = False
443 self._dirty = False
444 self._parentwriters = 0
444 self._parentwriters = 0
445
445
446 def copy(self, source, dest):
446 def copy(self, source, dest):
447 """Mark dest as a copy of source. Unmark dest if source is None."""
447 """Mark dest as a copy of source. Unmark dest if source is None."""
448 if source == dest:
448 if source == dest:
449 return
449 return
450 self._dirty = True
450 self._dirty = True
451 if source is not None:
451 if source is not None:
452 self._copymap[dest] = source
452 self._copymap[dest] = source
453 elif dest in self._copymap:
453 elif dest in self._copymap:
454 del self._copymap[dest]
454 del self._copymap[dest]
455
455
456 def copied(self, file):
456 def copied(self, file):
457 return self._copymap.get(file, None)
457 return self._copymap.get(file, None)
458
458
459 def copies(self):
459 def copies(self):
460 return self._copymap
460 return self._copymap
461
461
462 def _droppath(self, f):
462 def _droppath(self, f):
463 if self[f] not in "?r" and "_dirs" in self.__dict__:
463 if self[f] not in "?r" and "_dirs" in self.__dict__:
464 self._dirs.delpath(f)
464 self._dirs.delpath(f)
465
465
466 if "_filefoldmap" in self.__dict__:
466 if "_filefoldmap" in self.__dict__:
467 normed = util.normcase(f)
467 normed = util.normcase(f)
468 if normed in self._filefoldmap:
468 if normed in self._filefoldmap:
469 del self._filefoldmap[normed]
469 del self._filefoldmap[normed]
470
470
471 def _addpath(self, f, state, mode, size, mtime):
471 def _addpath(self, f, state, mode, size, mtime):
472 oldstate = self[f]
472 oldstate = self[f]
473 if state == 'a' or oldstate == 'r':
473 if state == 'a' or oldstate == 'r':
474 scmutil.checkfilename(f)
474 scmutil.checkfilename(f)
475 if f in self._dirs:
475 if f in self._dirs:
476 raise error.Abort(_('directory %r already in dirstate') % f)
476 raise error.Abort(_('directory %r already in dirstate') % f)
477 # shadows
477 # shadows
478 for d in util.finddirs(f):
478 for d in util.finddirs(f):
479 if d in self._dirs:
479 if d in self._dirs:
480 break
480 break
481 if d in self._map and self[d] != 'r':
481 if d in self._map and self[d] != 'r':
482 raise error.Abort(
482 raise error.Abort(
483 _('file %r in dirstate clashes with %r') % (d, f))
483 _('file %r in dirstate clashes with %r') % (d, f))
484 if oldstate in "?r" and "_dirs" in self.__dict__:
484 if oldstate in "?r" and "_dirs" in self.__dict__:
485 self._dirs.addpath(f)
485 self._dirs.addpath(f)
486 self._dirty = True
486 self._dirty = True
487 self._map[f] = dirstatetuple(state, mode, size, mtime)
487 self._map[f] = dirstatetuple(state, mode, size, mtime)
488 if state != 'n' or mtime == -1:
488 if state != 'n' or mtime == -1:
489 self._nonnormalset.add(f)
489 self._nonnormalset.add(f)
490
490
491 def normal(self, f):
491 def normal(self, f):
492 '''Mark a file normal and clean.'''
492 '''Mark a file normal and clean.'''
493 s = os.lstat(self._join(f))
493 s = os.lstat(self._join(f))
494 mtime = s.st_mtime
494 mtime = s.st_mtime
495 self._addpath(f, 'n', s.st_mode,
495 self._addpath(f, 'n', s.st_mode,
496 s.st_size & _rangemask, mtime & _rangemask)
496 s.st_size & _rangemask, mtime & _rangemask)
497 if f in self._copymap:
497 if f in self._copymap:
498 del self._copymap[f]
498 del self._copymap[f]
499 if f in self._nonnormalset:
499 if f in self._nonnormalset:
500 self._nonnormalset.remove(f)
500 self._nonnormalset.remove(f)
501 if mtime > self._lastnormaltime:
501 if mtime > self._lastnormaltime:
502 # Remember the most recent modification timeslot for status(),
502 # Remember the most recent modification timeslot for status(),
503 # to make sure we won't miss future size-preserving file content
503 # to make sure we won't miss future size-preserving file content
504 # modifications that happen within the same timeslot.
504 # modifications that happen within the same timeslot.
505 self._lastnormaltime = mtime
505 self._lastnormaltime = mtime
506
506
507 def normallookup(self, f):
507 def normallookup(self, f):
508 '''Mark a file normal, but possibly dirty.'''
508 '''Mark a file normal, but possibly dirty.'''
509 if self._pl[1] != nullid and f in self._map:
509 if self._pl[1] != nullid and f in self._map:
510 # if there is a merge going on and the file was either
510 # if there is a merge going on and the file was either
511 # in state 'm' (-1) or coming from other parent (-2) before
511 # in state 'm' (-1) or coming from other parent (-2) before
512 # being removed, restore that state.
512 # being removed, restore that state.
513 entry = self._map[f]
513 entry = self._map[f]
514 if entry[0] == 'r' and entry[2] in (-1, -2):
514 if entry[0] == 'r' and entry[2] in (-1, -2):
515 source = self._copymap.get(f)
515 source = self._copymap.get(f)
516 if entry[2] == -1:
516 if entry[2] == -1:
517 self.merge(f)
517 self.merge(f)
518 elif entry[2] == -2:
518 elif entry[2] == -2:
519 self.otherparent(f)
519 self.otherparent(f)
520 if source:
520 if source:
521 self.copy(source, f)
521 self.copy(source, f)
522 return
522 return
523 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
523 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
524 return
524 return
525 self._addpath(f, 'n', 0, -1, -1)
525 self._addpath(f, 'n', 0, -1, -1)
526 if f in self._copymap:
526 if f in self._copymap:
527 del self._copymap[f]
527 del self._copymap[f]
528 if f in self._nonnormalset:
528 if f in self._nonnormalset:
529 self._nonnormalset.remove(f)
529 self._nonnormalset.remove(f)
530
530
531 def otherparent(self, f):
531 def otherparent(self, f):
532 '''Mark as coming from the other parent, always dirty.'''
532 '''Mark as coming from the other parent, always dirty.'''
533 if self._pl[1] == nullid:
533 if self._pl[1] == nullid:
534 raise error.Abort(_("setting %r to other parent "
534 raise error.Abort(_("setting %r to other parent "
535 "only allowed in merges") % f)
535 "only allowed in merges") % f)
536 if f in self and self[f] == 'n':
536 if f in self and self[f] == 'n':
537 # merge-like
537 # merge-like
538 self._addpath(f, 'm', 0, -2, -1)
538 self._addpath(f, 'm', 0, -2, -1)
539 else:
539 else:
540 # add-like
540 # add-like
541 self._addpath(f, 'n', 0, -2, -1)
541 self._addpath(f, 'n', 0, -2, -1)
542
542
543 if f in self._copymap:
543 if f in self._copymap:
544 del self._copymap[f]
544 del self._copymap[f]
545
545
546 def add(self, f):
546 def add(self, f):
547 '''Mark a file added.'''
547 '''Mark a file added.'''
548 self._addpath(f, 'a', 0, -1, -1)
548 self._addpath(f, 'a', 0, -1, -1)
549 if f in self._copymap:
549 if f in self._copymap:
550 del self._copymap[f]
550 del self._copymap[f]
551
551
552 def remove(self, f):
552 def remove(self, f):
553 '''Mark a file removed.'''
553 '''Mark a file removed.'''
554 self._dirty = True
554 self._dirty = True
555 self._droppath(f)
555 self._droppath(f)
556 size = 0
556 size = 0
557 if self._pl[1] != nullid and f in self._map:
557 if self._pl[1] != nullid and f in self._map:
558 # backup the previous state
558 # backup the previous state
559 entry = self._map[f]
559 entry = self._map[f]
560 if entry[0] == 'm': # merge
560 if entry[0] == 'm': # merge
561 size = -1
561 size = -1
562 elif entry[0] == 'n' and entry[2] == -2: # other parent
562 elif entry[0] == 'n' and entry[2] == -2: # other parent
563 size = -2
563 size = -2
564 self._map[f] = dirstatetuple('r', 0, size, 0)
564 self._map[f] = dirstatetuple('r', 0, size, 0)
565 self._nonnormalset.add(f)
565 self._nonnormalset.add(f)
566 if size == 0 and f in self._copymap:
566 if size == 0 and f in self._copymap:
567 del self._copymap[f]
567 del self._copymap[f]
568
568
569 def merge(self, f):
569 def merge(self, f):
570 '''Mark a file merged.'''
570 '''Mark a file merged.'''
571 if self._pl[1] == nullid:
571 if self._pl[1] == nullid:
572 return self.normallookup(f)
572 return self.normallookup(f)
573 return self.otherparent(f)
573 return self.otherparent(f)
574
574
575 def drop(self, f):
575 def drop(self, f):
576 '''Drop a file from the dirstate'''
576 '''Drop a file from the dirstate'''
577 if f in self._map:
577 if f in self._map:
578 self._dirty = True
578 self._dirty = True
579 self._droppath(f)
579 self._droppath(f)
580 del self._map[f]
580 del self._map[f]
581 if f in self._nonnormalset:
581 if f in self._nonnormalset:
582 self._nonnormalset.remove(f)
582 self._nonnormalset.remove(f)
583
583
584 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
584 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
585 if exists is None:
585 if exists is None:
586 exists = os.path.lexists(os.path.join(self._root, path))
586 exists = os.path.lexists(os.path.join(self._root, path))
587 if not exists:
587 if not exists:
588 # Maybe a path component exists
588 # Maybe a path component exists
589 if not ignoremissing and '/' in path:
589 if not ignoremissing and '/' in path:
590 d, f = path.rsplit('/', 1)
590 d, f = path.rsplit('/', 1)
591 d = self._normalize(d, False, ignoremissing, None)
591 d = self._normalize(d, False, ignoremissing, None)
592 folded = d + "/" + f
592 folded = d + "/" + f
593 else:
593 else:
594 # No path components, preserve original case
594 # No path components, preserve original case
595 folded = path
595 folded = path
596 else:
596 else:
597 # recursively normalize leading directory components
597 # recursively normalize leading directory components
598 # against dirstate
598 # against dirstate
599 if '/' in normed:
599 if '/' in normed:
600 d, f = normed.rsplit('/', 1)
600 d, f = normed.rsplit('/', 1)
601 d = self._normalize(d, False, ignoremissing, True)
601 d = self._normalize(d, False, ignoremissing, True)
602 r = self._root + "/" + d
602 r = self._root + "/" + d
603 folded = d + "/" + util.fspath(f, r)
603 folded = d + "/" + util.fspath(f, r)
604 else:
604 else:
605 folded = util.fspath(normed, self._root)
605 folded = util.fspath(normed, self._root)
606 storemap[normed] = folded
606 storemap[normed] = folded
607
607
608 return folded
608 return folded
609
609
610 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
610 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
611 normed = util.normcase(path)
611 normed = util.normcase(path)
612 folded = self._filefoldmap.get(normed, None)
612 folded = self._filefoldmap.get(normed, None)
613 if folded is None:
613 if folded is None:
614 if isknown:
614 if isknown:
615 folded = path
615 folded = path
616 else:
616 else:
617 folded = self._discoverpath(path, normed, ignoremissing, exists,
617 folded = self._discoverpath(path, normed, ignoremissing, exists,
618 self._filefoldmap)
618 self._filefoldmap)
619 return folded
619 return folded
620
620
621 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
621 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
622 normed = util.normcase(path)
622 normed = util.normcase(path)
623 folded = self._filefoldmap.get(normed, None)
623 folded = self._filefoldmap.get(normed, None)
624 if folded is None:
624 if folded is None:
625 folded = self._dirfoldmap.get(normed, None)
625 folded = self._dirfoldmap.get(normed, None)
626 if folded is None:
626 if folded is None:
627 if isknown:
627 if isknown:
628 folded = path
628 folded = path
629 else:
629 else:
630 # store discovered result in dirfoldmap so that future
630 # store discovered result in dirfoldmap so that future
631 # normalizefile calls don't start matching directories
631 # normalizefile calls don't start matching directories
632 folded = self._discoverpath(path, normed, ignoremissing, exists,
632 folded = self._discoverpath(path, normed, ignoremissing, exists,
633 self._dirfoldmap)
633 self._dirfoldmap)
634 return folded
634 return folded
635
635
636 def normalize(self, path, isknown=False, ignoremissing=False):
636 def normalize(self, path, isknown=False, ignoremissing=False):
637 '''
637 '''
638 normalize the case of a pathname when on a casefolding filesystem
638 normalize the case of a pathname when on a casefolding filesystem
639
639
640 isknown specifies whether the filename came from walking the
640 isknown specifies whether the filename came from walking the
641 disk, to avoid extra filesystem access.
641 disk, to avoid extra filesystem access.
642
642
643 If ignoremissing is True, missing path are returned
643 If ignoremissing is True, missing path are returned
644 unchanged. Otherwise, we try harder to normalize possibly
644 unchanged. Otherwise, we try harder to normalize possibly
645 existing path components.
645 existing path components.
646
646
647 The normalized case is determined based on the following precedence:
647 The normalized case is determined based on the following precedence:
648
648
649 - version of name already stored in the dirstate
649 - version of name already stored in the dirstate
650 - version of name stored on disk
650 - version of name stored on disk
651 - version provided via command arguments
651 - version provided via command arguments
652 '''
652 '''
653
653
654 if self._checkcase:
654 if self._checkcase:
655 return self._normalize(path, isknown, ignoremissing)
655 return self._normalize(path, isknown, ignoremissing)
656 return path
656 return path
657
657
658 def clear(self):
658 def clear(self):
659 self._map = {}
659 self._map = {}
660 self._nonnormalset = set()
660 self._nonnormalset = set()
661 if "_dirs" in self.__dict__:
661 if "_dirs" in self.__dict__:
662 delattr(self, "_dirs")
662 delattr(self, "_dirs")
663 self._copymap = {}
663 self._copymap = {}
664 self._pl = [nullid, nullid]
664 self._pl = [nullid, nullid]
665 self._lastnormaltime = 0
665 self._lastnormaltime = 0
666 self._dirty = True
666 self._dirty = True
667
667
668 def rebuild(self, parent, allfiles, changedfiles=None):
668 def rebuild(self, parent, allfiles, changedfiles=None):
669 if changedfiles is None:
669 if changedfiles is None:
670 # Rebuild entire dirstate
670 # Rebuild entire dirstate
671 changedfiles = allfiles
671 changedfiles = allfiles
672 lastnormaltime = self._lastnormaltime
672 lastnormaltime = self._lastnormaltime
673 self.clear()
673 self.clear()
674 self._lastnormaltime = lastnormaltime
674 self._lastnormaltime = lastnormaltime
675
675
676 for f in changedfiles:
676 for f in changedfiles:
677 mode = 0o666
677 mode = 0o666
678 if f in allfiles and 'x' in allfiles.flags(f):
678 if f in allfiles and 'x' in allfiles.flags(f):
679 mode = 0o777
679 mode = 0o777
680
680
681 if f in allfiles:
681 if f in allfiles:
682 self._map[f] = dirstatetuple('n', mode, -1, 0)
682 self._map[f] = dirstatetuple('n', mode, -1, 0)
683 else:
683 else:
684 self._map.pop(f, None)
684 self._map.pop(f, None)
685 if f in self._nonnormalset:
685 if f in self._nonnormalset:
686 self._nonnormalset.remove(f)
686 self._nonnormalset.remove(f)
687
687
688 self._pl = (parent, nullid)
688 self._pl = (parent, nullid)
689 self._dirty = True
689 self._dirty = True
690
690
691 def write(self, tr=False):
691 def write(self, tr=False):
692 if not self._dirty:
692 if not self._dirty:
693 return
693 return
694
694
695 filename = self._filename
695 filename = self._filename
696 if tr is False: # not explicitly specified
696 if tr is False: # not explicitly specified
697 self._ui.develwarn('use dirstate.write with '
697 self._ui.deprecwarn('use dirstate.write with '
698 'repo.currenttransaction()',
698 'repo.currenttransaction()',
699 config='check-dirstate-write')
699 '3.9')
700
700
701 if self._opener.lexists(self._pendingfilename):
701 if self._opener.lexists(self._pendingfilename):
702 # if pending file already exists, in-memory changes
702 # if pending file already exists, in-memory changes
703 # should be written into it, because it has priority
703 # should be written into it, because it has priority
704 # to '.hg/dirstate' at reading under HG_PENDING mode
704 # to '.hg/dirstate' at reading under HG_PENDING mode
705 filename = self._pendingfilename
705 filename = self._pendingfilename
706 elif tr:
706 elif tr:
707 # 'dirstate.write()' is not only for writing in-memory
707 # 'dirstate.write()' is not only for writing in-memory
708 # changes out, but also for dropping ambiguous timestamp.
708 # changes out, but also for dropping ambiguous timestamp.
709 # delayed writing re-raise "ambiguous timestamp issue".
709 # delayed writing re-raise "ambiguous timestamp issue".
710 # See also the wiki page below for detail:
710 # See also the wiki page below for detail:
711 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
711 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
712
712
713 # emulate dropping timestamp in 'parsers.pack_dirstate'
713 # emulate dropping timestamp in 'parsers.pack_dirstate'
714 now = _getfsnow(self._opener)
714 now = _getfsnow(self._opener)
715 dmap = self._map
715 dmap = self._map
716 for f, e in dmap.iteritems():
716 for f, e in dmap.iteritems():
717 if e[0] == 'n' and e[3] == now:
717 if e[0] == 'n' and e[3] == now:
718 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
718 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
719 self._nonnormalset.add(f)
719 self._nonnormalset.add(f)
720
720
721 # emulate that all 'dirstate.normal' results are written out
721 # emulate that all 'dirstate.normal' results are written out
722 self._lastnormaltime = 0
722 self._lastnormaltime = 0
723
723
724 # delay writing in-memory changes out
724 # delay writing in-memory changes out
725 tr.addfilegenerator('dirstate', (self._filename,),
725 tr.addfilegenerator('dirstate', (self._filename,),
726 self._writedirstate, location='plain')
726 self._writedirstate, location='plain')
727 return
727 return
728
728
729 st = self._opener(filename, "w", atomictemp=True)
729 st = self._opener(filename, "w", atomictemp=True)
730 self._writedirstate(st)
730 self._writedirstate(st)
731
731
732 def _writedirstate(self, st):
732 def _writedirstate(self, st):
733 # use the modification time of the newly created temporary file as the
733 # use the modification time of the newly created temporary file as the
734 # filesystem's notion of 'now'
734 # filesystem's notion of 'now'
735 now = util.fstat(st).st_mtime & _rangemask
735 now = util.fstat(st).st_mtime & _rangemask
736
736
737 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
737 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
738 # timestamp of each entries in dirstate, because of 'now > mtime'
738 # timestamp of each entries in dirstate, because of 'now > mtime'
739 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
739 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
740 if delaywrite > 0:
740 if delaywrite > 0:
741 # do we have any files to delay for?
741 # do we have any files to delay for?
742 for f, e in self._map.iteritems():
742 for f, e in self._map.iteritems():
743 if e[0] == 'n' and e[3] == now:
743 if e[0] == 'n' and e[3] == now:
744 import time # to avoid useless import
744 import time # to avoid useless import
745 # rather than sleep n seconds, sleep until the next
745 # rather than sleep n seconds, sleep until the next
746 # multiple of n seconds
746 # multiple of n seconds
747 clock = time.time()
747 clock = time.time()
748 start = int(clock) - (int(clock) % delaywrite)
748 start = int(clock) - (int(clock) % delaywrite)
749 end = start + delaywrite
749 end = start + delaywrite
750 time.sleep(end - clock)
750 time.sleep(end - clock)
751 break
751 break
752
752
753 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
753 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
754 self._nonnormalset = nonnormalentries(self._map)
754 self._nonnormalset = nonnormalentries(self._map)
755 st.close()
755 st.close()
756 self._lastnormaltime = 0
756 self._lastnormaltime = 0
757 self._dirty = self._dirtypl = False
757 self._dirty = self._dirtypl = False
758
758
759 def _dirignore(self, f):
759 def _dirignore(self, f):
760 if f == '.':
760 if f == '.':
761 return False
761 return False
762 if self._ignore(f):
762 if self._ignore(f):
763 return True
763 return True
764 for p in util.finddirs(f):
764 for p in util.finddirs(f):
765 if self._ignore(p):
765 if self._ignore(p):
766 return True
766 return True
767 return False
767 return False
768
768
769 def _ignorefiles(self):
769 def _ignorefiles(self):
770 files = []
770 files = []
771 if os.path.exists(self._join('.hgignore')):
771 if os.path.exists(self._join('.hgignore')):
772 files.append(self._join('.hgignore'))
772 files.append(self._join('.hgignore'))
773 for name, path in self._ui.configitems("ui"):
773 for name, path in self._ui.configitems("ui"):
774 if name == 'ignore' or name.startswith('ignore.'):
774 if name == 'ignore' or name.startswith('ignore.'):
775 # we need to use os.path.join here rather than self._join
775 # we need to use os.path.join here rather than self._join
776 # because path is arbitrary and user-specified
776 # because path is arbitrary and user-specified
777 files.append(os.path.join(self._rootdir, util.expandpath(path)))
777 files.append(os.path.join(self._rootdir, util.expandpath(path)))
778 return files
778 return files
779
779
780 def _ignorefileandline(self, f):
780 def _ignorefileandline(self, f):
781 files = collections.deque(self._ignorefiles())
781 files = collections.deque(self._ignorefiles())
782 visited = set()
782 visited = set()
783 while files:
783 while files:
784 i = files.popleft()
784 i = files.popleft()
785 patterns = matchmod.readpatternfile(i, self._ui.warn,
785 patterns = matchmod.readpatternfile(i, self._ui.warn,
786 sourceinfo=True)
786 sourceinfo=True)
787 for pattern, lineno, line in patterns:
787 for pattern, lineno, line in patterns:
788 kind, p = matchmod._patsplit(pattern, 'glob')
788 kind, p = matchmod._patsplit(pattern, 'glob')
789 if kind == "subinclude":
789 if kind == "subinclude":
790 if p not in visited:
790 if p not in visited:
791 files.append(p)
791 files.append(p)
792 continue
792 continue
793 m = matchmod.match(self._root, '', [], [pattern],
793 m = matchmod.match(self._root, '', [], [pattern],
794 warn=self._ui.warn)
794 warn=self._ui.warn)
795 if m(f):
795 if m(f):
796 return (i, lineno, line)
796 return (i, lineno, line)
797 visited.add(i)
797 visited.add(i)
798 return (None, -1, "")
798 return (None, -1, "")
799
799
800 def _walkexplicit(self, match, subrepos):
800 def _walkexplicit(self, match, subrepos):
801 '''Get stat data about the files explicitly specified by match.
801 '''Get stat data about the files explicitly specified by match.
802
802
803 Return a triple (results, dirsfound, dirsnotfound).
803 Return a triple (results, dirsfound, dirsnotfound).
804 - results is a mapping from filename to stat result. It also contains
804 - results is a mapping from filename to stat result. It also contains
805 listings mapping subrepos and .hg to None.
805 listings mapping subrepos and .hg to None.
806 - dirsfound is a list of files found to be directories.
806 - dirsfound is a list of files found to be directories.
807 - dirsnotfound is a list of files that the dirstate thinks are
807 - dirsnotfound is a list of files that the dirstate thinks are
808 directories and that were not found.'''
808 directories and that were not found.'''
809
809
810 def badtype(mode):
810 def badtype(mode):
811 kind = _('unknown')
811 kind = _('unknown')
812 if stat.S_ISCHR(mode):
812 if stat.S_ISCHR(mode):
813 kind = _('character device')
813 kind = _('character device')
814 elif stat.S_ISBLK(mode):
814 elif stat.S_ISBLK(mode):
815 kind = _('block device')
815 kind = _('block device')
816 elif stat.S_ISFIFO(mode):
816 elif stat.S_ISFIFO(mode):
817 kind = _('fifo')
817 kind = _('fifo')
818 elif stat.S_ISSOCK(mode):
818 elif stat.S_ISSOCK(mode):
819 kind = _('socket')
819 kind = _('socket')
820 elif stat.S_ISDIR(mode):
820 elif stat.S_ISDIR(mode):
821 kind = _('directory')
821 kind = _('directory')
822 return _('unsupported file type (type is %s)') % kind
822 return _('unsupported file type (type is %s)') % kind
823
823
824 matchedir = match.explicitdir
824 matchedir = match.explicitdir
825 badfn = match.bad
825 badfn = match.bad
826 dmap = self._map
826 dmap = self._map
827 lstat = os.lstat
827 lstat = os.lstat
828 getkind = stat.S_IFMT
828 getkind = stat.S_IFMT
829 dirkind = stat.S_IFDIR
829 dirkind = stat.S_IFDIR
830 regkind = stat.S_IFREG
830 regkind = stat.S_IFREG
831 lnkkind = stat.S_IFLNK
831 lnkkind = stat.S_IFLNK
832 join = self._join
832 join = self._join
833 dirsfound = []
833 dirsfound = []
834 foundadd = dirsfound.append
834 foundadd = dirsfound.append
835 dirsnotfound = []
835 dirsnotfound = []
836 notfoundadd = dirsnotfound.append
836 notfoundadd = dirsnotfound.append
837
837
838 if not match.isexact() and self._checkcase:
838 if not match.isexact() and self._checkcase:
839 normalize = self._normalize
839 normalize = self._normalize
840 else:
840 else:
841 normalize = None
841 normalize = None
842
842
843 files = sorted(match.files())
843 files = sorted(match.files())
844 subrepos.sort()
844 subrepos.sort()
845 i, j = 0, 0
845 i, j = 0, 0
846 while i < len(files) and j < len(subrepos):
846 while i < len(files) and j < len(subrepos):
847 subpath = subrepos[j] + "/"
847 subpath = subrepos[j] + "/"
848 if files[i] < subpath:
848 if files[i] < subpath:
849 i += 1
849 i += 1
850 continue
850 continue
851 while i < len(files) and files[i].startswith(subpath):
851 while i < len(files) and files[i].startswith(subpath):
852 del files[i]
852 del files[i]
853 j += 1
853 j += 1
854
854
855 if not files or '.' in files:
855 if not files or '.' in files:
856 files = ['.']
856 files = ['.']
857 results = dict.fromkeys(subrepos)
857 results = dict.fromkeys(subrepos)
858 results['.hg'] = None
858 results['.hg'] = None
859
859
860 alldirs = None
860 alldirs = None
861 for ff in files:
861 for ff in files:
862 # constructing the foldmap is expensive, so don't do it for the
862 # constructing the foldmap is expensive, so don't do it for the
863 # common case where files is ['.']
863 # common case where files is ['.']
864 if normalize and ff != '.':
864 if normalize and ff != '.':
865 nf = normalize(ff, False, True)
865 nf = normalize(ff, False, True)
866 else:
866 else:
867 nf = ff
867 nf = ff
868 if nf in results:
868 if nf in results:
869 continue
869 continue
870
870
871 try:
871 try:
872 st = lstat(join(nf))
872 st = lstat(join(nf))
873 kind = getkind(st.st_mode)
873 kind = getkind(st.st_mode)
874 if kind == dirkind:
874 if kind == dirkind:
875 if nf in dmap:
875 if nf in dmap:
876 # file replaced by dir on disk but still in dirstate
876 # file replaced by dir on disk but still in dirstate
877 results[nf] = None
877 results[nf] = None
878 if matchedir:
878 if matchedir:
879 matchedir(nf)
879 matchedir(nf)
880 foundadd((nf, ff))
880 foundadd((nf, ff))
881 elif kind == regkind or kind == lnkkind:
881 elif kind == regkind or kind == lnkkind:
882 results[nf] = st
882 results[nf] = st
883 else:
883 else:
884 badfn(ff, badtype(kind))
884 badfn(ff, badtype(kind))
885 if nf in dmap:
885 if nf in dmap:
886 results[nf] = None
886 results[nf] = None
887 except OSError as inst: # nf not found on disk - it is dirstate only
887 except OSError as inst: # nf not found on disk - it is dirstate only
888 if nf in dmap: # does it exactly match a missing file?
888 if nf in dmap: # does it exactly match a missing file?
889 results[nf] = None
889 results[nf] = None
890 else: # does it match a missing directory?
890 else: # does it match a missing directory?
891 if alldirs is None:
891 if alldirs is None:
892 alldirs = util.dirs(dmap)
892 alldirs = util.dirs(dmap)
893 if nf in alldirs:
893 if nf in alldirs:
894 if matchedir:
894 if matchedir:
895 matchedir(nf)
895 matchedir(nf)
896 notfoundadd(nf)
896 notfoundadd(nf)
897 else:
897 else:
898 badfn(ff, inst.strerror)
898 badfn(ff, inst.strerror)
899
899
900 # Case insensitive filesystems cannot rely on lstat() failing to detect
900 # Case insensitive filesystems cannot rely on lstat() failing to detect
901 # a case-only rename. Prune the stat object for any file that does not
901 # a case-only rename. Prune the stat object for any file that does not
902 # match the case in the filesystem, if there are multiple files that
902 # match the case in the filesystem, if there are multiple files that
903 # normalize to the same path.
903 # normalize to the same path.
904 if match.isexact() and self._checkcase:
904 if match.isexact() and self._checkcase:
905 normed = {}
905 normed = {}
906
906
907 for f, st in results.iteritems():
907 for f, st in results.iteritems():
908 if st is None:
908 if st is None:
909 continue
909 continue
910
910
911 nc = util.normcase(f)
911 nc = util.normcase(f)
912 paths = normed.get(nc)
912 paths = normed.get(nc)
913
913
914 if paths is None:
914 if paths is None:
915 paths = set()
915 paths = set()
916 normed[nc] = paths
916 normed[nc] = paths
917
917
918 paths.add(f)
918 paths.add(f)
919
919
920 for norm, paths in normed.iteritems():
920 for norm, paths in normed.iteritems():
921 if len(paths) > 1:
921 if len(paths) > 1:
922 for path in paths:
922 for path in paths:
923 folded = self._discoverpath(path, norm, True, None,
923 folded = self._discoverpath(path, norm, True, None,
924 self._dirfoldmap)
924 self._dirfoldmap)
925 if path != folded:
925 if path != folded:
926 results[path] = None
926 results[path] = None
927
927
928 return results, dirsfound, dirsnotfound
928 return results, dirsfound, dirsnotfound
929
929
930 def walk(self, match, subrepos, unknown, ignored, full=True):
930 def walk(self, match, subrepos, unknown, ignored, full=True):
931 '''
931 '''
932 Walk recursively through the directory tree, finding all files
932 Walk recursively through the directory tree, finding all files
933 matched by match.
933 matched by match.
934
934
935 If full is False, maybe skip some known-clean files.
935 If full is False, maybe skip some known-clean files.
936
936
937 Return a dict mapping filename to stat-like object (either
937 Return a dict mapping filename to stat-like object (either
938 mercurial.osutil.stat instance or return value of os.stat()).
938 mercurial.osutil.stat instance or return value of os.stat()).
939
939
940 '''
940 '''
941 # full is a flag that extensions that hook into walk can use -- this
941 # full is a flag that extensions that hook into walk can use -- this
942 # implementation doesn't use it at all. This satisfies the contract
942 # implementation doesn't use it at all. This satisfies the contract
943 # because we only guarantee a "maybe".
943 # because we only guarantee a "maybe".
944
944
945 if ignored:
945 if ignored:
946 ignore = util.never
946 ignore = util.never
947 dirignore = util.never
947 dirignore = util.never
948 elif unknown:
948 elif unknown:
949 ignore = self._ignore
949 ignore = self._ignore
950 dirignore = self._dirignore
950 dirignore = self._dirignore
951 else:
951 else:
952 # if not unknown and not ignored, drop dir recursion and step 2
952 # if not unknown and not ignored, drop dir recursion and step 2
953 ignore = util.always
953 ignore = util.always
954 dirignore = util.always
954 dirignore = util.always
955
955
956 matchfn = match.matchfn
956 matchfn = match.matchfn
957 matchalways = match.always()
957 matchalways = match.always()
958 matchtdir = match.traversedir
958 matchtdir = match.traversedir
959 dmap = self._map
959 dmap = self._map
960 listdir = osutil.listdir
960 listdir = osutil.listdir
961 lstat = os.lstat
961 lstat = os.lstat
962 dirkind = stat.S_IFDIR
962 dirkind = stat.S_IFDIR
963 regkind = stat.S_IFREG
963 regkind = stat.S_IFREG
964 lnkkind = stat.S_IFLNK
964 lnkkind = stat.S_IFLNK
965 join = self._join
965 join = self._join
966
966
967 exact = skipstep3 = False
967 exact = skipstep3 = False
968 if match.isexact(): # match.exact
968 if match.isexact(): # match.exact
969 exact = True
969 exact = True
970 dirignore = util.always # skip step 2
970 dirignore = util.always # skip step 2
971 elif match.prefix(): # match.match, no patterns
971 elif match.prefix(): # match.match, no patterns
972 skipstep3 = True
972 skipstep3 = True
973
973
974 if not exact and self._checkcase:
974 if not exact and self._checkcase:
975 normalize = self._normalize
975 normalize = self._normalize
976 normalizefile = self._normalizefile
976 normalizefile = self._normalizefile
977 skipstep3 = False
977 skipstep3 = False
978 else:
978 else:
979 normalize = self._normalize
979 normalize = self._normalize
980 normalizefile = None
980 normalizefile = None
981
981
982 # step 1: find all explicit files
982 # step 1: find all explicit files
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
984
984
985 skipstep3 = skipstep3 and not (work or dirsnotfound)
985 skipstep3 = skipstep3 and not (work or dirsnotfound)
986 work = [d for d in work if not dirignore(d[0])]
986 work = [d for d in work if not dirignore(d[0])]
987
987
988 # step 2: visit subdirectories
988 # step 2: visit subdirectories
989 def traverse(work, alreadynormed):
989 def traverse(work, alreadynormed):
990 wadd = work.append
990 wadd = work.append
991 while work:
991 while work:
992 nd = work.pop()
992 nd = work.pop()
993 skip = None
993 skip = None
994 if nd == '.':
994 if nd == '.':
995 nd = ''
995 nd = ''
996 else:
996 else:
997 skip = '.hg'
997 skip = '.hg'
998 try:
998 try:
999 entries = listdir(join(nd), stat=True, skip=skip)
999 entries = listdir(join(nd), stat=True, skip=skip)
1000 except OSError as inst:
1000 except OSError as inst:
1001 if inst.errno in (errno.EACCES, errno.ENOENT):
1001 if inst.errno in (errno.EACCES, errno.ENOENT):
1002 match.bad(self.pathto(nd), inst.strerror)
1002 match.bad(self.pathto(nd), inst.strerror)
1003 continue
1003 continue
1004 raise
1004 raise
1005 for f, kind, st in entries:
1005 for f, kind, st in entries:
1006 if normalizefile:
1006 if normalizefile:
1007 # even though f might be a directory, we're only
1007 # even though f might be a directory, we're only
1008 # interested in comparing it to files currently in the
1008 # interested in comparing it to files currently in the
1009 # dmap -- therefore normalizefile is enough
1009 # dmap -- therefore normalizefile is enough
1010 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1010 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1011 True)
1011 True)
1012 else:
1012 else:
1013 nf = nd and (nd + "/" + f) or f
1013 nf = nd and (nd + "/" + f) or f
1014 if nf not in results:
1014 if nf not in results:
1015 if kind == dirkind:
1015 if kind == dirkind:
1016 if not ignore(nf):
1016 if not ignore(nf):
1017 if matchtdir:
1017 if matchtdir:
1018 matchtdir(nf)
1018 matchtdir(nf)
1019 wadd(nf)
1019 wadd(nf)
1020 if nf in dmap and (matchalways or matchfn(nf)):
1020 if nf in dmap and (matchalways or matchfn(nf)):
1021 results[nf] = None
1021 results[nf] = None
1022 elif kind == regkind or kind == lnkkind:
1022 elif kind == regkind or kind == lnkkind:
1023 if nf in dmap:
1023 if nf in dmap:
1024 if matchalways or matchfn(nf):
1024 if matchalways or matchfn(nf):
1025 results[nf] = st
1025 results[nf] = st
1026 elif ((matchalways or matchfn(nf))
1026 elif ((matchalways or matchfn(nf))
1027 and not ignore(nf)):
1027 and not ignore(nf)):
1028 # unknown file -- normalize if necessary
1028 # unknown file -- normalize if necessary
1029 if not alreadynormed:
1029 if not alreadynormed:
1030 nf = normalize(nf, False, True)
1030 nf = normalize(nf, False, True)
1031 results[nf] = st
1031 results[nf] = st
1032 elif nf in dmap and (matchalways or matchfn(nf)):
1032 elif nf in dmap and (matchalways or matchfn(nf)):
1033 results[nf] = None
1033 results[nf] = None
1034
1034
1035 for nd, d in work:
1035 for nd, d in work:
1036 # alreadynormed means that processwork doesn't have to do any
1036 # alreadynormed means that processwork doesn't have to do any
1037 # expensive directory normalization
1037 # expensive directory normalization
1038 alreadynormed = not normalize or nd == d
1038 alreadynormed = not normalize or nd == d
1039 traverse([d], alreadynormed)
1039 traverse([d], alreadynormed)
1040
1040
1041 for s in subrepos:
1041 for s in subrepos:
1042 del results[s]
1042 del results[s]
1043 del results['.hg']
1043 del results['.hg']
1044
1044
1045 # step 3: visit remaining files from dmap
1045 # step 3: visit remaining files from dmap
1046 if not skipstep3 and not exact:
1046 if not skipstep3 and not exact:
1047 # If a dmap file is not in results yet, it was either
1047 # If a dmap file is not in results yet, it was either
1048 # a) not matching matchfn b) ignored, c) missing, or d) under a
1048 # a) not matching matchfn b) ignored, c) missing, or d) under a
1049 # symlink directory.
1049 # symlink directory.
1050 if not results and matchalways:
1050 if not results and matchalways:
1051 visit = dmap.keys()
1051 visit = dmap.keys()
1052 else:
1052 else:
1053 visit = [f for f in dmap if f not in results and matchfn(f)]
1053 visit = [f for f in dmap if f not in results and matchfn(f)]
1054 visit.sort()
1054 visit.sort()
1055
1055
1056 if unknown:
1056 if unknown:
1057 # unknown == True means we walked all dirs under the roots
1057 # unknown == True means we walked all dirs under the roots
1058 # that wasn't ignored, and everything that matched was stat'ed
1058 # that wasn't ignored, and everything that matched was stat'ed
1059 # and is already in results.
1059 # and is already in results.
1060 # The rest must thus be ignored or under a symlink.
1060 # The rest must thus be ignored or under a symlink.
1061 audit_path = pathutil.pathauditor(self._root)
1061 audit_path = pathutil.pathauditor(self._root)
1062
1062
1063 for nf in iter(visit):
1063 for nf in iter(visit):
1064 # If a stat for the same file was already added with a
1064 # If a stat for the same file was already added with a
1065 # different case, don't add one for this, since that would
1065 # different case, don't add one for this, since that would
1066 # make it appear as if the file exists under both names
1066 # make it appear as if the file exists under both names
1067 # on disk.
1067 # on disk.
1068 if (normalizefile and
1068 if (normalizefile and
1069 normalizefile(nf, True, True) in results):
1069 normalizefile(nf, True, True) in results):
1070 results[nf] = None
1070 results[nf] = None
1071 # Report ignored items in the dmap as long as they are not
1071 # Report ignored items in the dmap as long as they are not
1072 # under a symlink directory.
1072 # under a symlink directory.
1073 elif audit_path.check(nf):
1073 elif audit_path.check(nf):
1074 try:
1074 try:
1075 results[nf] = lstat(join(nf))
1075 results[nf] = lstat(join(nf))
1076 # file was just ignored, no links, and exists
1076 # file was just ignored, no links, and exists
1077 except OSError:
1077 except OSError:
1078 # file doesn't exist
1078 # file doesn't exist
1079 results[nf] = None
1079 results[nf] = None
1080 else:
1080 else:
1081 # It's either missing or under a symlink directory
1081 # It's either missing or under a symlink directory
1082 # which we in this case report as missing
1082 # which we in this case report as missing
1083 results[nf] = None
1083 results[nf] = None
1084 else:
1084 else:
1085 # We may not have walked the full directory tree above,
1085 # We may not have walked the full directory tree above,
1086 # so stat and check everything we missed.
1086 # so stat and check everything we missed.
1087 nf = iter(visit).next
1087 nf = iter(visit).next
1088 for st in util.statfiles([join(i) for i in visit]):
1088 for st in util.statfiles([join(i) for i in visit]):
1089 results[nf()] = st
1089 results[nf()] = st
1090 return results
1090 return results
1091
1091
1092 def status(self, match, subrepos, ignored, clean, unknown):
1092 def status(self, match, subrepos, ignored, clean, unknown):
1093 '''Determine the status of the working copy relative to the
1093 '''Determine the status of the working copy relative to the
1094 dirstate and return a pair of (unsure, status), where status is of type
1094 dirstate and return a pair of (unsure, status), where status is of type
1095 scmutil.status and:
1095 scmutil.status and:
1096
1096
1097 unsure:
1097 unsure:
1098 files that might have been modified since the dirstate was
1098 files that might have been modified since the dirstate was
1099 written, but need to be read to be sure (size is the same
1099 written, but need to be read to be sure (size is the same
1100 but mtime differs)
1100 but mtime differs)
1101 status.modified:
1101 status.modified:
1102 files that have definitely been modified since the dirstate
1102 files that have definitely been modified since the dirstate
1103 was written (different size or mode)
1103 was written (different size or mode)
1104 status.clean:
1104 status.clean:
1105 files that have definitely not been modified since the
1105 files that have definitely not been modified since the
1106 dirstate was written
1106 dirstate was written
1107 '''
1107 '''
1108 listignored, listclean, listunknown = ignored, clean, unknown
1108 listignored, listclean, listunknown = ignored, clean, unknown
1109 lookup, modified, added, unknown, ignored = [], [], [], [], []
1109 lookup, modified, added, unknown, ignored = [], [], [], [], []
1110 removed, deleted, clean = [], [], []
1110 removed, deleted, clean = [], [], []
1111
1111
1112 dmap = self._map
1112 dmap = self._map
1113 ladd = lookup.append # aka "unsure"
1113 ladd = lookup.append # aka "unsure"
1114 madd = modified.append
1114 madd = modified.append
1115 aadd = added.append
1115 aadd = added.append
1116 uadd = unknown.append
1116 uadd = unknown.append
1117 iadd = ignored.append
1117 iadd = ignored.append
1118 radd = removed.append
1118 radd = removed.append
1119 dadd = deleted.append
1119 dadd = deleted.append
1120 cadd = clean.append
1120 cadd = clean.append
1121 mexact = match.exact
1121 mexact = match.exact
1122 dirignore = self._dirignore
1122 dirignore = self._dirignore
1123 checkexec = self._checkexec
1123 checkexec = self._checkexec
1124 copymap = self._copymap
1124 copymap = self._copymap
1125 lastnormaltime = self._lastnormaltime
1125 lastnormaltime = self._lastnormaltime
1126
1126
1127 # We need to do full walks when either
1127 # We need to do full walks when either
1128 # - we're listing all clean files, or
1128 # - we're listing all clean files, or
1129 # - match.traversedir does something, because match.traversedir should
1129 # - match.traversedir does something, because match.traversedir should
1130 # be called for every dir in the working dir
1130 # be called for every dir in the working dir
1131 full = listclean or match.traversedir is not None
1131 full = listclean or match.traversedir is not None
1132 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1132 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1133 full=full).iteritems():
1133 full=full).iteritems():
1134 if fn not in dmap:
1134 if fn not in dmap:
1135 if (listignored or mexact(fn)) and dirignore(fn):
1135 if (listignored or mexact(fn)) and dirignore(fn):
1136 if listignored:
1136 if listignored:
1137 iadd(fn)
1137 iadd(fn)
1138 else:
1138 else:
1139 uadd(fn)
1139 uadd(fn)
1140 continue
1140 continue
1141
1141
1142 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1142 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1143 # written like that for performance reasons. dmap[fn] is not a
1143 # written like that for performance reasons. dmap[fn] is not a
1144 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1144 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1145 # opcode has fast paths when the value to be unpacked is a tuple or
1145 # opcode has fast paths when the value to be unpacked is a tuple or
1146 # a list, but falls back to creating a full-fledged iterator in
1146 # a list, but falls back to creating a full-fledged iterator in
1147 # general. That is much slower than simply accessing and storing the
1147 # general. That is much slower than simply accessing and storing the
1148 # tuple members one by one.
1148 # tuple members one by one.
1149 t = dmap[fn]
1149 t = dmap[fn]
1150 state = t[0]
1150 state = t[0]
1151 mode = t[1]
1151 mode = t[1]
1152 size = t[2]
1152 size = t[2]
1153 time = t[3]
1153 time = t[3]
1154
1154
1155 if not st and state in "nma":
1155 if not st and state in "nma":
1156 dadd(fn)
1156 dadd(fn)
1157 elif state == 'n':
1157 elif state == 'n':
1158 if (size >= 0 and
1158 if (size >= 0 and
1159 ((size != st.st_size and size != st.st_size & _rangemask)
1159 ((size != st.st_size and size != st.st_size & _rangemask)
1160 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1160 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1161 or size == -2 # other parent
1161 or size == -2 # other parent
1162 or fn in copymap):
1162 or fn in copymap):
1163 madd(fn)
1163 madd(fn)
1164 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1164 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1165 ladd(fn)
1165 ladd(fn)
1166 elif st.st_mtime == lastnormaltime:
1166 elif st.st_mtime == lastnormaltime:
1167 # fn may have just been marked as normal and it may have
1167 # fn may have just been marked as normal and it may have
1168 # changed in the same second without changing its size.
1168 # changed in the same second without changing its size.
1169 # This can happen if we quickly do multiple commits.
1169 # This can happen if we quickly do multiple commits.
1170 # Force lookup, so we don't miss such a racy file change.
1170 # Force lookup, so we don't miss such a racy file change.
1171 ladd(fn)
1171 ladd(fn)
1172 elif listclean:
1172 elif listclean:
1173 cadd(fn)
1173 cadd(fn)
1174 elif state == 'm':
1174 elif state == 'm':
1175 madd(fn)
1175 madd(fn)
1176 elif state == 'a':
1176 elif state == 'a':
1177 aadd(fn)
1177 aadd(fn)
1178 elif state == 'r':
1178 elif state == 'r':
1179 radd(fn)
1179 radd(fn)
1180
1180
1181 return (lookup, scmutil.status(modified, added, removed, deleted,
1181 return (lookup, scmutil.status(modified, added, removed, deleted,
1182 unknown, ignored, clean))
1182 unknown, ignored, clean))
1183
1183
1184 def matches(self, match):
1184 def matches(self, match):
1185 '''
1185 '''
1186 return files in the dirstate (in whatever state) filtered by match
1186 return files in the dirstate (in whatever state) filtered by match
1187 '''
1187 '''
1188 dmap = self._map
1188 dmap = self._map
1189 if match.always():
1189 if match.always():
1190 return dmap.keys()
1190 return dmap.keys()
1191 files = match.files()
1191 files = match.files()
1192 if match.isexact():
1192 if match.isexact():
1193 # fast path -- filter the other way around, since typically files is
1193 # fast path -- filter the other way around, since typically files is
1194 # much smaller than dmap
1194 # much smaller than dmap
1195 return [f for f in files if f in dmap]
1195 return [f for f in files if f in dmap]
1196 if match.prefix() and all(fn in dmap for fn in files):
1196 if match.prefix() and all(fn in dmap for fn in files):
1197 # fast path -- all the values are known to be files, so just return
1197 # fast path -- all the values are known to be files, so just return
1198 # that
1198 # that
1199 return list(files)
1199 return list(files)
1200 return [f for f in dmap if match(f)]
1200 return [f for f in dmap if match(f)]
1201
1201
1202 def _actualfilename(self, tr):
1202 def _actualfilename(self, tr):
1203 if tr:
1203 if tr:
1204 return self._pendingfilename
1204 return self._pendingfilename
1205 else:
1205 else:
1206 return self._filename
1206 return self._filename
1207
1207
1208 def savebackup(self, tr, suffix):
1208 def savebackup(self, tr, suffix):
1209 '''Save current dirstate into backup file with suffix'''
1209 '''Save current dirstate into backup file with suffix'''
1210 filename = self._actualfilename(tr)
1210 filename = self._actualfilename(tr)
1211
1211
1212 # use '_writedirstate' instead of 'write' to write changes certainly,
1212 # use '_writedirstate' instead of 'write' to write changes certainly,
1213 # because the latter omits writing out if transaction is running.
1213 # because the latter omits writing out if transaction is running.
1214 # output file will be used to create backup of dirstate at this point.
1214 # output file will be used to create backup of dirstate at this point.
1215 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1215 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1216
1216
1217 if tr:
1217 if tr:
1218 # ensure that subsequent tr.writepending returns True for
1218 # ensure that subsequent tr.writepending returns True for
1219 # changes written out above, even if dirstate is never
1219 # changes written out above, even if dirstate is never
1220 # changed after this
1220 # changed after this
1221 tr.addfilegenerator('dirstate', (self._filename,),
1221 tr.addfilegenerator('dirstate', (self._filename,),
1222 self._writedirstate, location='plain')
1222 self._writedirstate, location='plain')
1223
1223
1224 # ensure that pending file written above is unlinked at
1224 # ensure that pending file written above is unlinked at
1225 # failure, even if tr.writepending isn't invoked until the
1225 # failure, even if tr.writepending isn't invoked until the
1226 # end of this transaction
1226 # end of this transaction
1227 tr.registertmp(filename, location='plain')
1227 tr.registertmp(filename, location='plain')
1228
1228
1229 self._opener.write(filename + suffix, self._opener.tryread(filename))
1229 self._opener.write(filename + suffix, self._opener.tryread(filename))
1230
1230
1231 def restorebackup(self, tr, suffix):
1231 def restorebackup(self, tr, suffix):
1232 '''Restore dirstate by backup file with suffix'''
1232 '''Restore dirstate by backup file with suffix'''
1233 # this "invalidate()" prevents "wlock.release()" from writing
1233 # this "invalidate()" prevents "wlock.release()" from writing
1234 # changes of dirstate out after restoring from backup file
1234 # changes of dirstate out after restoring from backup file
1235 self.invalidate()
1235 self.invalidate()
1236 filename = self._actualfilename(tr)
1236 filename = self._actualfilename(tr)
1237 self._opener.rename(filename + suffix, filename)
1237 self._opener.rename(filename + suffix, filename)
1238
1238
1239 def clearbackup(self, tr, suffix):
1239 def clearbackup(self, tr, suffix):
1240 '''Clear backup file with suffix'''
1240 '''Clear backup file with suffix'''
1241 filename = self._actualfilename(tr)
1241 filename = self._actualfilename(tr)
1242 self._opener.unlink(filename + suffix)
1242 self._opener.unlink(filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now