##// END OF EJS Templates
cleanup: replace False identity testing with an explicit token object...
Pierre-Yves David -
r29150:0e9ed09f default
parent child Browse files
Show More
@@ -1,1242 +1,1244 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 match as matchmod,
20 match as matchmod,
21 osutil,
21 osutil,
22 parsers,
22 parsers,
23 pathutil,
23 pathutil,
24 scmutil,
24 scmutil,
25 util,
25 util,
26 )
26 )
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29 filecache = scmutil.filecache
29 filecache = scmutil.filecache
30 _rangemask = 0x7fffffff
30 _rangemask = 0x7fffffff
31
31
32 dirstatetuple = parsers.dirstatetuple
32 dirstatetuple = parsers.dirstatetuple
33
33
34 class repocache(filecache):
34 class repocache(filecache):
35 """filecache for files in .hg/"""
35 """filecache for files in .hg/"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj._opener.join(fname)
37 return obj._opener.join(fname)
38
38
39 class rootcache(filecache):
39 class rootcache(filecache):
40 """filecache for files in the repository root"""
40 """filecache for files in the repository root"""
41 def join(self, obj, fname):
41 def join(self, obj, fname):
42 return obj._join(fname)
42 return obj._join(fname)
43
43
44 def _getfsnow(vfs):
44 def _getfsnow(vfs):
45 '''Get "now" timestamp on filesystem'''
45 '''Get "now" timestamp on filesystem'''
46 tmpfd, tmpname = vfs.mkstemp()
46 tmpfd, tmpname = vfs.mkstemp()
47 try:
47 try:
48 return os.fstat(tmpfd).st_mtime
48 return os.fstat(tmpfd).st_mtime
49 finally:
49 finally:
50 os.close(tmpfd)
50 os.close(tmpfd)
51 vfs.unlink(tmpname)
51 vfs.unlink(tmpname)
52
52
53 def nonnormalentries(dmap):
53 def nonnormalentries(dmap):
54 '''Compute the nonnormal dirstate entries from the dmap'''
54 '''Compute the nonnormal dirstate entries from the dmap'''
55 try:
55 try:
56 return parsers.nonnormalentries(dmap)
56 return parsers.nonnormalentries(dmap)
57 except AttributeError:
57 except AttributeError:
58 return set(fname for fname, e in dmap.iteritems()
58 return set(fname for fname, e in dmap.iteritems()
59 if e[0] != 'n' or e[3] == -1)
59 if e[0] != 'n' or e[3] == -1)
60
60
61 def _trypending(root, vfs, filename):
61 def _trypending(root, vfs, filename):
62 '''Open file to be read according to HG_PENDING environment variable
62 '''Open file to be read according to HG_PENDING environment variable
63
63
64 This opens '.pending' of specified 'filename' only when HG_PENDING
64 This opens '.pending' of specified 'filename' only when HG_PENDING
65 is equal to 'root'.
65 is equal to 'root'.
66
66
67 This returns '(fp, is_pending_opened)' tuple.
67 This returns '(fp, is_pending_opened)' tuple.
68 '''
68 '''
69 if root == os.environ.get('HG_PENDING'):
69 if root == os.environ.get('HG_PENDING'):
70 try:
70 try:
71 return (vfs('%s.pending' % filename), True)
71 return (vfs('%s.pending' % filename), True)
72 except IOError as inst:
72 except IOError as inst:
73 if inst.errno != errno.ENOENT:
73 if inst.errno != errno.ENOENT:
74 raise
74 raise
75 return (vfs(filename), False)
75 return (vfs(filename), False)
76
76
77 _token = object()
78
77 class dirstate(object):
79 class dirstate(object):
78
80
79 def __init__(self, opener, ui, root, validate):
81 def __init__(self, opener, ui, root, validate):
80 '''Create a new dirstate object.
82 '''Create a new dirstate object.
81
83
82 opener is an open()-like callable that can be used to open the
84 opener is an open()-like callable that can be used to open the
83 dirstate file; root is the root of the directory tracked by
85 dirstate file; root is the root of the directory tracked by
84 the dirstate.
86 the dirstate.
85 '''
87 '''
86 self._opener = opener
88 self._opener = opener
87 self._validate = validate
89 self._validate = validate
88 self._root = root
90 self._root = root
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
91 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
90 # UNC path pointing to root share (issue4557)
92 # UNC path pointing to root share (issue4557)
91 self._rootdir = pathutil.normasprefix(root)
93 self._rootdir = pathutil.normasprefix(root)
92 # internal config: ui.forcecwd
94 # internal config: ui.forcecwd
93 forcecwd = ui.config('ui', 'forcecwd')
95 forcecwd = ui.config('ui', 'forcecwd')
94 if forcecwd:
96 if forcecwd:
95 self._cwd = forcecwd
97 self._cwd = forcecwd
96 self._dirty = False
98 self._dirty = False
97 self._dirtypl = False
99 self._dirtypl = False
98 self._lastnormaltime = 0
100 self._lastnormaltime = 0
99 self._ui = ui
101 self._ui = ui
100 self._filecache = {}
102 self._filecache = {}
101 self._parentwriters = 0
103 self._parentwriters = 0
102 self._filename = 'dirstate'
104 self._filename = 'dirstate'
103 self._pendingfilename = '%s.pending' % self._filename
105 self._pendingfilename = '%s.pending' % self._filename
104
106
105 # for consistent view between _pl() and _read() invocations
107 # for consistent view between _pl() and _read() invocations
106 self._pendingmode = None
108 self._pendingmode = None
107
109
108 def beginparentchange(self):
110 def beginparentchange(self):
109 '''Marks the beginning of a set of changes that involve changing
111 '''Marks the beginning of a set of changes that involve changing
110 the dirstate parents. If there is an exception during this time,
112 the dirstate parents. If there is an exception during this time,
111 the dirstate will not be written when the wlock is released. This
113 the dirstate will not be written when the wlock is released. This
112 prevents writing an incoherent dirstate where the parent doesn't
114 prevents writing an incoherent dirstate where the parent doesn't
113 match the contents.
115 match the contents.
114 '''
116 '''
115 self._parentwriters += 1
117 self._parentwriters += 1
116
118
117 def endparentchange(self):
119 def endparentchange(self):
118 '''Marks the end of a set of changes that involve changing the
120 '''Marks the end of a set of changes that involve changing the
119 dirstate parents. Once all parent changes have been marked done,
121 dirstate parents. Once all parent changes have been marked done,
120 the wlock will be free to write the dirstate on release.
122 the wlock will be free to write the dirstate on release.
121 '''
123 '''
122 if self._parentwriters > 0:
124 if self._parentwriters > 0:
123 self._parentwriters -= 1
125 self._parentwriters -= 1
124
126
125 def pendingparentchange(self):
127 def pendingparentchange(self):
126 '''Returns true if the dirstate is in the middle of a set of changes
128 '''Returns true if the dirstate is in the middle of a set of changes
127 that modify the dirstate parent.
129 that modify the dirstate parent.
128 '''
130 '''
129 return self._parentwriters > 0
131 return self._parentwriters > 0
130
132
131 @propertycache
133 @propertycache
132 def _map(self):
134 def _map(self):
133 '''Return the dirstate contents as a map from filename to
135 '''Return the dirstate contents as a map from filename to
134 (state, mode, size, time).'''
136 (state, mode, size, time).'''
135 self._read()
137 self._read()
136 return self._map
138 return self._map
137
139
138 @propertycache
140 @propertycache
139 def _copymap(self):
141 def _copymap(self):
140 self._read()
142 self._read()
141 return self._copymap
143 return self._copymap
142
144
143 @propertycache
145 @propertycache
144 def _nonnormalset(self):
146 def _nonnormalset(self):
145 return nonnormalentries(self._map)
147 return nonnormalentries(self._map)
146
148
147 @propertycache
149 @propertycache
148 def _filefoldmap(self):
150 def _filefoldmap(self):
149 try:
151 try:
150 makefilefoldmap = parsers.make_file_foldmap
152 makefilefoldmap = parsers.make_file_foldmap
151 except AttributeError:
153 except AttributeError:
152 pass
154 pass
153 else:
155 else:
154 return makefilefoldmap(self._map, util.normcasespec,
156 return makefilefoldmap(self._map, util.normcasespec,
155 util.normcasefallback)
157 util.normcasefallback)
156
158
157 f = {}
159 f = {}
158 normcase = util.normcase
160 normcase = util.normcase
159 for name, s in self._map.iteritems():
161 for name, s in self._map.iteritems():
160 if s[0] != 'r':
162 if s[0] != 'r':
161 f[normcase(name)] = name
163 f[normcase(name)] = name
162 f['.'] = '.' # prevents useless util.fspath() invocation
164 f['.'] = '.' # prevents useless util.fspath() invocation
163 return f
165 return f
164
166
165 @propertycache
167 @propertycache
166 def _dirfoldmap(self):
168 def _dirfoldmap(self):
167 f = {}
169 f = {}
168 normcase = util.normcase
170 normcase = util.normcase
169 for name in self._dirs:
171 for name in self._dirs:
170 f[normcase(name)] = name
172 f[normcase(name)] = name
171 return f
173 return f
172
174
173 @repocache('branch')
175 @repocache('branch')
174 def _branch(self):
176 def _branch(self):
175 try:
177 try:
176 return self._opener.read("branch").strip() or "default"
178 return self._opener.read("branch").strip() or "default"
177 except IOError as inst:
179 except IOError as inst:
178 if inst.errno != errno.ENOENT:
180 if inst.errno != errno.ENOENT:
179 raise
181 raise
180 return "default"
182 return "default"
181
183
182 @propertycache
184 @propertycache
183 def _pl(self):
185 def _pl(self):
184 try:
186 try:
185 fp = self._opendirstatefile()
187 fp = self._opendirstatefile()
186 st = fp.read(40)
188 st = fp.read(40)
187 fp.close()
189 fp.close()
188 l = len(st)
190 l = len(st)
189 if l == 40:
191 if l == 40:
190 return st[:20], st[20:40]
192 return st[:20], st[20:40]
191 elif l > 0 and l < 40:
193 elif l > 0 and l < 40:
192 raise error.Abort(_('working directory state appears damaged!'))
194 raise error.Abort(_('working directory state appears damaged!'))
193 except IOError as err:
195 except IOError as err:
194 if err.errno != errno.ENOENT:
196 if err.errno != errno.ENOENT:
195 raise
197 raise
196 return [nullid, nullid]
198 return [nullid, nullid]
197
199
198 @propertycache
200 @propertycache
199 def _dirs(self):
201 def _dirs(self):
200 return util.dirs(self._map, 'r')
202 return util.dirs(self._map, 'r')
201
203
202 def dirs(self):
204 def dirs(self):
203 return self._dirs
205 return self._dirs
204
206
205 @rootcache('.hgignore')
207 @rootcache('.hgignore')
206 def _ignore(self):
208 def _ignore(self):
207 files = self._ignorefiles()
209 files = self._ignorefiles()
208 if not files:
210 if not files:
209 return util.never
211 return util.never
210
212
211 pats = ['include:%s' % f for f in files]
213 pats = ['include:%s' % f for f in files]
212 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
213
215
214 @propertycache
216 @propertycache
215 def _slash(self):
217 def _slash(self):
216 return self._ui.configbool('ui', 'slash') and os.sep != '/'
218 return self._ui.configbool('ui', 'slash') and os.sep != '/'
217
219
218 @propertycache
220 @propertycache
219 def _checklink(self):
221 def _checklink(self):
220 return util.checklink(self._root)
222 return util.checklink(self._root)
221
223
222 @propertycache
224 @propertycache
223 def _checkexec(self):
225 def _checkexec(self):
224 return util.checkexec(self._root)
226 return util.checkexec(self._root)
225
227
226 @propertycache
228 @propertycache
227 def _checkcase(self):
229 def _checkcase(self):
228 return not util.checkcase(self._join('.hg'))
230 return not util.checkcase(self._join('.hg'))
229
231
230 def _join(self, f):
232 def _join(self, f):
231 # much faster than os.path.join()
233 # much faster than os.path.join()
232 # it's safe because f is always a relative path
234 # it's safe because f is always a relative path
233 return self._rootdir + f
235 return self._rootdir + f
234
236
235 def flagfunc(self, buildfallback):
237 def flagfunc(self, buildfallback):
236 if self._checklink and self._checkexec:
238 if self._checklink and self._checkexec:
237 def f(x):
239 def f(x):
238 try:
240 try:
239 st = os.lstat(self._join(x))
241 st = os.lstat(self._join(x))
240 if util.statislink(st):
242 if util.statislink(st):
241 return 'l'
243 return 'l'
242 if util.statisexec(st):
244 if util.statisexec(st):
243 return 'x'
245 return 'x'
244 except OSError:
246 except OSError:
245 pass
247 pass
246 return ''
248 return ''
247 return f
249 return f
248
250
249 fallback = buildfallback()
251 fallback = buildfallback()
250 if self._checklink:
252 if self._checklink:
251 def f(x):
253 def f(x):
252 if os.path.islink(self._join(x)):
254 if os.path.islink(self._join(x)):
253 return 'l'
255 return 'l'
254 if 'x' in fallback(x):
256 if 'x' in fallback(x):
255 return 'x'
257 return 'x'
256 return ''
258 return ''
257 return f
259 return f
258 if self._checkexec:
260 if self._checkexec:
259 def f(x):
261 def f(x):
260 if 'l' in fallback(x):
262 if 'l' in fallback(x):
261 return 'l'
263 return 'l'
262 if util.isexec(self._join(x)):
264 if util.isexec(self._join(x)):
263 return 'x'
265 return 'x'
264 return ''
266 return ''
265 return f
267 return f
266 else:
268 else:
267 return fallback
269 return fallback
268
270
269 @propertycache
271 @propertycache
270 def _cwd(self):
272 def _cwd(self):
271 return os.getcwd()
273 return os.getcwd()
272
274
273 def getcwd(self):
275 def getcwd(self):
274 '''Return the path from which a canonical path is calculated.
276 '''Return the path from which a canonical path is calculated.
275
277
276 This path should be used to resolve file patterns or to convert
278 This path should be used to resolve file patterns or to convert
277 canonical paths back to file paths for display. It shouldn't be
279 canonical paths back to file paths for display. It shouldn't be
278 used to get real file paths. Use vfs functions instead.
280 used to get real file paths. Use vfs functions instead.
279 '''
281 '''
280 cwd = self._cwd
282 cwd = self._cwd
281 if cwd == self._root:
283 if cwd == self._root:
282 return ''
284 return ''
283 # self._root ends with a path separator if self._root is '/' or 'C:\'
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
284 rootsep = self._root
286 rootsep = self._root
285 if not util.endswithsep(rootsep):
287 if not util.endswithsep(rootsep):
286 rootsep += os.sep
288 rootsep += os.sep
287 if cwd.startswith(rootsep):
289 if cwd.startswith(rootsep):
288 return cwd[len(rootsep):]
290 return cwd[len(rootsep):]
289 else:
291 else:
290 # we're outside the repo. return an absolute path.
292 # we're outside the repo. return an absolute path.
291 return cwd
293 return cwd
292
294
293 def pathto(self, f, cwd=None):
295 def pathto(self, f, cwd=None):
294 if cwd is None:
296 if cwd is None:
295 cwd = self.getcwd()
297 cwd = self.getcwd()
296 path = util.pathto(self._root, cwd, f)
298 path = util.pathto(self._root, cwd, f)
297 if self._slash:
299 if self._slash:
298 return util.pconvert(path)
300 return util.pconvert(path)
299 return path
301 return path
300
302
301 def __getitem__(self, key):
303 def __getitem__(self, key):
302 '''Return the current state of key (a filename) in the dirstate.
304 '''Return the current state of key (a filename) in the dirstate.
303
305
304 States are:
306 States are:
305 n normal
307 n normal
306 m needs merging
308 m needs merging
307 r marked for removal
309 r marked for removal
308 a marked for addition
310 a marked for addition
309 ? not tracked
311 ? not tracked
310 '''
312 '''
311 return self._map.get(key, ("?",))[0]
313 return self._map.get(key, ("?",))[0]
312
314
313 def __contains__(self, key):
315 def __contains__(self, key):
314 return key in self._map
316 return key in self._map
315
317
316 def __iter__(self):
318 def __iter__(self):
317 for x in sorted(self._map):
319 for x in sorted(self._map):
318 yield x
320 yield x
319
321
320 def iteritems(self):
322 def iteritems(self):
321 return self._map.iteritems()
323 return self._map.iteritems()
322
324
323 def parents(self):
325 def parents(self):
324 return [self._validate(p) for p in self._pl]
326 return [self._validate(p) for p in self._pl]
325
327
326 def p1(self):
328 def p1(self):
327 return self._validate(self._pl[0])
329 return self._validate(self._pl[0])
328
330
329 def p2(self):
331 def p2(self):
330 return self._validate(self._pl[1])
332 return self._validate(self._pl[1])
331
333
332 def branch(self):
334 def branch(self):
333 return encoding.tolocal(self._branch)
335 return encoding.tolocal(self._branch)
334
336
335 def setparents(self, p1, p2=nullid):
337 def setparents(self, p1, p2=nullid):
336 """Set dirstate parents to p1 and p2.
338 """Set dirstate parents to p1 and p2.
337
339
338 When moving from two parents to one, 'm' merged entries a
340 When moving from two parents to one, 'm' merged entries a
339 adjusted to normal and previous copy records discarded and
341 adjusted to normal and previous copy records discarded and
340 returned by the call.
342 returned by the call.
341
343
342 See localrepo.setparents()
344 See localrepo.setparents()
343 """
345 """
344 if self._parentwriters == 0:
346 if self._parentwriters == 0:
345 raise ValueError("cannot set dirstate parent without "
347 raise ValueError("cannot set dirstate parent without "
346 "calling dirstate.beginparentchange")
348 "calling dirstate.beginparentchange")
347
349
348 self._dirty = self._dirtypl = True
350 self._dirty = self._dirtypl = True
349 oldp2 = self._pl[1]
351 oldp2 = self._pl[1]
350 self._pl = p1, p2
352 self._pl = p1, p2
351 copies = {}
353 copies = {}
352 if oldp2 != nullid and p2 == nullid:
354 if oldp2 != nullid and p2 == nullid:
353 for f, s in self._map.iteritems():
355 for f, s in self._map.iteritems():
354 # Discard 'm' markers when moving away from a merge state
356 # Discard 'm' markers when moving away from a merge state
355 if s[0] == 'm':
357 if s[0] == 'm':
356 if f in self._copymap:
358 if f in self._copymap:
357 copies[f] = self._copymap[f]
359 copies[f] = self._copymap[f]
358 self.normallookup(f)
360 self.normallookup(f)
359 # Also fix up otherparent markers
361 # Also fix up otherparent markers
360 elif s[0] == 'n' and s[2] == -2:
362 elif s[0] == 'n' and s[2] == -2:
361 if f in self._copymap:
363 if f in self._copymap:
362 copies[f] = self._copymap[f]
364 copies[f] = self._copymap[f]
363 self.add(f)
365 self.add(f)
364 return copies
366 return copies
365
367
366 def setbranch(self, branch):
368 def setbranch(self, branch):
367 self._branch = encoding.fromlocal(branch)
369 self._branch = encoding.fromlocal(branch)
368 f = self._opener('branch', 'w', atomictemp=True)
370 f = self._opener('branch', 'w', atomictemp=True)
369 try:
371 try:
370 f.write(self._branch + '\n')
372 f.write(self._branch + '\n')
371 f.close()
373 f.close()
372
374
373 # make sure filecache has the correct stat info for _branch after
375 # make sure filecache has the correct stat info for _branch after
374 # replacing the underlying file
376 # replacing the underlying file
375 ce = self._filecache['_branch']
377 ce = self._filecache['_branch']
376 if ce:
378 if ce:
377 ce.refresh()
379 ce.refresh()
378 except: # re-raises
380 except: # re-raises
379 f.discard()
381 f.discard()
380 raise
382 raise
381
383
382 def _opendirstatefile(self):
384 def _opendirstatefile(self):
383 fp, mode = _trypending(self._root, self._opener, self._filename)
385 fp, mode = _trypending(self._root, self._opener, self._filename)
384 if self._pendingmode is not None and self._pendingmode != mode:
386 if self._pendingmode is not None and self._pendingmode != mode:
385 fp.close()
387 fp.close()
386 raise error.Abort(_('working directory state may be '
388 raise error.Abort(_('working directory state may be '
387 'changed parallelly'))
389 'changed parallelly'))
388 self._pendingmode = mode
390 self._pendingmode = mode
389 return fp
391 return fp
390
392
391 def _read(self):
393 def _read(self):
392 self._map = {}
394 self._map = {}
393 self._copymap = {}
395 self._copymap = {}
394 try:
396 try:
395 fp = self._opendirstatefile()
397 fp = self._opendirstatefile()
396 try:
398 try:
397 st = fp.read()
399 st = fp.read()
398 finally:
400 finally:
399 fp.close()
401 fp.close()
400 except IOError as err:
402 except IOError as err:
401 if err.errno != errno.ENOENT:
403 if err.errno != errno.ENOENT:
402 raise
404 raise
403 return
405 return
404 if not st:
406 if not st:
405 return
407 return
406
408
407 if util.safehasattr(parsers, 'dict_new_presized'):
409 if util.safehasattr(parsers, 'dict_new_presized'):
408 # Make an estimate of the number of files in the dirstate based on
410 # Make an estimate of the number of files in the dirstate based on
409 # its size. From a linear regression on a set of real-world repos,
411 # its size. From a linear regression on a set of real-world repos,
410 # all over 10,000 files, the size of a dirstate entry is 85
412 # all over 10,000 files, the size of a dirstate entry is 85
411 # bytes. The cost of resizing is significantly higher than the cost
413 # bytes. The cost of resizing is significantly higher than the cost
412 # of filling in a larger presized dict, so subtract 20% from the
414 # of filling in a larger presized dict, so subtract 20% from the
413 # size.
415 # size.
414 #
416 #
415 # This heuristic is imperfect in many ways, so in a future dirstate
417 # This heuristic is imperfect in many ways, so in a future dirstate
416 # format update it makes sense to just record the number of entries
418 # format update it makes sense to just record the number of entries
417 # on write.
419 # on write.
418 self._map = parsers.dict_new_presized(len(st) / 71)
420 self._map = parsers.dict_new_presized(len(st) / 71)
419
421
420 # Python's garbage collector triggers a GC each time a certain number
422 # Python's garbage collector triggers a GC each time a certain number
421 # of container objects (the number being defined by
423 # of container objects (the number being defined by
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
424 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
423 # for each file in the dirstate. The C version then immediately marks
425 # for each file in the dirstate. The C version then immediately marks
424 # them as not to be tracked by the collector. However, this has no
426 # them as not to be tracked by the collector. However, this has no
425 # effect on when GCs are triggered, only on what objects the GC looks
427 # effect on when GCs are triggered, only on what objects the GC looks
426 # into. This means that O(number of files) GCs are unavoidable.
428 # into. This means that O(number of files) GCs are unavoidable.
427 # Depending on when in the process's lifetime the dirstate is parsed,
429 # Depending on when in the process's lifetime the dirstate is parsed,
428 # this can get very expensive. As a workaround, disable GC while
430 # this can get very expensive. As a workaround, disable GC while
429 # parsing the dirstate.
431 # parsing the dirstate.
430 #
432 #
431 # (we cannot decorate the function directly since it is in a C module)
433 # (we cannot decorate the function directly since it is in a C module)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
434 parse_dirstate = util.nogc(parsers.parse_dirstate)
433 p = parse_dirstate(self._map, self._copymap, st)
435 p = parse_dirstate(self._map, self._copymap, st)
434 if not self._dirtypl:
436 if not self._dirtypl:
435 self._pl = p
437 self._pl = p
436
438
437 def invalidate(self):
439 def invalidate(self):
438 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
440 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
439 "_pl", "_dirs", "_ignore", "_nonnormalset"):
441 "_pl", "_dirs", "_ignore", "_nonnormalset"):
440 if a in self.__dict__:
442 if a in self.__dict__:
441 delattr(self, a)
443 delattr(self, a)
442 self._lastnormaltime = 0
444 self._lastnormaltime = 0
443 self._dirty = False
445 self._dirty = False
444 self._parentwriters = 0
446 self._parentwriters = 0
445
447
446 def copy(self, source, dest):
448 def copy(self, source, dest):
447 """Mark dest as a copy of source. Unmark dest if source is None."""
449 """Mark dest as a copy of source. Unmark dest if source is None."""
448 if source == dest:
450 if source == dest:
449 return
451 return
450 self._dirty = True
452 self._dirty = True
451 if source is not None:
453 if source is not None:
452 self._copymap[dest] = source
454 self._copymap[dest] = source
453 elif dest in self._copymap:
455 elif dest in self._copymap:
454 del self._copymap[dest]
456 del self._copymap[dest]
455
457
456 def copied(self, file):
458 def copied(self, file):
457 return self._copymap.get(file, None)
459 return self._copymap.get(file, None)
458
460
459 def copies(self):
461 def copies(self):
460 return self._copymap
462 return self._copymap
461
463
462 def _droppath(self, f):
464 def _droppath(self, f):
463 if self[f] not in "?r" and "_dirs" in self.__dict__:
465 if self[f] not in "?r" and "_dirs" in self.__dict__:
464 self._dirs.delpath(f)
466 self._dirs.delpath(f)
465
467
466 if "_filefoldmap" in self.__dict__:
468 if "_filefoldmap" in self.__dict__:
467 normed = util.normcase(f)
469 normed = util.normcase(f)
468 if normed in self._filefoldmap:
470 if normed in self._filefoldmap:
469 del self._filefoldmap[normed]
471 del self._filefoldmap[normed]
470
472
471 def _addpath(self, f, state, mode, size, mtime):
473 def _addpath(self, f, state, mode, size, mtime):
472 oldstate = self[f]
474 oldstate = self[f]
473 if state == 'a' or oldstate == 'r':
475 if state == 'a' or oldstate == 'r':
474 scmutil.checkfilename(f)
476 scmutil.checkfilename(f)
475 if f in self._dirs:
477 if f in self._dirs:
476 raise error.Abort(_('directory %r already in dirstate') % f)
478 raise error.Abort(_('directory %r already in dirstate') % f)
477 # shadows
479 # shadows
478 for d in util.finddirs(f):
480 for d in util.finddirs(f):
479 if d in self._dirs:
481 if d in self._dirs:
480 break
482 break
481 if d in self._map and self[d] != 'r':
483 if d in self._map and self[d] != 'r':
482 raise error.Abort(
484 raise error.Abort(
483 _('file %r in dirstate clashes with %r') % (d, f))
485 _('file %r in dirstate clashes with %r') % (d, f))
484 if oldstate in "?r" and "_dirs" in self.__dict__:
486 if oldstate in "?r" and "_dirs" in self.__dict__:
485 self._dirs.addpath(f)
487 self._dirs.addpath(f)
486 self._dirty = True
488 self._dirty = True
487 self._map[f] = dirstatetuple(state, mode, size, mtime)
489 self._map[f] = dirstatetuple(state, mode, size, mtime)
488 if state != 'n' or mtime == -1:
490 if state != 'n' or mtime == -1:
489 self._nonnormalset.add(f)
491 self._nonnormalset.add(f)
490
492
491 def normal(self, f):
493 def normal(self, f):
492 '''Mark a file normal and clean.'''
494 '''Mark a file normal and clean.'''
493 s = os.lstat(self._join(f))
495 s = os.lstat(self._join(f))
494 mtime = s.st_mtime
496 mtime = s.st_mtime
495 self._addpath(f, 'n', s.st_mode,
497 self._addpath(f, 'n', s.st_mode,
496 s.st_size & _rangemask, mtime & _rangemask)
498 s.st_size & _rangemask, mtime & _rangemask)
497 if f in self._copymap:
499 if f in self._copymap:
498 del self._copymap[f]
500 del self._copymap[f]
499 if f in self._nonnormalset:
501 if f in self._nonnormalset:
500 self._nonnormalset.remove(f)
502 self._nonnormalset.remove(f)
501 if mtime > self._lastnormaltime:
503 if mtime > self._lastnormaltime:
502 # Remember the most recent modification timeslot for status(),
504 # Remember the most recent modification timeslot for status(),
503 # to make sure we won't miss future size-preserving file content
505 # to make sure we won't miss future size-preserving file content
504 # modifications that happen within the same timeslot.
506 # modifications that happen within the same timeslot.
505 self._lastnormaltime = mtime
507 self._lastnormaltime = mtime
506
508
507 def normallookup(self, f):
509 def normallookup(self, f):
508 '''Mark a file normal, but possibly dirty.'''
510 '''Mark a file normal, but possibly dirty.'''
509 if self._pl[1] != nullid and f in self._map:
511 if self._pl[1] != nullid and f in self._map:
510 # if there is a merge going on and the file was either
512 # if there is a merge going on and the file was either
511 # in state 'm' (-1) or coming from other parent (-2) before
513 # in state 'm' (-1) or coming from other parent (-2) before
512 # being removed, restore that state.
514 # being removed, restore that state.
513 entry = self._map[f]
515 entry = self._map[f]
514 if entry[0] == 'r' and entry[2] in (-1, -2):
516 if entry[0] == 'r' and entry[2] in (-1, -2):
515 source = self._copymap.get(f)
517 source = self._copymap.get(f)
516 if entry[2] == -1:
518 if entry[2] == -1:
517 self.merge(f)
519 self.merge(f)
518 elif entry[2] == -2:
520 elif entry[2] == -2:
519 self.otherparent(f)
521 self.otherparent(f)
520 if source:
522 if source:
521 self.copy(source, f)
523 self.copy(source, f)
522 return
524 return
523 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
525 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
524 return
526 return
525 self._addpath(f, 'n', 0, -1, -1)
527 self._addpath(f, 'n', 0, -1, -1)
526 if f in self._copymap:
528 if f in self._copymap:
527 del self._copymap[f]
529 del self._copymap[f]
528 if f in self._nonnormalset:
530 if f in self._nonnormalset:
529 self._nonnormalset.remove(f)
531 self._nonnormalset.remove(f)
530
532
531 def otherparent(self, f):
533 def otherparent(self, f):
532 '''Mark as coming from the other parent, always dirty.'''
534 '''Mark as coming from the other parent, always dirty.'''
533 if self._pl[1] == nullid:
535 if self._pl[1] == nullid:
534 raise error.Abort(_("setting %r to other parent "
536 raise error.Abort(_("setting %r to other parent "
535 "only allowed in merges") % f)
537 "only allowed in merges") % f)
536 if f in self and self[f] == 'n':
538 if f in self and self[f] == 'n':
537 # merge-like
539 # merge-like
538 self._addpath(f, 'm', 0, -2, -1)
540 self._addpath(f, 'm', 0, -2, -1)
539 else:
541 else:
540 # add-like
542 # add-like
541 self._addpath(f, 'n', 0, -2, -1)
543 self._addpath(f, 'n', 0, -2, -1)
542
544
543 if f in self._copymap:
545 if f in self._copymap:
544 del self._copymap[f]
546 del self._copymap[f]
545
547
546 def add(self, f):
548 def add(self, f):
547 '''Mark a file added.'''
549 '''Mark a file added.'''
548 self._addpath(f, 'a', 0, -1, -1)
550 self._addpath(f, 'a', 0, -1, -1)
549 if f in self._copymap:
551 if f in self._copymap:
550 del self._copymap[f]
552 del self._copymap[f]
551
553
552 def remove(self, f):
554 def remove(self, f):
553 '''Mark a file removed.'''
555 '''Mark a file removed.'''
554 self._dirty = True
556 self._dirty = True
555 self._droppath(f)
557 self._droppath(f)
556 size = 0
558 size = 0
557 if self._pl[1] != nullid and f in self._map:
559 if self._pl[1] != nullid and f in self._map:
558 # backup the previous state
560 # backup the previous state
559 entry = self._map[f]
561 entry = self._map[f]
560 if entry[0] == 'm': # merge
562 if entry[0] == 'm': # merge
561 size = -1
563 size = -1
562 elif entry[0] == 'n' and entry[2] == -2: # other parent
564 elif entry[0] == 'n' and entry[2] == -2: # other parent
563 size = -2
565 size = -2
564 self._map[f] = dirstatetuple('r', 0, size, 0)
566 self._map[f] = dirstatetuple('r', 0, size, 0)
565 self._nonnormalset.add(f)
567 self._nonnormalset.add(f)
566 if size == 0 and f in self._copymap:
568 if size == 0 and f in self._copymap:
567 del self._copymap[f]
569 del self._copymap[f]
568
570
569 def merge(self, f):
571 def merge(self, f):
570 '''Mark a file merged.'''
572 '''Mark a file merged.'''
571 if self._pl[1] == nullid:
573 if self._pl[1] == nullid:
572 return self.normallookup(f)
574 return self.normallookup(f)
573 return self.otherparent(f)
575 return self.otherparent(f)
574
576
575 def drop(self, f):
577 def drop(self, f):
576 '''Drop a file from the dirstate'''
578 '''Drop a file from the dirstate'''
577 if f in self._map:
579 if f in self._map:
578 self._dirty = True
580 self._dirty = True
579 self._droppath(f)
581 self._droppath(f)
580 del self._map[f]
582 del self._map[f]
581 if f in self._nonnormalset:
583 if f in self._nonnormalset:
582 self._nonnormalset.remove(f)
584 self._nonnormalset.remove(f)
583
585
584 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
586 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
585 if exists is None:
587 if exists is None:
586 exists = os.path.lexists(os.path.join(self._root, path))
588 exists = os.path.lexists(os.path.join(self._root, path))
587 if not exists:
589 if not exists:
588 # Maybe a path component exists
590 # Maybe a path component exists
589 if not ignoremissing and '/' in path:
591 if not ignoremissing and '/' in path:
590 d, f = path.rsplit('/', 1)
592 d, f = path.rsplit('/', 1)
591 d = self._normalize(d, False, ignoremissing, None)
593 d = self._normalize(d, False, ignoremissing, None)
592 folded = d + "/" + f
594 folded = d + "/" + f
593 else:
595 else:
594 # No path components, preserve original case
596 # No path components, preserve original case
595 folded = path
597 folded = path
596 else:
598 else:
597 # recursively normalize leading directory components
599 # recursively normalize leading directory components
598 # against dirstate
600 # against dirstate
599 if '/' in normed:
601 if '/' in normed:
600 d, f = normed.rsplit('/', 1)
602 d, f = normed.rsplit('/', 1)
601 d = self._normalize(d, False, ignoremissing, True)
603 d = self._normalize(d, False, ignoremissing, True)
602 r = self._root + "/" + d
604 r = self._root + "/" + d
603 folded = d + "/" + util.fspath(f, r)
605 folded = d + "/" + util.fspath(f, r)
604 else:
606 else:
605 folded = util.fspath(normed, self._root)
607 folded = util.fspath(normed, self._root)
606 storemap[normed] = folded
608 storemap[normed] = folded
607
609
608 return folded
610 return folded
609
611
610 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
612 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
611 normed = util.normcase(path)
613 normed = util.normcase(path)
612 folded = self._filefoldmap.get(normed, None)
614 folded = self._filefoldmap.get(normed, None)
613 if folded is None:
615 if folded is None:
614 if isknown:
616 if isknown:
615 folded = path
617 folded = path
616 else:
618 else:
617 folded = self._discoverpath(path, normed, ignoremissing, exists,
619 folded = self._discoverpath(path, normed, ignoremissing, exists,
618 self._filefoldmap)
620 self._filefoldmap)
619 return folded
621 return folded
620
622
621 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
623 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
622 normed = util.normcase(path)
624 normed = util.normcase(path)
623 folded = self._filefoldmap.get(normed, None)
625 folded = self._filefoldmap.get(normed, None)
624 if folded is None:
626 if folded is None:
625 folded = self._dirfoldmap.get(normed, None)
627 folded = self._dirfoldmap.get(normed, None)
626 if folded is None:
628 if folded is None:
627 if isknown:
629 if isknown:
628 folded = path
630 folded = path
629 else:
631 else:
630 # store discovered result in dirfoldmap so that future
632 # store discovered result in dirfoldmap so that future
631 # normalizefile calls don't start matching directories
633 # normalizefile calls don't start matching directories
632 folded = self._discoverpath(path, normed, ignoremissing, exists,
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
633 self._dirfoldmap)
635 self._dirfoldmap)
634 return folded
636 return folded
635
637
636 def normalize(self, path, isknown=False, ignoremissing=False):
638 def normalize(self, path, isknown=False, ignoremissing=False):
637 '''
639 '''
638 normalize the case of a pathname when on a casefolding filesystem
640 normalize the case of a pathname when on a casefolding filesystem
639
641
640 isknown specifies whether the filename came from walking the
642 isknown specifies whether the filename came from walking the
641 disk, to avoid extra filesystem access.
643 disk, to avoid extra filesystem access.
642
644
643 If ignoremissing is True, missing path are returned
645 If ignoremissing is True, missing path are returned
644 unchanged. Otherwise, we try harder to normalize possibly
646 unchanged. Otherwise, we try harder to normalize possibly
645 existing path components.
647 existing path components.
646
648
647 The normalized case is determined based on the following precedence:
649 The normalized case is determined based on the following precedence:
648
650
649 - version of name already stored in the dirstate
651 - version of name already stored in the dirstate
650 - version of name stored on disk
652 - version of name stored on disk
651 - version provided via command arguments
653 - version provided via command arguments
652 '''
654 '''
653
655
654 if self._checkcase:
656 if self._checkcase:
655 return self._normalize(path, isknown, ignoremissing)
657 return self._normalize(path, isknown, ignoremissing)
656 return path
658 return path
657
659
658 def clear(self):
660 def clear(self):
659 self._map = {}
661 self._map = {}
660 self._nonnormalset = set()
662 self._nonnormalset = set()
661 if "_dirs" in self.__dict__:
663 if "_dirs" in self.__dict__:
662 delattr(self, "_dirs")
664 delattr(self, "_dirs")
663 self._copymap = {}
665 self._copymap = {}
664 self._pl = [nullid, nullid]
666 self._pl = [nullid, nullid]
665 self._lastnormaltime = 0
667 self._lastnormaltime = 0
666 self._dirty = True
668 self._dirty = True
667
669
668 def rebuild(self, parent, allfiles, changedfiles=None):
670 def rebuild(self, parent, allfiles, changedfiles=None):
669 if changedfiles is None:
671 if changedfiles is None:
670 # Rebuild entire dirstate
672 # Rebuild entire dirstate
671 changedfiles = allfiles
673 changedfiles = allfiles
672 lastnormaltime = self._lastnormaltime
674 lastnormaltime = self._lastnormaltime
673 self.clear()
675 self.clear()
674 self._lastnormaltime = lastnormaltime
676 self._lastnormaltime = lastnormaltime
675
677
676 for f in changedfiles:
678 for f in changedfiles:
677 mode = 0o666
679 mode = 0o666
678 if f in allfiles and 'x' in allfiles.flags(f):
680 if f in allfiles and 'x' in allfiles.flags(f):
679 mode = 0o777
681 mode = 0o777
680
682
681 if f in allfiles:
683 if f in allfiles:
682 self._map[f] = dirstatetuple('n', mode, -1, 0)
684 self._map[f] = dirstatetuple('n', mode, -1, 0)
683 else:
685 else:
684 self._map.pop(f, None)
686 self._map.pop(f, None)
685 if f in self._nonnormalset:
687 if f in self._nonnormalset:
686 self._nonnormalset.remove(f)
688 self._nonnormalset.remove(f)
687
689
688 self._pl = (parent, nullid)
690 self._pl = (parent, nullid)
689 self._dirty = True
691 self._dirty = True
690
692
691 def write(self, tr=False):
693 def write(self, tr=_token):
692 if not self._dirty:
694 if not self._dirty:
693 return
695 return
694
696
695 filename = self._filename
697 filename = self._filename
696 if tr is False: # not explicitly specified
698 if tr is _token: # not explicitly specified
697 self._ui.deprecwarn('use dirstate.write with '
699 self._ui.deprecwarn('use dirstate.write with '
698 'repo.currenttransaction()',
700 'repo.currenttransaction()',
699 '3.9')
701 '3.9')
700
702
701 if self._opener.lexists(self._pendingfilename):
703 if self._opener.lexists(self._pendingfilename):
702 # if pending file already exists, in-memory changes
704 # if pending file already exists, in-memory changes
703 # should be written into it, because it has priority
705 # should be written into it, because it has priority
704 # to '.hg/dirstate' at reading under HG_PENDING mode
706 # to '.hg/dirstate' at reading under HG_PENDING mode
705 filename = self._pendingfilename
707 filename = self._pendingfilename
706 elif tr:
708 elif tr:
707 # 'dirstate.write()' is not only for writing in-memory
709 # 'dirstate.write()' is not only for writing in-memory
708 # changes out, but also for dropping ambiguous timestamp.
710 # changes out, but also for dropping ambiguous timestamp.
709 # delayed writing re-raise "ambiguous timestamp issue".
711 # delayed writing re-raise "ambiguous timestamp issue".
710 # See also the wiki page below for detail:
712 # See also the wiki page below for detail:
711 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
712
714
713 # emulate dropping timestamp in 'parsers.pack_dirstate'
715 # emulate dropping timestamp in 'parsers.pack_dirstate'
714 now = _getfsnow(self._opener)
716 now = _getfsnow(self._opener)
715 dmap = self._map
717 dmap = self._map
716 for f, e in dmap.iteritems():
718 for f, e in dmap.iteritems():
717 if e[0] == 'n' and e[3] == now:
719 if e[0] == 'n' and e[3] == now:
718 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
720 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
719 self._nonnormalset.add(f)
721 self._nonnormalset.add(f)
720
722
721 # emulate that all 'dirstate.normal' results are written out
723 # emulate that all 'dirstate.normal' results are written out
722 self._lastnormaltime = 0
724 self._lastnormaltime = 0
723
725
724 # delay writing in-memory changes out
726 # delay writing in-memory changes out
725 tr.addfilegenerator('dirstate', (self._filename,),
727 tr.addfilegenerator('dirstate', (self._filename,),
726 self._writedirstate, location='plain')
728 self._writedirstate, location='plain')
727 return
729 return
728
730
729 st = self._opener(filename, "w", atomictemp=True)
731 st = self._opener(filename, "w", atomictemp=True)
730 self._writedirstate(st)
732 self._writedirstate(st)
731
733
732 def _writedirstate(self, st):
734 def _writedirstate(self, st):
733 # use the modification time of the newly created temporary file as the
735 # use the modification time of the newly created temporary file as the
734 # filesystem's notion of 'now'
736 # filesystem's notion of 'now'
735 now = util.fstat(st).st_mtime & _rangemask
737 now = util.fstat(st).st_mtime & _rangemask
736
738
737 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
739 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
738 # timestamp of each entries in dirstate, because of 'now > mtime'
740 # timestamp of each entries in dirstate, because of 'now > mtime'
739 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
741 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
740 if delaywrite > 0:
742 if delaywrite > 0:
741 # do we have any files to delay for?
743 # do we have any files to delay for?
742 for f, e in self._map.iteritems():
744 for f, e in self._map.iteritems():
743 if e[0] == 'n' and e[3] == now:
745 if e[0] == 'n' and e[3] == now:
744 import time # to avoid useless import
746 import time # to avoid useless import
745 # rather than sleep n seconds, sleep until the next
747 # rather than sleep n seconds, sleep until the next
746 # multiple of n seconds
748 # multiple of n seconds
747 clock = time.time()
749 clock = time.time()
748 start = int(clock) - (int(clock) % delaywrite)
750 start = int(clock) - (int(clock) % delaywrite)
749 end = start + delaywrite
751 end = start + delaywrite
750 time.sleep(end - clock)
752 time.sleep(end - clock)
751 break
753 break
752
754
753 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
755 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
754 self._nonnormalset = nonnormalentries(self._map)
756 self._nonnormalset = nonnormalentries(self._map)
755 st.close()
757 st.close()
756 self._lastnormaltime = 0
758 self._lastnormaltime = 0
757 self._dirty = self._dirtypl = False
759 self._dirty = self._dirtypl = False
758
760
759 def _dirignore(self, f):
761 def _dirignore(self, f):
760 if f == '.':
762 if f == '.':
761 return False
763 return False
762 if self._ignore(f):
764 if self._ignore(f):
763 return True
765 return True
764 for p in util.finddirs(f):
766 for p in util.finddirs(f):
765 if self._ignore(p):
767 if self._ignore(p):
766 return True
768 return True
767 return False
769 return False
768
770
769 def _ignorefiles(self):
771 def _ignorefiles(self):
770 files = []
772 files = []
771 if os.path.exists(self._join('.hgignore')):
773 if os.path.exists(self._join('.hgignore')):
772 files.append(self._join('.hgignore'))
774 files.append(self._join('.hgignore'))
773 for name, path in self._ui.configitems("ui"):
775 for name, path in self._ui.configitems("ui"):
774 if name == 'ignore' or name.startswith('ignore.'):
776 if name == 'ignore' or name.startswith('ignore.'):
775 # we need to use os.path.join here rather than self._join
777 # we need to use os.path.join here rather than self._join
776 # because path is arbitrary and user-specified
778 # because path is arbitrary and user-specified
777 files.append(os.path.join(self._rootdir, util.expandpath(path)))
779 files.append(os.path.join(self._rootdir, util.expandpath(path)))
778 return files
780 return files
779
781
780 def _ignorefileandline(self, f):
782 def _ignorefileandline(self, f):
781 files = collections.deque(self._ignorefiles())
783 files = collections.deque(self._ignorefiles())
782 visited = set()
784 visited = set()
783 while files:
785 while files:
784 i = files.popleft()
786 i = files.popleft()
785 patterns = matchmod.readpatternfile(i, self._ui.warn,
787 patterns = matchmod.readpatternfile(i, self._ui.warn,
786 sourceinfo=True)
788 sourceinfo=True)
787 for pattern, lineno, line in patterns:
789 for pattern, lineno, line in patterns:
788 kind, p = matchmod._patsplit(pattern, 'glob')
790 kind, p = matchmod._patsplit(pattern, 'glob')
789 if kind == "subinclude":
791 if kind == "subinclude":
790 if p not in visited:
792 if p not in visited:
791 files.append(p)
793 files.append(p)
792 continue
794 continue
793 m = matchmod.match(self._root, '', [], [pattern],
795 m = matchmod.match(self._root, '', [], [pattern],
794 warn=self._ui.warn)
796 warn=self._ui.warn)
795 if m(f):
797 if m(f):
796 return (i, lineno, line)
798 return (i, lineno, line)
797 visited.add(i)
799 visited.add(i)
798 return (None, -1, "")
800 return (None, -1, "")
799
801
800 def _walkexplicit(self, match, subrepos):
802 def _walkexplicit(self, match, subrepos):
801 '''Get stat data about the files explicitly specified by match.
803 '''Get stat data about the files explicitly specified by match.
802
804
803 Return a triple (results, dirsfound, dirsnotfound).
805 Return a triple (results, dirsfound, dirsnotfound).
804 - results is a mapping from filename to stat result. It also contains
806 - results is a mapping from filename to stat result. It also contains
805 listings mapping subrepos and .hg to None.
807 listings mapping subrepos and .hg to None.
806 - dirsfound is a list of files found to be directories.
808 - dirsfound is a list of files found to be directories.
807 - dirsnotfound is a list of files that the dirstate thinks are
809 - dirsnotfound is a list of files that the dirstate thinks are
808 directories and that were not found.'''
810 directories and that were not found.'''
809
811
810 def badtype(mode):
812 def badtype(mode):
811 kind = _('unknown')
813 kind = _('unknown')
812 if stat.S_ISCHR(mode):
814 if stat.S_ISCHR(mode):
813 kind = _('character device')
815 kind = _('character device')
814 elif stat.S_ISBLK(mode):
816 elif stat.S_ISBLK(mode):
815 kind = _('block device')
817 kind = _('block device')
816 elif stat.S_ISFIFO(mode):
818 elif stat.S_ISFIFO(mode):
817 kind = _('fifo')
819 kind = _('fifo')
818 elif stat.S_ISSOCK(mode):
820 elif stat.S_ISSOCK(mode):
819 kind = _('socket')
821 kind = _('socket')
820 elif stat.S_ISDIR(mode):
822 elif stat.S_ISDIR(mode):
821 kind = _('directory')
823 kind = _('directory')
822 return _('unsupported file type (type is %s)') % kind
824 return _('unsupported file type (type is %s)') % kind
823
825
824 matchedir = match.explicitdir
826 matchedir = match.explicitdir
825 badfn = match.bad
827 badfn = match.bad
826 dmap = self._map
828 dmap = self._map
827 lstat = os.lstat
829 lstat = os.lstat
828 getkind = stat.S_IFMT
830 getkind = stat.S_IFMT
829 dirkind = stat.S_IFDIR
831 dirkind = stat.S_IFDIR
830 regkind = stat.S_IFREG
832 regkind = stat.S_IFREG
831 lnkkind = stat.S_IFLNK
833 lnkkind = stat.S_IFLNK
832 join = self._join
834 join = self._join
833 dirsfound = []
835 dirsfound = []
834 foundadd = dirsfound.append
836 foundadd = dirsfound.append
835 dirsnotfound = []
837 dirsnotfound = []
836 notfoundadd = dirsnotfound.append
838 notfoundadd = dirsnotfound.append
837
839
838 if not match.isexact() and self._checkcase:
840 if not match.isexact() and self._checkcase:
839 normalize = self._normalize
841 normalize = self._normalize
840 else:
842 else:
841 normalize = None
843 normalize = None
842
844
843 files = sorted(match.files())
845 files = sorted(match.files())
844 subrepos.sort()
846 subrepos.sort()
845 i, j = 0, 0
847 i, j = 0, 0
846 while i < len(files) and j < len(subrepos):
848 while i < len(files) and j < len(subrepos):
847 subpath = subrepos[j] + "/"
849 subpath = subrepos[j] + "/"
848 if files[i] < subpath:
850 if files[i] < subpath:
849 i += 1
851 i += 1
850 continue
852 continue
851 while i < len(files) and files[i].startswith(subpath):
853 while i < len(files) and files[i].startswith(subpath):
852 del files[i]
854 del files[i]
853 j += 1
855 j += 1
854
856
855 if not files or '.' in files:
857 if not files or '.' in files:
856 files = ['.']
858 files = ['.']
857 results = dict.fromkeys(subrepos)
859 results = dict.fromkeys(subrepos)
858 results['.hg'] = None
860 results['.hg'] = None
859
861
860 alldirs = None
862 alldirs = None
861 for ff in files:
863 for ff in files:
862 # constructing the foldmap is expensive, so don't do it for the
864 # constructing the foldmap is expensive, so don't do it for the
863 # common case where files is ['.']
865 # common case where files is ['.']
864 if normalize and ff != '.':
866 if normalize and ff != '.':
865 nf = normalize(ff, False, True)
867 nf = normalize(ff, False, True)
866 else:
868 else:
867 nf = ff
869 nf = ff
868 if nf in results:
870 if nf in results:
869 continue
871 continue
870
872
871 try:
873 try:
872 st = lstat(join(nf))
874 st = lstat(join(nf))
873 kind = getkind(st.st_mode)
875 kind = getkind(st.st_mode)
874 if kind == dirkind:
876 if kind == dirkind:
875 if nf in dmap:
877 if nf in dmap:
876 # file replaced by dir on disk but still in dirstate
878 # file replaced by dir on disk but still in dirstate
877 results[nf] = None
879 results[nf] = None
878 if matchedir:
880 if matchedir:
879 matchedir(nf)
881 matchedir(nf)
880 foundadd((nf, ff))
882 foundadd((nf, ff))
881 elif kind == regkind or kind == lnkkind:
883 elif kind == regkind or kind == lnkkind:
882 results[nf] = st
884 results[nf] = st
883 else:
885 else:
884 badfn(ff, badtype(kind))
886 badfn(ff, badtype(kind))
885 if nf in dmap:
887 if nf in dmap:
886 results[nf] = None
888 results[nf] = None
887 except OSError as inst: # nf not found on disk - it is dirstate only
889 except OSError as inst: # nf not found on disk - it is dirstate only
888 if nf in dmap: # does it exactly match a missing file?
890 if nf in dmap: # does it exactly match a missing file?
889 results[nf] = None
891 results[nf] = None
890 else: # does it match a missing directory?
892 else: # does it match a missing directory?
891 if alldirs is None:
893 if alldirs is None:
892 alldirs = util.dirs(dmap)
894 alldirs = util.dirs(dmap)
893 if nf in alldirs:
895 if nf in alldirs:
894 if matchedir:
896 if matchedir:
895 matchedir(nf)
897 matchedir(nf)
896 notfoundadd(nf)
898 notfoundadd(nf)
897 else:
899 else:
898 badfn(ff, inst.strerror)
900 badfn(ff, inst.strerror)
899
901
900 # Case insensitive filesystems cannot rely on lstat() failing to detect
902 # Case insensitive filesystems cannot rely on lstat() failing to detect
901 # a case-only rename. Prune the stat object for any file that does not
903 # a case-only rename. Prune the stat object for any file that does not
902 # match the case in the filesystem, if there are multiple files that
904 # match the case in the filesystem, if there are multiple files that
903 # normalize to the same path.
905 # normalize to the same path.
904 if match.isexact() and self._checkcase:
906 if match.isexact() and self._checkcase:
905 normed = {}
907 normed = {}
906
908
907 for f, st in results.iteritems():
909 for f, st in results.iteritems():
908 if st is None:
910 if st is None:
909 continue
911 continue
910
912
911 nc = util.normcase(f)
913 nc = util.normcase(f)
912 paths = normed.get(nc)
914 paths = normed.get(nc)
913
915
914 if paths is None:
916 if paths is None:
915 paths = set()
917 paths = set()
916 normed[nc] = paths
918 normed[nc] = paths
917
919
918 paths.add(f)
920 paths.add(f)
919
921
920 for norm, paths in normed.iteritems():
922 for norm, paths in normed.iteritems():
921 if len(paths) > 1:
923 if len(paths) > 1:
922 for path in paths:
924 for path in paths:
923 folded = self._discoverpath(path, norm, True, None,
925 folded = self._discoverpath(path, norm, True, None,
924 self._dirfoldmap)
926 self._dirfoldmap)
925 if path != folded:
927 if path != folded:
926 results[path] = None
928 results[path] = None
927
929
928 return results, dirsfound, dirsnotfound
930 return results, dirsfound, dirsnotfound
929
931
930 def walk(self, match, subrepos, unknown, ignored, full=True):
932 def walk(self, match, subrepos, unknown, ignored, full=True):
931 '''
933 '''
932 Walk recursively through the directory tree, finding all files
934 Walk recursively through the directory tree, finding all files
933 matched by match.
935 matched by match.
934
936
935 If full is False, maybe skip some known-clean files.
937 If full is False, maybe skip some known-clean files.
936
938
937 Return a dict mapping filename to stat-like object (either
939 Return a dict mapping filename to stat-like object (either
938 mercurial.osutil.stat instance or return value of os.stat()).
940 mercurial.osutil.stat instance or return value of os.stat()).
939
941
940 '''
942 '''
941 # full is a flag that extensions that hook into walk can use -- this
943 # full is a flag that extensions that hook into walk can use -- this
942 # implementation doesn't use it at all. This satisfies the contract
944 # implementation doesn't use it at all. This satisfies the contract
943 # because we only guarantee a "maybe".
945 # because we only guarantee a "maybe".
944
946
945 if ignored:
947 if ignored:
946 ignore = util.never
948 ignore = util.never
947 dirignore = util.never
949 dirignore = util.never
948 elif unknown:
950 elif unknown:
949 ignore = self._ignore
951 ignore = self._ignore
950 dirignore = self._dirignore
952 dirignore = self._dirignore
951 else:
953 else:
952 # if not unknown and not ignored, drop dir recursion and step 2
954 # if not unknown and not ignored, drop dir recursion and step 2
953 ignore = util.always
955 ignore = util.always
954 dirignore = util.always
956 dirignore = util.always
955
957
956 matchfn = match.matchfn
958 matchfn = match.matchfn
957 matchalways = match.always()
959 matchalways = match.always()
958 matchtdir = match.traversedir
960 matchtdir = match.traversedir
959 dmap = self._map
961 dmap = self._map
960 listdir = osutil.listdir
962 listdir = osutil.listdir
961 lstat = os.lstat
963 lstat = os.lstat
962 dirkind = stat.S_IFDIR
964 dirkind = stat.S_IFDIR
963 regkind = stat.S_IFREG
965 regkind = stat.S_IFREG
964 lnkkind = stat.S_IFLNK
966 lnkkind = stat.S_IFLNK
965 join = self._join
967 join = self._join
966
968
967 exact = skipstep3 = False
969 exact = skipstep3 = False
968 if match.isexact(): # match.exact
970 if match.isexact(): # match.exact
969 exact = True
971 exact = True
970 dirignore = util.always # skip step 2
972 dirignore = util.always # skip step 2
971 elif match.prefix(): # match.match, no patterns
973 elif match.prefix(): # match.match, no patterns
972 skipstep3 = True
974 skipstep3 = True
973
975
974 if not exact and self._checkcase:
976 if not exact and self._checkcase:
975 normalize = self._normalize
977 normalize = self._normalize
976 normalizefile = self._normalizefile
978 normalizefile = self._normalizefile
977 skipstep3 = False
979 skipstep3 = False
978 else:
980 else:
979 normalize = self._normalize
981 normalize = self._normalize
980 normalizefile = None
982 normalizefile = None
981
983
982 # step 1: find all explicit files
984 # step 1: find all explicit files
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
985 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
984
986
985 skipstep3 = skipstep3 and not (work or dirsnotfound)
987 skipstep3 = skipstep3 and not (work or dirsnotfound)
986 work = [d for d in work if not dirignore(d[0])]
988 work = [d for d in work if not dirignore(d[0])]
987
989
988 # step 2: visit subdirectories
990 # step 2: visit subdirectories
989 def traverse(work, alreadynormed):
991 def traverse(work, alreadynormed):
990 wadd = work.append
992 wadd = work.append
991 while work:
993 while work:
992 nd = work.pop()
994 nd = work.pop()
993 skip = None
995 skip = None
994 if nd == '.':
996 if nd == '.':
995 nd = ''
997 nd = ''
996 else:
998 else:
997 skip = '.hg'
999 skip = '.hg'
998 try:
1000 try:
999 entries = listdir(join(nd), stat=True, skip=skip)
1001 entries = listdir(join(nd), stat=True, skip=skip)
1000 except OSError as inst:
1002 except OSError as inst:
1001 if inst.errno in (errno.EACCES, errno.ENOENT):
1003 if inst.errno in (errno.EACCES, errno.ENOENT):
1002 match.bad(self.pathto(nd), inst.strerror)
1004 match.bad(self.pathto(nd), inst.strerror)
1003 continue
1005 continue
1004 raise
1006 raise
1005 for f, kind, st in entries:
1007 for f, kind, st in entries:
1006 if normalizefile:
1008 if normalizefile:
1007 # even though f might be a directory, we're only
1009 # even though f might be a directory, we're only
1008 # interested in comparing it to files currently in the
1010 # interested in comparing it to files currently in the
1009 # dmap -- therefore normalizefile is enough
1011 # dmap -- therefore normalizefile is enough
1010 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1012 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1011 True)
1013 True)
1012 else:
1014 else:
1013 nf = nd and (nd + "/" + f) or f
1015 nf = nd and (nd + "/" + f) or f
1014 if nf not in results:
1016 if nf not in results:
1015 if kind == dirkind:
1017 if kind == dirkind:
1016 if not ignore(nf):
1018 if not ignore(nf):
1017 if matchtdir:
1019 if matchtdir:
1018 matchtdir(nf)
1020 matchtdir(nf)
1019 wadd(nf)
1021 wadd(nf)
1020 if nf in dmap and (matchalways or matchfn(nf)):
1022 if nf in dmap and (matchalways or matchfn(nf)):
1021 results[nf] = None
1023 results[nf] = None
1022 elif kind == regkind or kind == lnkkind:
1024 elif kind == regkind or kind == lnkkind:
1023 if nf in dmap:
1025 if nf in dmap:
1024 if matchalways or matchfn(nf):
1026 if matchalways or matchfn(nf):
1025 results[nf] = st
1027 results[nf] = st
1026 elif ((matchalways or matchfn(nf))
1028 elif ((matchalways or matchfn(nf))
1027 and not ignore(nf)):
1029 and not ignore(nf)):
1028 # unknown file -- normalize if necessary
1030 # unknown file -- normalize if necessary
1029 if not alreadynormed:
1031 if not alreadynormed:
1030 nf = normalize(nf, False, True)
1032 nf = normalize(nf, False, True)
1031 results[nf] = st
1033 results[nf] = st
1032 elif nf in dmap and (matchalways or matchfn(nf)):
1034 elif nf in dmap and (matchalways or matchfn(nf)):
1033 results[nf] = None
1035 results[nf] = None
1034
1036
1035 for nd, d in work:
1037 for nd, d in work:
1036 # alreadynormed means that processwork doesn't have to do any
1038 # alreadynormed means that processwork doesn't have to do any
1037 # expensive directory normalization
1039 # expensive directory normalization
1038 alreadynormed = not normalize or nd == d
1040 alreadynormed = not normalize or nd == d
1039 traverse([d], alreadynormed)
1041 traverse([d], alreadynormed)
1040
1042
1041 for s in subrepos:
1043 for s in subrepos:
1042 del results[s]
1044 del results[s]
1043 del results['.hg']
1045 del results['.hg']
1044
1046
1045 # step 3: visit remaining files from dmap
1047 # step 3: visit remaining files from dmap
1046 if not skipstep3 and not exact:
1048 if not skipstep3 and not exact:
1047 # If a dmap file is not in results yet, it was either
1049 # If a dmap file is not in results yet, it was either
1048 # a) not matching matchfn b) ignored, c) missing, or d) under a
1050 # a) not matching matchfn b) ignored, c) missing, or d) under a
1049 # symlink directory.
1051 # symlink directory.
1050 if not results and matchalways:
1052 if not results and matchalways:
1051 visit = dmap.keys()
1053 visit = dmap.keys()
1052 else:
1054 else:
1053 visit = [f for f in dmap if f not in results and matchfn(f)]
1055 visit = [f for f in dmap if f not in results and matchfn(f)]
1054 visit.sort()
1056 visit.sort()
1055
1057
1056 if unknown:
1058 if unknown:
1057 # unknown == True means we walked all dirs under the roots
1059 # unknown == True means we walked all dirs under the roots
1058 # that wasn't ignored, and everything that matched was stat'ed
1060 # that wasn't ignored, and everything that matched was stat'ed
1059 # and is already in results.
1061 # and is already in results.
1060 # The rest must thus be ignored or under a symlink.
1062 # The rest must thus be ignored or under a symlink.
1061 audit_path = pathutil.pathauditor(self._root)
1063 audit_path = pathutil.pathauditor(self._root)
1062
1064
1063 for nf in iter(visit):
1065 for nf in iter(visit):
1064 # If a stat for the same file was already added with a
1066 # If a stat for the same file was already added with a
1065 # different case, don't add one for this, since that would
1067 # different case, don't add one for this, since that would
1066 # make it appear as if the file exists under both names
1068 # make it appear as if the file exists under both names
1067 # on disk.
1069 # on disk.
1068 if (normalizefile and
1070 if (normalizefile and
1069 normalizefile(nf, True, True) in results):
1071 normalizefile(nf, True, True) in results):
1070 results[nf] = None
1072 results[nf] = None
1071 # Report ignored items in the dmap as long as they are not
1073 # Report ignored items in the dmap as long as they are not
1072 # under a symlink directory.
1074 # under a symlink directory.
1073 elif audit_path.check(nf):
1075 elif audit_path.check(nf):
1074 try:
1076 try:
1075 results[nf] = lstat(join(nf))
1077 results[nf] = lstat(join(nf))
1076 # file was just ignored, no links, and exists
1078 # file was just ignored, no links, and exists
1077 except OSError:
1079 except OSError:
1078 # file doesn't exist
1080 # file doesn't exist
1079 results[nf] = None
1081 results[nf] = None
1080 else:
1082 else:
1081 # It's either missing or under a symlink directory
1083 # It's either missing or under a symlink directory
1082 # which we in this case report as missing
1084 # which we in this case report as missing
1083 results[nf] = None
1085 results[nf] = None
1084 else:
1086 else:
1085 # We may not have walked the full directory tree above,
1087 # We may not have walked the full directory tree above,
1086 # so stat and check everything we missed.
1088 # so stat and check everything we missed.
1087 nf = iter(visit).next
1089 nf = iter(visit).next
1088 for st in util.statfiles([join(i) for i in visit]):
1090 for st in util.statfiles([join(i) for i in visit]):
1089 results[nf()] = st
1091 results[nf()] = st
1090 return results
1092 return results
1091
1093
1092 def status(self, match, subrepos, ignored, clean, unknown):
1094 def status(self, match, subrepos, ignored, clean, unknown):
1093 '''Determine the status of the working copy relative to the
1095 '''Determine the status of the working copy relative to the
1094 dirstate and return a pair of (unsure, status), where status is of type
1096 dirstate and return a pair of (unsure, status), where status is of type
1095 scmutil.status and:
1097 scmutil.status and:
1096
1098
1097 unsure:
1099 unsure:
1098 files that might have been modified since the dirstate was
1100 files that might have been modified since the dirstate was
1099 written, but need to be read to be sure (size is the same
1101 written, but need to be read to be sure (size is the same
1100 but mtime differs)
1102 but mtime differs)
1101 status.modified:
1103 status.modified:
1102 files that have definitely been modified since the dirstate
1104 files that have definitely been modified since the dirstate
1103 was written (different size or mode)
1105 was written (different size or mode)
1104 status.clean:
1106 status.clean:
1105 files that have definitely not been modified since the
1107 files that have definitely not been modified since the
1106 dirstate was written
1108 dirstate was written
1107 '''
1109 '''
1108 listignored, listclean, listunknown = ignored, clean, unknown
1110 listignored, listclean, listunknown = ignored, clean, unknown
1109 lookup, modified, added, unknown, ignored = [], [], [], [], []
1111 lookup, modified, added, unknown, ignored = [], [], [], [], []
1110 removed, deleted, clean = [], [], []
1112 removed, deleted, clean = [], [], []
1111
1113
1112 dmap = self._map
1114 dmap = self._map
1113 ladd = lookup.append # aka "unsure"
1115 ladd = lookup.append # aka "unsure"
1114 madd = modified.append
1116 madd = modified.append
1115 aadd = added.append
1117 aadd = added.append
1116 uadd = unknown.append
1118 uadd = unknown.append
1117 iadd = ignored.append
1119 iadd = ignored.append
1118 radd = removed.append
1120 radd = removed.append
1119 dadd = deleted.append
1121 dadd = deleted.append
1120 cadd = clean.append
1122 cadd = clean.append
1121 mexact = match.exact
1123 mexact = match.exact
1122 dirignore = self._dirignore
1124 dirignore = self._dirignore
1123 checkexec = self._checkexec
1125 checkexec = self._checkexec
1124 copymap = self._copymap
1126 copymap = self._copymap
1125 lastnormaltime = self._lastnormaltime
1127 lastnormaltime = self._lastnormaltime
1126
1128
1127 # We need to do full walks when either
1129 # We need to do full walks when either
1128 # - we're listing all clean files, or
1130 # - we're listing all clean files, or
1129 # - match.traversedir does something, because match.traversedir should
1131 # - match.traversedir does something, because match.traversedir should
1130 # be called for every dir in the working dir
1132 # be called for every dir in the working dir
1131 full = listclean or match.traversedir is not None
1133 full = listclean or match.traversedir is not None
1132 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1134 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1133 full=full).iteritems():
1135 full=full).iteritems():
1134 if fn not in dmap:
1136 if fn not in dmap:
1135 if (listignored or mexact(fn)) and dirignore(fn):
1137 if (listignored or mexact(fn)) and dirignore(fn):
1136 if listignored:
1138 if listignored:
1137 iadd(fn)
1139 iadd(fn)
1138 else:
1140 else:
1139 uadd(fn)
1141 uadd(fn)
1140 continue
1142 continue
1141
1143
1142 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1144 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1143 # written like that for performance reasons. dmap[fn] is not a
1145 # written like that for performance reasons. dmap[fn] is not a
1144 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1146 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1145 # opcode has fast paths when the value to be unpacked is a tuple or
1147 # opcode has fast paths when the value to be unpacked is a tuple or
1146 # a list, but falls back to creating a full-fledged iterator in
1148 # a list, but falls back to creating a full-fledged iterator in
1147 # general. That is much slower than simply accessing and storing the
1149 # general. That is much slower than simply accessing and storing the
1148 # tuple members one by one.
1150 # tuple members one by one.
1149 t = dmap[fn]
1151 t = dmap[fn]
1150 state = t[0]
1152 state = t[0]
1151 mode = t[1]
1153 mode = t[1]
1152 size = t[2]
1154 size = t[2]
1153 time = t[3]
1155 time = t[3]
1154
1156
1155 if not st and state in "nma":
1157 if not st and state in "nma":
1156 dadd(fn)
1158 dadd(fn)
1157 elif state == 'n':
1159 elif state == 'n':
1158 if (size >= 0 and
1160 if (size >= 0 and
1159 ((size != st.st_size and size != st.st_size & _rangemask)
1161 ((size != st.st_size and size != st.st_size & _rangemask)
1160 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1162 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1161 or size == -2 # other parent
1163 or size == -2 # other parent
1162 or fn in copymap):
1164 or fn in copymap):
1163 madd(fn)
1165 madd(fn)
1164 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1166 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1165 ladd(fn)
1167 ladd(fn)
1166 elif st.st_mtime == lastnormaltime:
1168 elif st.st_mtime == lastnormaltime:
1167 # fn may have just been marked as normal and it may have
1169 # fn may have just been marked as normal and it may have
1168 # changed in the same second without changing its size.
1170 # changed in the same second without changing its size.
1169 # This can happen if we quickly do multiple commits.
1171 # This can happen if we quickly do multiple commits.
1170 # Force lookup, so we don't miss such a racy file change.
1172 # Force lookup, so we don't miss such a racy file change.
1171 ladd(fn)
1173 ladd(fn)
1172 elif listclean:
1174 elif listclean:
1173 cadd(fn)
1175 cadd(fn)
1174 elif state == 'm':
1176 elif state == 'm':
1175 madd(fn)
1177 madd(fn)
1176 elif state == 'a':
1178 elif state == 'a':
1177 aadd(fn)
1179 aadd(fn)
1178 elif state == 'r':
1180 elif state == 'r':
1179 radd(fn)
1181 radd(fn)
1180
1182
1181 return (lookup, scmutil.status(modified, added, removed, deleted,
1183 return (lookup, scmutil.status(modified, added, removed, deleted,
1182 unknown, ignored, clean))
1184 unknown, ignored, clean))
1183
1185
1184 def matches(self, match):
1186 def matches(self, match):
1185 '''
1187 '''
1186 return files in the dirstate (in whatever state) filtered by match
1188 return files in the dirstate (in whatever state) filtered by match
1187 '''
1189 '''
1188 dmap = self._map
1190 dmap = self._map
1189 if match.always():
1191 if match.always():
1190 return dmap.keys()
1192 return dmap.keys()
1191 files = match.files()
1193 files = match.files()
1192 if match.isexact():
1194 if match.isexact():
1193 # fast path -- filter the other way around, since typically files is
1195 # fast path -- filter the other way around, since typically files is
1194 # much smaller than dmap
1196 # much smaller than dmap
1195 return [f for f in files if f in dmap]
1197 return [f for f in files if f in dmap]
1196 if match.prefix() and all(fn in dmap for fn in files):
1198 if match.prefix() and all(fn in dmap for fn in files):
1197 # fast path -- all the values are known to be files, so just return
1199 # fast path -- all the values are known to be files, so just return
1198 # that
1200 # that
1199 return list(files)
1201 return list(files)
1200 return [f for f in dmap if match(f)]
1202 return [f for f in dmap if match(f)]
1201
1203
1202 def _actualfilename(self, tr):
1204 def _actualfilename(self, tr):
1203 if tr:
1205 if tr:
1204 return self._pendingfilename
1206 return self._pendingfilename
1205 else:
1207 else:
1206 return self._filename
1208 return self._filename
1207
1209
1208 def savebackup(self, tr, suffix):
1210 def savebackup(self, tr, suffix):
1209 '''Save current dirstate into backup file with suffix'''
1211 '''Save current dirstate into backup file with suffix'''
1210 filename = self._actualfilename(tr)
1212 filename = self._actualfilename(tr)
1211
1213
1212 # use '_writedirstate' instead of 'write' to write changes certainly,
1214 # use '_writedirstate' instead of 'write' to write changes certainly,
1213 # because the latter omits writing out if transaction is running.
1215 # because the latter omits writing out if transaction is running.
1214 # output file will be used to create backup of dirstate at this point.
1216 # output file will be used to create backup of dirstate at this point.
1215 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1217 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1216
1218
1217 if tr:
1219 if tr:
1218 # ensure that subsequent tr.writepending returns True for
1220 # ensure that subsequent tr.writepending returns True for
1219 # changes written out above, even if dirstate is never
1221 # changes written out above, even if dirstate is never
1220 # changed after this
1222 # changed after this
1221 tr.addfilegenerator('dirstate', (self._filename,),
1223 tr.addfilegenerator('dirstate', (self._filename,),
1222 self._writedirstate, location='plain')
1224 self._writedirstate, location='plain')
1223
1225
1224 # ensure that pending file written above is unlinked at
1226 # ensure that pending file written above is unlinked at
1225 # failure, even if tr.writepending isn't invoked until the
1227 # failure, even if tr.writepending isn't invoked until the
1226 # end of this transaction
1228 # end of this transaction
1227 tr.registertmp(filename, location='plain')
1229 tr.registertmp(filename, location='plain')
1228
1230
1229 self._opener.write(filename + suffix, self._opener.tryread(filename))
1231 self._opener.write(filename + suffix, self._opener.tryread(filename))
1230
1232
1231 def restorebackup(self, tr, suffix):
1233 def restorebackup(self, tr, suffix):
1232 '''Restore dirstate by backup file with suffix'''
1234 '''Restore dirstate by backup file with suffix'''
1233 # this "invalidate()" prevents "wlock.release()" from writing
1235 # this "invalidate()" prevents "wlock.release()" from writing
1234 # changes of dirstate out after restoring from backup file
1236 # changes of dirstate out after restoring from backup file
1235 self.invalidate()
1237 self.invalidate()
1236 filename = self._actualfilename(tr)
1238 filename = self._actualfilename(tr)
1237 self._opener.rename(filename + suffix, filename)
1239 self._opener.rename(filename + suffix, filename)
1238
1240
1239 def clearbackup(self, tr, suffix):
1241 def clearbackup(self, tr, suffix):
1240 '''Clear backup file with suffix'''
1242 '''Clear backup file with suffix'''
1241 filename = self._actualfilename(tr)
1243 filename = self._actualfilename(tr)
1242 self._opener.unlink(filename + suffix)
1244 self._opener.unlink(filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now