##// END OF EJS Templates
dirstate: add prefix and suffix arguments to backup...
Mateusz Kwapich -
r29189:930d4ee4 default
parent child Browse files
Show More
@@ -1,1244 +1,1245 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 match as matchmod,
20 match as matchmod,
21 osutil,
21 osutil,
22 parsers,
22 parsers,
23 pathutil,
23 pathutil,
24 scmutil,
24 scmutil,
25 util,
25 util,
26 )
26 )
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29 filecache = scmutil.filecache
29 filecache = scmutil.filecache
30 _rangemask = 0x7fffffff
30 _rangemask = 0x7fffffff
31
31
32 dirstatetuple = parsers.dirstatetuple
32 dirstatetuple = parsers.dirstatetuple
33
33
34 class repocache(filecache):
34 class repocache(filecache):
35 """filecache for files in .hg/"""
35 """filecache for files in .hg/"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj._opener.join(fname)
37 return obj._opener.join(fname)
38
38
39 class rootcache(filecache):
39 class rootcache(filecache):
40 """filecache for files in the repository root"""
40 """filecache for files in the repository root"""
41 def join(self, obj, fname):
41 def join(self, obj, fname):
42 return obj._join(fname)
42 return obj._join(fname)
43
43
44 def _getfsnow(vfs):
44 def _getfsnow(vfs):
45 '''Get "now" timestamp on filesystem'''
45 '''Get "now" timestamp on filesystem'''
46 tmpfd, tmpname = vfs.mkstemp()
46 tmpfd, tmpname = vfs.mkstemp()
47 try:
47 try:
48 return os.fstat(tmpfd).st_mtime
48 return os.fstat(tmpfd).st_mtime
49 finally:
49 finally:
50 os.close(tmpfd)
50 os.close(tmpfd)
51 vfs.unlink(tmpname)
51 vfs.unlink(tmpname)
52
52
53 def nonnormalentries(dmap):
53 def nonnormalentries(dmap):
54 '''Compute the nonnormal dirstate entries from the dmap'''
54 '''Compute the nonnormal dirstate entries from the dmap'''
55 try:
55 try:
56 return parsers.nonnormalentries(dmap)
56 return parsers.nonnormalentries(dmap)
57 except AttributeError:
57 except AttributeError:
58 return set(fname for fname, e in dmap.iteritems()
58 return set(fname for fname, e in dmap.iteritems()
59 if e[0] != 'n' or e[3] == -1)
59 if e[0] != 'n' or e[3] == -1)
60
60
61 def _trypending(root, vfs, filename):
61 def _trypending(root, vfs, filename):
62 '''Open file to be read according to HG_PENDING environment variable
62 '''Open file to be read according to HG_PENDING environment variable
63
63
64 This opens '.pending' of specified 'filename' only when HG_PENDING
64 This opens '.pending' of specified 'filename' only when HG_PENDING
65 is equal to 'root'.
65 is equal to 'root'.
66
66
67 This returns '(fp, is_pending_opened)' tuple.
67 This returns '(fp, is_pending_opened)' tuple.
68 '''
68 '''
69 if root == os.environ.get('HG_PENDING'):
69 if root == os.environ.get('HG_PENDING'):
70 try:
70 try:
71 return (vfs('%s.pending' % filename), True)
71 return (vfs('%s.pending' % filename), True)
72 except IOError as inst:
72 except IOError as inst:
73 if inst.errno != errno.ENOENT:
73 if inst.errno != errno.ENOENT:
74 raise
74 raise
75 return (vfs(filename), False)
75 return (vfs(filename), False)
76
76
77 _token = object()
77 _token = object()
78
78
79 class dirstate(object):
79 class dirstate(object):
80
80
81 def __init__(self, opener, ui, root, validate):
81 def __init__(self, opener, ui, root, validate):
82 '''Create a new dirstate object.
82 '''Create a new dirstate object.
83
83
84 opener is an open()-like callable that can be used to open the
84 opener is an open()-like callable that can be used to open the
85 dirstate file; root is the root of the directory tracked by
85 dirstate file; root is the root of the directory tracked by
86 the dirstate.
86 the dirstate.
87 '''
87 '''
88 self._opener = opener
88 self._opener = opener
89 self._validate = validate
89 self._validate = validate
90 self._root = root
90 self._root = root
91 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
91 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
92 # UNC path pointing to root share (issue4557)
92 # UNC path pointing to root share (issue4557)
93 self._rootdir = pathutil.normasprefix(root)
93 self._rootdir = pathutil.normasprefix(root)
94 # internal config: ui.forcecwd
94 # internal config: ui.forcecwd
95 forcecwd = ui.config('ui', 'forcecwd')
95 forcecwd = ui.config('ui', 'forcecwd')
96 if forcecwd:
96 if forcecwd:
97 self._cwd = forcecwd
97 self._cwd = forcecwd
98 self._dirty = False
98 self._dirty = False
99 self._dirtypl = False
99 self._dirtypl = False
100 self._lastnormaltime = 0
100 self._lastnormaltime = 0
101 self._ui = ui
101 self._ui = ui
102 self._filecache = {}
102 self._filecache = {}
103 self._parentwriters = 0
103 self._parentwriters = 0
104 self._filename = 'dirstate'
104 self._filename = 'dirstate'
105 self._pendingfilename = '%s.pending' % self._filename
105 self._pendingfilename = '%s.pending' % self._filename
106
106
107 # for consistent view between _pl() and _read() invocations
107 # for consistent view between _pl() and _read() invocations
108 self._pendingmode = None
108 self._pendingmode = None
109
109
110 def beginparentchange(self):
110 def beginparentchange(self):
111 '''Marks the beginning of a set of changes that involve changing
111 '''Marks the beginning of a set of changes that involve changing
112 the dirstate parents. If there is an exception during this time,
112 the dirstate parents. If there is an exception during this time,
113 the dirstate will not be written when the wlock is released. This
113 the dirstate will not be written when the wlock is released. This
114 prevents writing an incoherent dirstate where the parent doesn't
114 prevents writing an incoherent dirstate where the parent doesn't
115 match the contents.
115 match the contents.
116 '''
116 '''
117 self._parentwriters += 1
117 self._parentwriters += 1
118
118
119 def endparentchange(self):
119 def endparentchange(self):
120 '''Marks the end of a set of changes that involve changing the
120 '''Marks the end of a set of changes that involve changing the
121 dirstate parents. Once all parent changes have been marked done,
121 dirstate parents. Once all parent changes have been marked done,
122 the wlock will be free to write the dirstate on release.
122 the wlock will be free to write the dirstate on release.
123 '''
123 '''
124 if self._parentwriters > 0:
124 if self._parentwriters > 0:
125 self._parentwriters -= 1
125 self._parentwriters -= 1
126
126
127 def pendingparentchange(self):
127 def pendingparentchange(self):
128 '''Returns true if the dirstate is in the middle of a set of changes
128 '''Returns true if the dirstate is in the middle of a set of changes
129 that modify the dirstate parent.
129 that modify the dirstate parent.
130 '''
130 '''
131 return self._parentwriters > 0
131 return self._parentwriters > 0
132
132
133 @propertycache
133 @propertycache
134 def _map(self):
134 def _map(self):
135 '''Return the dirstate contents as a map from filename to
135 '''Return the dirstate contents as a map from filename to
136 (state, mode, size, time).'''
136 (state, mode, size, time).'''
137 self._read()
137 self._read()
138 return self._map
138 return self._map
139
139
140 @propertycache
140 @propertycache
141 def _copymap(self):
141 def _copymap(self):
142 self._read()
142 self._read()
143 return self._copymap
143 return self._copymap
144
144
145 @propertycache
145 @propertycache
146 def _nonnormalset(self):
146 def _nonnormalset(self):
147 return nonnormalentries(self._map)
147 return nonnormalentries(self._map)
148
148
149 @propertycache
149 @propertycache
150 def _filefoldmap(self):
150 def _filefoldmap(self):
151 try:
151 try:
152 makefilefoldmap = parsers.make_file_foldmap
152 makefilefoldmap = parsers.make_file_foldmap
153 except AttributeError:
153 except AttributeError:
154 pass
154 pass
155 else:
155 else:
156 return makefilefoldmap(self._map, util.normcasespec,
156 return makefilefoldmap(self._map, util.normcasespec,
157 util.normcasefallback)
157 util.normcasefallback)
158
158
159 f = {}
159 f = {}
160 normcase = util.normcase
160 normcase = util.normcase
161 for name, s in self._map.iteritems():
161 for name, s in self._map.iteritems():
162 if s[0] != 'r':
162 if s[0] != 'r':
163 f[normcase(name)] = name
163 f[normcase(name)] = name
164 f['.'] = '.' # prevents useless util.fspath() invocation
164 f['.'] = '.' # prevents useless util.fspath() invocation
165 return f
165 return f
166
166
167 @propertycache
167 @propertycache
168 def _dirfoldmap(self):
168 def _dirfoldmap(self):
169 f = {}
169 f = {}
170 normcase = util.normcase
170 normcase = util.normcase
171 for name in self._dirs:
171 for name in self._dirs:
172 f[normcase(name)] = name
172 f[normcase(name)] = name
173 return f
173 return f
174
174
175 @repocache('branch')
175 @repocache('branch')
176 def _branch(self):
176 def _branch(self):
177 try:
177 try:
178 return self._opener.read("branch").strip() or "default"
178 return self._opener.read("branch").strip() or "default"
179 except IOError as inst:
179 except IOError as inst:
180 if inst.errno != errno.ENOENT:
180 if inst.errno != errno.ENOENT:
181 raise
181 raise
182 return "default"
182 return "default"
183
183
184 @propertycache
184 @propertycache
185 def _pl(self):
185 def _pl(self):
186 try:
186 try:
187 fp = self._opendirstatefile()
187 fp = self._opendirstatefile()
188 st = fp.read(40)
188 st = fp.read(40)
189 fp.close()
189 fp.close()
190 l = len(st)
190 l = len(st)
191 if l == 40:
191 if l == 40:
192 return st[:20], st[20:40]
192 return st[:20], st[20:40]
193 elif l > 0 and l < 40:
193 elif l > 0 and l < 40:
194 raise error.Abort(_('working directory state appears damaged!'))
194 raise error.Abort(_('working directory state appears damaged!'))
195 except IOError as err:
195 except IOError as err:
196 if err.errno != errno.ENOENT:
196 if err.errno != errno.ENOENT:
197 raise
197 raise
198 return [nullid, nullid]
198 return [nullid, nullid]
199
199
200 @propertycache
200 @propertycache
201 def _dirs(self):
201 def _dirs(self):
202 return util.dirs(self._map, 'r')
202 return util.dirs(self._map, 'r')
203
203
204 def dirs(self):
204 def dirs(self):
205 return self._dirs
205 return self._dirs
206
206
207 @rootcache('.hgignore')
207 @rootcache('.hgignore')
208 def _ignore(self):
208 def _ignore(self):
209 files = self._ignorefiles()
209 files = self._ignorefiles()
210 if not files:
210 if not files:
211 return util.never
211 return util.never
212
212
213 pats = ['include:%s' % f for f in files]
213 pats = ['include:%s' % f for f in files]
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
215
215
216 @propertycache
216 @propertycache
217 def _slash(self):
217 def _slash(self):
218 return self._ui.configbool('ui', 'slash') and os.sep != '/'
218 return self._ui.configbool('ui', 'slash') and os.sep != '/'
219
219
220 @propertycache
220 @propertycache
221 def _checklink(self):
221 def _checklink(self):
222 return util.checklink(self._root)
222 return util.checklink(self._root)
223
223
224 @propertycache
224 @propertycache
225 def _checkexec(self):
225 def _checkexec(self):
226 return util.checkexec(self._root)
226 return util.checkexec(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkcase(self):
229 def _checkcase(self):
230 return not util.checkcase(self._join('.hg'))
230 return not util.checkcase(self._join('.hg'))
231
231
232 def _join(self, f):
232 def _join(self, f):
233 # much faster than os.path.join()
233 # much faster than os.path.join()
234 # it's safe because f is always a relative path
234 # it's safe because f is always a relative path
235 return self._rootdir + f
235 return self._rootdir + f
236
236
237 def flagfunc(self, buildfallback):
237 def flagfunc(self, buildfallback):
238 if self._checklink and self._checkexec:
238 if self._checklink and self._checkexec:
239 def f(x):
239 def f(x):
240 try:
240 try:
241 st = os.lstat(self._join(x))
241 st = os.lstat(self._join(x))
242 if util.statislink(st):
242 if util.statislink(st):
243 return 'l'
243 return 'l'
244 if util.statisexec(st):
244 if util.statisexec(st):
245 return 'x'
245 return 'x'
246 except OSError:
246 except OSError:
247 pass
247 pass
248 return ''
248 return ''
249 return f
249 return f
250
250
251 fallback = buildfallback()
251 fallback = buildfallback()
252 if self._checklink:
252 if self._checklink:
253 def f(x):
253 def f(x):
254 if os.path.islink(self._join(x)):
254 if os.path.islink(self._join(x)):
255 return 'l'
255 return 'l'
256 if 'x' in fallback(x):
256 if 'x' in fallback(x):
257 return 'x'
257 return 'x'
258 return ''
258 return ''
259 return f
259 return f
260 if self._checkexec:
260 if self._checkexec:
261 def f(x):
261 def f(x):
262 if 'l' in fallback(x):
262 if 'l' in fallback(x):
263 return 'l'
263 return 'l'
264 if util.isexec(self._join(x)):
264 if util.isexec(self._join(x)):
265 return 'x'
265 return 'x'
266 return ''
266 return ''
267 return f
267 return f
268 else:
268 else:
269 return fallback
269 return fallback
270
270
271 @propertycache
271 @propertycache
272 def _cwd(self):
272 def _cwd(self):
273 return os.getcwd()
273 return os.getcwd()
274
274
275 def getcwd(self):
275 def getcwd(self):
276 '''Return the path from which a canonical path is calculated.
276 '''Return the path from which a canonical path is calculated.
277
277
278 This path should be used to resolve file patterns or to convert
278 This path should be used to resolve file patterns or to convert
279 canonical paths back to file paths for display. It shouldn't be
279 canonical paths back to file paths for display. It shouldn't be
280 used to get real file paths. Use vfs functions instead.
280 used to get real file paths. Use vfs functions instead.
281 '''
281 '''
282 cwd = self._cwd
282 cwd = self._cwd
283 if cwd == self._root:
283 if cwd == self._root:
284 return ''
284 return ''
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
286 rootsep = self._root
286 rootsep = self._root
287 if not util.endswithsep(rootsep):
287 if not util.endswithsep(rootsep):
288 rootsep += os.sep
288 rootsep += os.sep
289 if cwd.startswith(rootsep):
289 if cwd.startswith(rootsep):
290 return cwd[len(rootsep):]
290 return cwd[len(rootsep):]
291 else:
291 else:
292 # we're outside the repo. return an absolute path.
292 # we're outside the repo. return an absolute path.
293 return cwd
293 return cwd
294
294
295 def pathto(self, f, cwd=None):
295 def pathto(self, f, cwd=None):
296 if cwd is None:
296 if cwd is None:
297 cwd = self.getcwd()
297 cwd = self.getcwd()
298 path = util.pathto(self._root, cwd, f)
298 path = util.pathto(self._root, cwd, f)
299 if self._slash:
299 if self._slash:
300 return util.pconvert(path)
300 return util.pconvert(path)
301 return path
301 return path
302
302
303 def __getitem__(self, key):
303 def __getitem__(self, key):
304 '''Return the current state of key (a filename) in the dirstate.
304 '''Return the current state of key (a filename) in the dirstate.
305
305
306 States are:
306 States are:
307 n normal
307 n normal
308 m needs merging
308 m needs merging
309 r marked for removal
309 r marked for removal
310 a marked for addition
310 a marked for addition
311 ? not tracked
311 ? not tracked
312 '''
312 '''
313 return self._map.get(key, ("?",))[0]
313 return self._map.get(key, ("?",))[0]
314
314
315 def __contains__(self, key):
315 def __contains__(self, key):
316 return key in self._map
316 return key in self._map
317
317
318 def __iter__(self):
318 def __iter__(self):
319 for x in sorted(self._map):
319 for x in sorted(self._map):
320 yield x
320 yield x
321
321
322 def iteritems(self):
322 def iteritems(self):
323 return self._map.iteritems()
323 return self._map.iteritems()
324
324
325 def parents(self):
325 def parents(self):
326 return [self._validate(p) for p in self._pl]
326 return [self._validate(p) for p in self._pl]
327
327
328 def p1(self):
328 def p1(self):
329 return self._validate(self._pl[0])
329 return self._validate(self._pl[0])
330
330
331 def p2(self):
331 def p2(self):
332 return self._validate(self._pl[1])
332 return self._validate(self._pl[1])
333
333
334 def branch(self):
334 def branch(self):
335 return encoding.tolocal(self._branch)
335 return encoding.tolocal(self._branch)
336
336
337 def setparents(self, p1, p2=nullid):
337 def setparents(self, p1, p2=nullid):
338 """Set dirstate parents to p1 and p2.
338 """Set dirstate parents to p1 and p2.
339
339
340 When moving from two parents to one, 'm' merged entries a
340 When moving from two parents to one, 'm' merged entries a
341 adjusted to normal and previous copy records discarded and
341 adjusted to normal and previous copy records discarded and
342 returned by the call.
342 returned by the call.
343
343
344 See localrepo.setparents()
344 See localrepo.setparents()
345 """
345 """
346 if self._parentwriters == 0:
346 if self._parentwriters == 0:
347 raise ValueError("cannot set dirstate parent without "
347 raise ValueError("cannot set dirstate parent without "
348 "calling dirstate.beginparentchange")
348 "calling dirstate.beginparentchange")
349
349
350 self._dirty = self._dirtypl = True
350 self._dirty = self._dirtypl = True
351 oldp2 = self._pl[1]
351 oldp2 = self._pl[1]
352 self._pl = p1, p2
352 self._pl = p1, p2
353 copies = {}
353 copies = {}
354 if oldp2 != nullid and p2 == nullid:
354 if oldp2 != nullid and p2 == nullid:
355 for f, s in self._map.iteritems():
355 for f, s in self._map.iteritems():
356 # Discard 'm' markers when moving away from a merge state
356 # Discard 'm' markers when moving away from a merge state
357 if s[0] == 'm':
357 if s[0] == 'm':
358 if f in self._copymap:
358 if f in self._copymap:
359 copies[f] = self._copymap[f]
359 copies[f] = self._copymap[f]
360 self.normallookup(f)
360 self.normallookup(f)
361 # Also fix up otherparent markers
361 # Also fix up otherparent markers
362 elif s[0] == 'n' and s[2] == -2:
362 elif s[0] == 'n' and s[2] == -2:
363 if f in self._copymap:
363 if f in self._copymap:
364 copies[f] = self._copymap[f]
364 copies[f] = self._copymap[f]
365 self.add(f)
365 self.add(f)
366 return copies
366 return copies
367
367
368 def setbranch(self, branch):
368 def setbranch(self, branch):
369 self._branch = encoding.fromlocal(branch)
369 self._branch = encoding.fromlocal(branch)
370 f = self._opener('branch', 'w', atomictemp=True)
370 f = self._opener('branch', 'w', atomictemp=True)
371 try:
371 try:
372 f.write(self._branch + '\n')
372 f.write(self._branch + '\n')
373 f.close()
373 f.close()
374
374
375 # make sure filecache has the correct stat info for _branch after
375 # make sure filecache has the correct stat info for _branch after
376 # replacing the underlying file
376 # replacing the underlying file
377 ce = self._filecache['_branch']
377 ce = self._filecache['_branch']
378 if ce:
378 if ce:
379 ce.refresh()
379 ce.refresh()
380 except: # re-raises
380 except: # re-raises
381 f.discard()
381 f.discard()
382 raise
382 raise
383
383
384 def _opendirstatefile(self):
384 def _opendirstatefile(self):
385 fp, mode = _trypending(self._root, self._opener, self._filename)
385 fp, mode = _trypending(self._root, self._opener, self._filename)
386 if self._pendingmode is not None and self._pendingmode != mode:
386 if self._pendingmode is not None and self._pendingmode != mode:
387 fp.close()
387 fp.close()
388 raise error.Abort(_('working directory state may be '
388 raise error.Abort(_('working directory state may be '
389 'changed parallelly'))
389 'changed parallelly'))
390 self._pendingmode = mode
390 self._pendingmode = mode
391 return fp
391 return fp
392
392
393 def _read(self):
393 def _read(self):
394 self._map = {}
394 self._map = {}
395 self._copymap = {}
395 self._copymap = {}
396 try:
396 try:
397 fp = self._opendirstatefile()
397 fp = self._opendirstatefile()
398 try:
398 try:
399 st = fp.read()
399 st = fp.read()
400 finally:
400 finally:
401 fp.close()
401 fp.close()
402 except IOError as err:
402 except IOError as err:
403 if err.errno != errno.ENOENT:
403 if err.errno != errno.ENOENT:
404 raise
404 raise
405 return
405 return
406 if not st:
406 if not st:
407 return
407 return
408
408
409 if util.safehasattr(parsers, 'dict_new_presized'):
409 if util.safehasattr(parsers, 'dict_new_presized'):
410 # Make an estimate of the number of files in the dirstate based on
410 # Make an estimate of the number of files in the dirstate based on
411 # its size. From a linear regression on a set of real-world repos,
411 # its size. From a linear regression on a set of real-world repos,
412 # all over 10,000 files, the size of a dirstate entry is 85
412 # all over 10,000 files, the size of a dirstate entry is 85
413 # bytes. The cost of resizing is significantly higher than the cost
413 # bytes. The cost of resizing is significantly higher than the cost
414 # of filling in a larger presized dict, so subtract 20% from the
414 # of filling in a larger presized dict, so subtract 20% from the
415 # size.
415 # size.
416 #
416 #
417 # This heuristic is imperfect in many ways, so in a future dirstate
417 # This heuristic is imperfect in many ways, so in a future dirstate
418 # format update it makes sense to just record the number of entries
418 # format update it makes sense to just record the number of entries
419 # on write.
419 # on write.
420 self._map = parsers.dict_new_presized(len(st) / 71)
420 self._map = parsers.dict_new_presized(len(st) / 71)
421
421
422 # Python's garbage collector triggers a GC each time a certain number
422 # Python's garbage collector triggers a GC each time a certain number
423 # of container objects (the number being defined by
423 # of container objects (the number being defined by
424 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
424 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
425 # for each file in the dirstate. The C version then immediately marks
425 # for each file in the dirstate. The C version then immediately marks
426 # them as not to be tracked by the collector. However, this has no
426 # them as not to be tracked by the collector. However, this has no
427 # effect on when GCs are triggered, only on what objects the GC looks
427 # effect on when GCs are triggered, only on what objects the GC looks
428 # into. This means that O(number of files) GCs are unavoidable.
428 # into. This means that O(number of files) GCs are unavoidable.
429 # Depending on when in the process's lifetime the dirstate is parsed,
429 # Depending on when in the process's lifetime the dirstate is parsed,
430 # this can get very expensive. As a workaround, disable GC while
430 # this can get very expensive. As a workaround, disable GC while
431 # parsing the dirstate.
431 # parsing the dirstate.
432 #
432 #
433 # (we cannot decorate the function directly since it is in a C module)
433 # (we cannot decorate the function directly since it is in a C module)
434 parse_dirstate = util.nogc(parsers.parse_dirstate)
434 parse_dirstate = util.nogc(parsers.parse_dirstate)
435 p = parse_dirstate(self._map, self._copymap, st)
435 p = parse_dirstate(self._map, self._copymap, st)
436 if not self._dirtypl:
436 if not self._dirtypl:
437 self._pl = p
437 self._pl = p
438
438
439 def invalidate(self):
439 def invalidate(self):
440 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
440 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
441 "_pl", "_dirs", "_ignore", "_nonnormalset"):
441 "_pl", "_dirs", "_ignore", "_nonnormalset"):
442 if a in self.__dict__:
442 if a in self.__dict__:
443 delattr(self, a)
443 delattr(self, a)
444 self._lastnormaltime = 0
444 self._lastnormaltime = 0
445 self._dirty = False
445 self._dirty = False
446 self._parentwriters = 0
446 self._parentwriters = 0
447
447
448 def copy(self, source, dest):
448 def copy(self, source, dest):
449 """Mark dest as a copy of source. Unmark dest if source is None."""
449 """Mark dest as a copy of source. Unmark dest if source is None."""
450 if source == dest:
450 if source == dest:
451 return
451 return
452 self._dirty = True
452 self._dirty = True
453 if source is not None:
453 if source is not None:
454 self._copymap[dest] = source
454 self._copymap[dest] = source
455 elif dest in self._copymap:
455 elif dest in self._copymap:
456 del self._copymap[dest]
456 del self._copymap[dest]
457
457
458 def copied(self, file):
458 def copied(self, file):
459 return self._copymap.get(file, None)
459 return self._copymap.get(file, None)
460
460
461 def copies(self):
461 def copies(self):
462 return self._copymap
462 return self._copymap
463
463
464 def _droppath(self, f):
464 def _droppath(self, f):
465 if self[f] not in "?r" and "_dirs" in self.__dict__:
465 if self[f] not in "?r" and "_dirs" in self.__dict__:
466 self._dirs.delpath(f)
466 self._dirs.delpath(f)
467
467
468 if "_filefoldmap" in self.__dict__:
468 if "_filefoldmap" in self.__dict__:
469 normed = util.normcase(f)
469 normed = util.normcase(f)
470 if normed in self._filefoldmap:
470 if normed in self._filefoldmap:
471 del self._filefoldmap[normed]
471 del self._filefoldmap[normed]
472
472
473 def _addpath(self, f, state, mode, size, mtime):
473 def _addpath(self, f, state, mode, size, mtime):
474 oldstate = self[f]
474 oldstate = self[f]
475 if state == 'a' or oldstate == 'r':
475 if state == 'a' or oldstate == 'r':
476 scmutil.checkfilename(f)
476 scmutil.checkfilename(f)
477 if f in self._dirs:
477 if f in self._dirs:
478 raise error.Abort(_('directory %r already in dirstate') % f)
478 raise error.Abort(_('directory %r already in dirstate') % f)
479 # shadows
479 # shadows
480 for d in util.finddirs(f):
480 for d in util.finddirs(f):
481 if d in self._dirs:
481 if d in self._dirs:
482 break
482 break
483 if d in self._map and self[d] != 'r':
483 if d in self._map and self[d] != 'r':
484 raise error.Abort(
484 raise error.Abort(
485 _('file %r in dirstate clashes with %r') % (d, f))
485 _('file %r in dirstate clashes with %r') % (d, f))
486 if oldstate in "?r" and "_dirs" in self.__dict__:
486 if oldstate in "?r" and "_dirs" in self.__dict__:
487 self._dirs.addpath(f)
487 self._dirs.addpath(f)
488 self._dirty = True
488 self._dirty = True
489 self._map[f] = dirstatetuple(state, mode, size, mtime)
489 self._map[f] = dirstatetuple(state, mode, size, mtime)
490 if state != 'n' or mtime == -1:
490 if state != 'n' or mtime == -1:
491 self._nonnormalset.add(f)
491 self._nonnormalset.add(f)
492
492
493 def normal(self, f):
493 def normal(self, f):
494 '''Mark a file normal and clean.'''
494 '''Mark a file normal and clean.'''
495 s = os.lstat(self._join(f))
495 s = os.lstat(self._join(f))
496 mtime = s.st_mtime
496 mtime = s.st_mtime
497 self._addpath(f, 'n', s.st_mode,
497 self._addpath(f, 'n', s.st_mode,
498 s.st_size & _rangemask, mtime & _rangemask)
498 s.st_size & _rangemask, mtime & _rangemask)
499 if f in self._copymap:
499 if f in self._copymap:
500 del self._copymap[f]
500 del self._copymap[f]
501 if f in self._nonnormalset:
501 if f in self._nonnormalset:
502 self._nonnormalset.remove(f)
502 self._nonnormalset.remove(f)
503 if mtime > self._lastnormaltime:
503 if mtime > self._lastnormaltime:
504 # Remember the most recent modification timeslot for status(),
504 # Remember the most recent modification timeslot for status(),
505 # to make sure we won't miss future size-preserving file content
505 # to make sure we won't miss future size-preserving file content
506 # modifications that happen within the same timeslot.
506 # modifications that happen within the same timeslot.
507 self._lastnormaltime = mtime
507 self._lastnormaltime = mtime
508
508
509 def normallookup(self, f):
509 def normallookup(self, f):
510 '''Mark a file normal, but possibly dirty.'''
510 '''Mark a file normal, but possibly dirty.'''
511 if self._pl[1] != nullid and f in self._map:
511 if self._pl[1] != nullid and f in self._map:
512 # if there is a merge going on and the file was either
512 # if there is a merge going on and the file was either
513 # in state 'm' (-1) or coming from other parent (-2) before
513 # in state 'm' (-1) or coming from other parent (-2) before
514 # being removed, restore that state.
514 # being removed, restore that state.
515 entry = self._map[f]
515 entry = self._map[f]
516 if entry[0] == 'r' and entry[2] in (-1, -2):
516 if entry[0] == 'r' and entry[2] in (-1, -2):
517 source = self._copymap.get(f)
517 source = self._copymap.get(f)
518 if entry[2] == -1:
518 if entry[2] == -1:
519 self.merge(f)
519 self.merge(f)
520 elif entry[2] == -2:
520 elif entry[2] == -2:
521 self.otherparent(f)
521 self.otherparent(f)
522 if source:
522 if source:
523 self.copy(source, f)
523 self.copy(source, f)
524 return
524 return
525 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
525 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
526 return
526 return
527 self._addpath(f, 'n', 0, -1, -1)
527 self._addpath(f, 'n', 0, -1, -1)
528 if f in self._copymap:
528 if f in self._copymap:
529 del self._copymap[f]
529 del self._copymap[f]
530 if f in self._nonnormalset:
530 if f in self._nonnormalset:
531 self._nonnormalset.remove(f)
531 self._nonnormalset.remove(f)
532
532
533 def otherparent(self, f):
533 def otherparent(self, f):
534 '''Mark as coming from the other parent, always dirty.'''
534 '''Mark as coming from the other parent, always dirty.'''
535 if self._pl[1] == nullid:
535 if self._pl[1] == nullid:
536 raise error.Abort(_("setting %r to other parent "
536 raise error.Abort(_("setting %r to other parent "
537 "only allowed in merges") % f)
537 "only allowed in merges") % f)
538 if f in self and self[f] == 'n':
538 if f in self and self[f] == 'n':
539 # merge-like
539 # merge-like
540 self._addpath(f, 'm', 0, -2, -1)
540 self._addpath(f, 'm', 0, -2, -1)
541 else:
541 else:
542 # add-like
542 # add-like
543 self._addpath(f, 'n', 0, -2, -1)
543 self._addpath(f, 'n', 0, -2, -1)
544
544
545 if f in self._copymap:
545 if f in self._copymap:
546 del self._copymap[f]
546 del self._copymap[f]
547
547
548 def add(self, f):
548 def add(self, f):
549 '''Mark a file added.'''
549 '''Mark a file added.'''
550 self._addpath(f, 'a', 0, -1, -1)
550 self._addpath(f, 'a', 0, -1, -1)
551 if f in self._copymap:
551 if f in self._copymap:
552 del self._copymap[f]
552 del self._copymap[f]
553
553
554 def remove(self, f):
554 def remove(self, f):
555 '''Mark a file removed.'''
555 '''Mark a file removed.'''
556 self._dirty = True
556 self._dirty = True
557 self._droppath(f)
557 self._droppath(f)
558 size = 0
558 size = 0
559 if self._pl[1] != nullid and f in self._map:
559 if self._pl[1] != nullid and f in self._map:
560 # backup the previous state
560 # backup the previous state
561 entry = self._map[f]
561 entry = self._map[f]
562 if entry[0] == 'm': # merge
562 if entry[0] == 'm': # merge
563 size = -1
563 size = -1
564 elif entry[0] == 'n' and entry[2] == -2: # other parent
564 elif entry[0] == 'n' and entry[2] == -2: # other parent
565 size = -2
565 size = -2
566 self._map[f] = dirstatetuple('r', 0, size, 0)
566 self._map[f] = dirstatetuple('r', 0, size, 0)
567 self._nonnormalset.add(f)
567 self._nonnormalset.add(f)
568 if size == 0 and f in self._copymap:
568 if size == 0 and f in self._copymap:
569 del self._copymap[f]
569 del self._copymap[f]
570
570
571 def merge(self, f):
571 def merge(self, f):
572 '''Mark a file merged.'''
572 '''Mark a file merged.'''
573 if self._pl[1] == nullid:
573 if self._pl[1] == nullid:
574 return self.normallookup(f)
574 return self.normallookup(f)
575 return self.otherparent(f)
575 return self.otherparent(f)
576
576
577 def drop(self, f):
577 def drop(self, f):
578 '''Drop a file from the dirstate'''
578 '''Drop a file from the dirstate'''
579 if f in self._map:
579 if f in self._map:
580 self._dirty = True
580 self._dirty = True
581 self._droppath(f)
581 self._droppath(f)
582 del self._map[f]
582 del self._map[f]
583 if f in self._nonnormalset:
583 if f in self._nonnormalset:
584 self._nonnormalset.remove(f)
584 self._nonnormalset.remove(f)
585
585
586 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
586 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
587 if exists is None:
587 if exists is None:
588 exists = os.path.lexists(os.path.join(self._root, path))
588 exists = os.path.lexists(os.path.join(self._root, path))
589 if not exists:
589 if not exists:
590 # Maybe a path component exists
590 # Maybe a path component exists
591 if not ignoremissing and '/' in path:
591 if not ignoremissing and '/' in path:
592 d, f = path.rsplit('/', 1)
592 d, f = path.rsplit('/', 1)
593 d = self._normalize(d, False, ignoremissing, None)
593 d = self._normalize(d, False, ignoremissing, None)
594 folded = d + "/" + f
594 folded = d + "/" + f
595 else:
595 else:
596 # No path components, preserve original case
596 # No path components, preserve original case
597 folded = path
597 folded = path
598 else:
598 else:
599 # recursively normalize leading directory components
599 # recursively normalize leading directory components
600 # against dirstate
600 # against dirstate
601 if '/' in normed:
601 if '/' in normed:
602 d, f = normed.rsplit('/', 1)
602 d, f = normed.rsplit('/', 1)
603 d = self._normalize(d, False, ignoremissing, True)
603 d = self._normalize(d, False, ignoremissing, True)
604 r = self._root + "/" + d
604 r = self._root + "/" + d
605 folded = d + "/" + util.fspath(f, r)
605 folded = d + "/" + util.fspath(f, r)
606 else:
606 else:
607 folded = util.fspath(normed, self._root)
607 folded = util.fspath(normed, self._root)
608 storemap[normed] = folded
608 storemap[normed] = folded
609
609
610 return folded
610 return folded
611
611
612 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
612 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
613 normed = util.normcase(path)
613 normed = util.normcase(path)
614 folded = self._filefoldmap.get(normed, None)
614 folded = self._filefoldmap.get(normed, None)
615 if folded is None:
615 if folded is None:
616 if isknown:
616 if isknown:
617 folded = path
617 folded = path
618 else:
618 else:
619 folded = self._discoverpath(path, normed, ignoremissing, exists,
619 folded = self._discoverpath(path, normed, ignoremissing, exists,
620 self._filefoldmap)
620 self._filefoldmap)
621 return folded
621 return folded
622
622
623 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
623 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
624 normed = util.normcase(path)
624 normed = util.normcase(path)
625 folded = self._filefoldmap.get(normed, None)
625 folded = self._filefoldmap.get(normed, None)
626 if folded is None:
626 if folded is None:
627 folded = self._dirfoldmap.get(normed, None)
627 folded = self._dirfoldmap.get(normed, None)
628 if folded is None:
628 if folded is None:
629 if isknown:
629 if isknown:
630 folded = path
630 folded = path
631 else:
631 else:
632 # store discovered result in dirfoldmap so that future
632 # store discovered result in dirfoldmap so that future
633 # normalizefile calls don't start matching directories
633 # normalizefile calls don't start matching directories
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
635 self._dirfoldmap)
635 self._dirfoldmap)
636 return folded
636 return folded
637
637
638 def normalize(self, path, isknown=False, ignoremissing=False):
638 def normalize(self, path, isknown=False, ignoremissing=False):
639 '''
639 '''
640 normalize the case of a pathname when on a casefolding filesystem
640 normalize the case of a pathname when on a casefolding filesystem
641
641
642 isknown specifies whether the filename came from walking the
642 isknown specifies whether the filename came from walking the
643 disk, to avoid extra filesystem access.
643 disk, to avoid extra filesystem access.
644
644
645 If ignoremissing is True, missing path are returned
645 If ignoremissing is True, missing path are returned
646 unchanged. Otherwise, we try harder to normalize possibly
646 unchanged. Otherwise, we try harder to normalize possibly
647 existing path components.
647 existing path components.
648
648
649 The normalized case is determined based on the following precedence:
649 The normalized case is determined based on the following precedence:
650
650
651 - version of name already stored in the dirstate
651 - version of name already stored in the dirstate
652 - version of name stored on disk
652 - version of name stored on disk
653 - version provided via command arguments
653 - version provided via command arguments
654 '''
654 '''
655
655
656 if self._checkcase:
656 if self._checkcase:
657 return self._normalize(path, isknown, ignoremissing)
657 return self._normalize(path, isknown, ignoremissing)
658 return path
658 return path
659
659
660 def clear(self):
660 def clear(self):
661 self._map = {}
661 self._map = {}
662 self._nonnormalset = set()
662 self._nonnormalset = set()
663 if "_dirs" in self.__dict__:
663 if "_dirs" in self.__dict__:
664 delattr(self, "_dirs")
664 delattr(self, "_dirs")
665 self._copymap = {}
665 self._copymap = {}
666 self._pl = [nullid, nullid]
666 self._pl = [nullid, nullid]
667 self._lastnormaltime = 0
667 self._lastnormaltime = 0
668 self._dirty = True
668 self._dirty = True
669
669
670 def rebuild(self, parent, allfiles, changedfiles=None):
670 def rebuild(self, parent, allfiles, changedfiles=None):
671 if changedfiles is None:
671 if changedfiles is None:
672 # Rebuild entire dirstate
672 # Rebuild entire dirstate
673 changedfiles = allfiles
673 changedfiles = allfiles
674 lastnormaltime = self._lastnormaltime
674 lastnormaltime = self._lastnormaltime
675 self.clear()
675 self.clear()
676 self._lastnormaltime = lastnormaltime
676 self._lastnormaltime = lastnormaltime
677
677
678 for f in changedfiles:
678 for f in changedfiles:
679 mode = 0o666
679 mode = 0o666
680 if f in allfiles and 'x' in allfiles.flags(f):
680 if f in allfiles and 'x' in allfiles.flags(f):
681 mode = 0o777
681 mode = 0o777
682
682
683 if f in allfiles:
683 if f in allfiles:
684 self._map[f] = dirstatetuple('n', mode, -1, 0)
684 self._map[f] = dirstatetuple('n', mode, -1, 0)
685 else:
685 else:
686 self._map.pop(f, None)
686 self._map.pop(f, None)
687 if f in self._nonnormalset:
687 if f in self._nonnormalset:
688 self._nonnormalset.remove(f)
688 self._nonnormalset.remove(f)
689
689
690 self._pl = (parent, nullid)
690 self._pl = (parent, nullid)
691 self._dirty = True
691 self._dirty = True
692
692
693 def write(self, tr=_token):
693 def write(self, tr=_token):
694 if not self._dirty:
694 if not self._dirty:
695 return
695 return
696
696
697 filename = self._filename
697 filename = self._filename
698 if tr is _token: # not explicitly specified
698 if tr is _token: # not explicitly specified
699 self._ui.deprecwarn('use dirstate.write with '
699 self._ui.deprecwarn('use dirstate.write with '
700 'repo.currenttransaction()',
700 'repo.currenttransaction()',
701 '3.9')
701 '3.9')
702
702
703 if self._opener.lexists(self._pendingfilename):
703 if self._opener.lexists(self._pendingfilename):
704 # if pending file already exists, in-memory changes
704 # if pending file already exists, in-memory changes
705 # should be written into it, because it has priority
705 # should be written into it, because it has priority
706 # to '.hg/dirstate' at reading under HG_PENDING mode
706 # to '.hg/dirstate' at reading under HG_PENDING mode
707 filename = self._pendingfilename
707 filename = self._pendingfilename
708 elif tr:
708 elif tr:
709 # 'dirstate.write()' is not only for writing in-memory
709 # 'dirstate.write()' is not only for writing in-memory
710 # changes out, but also for dropping ambiguous timestamp.
710 # changes out, but also for dropping ambiguous timestamp.
711 # delayed writing re-raise "ambiguous timestamp issue".
711 # delayed writing re-raise "ambiguous timestamp issue".
712 # See also the wiki page below for detail:
712 # See also the wiki page below for detail:
713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
714
714
715 # emulate dropping timestamp in 'parsers.pack_dirstate'
715 # emulate dropping timestamp in 'parsers.pack_dirstate'
716 now = _getfsnow(self._opener)
716 now = _getfsnow(self._opener)
717 dmap = self._map
717 dmap = self._map
718 for f, e in dmap.iteritems():
718 for f, e in dmap.iteritems():
719 if e[0] == 'n' and e[3] == now:
719 if e[0] == 'n' and e[3] == now:
720 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
720 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
721 self._nonnormalset.add(f)
721 self._nonnormalset.add(f)
722
722
723 # emulate that all 'dirstate.normal' results are written out
723 # emulate that all 'dirstate.normal' results are written out
724 self._lastnormaltime = 0
724 self._lastnormaltime = 0
725
725
726 # delay writing in-memory changes out
726 # delay writing in-memory changes out
727 tr.addfilegenerator('dirstate', (self._filename,),
727 tr.addfilegenerator('dirstate', (self._filename,),
728 self._writedirstate, location='plain')
728 self._writedirstate, location='plain')
729 return
729 return
730
730
731 st = self._opener(filename, "w", atomictemp=True)
731 st = self._opener(filename, "w", atomictemp=True)
732 self._writedirstate(st)
732 self._writedirstate(st)
733
733
734 def _writedirstate(self, st):
734 def _writedirstate(self, st):
735 # use the modification time of the newly created temporary file as the
735 # use the modification time of the newly created temporary file as the
736 # filesystem's notion of 'now'
736 # filesystem's notion of 'now'
737 now = util.fstat(st).st_mtime & _rangemask
737 now = util.fstat(st).st_mtime & _rangemask
738
738
739 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
739 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
740 # timestamp of each entries in dirstate, because of 'now > mtime'
740 # timestamp of each entries in dirstate, because of 'now > mtime'
741 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
741 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
742 if delaywrite > 0:
742 if delaywrite > 0:
743 # do we have any files to delay for?
743 # do we have any files to delay for?
744 for f, e in self._map.iteritems():
744 for f, e in self._map.iteritems():
745 if e[0] == 'n' and e[3] == now:
745 if e[0] == 'n' and e[3] == now:
746 import time # to avoid useless import
746 import time # to avoid useless import
747 # rather than sleep n seconds, sleep until the next
747 # rather than sleep n seconds, sleep until the next
748 # multiple of n seconds
748 # multiple of n seconds
749 clock = time.time()
749 clock = time.time()
750 start = int(clock) - (int(clock) % delaywrite)
750 start = int(clock) - (int(clock) % delaywrite)
751 end = start + delaywrite
751 end = start + delaywrite
752 time.sleep(end - clock)
752 time.sleep(end - clock)
753 break
753 break
754
754
755 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
755 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
756 self._nonnormalset = nonnormalentries(self._map)
756 self._nonnormalset = nonnormalentries(self._map)
757 st.close()
757 st.close()
758 self._lastnormaltime = 0
758 self._lastnormaltime = 0
759 self._dirty = self._dirtypl = False
759 self._dirty = self._dirtypl = False
760
760
761 def _dirignore(self, f):
761 def _dirignore(self, f):
762 if f == '.':
762 if f == '.':
763 return False
763 return False
764 if self._ignore(f):
764 if self._ignore(f):
765 return True
765 return True
766 for p in util.finddirs(f):
766 for p in util.finddirs(f):
767 if self._ignore(p):
767 if self._ignore(p):
768 return True
768 return True
769 return False
769 return False
770
770
771 def _ignorefiles(self):
771 def _ignorefiles(self):
772 files = []
772 files = []
773 if os.path.exists(self._join('.hgignore')):
773 if os.path.exists(self._join('.hgignore')):
774 files.append(self._join('.hgignore'))
774 files.append(self._join('.hgignore'))
775 for name, path in self._ui.configitems("ui"):
775 for name, path in self._ui.configitems("ui"):
776 if name == 'ignore' or name.startswith('ignore.'):
776 if name == 'ignore' or name.startswith('ignore.'):
777 # we need to use os.path.join here rather than self._join
777 # we need to use os.path.join here rather than self._join
778 # because path is arbitrary and user-specified
778 # because path is arbitrary and user-specified
779 files.append(os.path.join(self._rootdir, util.expandpath(path)))
779 files.append(os.path.join(self._rootdir, util.expandpath(path)))
780 return files
780 return files
781
781
782 def _ignorefileandline(self, f):
782 def _ignorefileandline(self, f):
783 files = collections.deque(self._ignorefiles())
783 files = collections.deque(self._ignorefiles())
784 visited = set()
784 visited = set()
785 while files:
785 while files:
786 i = files.popleft()
786 i = files.popleft()
787 patterns = matchmod.readpatternfile(i, self._ui.warn,
787 patterns = matchmod.readpatternfile(i, self._ui.warn,
788 sourceinfo=True)
788 sourceinfo=True)
789 for pattern, lineno, line in patterns:
789 for pattern, lineno, line in patterns:
790 kind, p = matchmod._patsplit(pattern, 'glob')
790 kind, p = matchmod._patsplit(pattern, 'glob')
791 if kind == "subinclude":
791 if kind == "subinclude":
792 if p not in visited:
792 if p not in visited:
793 files.append(p)
793 files.append(p)
794 continue
794 continue
795 m = matchmod.match(self._root, '', [], [pattern],
795 m = matchmod.match(self._root, '', [], [pattern],
796 warn=self._ui.warn)
796 warn=self._ui.warn)
797 if m(f):
797 if m(f):
798 return (i, lineno, line)
798 return (i, lineno, line)
799 visited.add(i)
799 visited.add(i)
800 return (None, -1, "")
800 return (None, -1, "")
801
801
802 def _walkexplicit(self, match, subrepos):
802 def _walkexplicit(self, match, subrepos):
803 '''Get stat data about the files explicitly specified by match.
803 '''Get stat data about the files explicitly specified by match.
804
804
805 Return a triple (results, dirsfound, dirsnotfound).
805 Return a triple (results, dirsfound, dirsnotfound).
806 - results is a mapping from filename to stat result. It also contains
806 - results is a mapping from filename to stat result. It also contains
807 listings mapping subrepos and .hg to None.
807 listings mapping subrepos and .hg to None.
808 - dirsfound is a list of files found to be directories.
808 - dirsfound is a list of files found to be directories.
809 - dirsnotfound is a list of files that the dirstate thinks are
809 - dirsnotfound is a list of files that the dirstate thinks are
810 directories and that were not found.'''
810 directories and that were not found.'''
811
811
812 def badtype(mode):
812 def badtype(mode):
813 kind = _('unknown')
813 kind = _('unknown')
814 if stat.S_ISCHR(mode):
814 if stat.S_ISCHR(mode):
815 kind = _('character device')
815 kind = _('character device')
816 elif stat.S_ISBLK(mode):
816 elif stat.S_ISBLK(mode):
817 kind = _('block device')
817 kind = _('block device')
818 elif stat.S_ISFIFO(mode):
818 elif stat.S_ISFIFO(mode):
819 kind = _('fifo')
819 kind = _('fifo')
820 elif stat.S_ISSOCK(mode):
820 elif stat.S_ISSOCK(mode):
821 kind = _('socket')
821 kind = _('socket')
822 elif stat.S_ISDIR(mode):
822 elif stat.S_ISDIR(mode):
823 kind = _('directory')
823 kind = _('directory')
824 return _('unsupported file type (type is %s)') % kind
824 return _('unsupported file type (type is %s)') % kind
825
825
826 matchedir = match.explicitdir
826 matchedir = match.explicitdir
827 badfn = match.bad
827 badfn = match.bad
828 dmap = self._map
828 dmap = self._map
829 lstat = os.lstat
829 lstat = os.lstat
830 getkind = stat.S_IFMT
830 getkind = stat.S_IFMT
831 dirkind = stat.S_IFDIR
831 dirkind = stat.S_IFDIR
832 regkind = stat.S_IFREG
832 regkind = stat.S_IFREG
833 lnkkind = stat.S_IFLNK
833 lnkkind = stat.S_IFLNK
834 join = self._join
834 join = self._join
835 dirsfound = []
835 dirsfound = []
836 foundadd = dirsfound.append
836 foundadd = dirsfound.append
837 dirsnotfound = []
837 dirsnotfound = []
838 notfoundadd = dirsnotfound.append
838 notfoundadd = dirsnotfound.append
839
839
840 if not match.isexact() and self._checkcase:
840 if not match.isexact() and self._checkcase:
841 normalize = self._normalize
841 normalize = self._normalize
842 else:
842 else:
843 normalize = None
843 normalize = None
844
844
845 files = sorted(match.files())
845 files = sorted(match.files())
846 subrepos.sort()
846 subrepos.sort()
847 i, j = 0, 0
847 i, j = 0, 0
848 while i < len(files) and j < len(subrepos):
848 while i < len(files) and j < len(subrepos):
849 subpath = subrepos[j] + "/"
849 subpath = subrepos[j] + "/"
850 if files[i] < subpath:
850 if files[i] < subpath:
851 i += 1
851 i += 1
852 continue
852 continue
853 while i < len(files) and files[i].startswith(subpath):
853 while i < len(files) and files[i].startswith(subpath):
854 del files[i]
854 del files[i]
855 j += 1
855 j += 1
856
856
857 if not files or '.' in files:
857 if not files or '.' in files:
858 files = ['.']
858 files = ['.']
859 results = dict.fromkeys(subrepos)
859 results = dict.fromkeys(subrepos)
860 results['.hg'] = None
860 results['.hg'] = None
861
861
862 alldirs = None
862 alldirs = None
863 for ff in files:
863 for ff in files:
864 # constructing the foldmap is expensive, so don't do it for the
864 # constructing the foldmap is expensive, so don't do it for the
865 # common case where files is ['.']
865 # common case where files is ['.']
866 if normalize and ff != '.':
866 if normalize and ff != '.':
867 nf = normalize(ff, False, True)
867 nf = normalize(ff, False, True)
868 else:
868 else:
869 nf = ff
869 nf = ff
870 if nf in results:
870 if nf in results:
871 continue
871 continue
872
872
873 try:
873 try:
874 st = lstat(join(nf))
874 st = lstat(join(nf))
875 kind = getkind(st.st_mode)
875 kind = getkind(st.st_mode)
876 if kind == dirkind:
876 if kind == dirkind:
877 if nf in dmap:
877 if nf in dmap:
878 # file replaced by dir on disk but still in dirstate
878 # file replaced by dir on disk but still in dirstate
879 results[nf] = None
879 results[nf] = None
880 if matchedir:
880 if matchedir:
881 matchedir(nf)
881 matchedir(nf)
882 foundadd((nf, ff))
882 foundadd((nf, ff))
883 elif kind == regkind or kind == lnkkind:
883 elif kind == regkind or kind == lnkkind:
884 results[nf] = st
884 results[nf] = st
885 else:
885 else:
886 badfn(ff, badtype(kind))
886 badfn(ff, badtype(kind))
887 if nf in dmap:
887 if nf in dmap:
888 results[nf] = None
888 results[nf] = None
889 except OSError as inst: # nf not found on disk - it is dirstate only
889 except OSError as inst: # nf not found on disk - it is dirstate only
890 if nf in dmap: # does it exactly match a missing file?
890 if nf in dmap: # does it exactly match a missing file?
891 results[nf] = None
891 results[nf] = None
892 else: # does it match a missing directory?
892 else: # does it match a missing directory?
893 if alldirs is None:
893 if alldirs is None:
894 alldirs = util.dirs(dmap)
894 alldirs = util.dirs(dmap)
895 if nf in alldirs:
895 if nf in alldirs:
896 if matchedir:
896 if matchedir:
897 matchedir(nf)
897 matchedir(nf)
898 notfoundadd(nf)
898 notfoundadd(nf)
899 else:
899 else:
900 badfn(ff, inst.strerror)
900 badfn(ff, inst.strerror)
901
901
902 # Case insensitive filesystems cannot rely on lstat() failing to detect
902 # Case insensitive filesystems cannot rely on lstat() failing to detect
903 # a case-only rename. Prune the stat object for any file that does not
903 # a case-only rename. Prune the stat object for any file that does not
904 # match the case in the filesystem, if there are multiple files that
904 # match the case in the filesystem, if there are multiple files that
905 # normalize to the same path.
905 # normalize to the same path.
906 if match.isexact() and self._checkcase:
906 if match.isexact() and self._checkcase:
907 normed = {}
907 normed = {}
908
908
909 for f, st in results.iteritems():
909 for f, st in results.iteritems():
910 if st is None:
910 if st is None:
911 continue
911 continue
912
912
913 nc = util.normcase(f)
913 nc = util.normcase(f)
914 paths = normed.get(nc)
914 paths = normed.get(nc)
915
915
916 if paths is None:
916 if paths is None:
917 paths = set()
917 paths = set()
918 normed[nc] = paths
918 normed[nc] = paths
919
919
920 paths.add(f)
920 paths.add(f)
921
921
922 for norm, paths in normed.iteritems():
922 for norm, paths in normed.iteritems():
923 if len(paths) > 1:
923 if len(paths) > 1:
924 for path in paths:
924 for path in paths:
925 folded = self._discoverpath(path, norm, True, None,
925 folded = self._discoverpath(path, norm, True, None,
926 self._dirfoldmap)
926 self._dirfoldmap)
927 if path != folded:
927 if path != folded:
928 results[path] = None
928 results[path] = None
929
929
930 return results, dirsfound, dirsnotfound
930 return results, dirsfound, dirsnotfound
931
931
932 def walk(self, match, subrepos, unknown, ignored, full=True):
932 def walk(self, match, subrepos, unknown, ignored, full=True):
933 '''
933 '''
934 Walk recursively through the directory tree, finding all files
934 Walk recursively through the directory tree, finding all files
935 matched by match.
935 matched by match.
936
936
937 If full is False, maybe skip some known-clean files.
937 If full is False, maybe skip some known-clean files.
938
938
939 Return a dict mapping filename to stat-like object (either
939 Return a dict mapping filename to stat-like object (either
940 mercurial.osutil.stat instance or return value of os.stat()).
940 mercurial.osutil.stat instance or return value of os.stat()).
941
941
942 '''
942 '''
943 # full is a flag that extensions that hook into walk can use -- this
943 # full is a flag that extensions that hook into walk can use -- this
944 # implementation doesn't use it at all. This satisfies the contract
944 # implementation doesn't use it at all. This satisfies the contract
945 # because we only guarantee a "maybe".
945 # because we only guarantee a "maybe".
946
946
947 if ignored:
947 if ignored:
948 ignore = util.never
948 ignore = util.never
949 dirignore = util.never
949 dirignore = util.never
950 elif unknown:
950 elif unknown:
951 ignore = self._ignore
951 ignore = self._ignore
952 dirignore = self._dirignore
952 dirignore = self._dirignore
953 else:
953 else:
954 # if not unknown and not ignored, drop dir recursion and step 2
954 # if not unknown and not ignored, drop dir recursion and step 2
955 ignore = util.always
955 ignore = util.always
956 dirignore = util.always
956 dirignore = util.always
957
957
958 matchfn = match.matchfn
958 matchfn = match.matchfn
959 matchalways = match.always()
959 matchalways = match.always()
960 matchtdir = match.traversedir
960 matchtdir = match.traversedir
961 dmap = self._map
961 dmap = self._map
962 listdir = osutil.listdir
962 listdir = osutil.listdir
963 lstat = os.lstat
963 lstat = os.lstat
964 dirkind = stat.S_IFDIR
964 dirkind = stat.S_IFDIR
965 regkind = stat.S_IFREG
965 regkind = stat.S_IFREG
966 lnkkind = stat.S_IFLNK
966 lnkkind = stat.S_IFLNK
967 join = self._join
967 join = self._join
968
968
969 exact = skipstep3 = False
969 exact = skipstep3 = False
970 if match.isexact(): # match.exact
970 if match.isexact(): # match.exact
971 exact = True
971 exact = True
972 dirignore = util.always # skip step 2
972 dirignore = util.always # skip step 2
973 elif match.prefix(): # match.match, no patterns
973 elif match.prefix(): # match.match, no patterns
974 skipstep3 = True
974 skipstep3 = True
975
975
976 if not exact and self._checkcase:
976 if not exact and self._checkcase:
977 normalize = self._normalize
977 normalize = self._normalize
978 normalizefile = self._normalizefile
978 normalizefile = self._normalizefile
979 skipstep3 = False
979 skipstep3 = False
980 else:
980 else:
981 normalize = self._normalize
981 normalize = self._normalize
982 normalizefile = None
982 normalizefile = None
983
983
984 # step 1: find all explicit files
984 # step 1: find all explicit files
985 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
985 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
986
986
987 skipstep3 = skipstep3 and not (work or dirsnotfound)
987 skipstep3 = skipstep3 and not (work or dirsnotfound)
988 work = [d for d in work if not dirignore(d[0])]
988 work = [d for d in work if not dirignore(d[0])]
989
989
990 # step 2: visit subdirectories
990 # step 2: visit subdirectories
991 def traverse(work, alreadynormed):
991 def traverse(work, alreadynormed):
992 wadd = work.append
992 wadd = work.append
993 while work:
993 while work:
994 nd = work.pop()
994 nd = work.pop()
995 skip = None
995 skip = None
996 if nd == '.':
996 if nd == '.':
997 nd = ''
997 nd = ''
998 else:
998 else:
999 skip = '.hg'
999 skip = '.hg'
1000 try:
1000 try:
1001 entries = listdir(join(nd), stat=True, skip=skip)
1001 entries = listdir(join(nd), stat=True, skip=skip)
1002 except OSError as inst:
1002 except OSError as inst:
1003 if inst.errno in (errno.EACCES, errno.ENOENT):
1003 if inst.errno in (errno.EACCES, errno.ENOENT):
1004 match.bad(self.pathto(nd), inst.strerror)
1004 match.bad(self.pathto(nd), inst.strerror)
1005 continue
1005 continue
1006 raise
1006 raise
1007 for f, kind, st in entries:
1007 for f, kind, st in entries:
1008 if normalizefile:
1008 if normalizefile:
1009 # even though f might be a directory, we're only
1009 # even though f might be a directory, we're only
1010 # interested in comparing it to files currently in the
1010 # interested in comparing it to files currently in the
1011 # dmap -- therefore normalizefile is enough
1011 # dmap -- therefore normalizefile is enough
1012 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1012 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1013 True)
1013 True)
1014 else:
1014 else:
1015 nf = nd and (nd + "/" + f) or f
1015 nf = nd and (nd + "/" + f) or f
1016 if nf not in results:
1016 if nf not in results:
1017 if kind == dirkind:
1017 if kind == dirkind:
1018 if not ignore(nf):
1018 if not ignore(nf):
1019 if matchtdir:
1019 if matchtdir:
1020 matchtdir(nf)
1020 matchtdir(nf)
1021 wadd(nf)
1021 wadd(nf)
1022 if nf in dmap and (matchalways or matchfn(nf)):
1022 if nf in dmap and (matchalways or matchfn(nf)):
1023 results[nf] = None
1023 results[nf] = None
1024 elif kind == regkind or kind == lnkkind:
1024 elif kind == regkind or kind == lnkkind:
1025 if nf in dmap:
1025 if nf in dmap:
1026 if matchalways or matchfn(nf):
1026 if matchalways or matchfn(nf):
1027 results[nf] = st
1027 results[nf] = st
1028 elif ((matchalways or matchfn(nf))
1028 elif ((matchalways or matchfn(nf))
1029 and not ignore(nf)):
1029 and not ignore(nf)):
1030 # unknown file -- normalize if necessary
1030 # unknown file -- normalize if necessary
1031 if not alreadynormed:
1031 if not alreadynormed:
1032 nf = normalize(nf, False, True)
1032 nf = normalize(nf, False, True)
1033 results[nf] = st
1033 results[nf] = st
1034 elif nf in dmap and (matchalways or matchfn(nf)):
1034 elif nf in dmap and (matchalways or matchfn(nf)):
1035 results[nf] = None
1035 results[nf] = None
1036
1036
1037 for nd, d in work:
1037 for nd, d in work:
1038 # alreadynormed means that processwork doesn't have to do any
1038 # alreadynormed means that processwork doesn't have to do any
1039 # expensive directory normalization
1039 # expensive directory normalization
1040 alreadynormed = not normalize or nd == d
1040 alreadynormed = not normalize or nd == d
1041 traverse([d], alreadynormed)
1041 traverse([d], alreadynormed)
1042
1042
1043 for s in subrepos:
1043 for s in subrepos:
1044 del results[s]
1044 del results[s]
1045 del results['.hg']
1045 del results['.hg']
1046
1046
1047 # step 3: visit remaining files from dmap
1047 # step 3: visit remaining files from dmap
1048 if not skipstep3 and not exact:
1048 if not skipstep3 and not exact:
1049 # If a dmap file is not in results yet, it was either
1049 # If a dmap file is not in results yet, it was either
1050 # a) not matching matchfn b) ignored, c) missing, or d) under a
1050 # a) not matching matchfn b) ignored, c) missing, or d) under a
1051 # symlink directory.
1051 # symlink directory.
1052 if not results and matchalways:
1052 if not results and matchalways:
1053 visit = dmap.keys()
1053 visit = dmap.keys()
1054 else:
1054 else:
1055 visit = [f for f in dmap if f not in results and matchfn(f)]
1055 visit = [f for f in dmap if f not in results and matchfn(f)]
1056 visit.sort()
1056 visit.sort()
1057
1057
1058 if unknown:
1058 if unknown:
1059 # unknown == True means we walked all dirs under the roots
1059 # unknown == True means we walked all dirs under the roots
1060 # that wasn't ignored, and everything that matched was stat'ed
1060 # that wasn't ignored, and everything that matched was stat'ed
1061 # and is already in results.
1061 # and is already in results.
1062 # The rest must thus be ignored or under a symlink.
1062 # The rest must thus be ignored or under a symlink.
1063 audit_path = pathutil.pathauditor(self._root)
1063 audit_path = pathutil.pathauditor(self._root)
1064
1064
1065 for nf in iter(visit):
1065 for nf in iter(visit):
1066 # If a stat for the same file was already added with a
1066 # If a stat for the same file was already added with a
1067 # different case, don't add one for this, since that would
1067 # different case, don't add one for this, since that would
1068 # make it appear as if the file exists under both names
1068 # make it appear as if the file exists under both names
1069 # on disk.
1069 # on disk.
1070 if (normalizefile and
1070 if (normalizefile and
1071 normalizefile(nf, True, True) in results):
1071 normalizefile(nf, True, True) in results):
1072 results[nf] = None
1072 results[nf] = None
1073 # Report ignored items in the dmap as long as they are not
1073 # Report ignored items in the dmap as long as they are not
1074 # under a symlink directory.
1074 # under a symlink directory.
1075 elif audit_path.check(nf):
1075 elif audit_path.check(nf):
1076 try:
1076 try:
1077 results[nf] = lstat(join(nf))
1077 results[nf] = lstat(join(nf))
1078 # file was just ignored, no links, and exists
1078 # file was just ignored, no links, and exists
1079 except OSError:
1079 except OSError:
1080 # file doesn't exist
1080 # file doesn't exist
1081 results[nf] = None
1081 results[nf] = None
1082 else:
1082 else:
1083 # It's either missing or under a symlink directory
1083 # It's either missing or under a symlink directory
1084 # which we in this case report as missing
1084 # which we in this case report as missing
1085 results[nf] = None
1085 results[nf] = None
1086 else:
1086 else:
1087 # We may not have walked the full directory tree above,
1087 # We may not have walked the full directory tree above,
1088 # so stat and check everything we missed.
1088 # so stat and check everything we missed.
1089 nf = iter(visit).next
1089 nf = iter(visit).next
1090 for st in util.statfiles([join(i) for i in visit]):
1090 for st in util.statfiles([join(i) for i in visit]):
1091 results[nf()] = st
1091 results[nf()] = st
1092 return results
1092 return results
1093
1093
1094 def status(self, match, subrepos, ignored, clean, unknown):
1094 def status(self, match, subrepos, ignored, clean, unknown):
1095 '''Determine the status of the working copy relative to the
1095 '''Determine the status of the working copy relative to the
1096 dirstate and return a pair of (unsure, status), where status is of type
1096 dirstate and return a pair of (unsure, status), where status is of type
1097 scmutil.status and:
1097 scmutil.status and:
1098
1098
1099 unsure:
1099 unsure:
1100 files that might have been modified since the dirstate was
1100 files that might have been modified since the dirstate was
1101 written, but need to be read to be sure (size is the same
1101 written, but need to be read to be sure (size is the same
1102 but mtime differs)
1102 but mtime differs)
1103 status.modified:
1103 status.modified:
1104 files that have definitely been modified since the dirstate
1104 files that have definitely been modified since the dirstate
1105 was written (different size or mode)
1105 was written (different size or mode)
1106 status.clean:
1106 status.clean:
1107 files that have definitely not been modified since the
1107 files that have definitely not been modified since the
1108 dirstate was written
1108 dirstate was written
1109 '''
1109 '''
1110 listignored, listclean, listunknown = ignored, clean, unknown
1110 listignored, listclean, listunknown = ignored, clean, unknown
1111 lookup, modified, added, unknown, ignored = [], [], [], [], []
1111 lookup, modified, added, unknown, ignored = [], [], [], [], []
1112 removed, deleted, clean = [], [], []
1112 removed, deleted, clean = [], [], []
1113
1113
1114 dmap = self._map
1114 dmap = self._map
1115 ladd = lookup.append # aka "unsure"
1115 ladd = lookup.append # aka "unsure"
1116 madd = modified.append
1116 madd = modified.append
1117 aadd = added.append
1117 aadd = added.append
1118 uadd = unknown.append
1118 uadd = unknown.append
1119 iadd = ignored.append
1119 iadd = ignored.append
1120 radd = removed.append
1120 radd = removed.append
1121 dadd = deleted.append
1121 dadd = deleted.append
1122 cadd = clean.append
1122 cadd = clean.append
1123 mexact = match.exact
1123 mexact = match.exact
1124 dirignore = self._dirignore
1124 dirignore = self._dirignore
1125 checkexec = self._checkexec
1125 checkexec = self._checkexec
1126 copymap = self._copymap
1126 copymap = self._copymap
1127 lastnormaltime = self._lastnormaltime
1127 lastnormaltime = self._lastnormaltime
1128
1128
1129 # We need to do full walks when either
1129 # We need to do full walks when either
1130 # - we're listing all clean files, or
1130 # - we're listing all clean files, or
1131 # - match.traversedir does something, because match.traversedir should
1131 # - match.traversedir does something, because match.traversedir should
1132 # be called for every dir in the working dir
1132 # be called for every dir in the working dir
1133 full = listclean or match.traversedir is not None
1133 full = listclean or match.traversedir is not None
1134 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1134 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1135 full=full).iteritems():
1135 full=full).iteritems():
1136 if fn not in dmap:
1136 if fn not in dmap:
1137 if (listignored or mexact(fn)) and dirignore(fn):
1137 if (listignored or mexact(fn)) and dirignore(fn):
1138 if listignored:
1138 if listignored:
1139 iadd(fn)
1139 iadd(fn)
1140 else:
1140 else:
1141 uadd(fn)
1141 uadd(fn)
1142 continue
1142 continue
1143
1143
1144 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1144 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1145 # written like that for performance reasons. dmap[fn] is not a
1145 # written like that for performance reasons. dmap[fn] is not a
1146 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1146 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1147 # opcode has fast paths when the value to be unpacked is a tuple or
1147 # opcode has fast paths when the value to be unpacked is a tuple or
1148 # a list, but falls back to creating a full-fledged iterator in
1148 # a list, but falls back to creating a full-fledged iterator in
1149 # general. That is much slower than simply accessing and storing the
1149 # general. That is much slower than simply accessing and storing the
1150 # tuple members one by one.
1150 # tuple members one by one.
1151 t = dmap[fn]
1151 t = dmap[fn]
1152 state = t[0]
1152 state = t[0]
1153 mode = t[1]
1153 mode = t[1]
1154 size = t[2]
1154 size = t[2]
1155 time = t[3]
1155 time = t[3]
1156
1156
1157 if not st and state in "nma":
1157 if not st and state in "nma":
1158 dadd(fn)
1158 dadd(fn)
1159 elif state == 'n':
1159 elif state == 'n':
1160 if (size >= 0 and
1160 if (size >= 0 and
1161 ((size != st.st_size and size != st.st_size & _rangemask)
1161 ((size != st.st_size and size != st.st_size & _rangemask)
1162 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1162 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1163 or size == -2 # other parent
1163 or size == -2 # other parent
1164 or fn in copymap):
1164 or fn in copymap):
1165 madd(fn)
1165 madd(fn)
1166 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1166 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1167 ladd(fn)
1167 ladd(fn)
1168 elif st.st_mtime == lastnormaltime:
1168 elif st.st_mtime == lastnormaltime:
1169 # fn may have just been marked as normal and it may have
1169 # fn may have just been marked as normal and it may have
1170 # changed in the same second without changing its size.
1170 # changed in the same second without changing its size.
1171 # This can happen if we quickly do multiple commits.
1171 # This can happen if we quickly do multiple commits.
1172 # Force lookup, so we don't miss such a racy file change.
1172 # Force lookup, so we don't miss such a racy file change.
1173 ladd(fn)
1173 ladd(fn)
1174 elif listclean:
1174 elif listclean:
1175 cadd(fn)
1175 cadd(fn)
1176 elif state == 'm':
1176 elif state == 'm':
1177 madd(fn)
1177 madd(fn)
1178 elif state == 'a':
1178 elif state == 'a':
1179 aadd(fn)
1179 aadd(fn)
1180 elif state == 'r':
1180 elif state == 'r':
1181 radd(fn)
1181 radd(fn)
1182
1182
1183 return (lookup, scmutil.status(modified, added, removed, deleted,
1183 return (lookup, scmutil.status(modified, added, removed, deleted,
1184 unknown, ignored, clean))
1184 unknown, ignored, clean))
1185
1185
1186 def matches(self, match):
1186 def matches(self, match):
1187 '''
1187 '''
1188 return files in the dirstate (in whatever state) filtered by match
1188 return files in the dirstate (in whatever state) filtered by match
1189 '''
1189 '''
1190 dmap = self._map
1190 dmap = self._map
1191 if match.always():
1191 if match.always():
1192 return dmap.keys()
1192 return dmap.keys()
1193 files = match.files()
1193 files = match.files()
1194 if match.isexact():
1194 if match.isexact():
1195 # fast path -- filter the other way around, since typically files is
1195 # fast path -- filter the other way around, since typically files is
1196 # much smaller than dmap
1196 # much smaller than dmap
1197 return [f for f in files if f in dmap]
1197 return [f for f in files if f in dmap]
1198 if match.prefix() and all(fn in dmap for fn in files):
1198 if match.prefix() and all(fn in dmap for fn in files):
1199 # fast path -- all the values are known to be files, so just return
1199 # fast path -- all the values are known to be files, so just return
1200 # that
1200 # that
1201 return list(files)
1201 return list(files)
1202 return [f for f in dmap if match(f)]
1202 return [f for f in dmap if match(f)]
1203
1203
1204 def _actualfilename(self, tr):
1204 def _actualfilename(self, tr):
1205 if tr:
1205 if tr:
1206 return self._pendingfilename
1206 return self._pendingfilename
1207 else:
1207 else:
1208 return self._filename
1208 return self._filename
1209
1209
1210 def savebackup(self, tr, suffix):
1210 def savebackup(self, tr, suffix='', prefix=''):
1211 '''Save current dirstate into backup file with suffix'''
1211 '''Save current dirstate into backup file with suffix'''
1212 filename = self._actualfilename(tr)
1212 filename = self._actualfilename(tr)
1213
1213
1214 # use '_writedirstate' instead of 'write' to write changes certainly,
1214 # use '_writedirstate' instead of 'write' to write changes certainly,
1215 # because the latter omits writing out if transaction is running.
1215 # because the latter omits writing out if transaction is running.
1216 # output file will be used to create backup of dirstate at this point.
1216 # output file will be used to create backup of dirstate at this point.
1217 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1217 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1218
1218
1219 if tr:
1219 if tr:
1220 # ensure that subsequent tr.writepending returns True for
1220 # ensure that subsequent tr.writepending returns True for
1221 # changes written out above, even if dirstate is never
1221 # changes written out above, even if dirstate is never
1222 # changed after this
1222 # changed after this
1223 tr.addfilegenerator('dirstate', (self._filename,),
1223 tr.addfilegenerator('dirstate', (self._filename,),
1224 self._writedirstate, location='plain')
1224 self._writedirstate, location='plain')
1225
1225
1226 # ensure that pending file written above is unlinked at
1226 # ensure that pending file written above is unlinked at
1227 # failure, even if tr.writepending isn't invoked until the
1227 # failure, even if tr.writepending isn't invoked until the
1228 # end of this transaction
1228 # end of this transaction
1229 tr.registertmp(filename, location='plain')
1229 tr.registertmp(filename, location='plain')
1230
1230
1231 self._opener.write(filename + suffix, self._opener.tryread(filename))
1231 self._opener.write(prefix + filename + suffix,
1232 self._opener.tryread(filename))
1232
1233
1233 def restorebackup(self, tr, suffix):
1234 def restorebackup(self, tr, suffix='', prefix=''):
1234 '''Restore dirstate by backup file with suffix'''
1235 '''Restore dirstate by backup file with suffix'''
1235 # this "invalidate()" prevents "wlock.release()" from writing
1236 # this "invalidate()" prevents "wlock.release()" from writing
1236 # changes of dirstate out after restoring from backup file
1237 # changes of dirstate out after restoring from backup file
1237 self.invalidate()
1238 self.invalidate()
1238 filename = self._actualfilename(tr)
1239 filename = self._actualfilename(tr)
1239 self._opener.rename(filename + suffix, filename)
1240 self._opener.rename(prefix + filename + suffix, filename)
1240
1241
1241 def clearbackup(self, tr, suffix):
1242 def clearbackup(self, tr, suffix='', prefix=''):
1242 '''Clear backup file with suffix'''
1243 '''Clear backup file with suffix'''
1243 filename = self._actualfilename(tr)
1244 filename = self._actualfilename(tr)
1244 self._opener.unlink(filename + suffix)
1245 self._opener.unlink(prefix + filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now