##// END OF EJS Templates
deprecation: enforce thew 'tr' argument of 'dirstate.write' (API)...
Pierre-Yves David -
r29673:52ff07e1 default
parent child Browse files
Show More
@@ -1,1253 +1,1241
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 match as matchmod,
20 match as matchmod,
21 osutil,
21 osutil,
22 parsers,
22 parsers,
23 pathutil,
23 pathutil,
24 scmutil,
24 scmutil,
25 util,
25 util,
26 )
26 )
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29 filecache = scmutil.filecache
29 filecache = scmutil.filecache
30 _rangemask = 0x7fffffff
30 _rangemask = 0x7fffffff
31
31
32 dirstatetuple = parsers.dirstatetuple
32 dirstatetuple = parsers.dirstatetuple
33
33
34 class repocache(filecache):
34 class repocache(filecache):
35 """filecache for files in .hg/"""
35 """filecache for files in .hg/"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj._opener.join(fname)
37 return obj._opener.join(fname)
38
38
39 class rootcache(filecache):
39 class rootcache(filecache):
40 """filecache for files in the repository root"""
40 """filecache for files in the repository root"""
41 def join(self, obj, fname):
41 def join(self, obj, fname):
42 return obj._join(fname)
42 return obj._join(fname)
43
43
44 def _getfsnow(vfs):
44 def _getfsnow(vfs):
45 '''Get "now" timestamp on filesystem'''
45 '''Get "now" timestamp on filesystem'''
46 tmpfd, tmpname = vfs.mkstemp()
46 tmpfd, tmpname = vfs.mkstemp()
47 try:
47 try:
48 return os.fstat(tmpfd).st_mtime
48 return os.fstat(tmpfd).st_mtime
49 finally:
49 finally:
50 os.close(tmpfd)
50 os.close(tmpfd)
51 vfs.unlink(tmpname)
51 vfs.unlink(tmpname)
52
52
53 def nonnormalentries(dmap):
53 def nonnormalentries(dmap):
54 '''Compute the nonnormal dirstate entries from the dmap'''
54 '''Compute the nonnormal dirstate entries from the dmap'''
55 try:
55 try:
56 return parsers.nonnormalentries(dmap)
56 return parsers.nonnormalentries(dmap)
57 except AttributeError:
57 except AttributeError:
58 return set(fname for fname, e in dmap.iteritems()
58 return set(fname for fname, e in dmap.iteritems()
59 if e[0] != 'n' or e[3] == -1)
59 if e[0] != 'n' or e[3] == -1)
60
60
61 def _trypending(root, vfs, filename):
61 def _trypending(root, vfs, filename):
62 '''Open file to be read according to HG_PENDING environment variable
62 '''Open file to be read according to HG_PENDING environment variable
63
63
64 This opens '.pending' of specified 'filename' only when HG_PENDING
64 This opens '.pending' of specified 'filename' only when HG_PENDING
65 is equal to 'root'.
65 is equal to 'root'.
66
66
67 This returns '(fp, is_pending_opened)' tuple.
67 This returns '(fp, is_pending_opened)' tuple.
68 '''
68 '''
69 if root == os.environ.get('HG_PENDING'):
69 if root == os.environ.get('HG_PENDING'):
70 try:
70 try:
71 return (vfs('%s.pending' % filename), True)
71 return (vfs('%s.pending' % filename), True)
72 except IOError as inst:
72 except IOError as inst:
73 if inst.errno != errno.ENOENT:
73 if inst.errno != errno.ENOENT:
74 raise
74 raise
75 return (vfs(filename), False)
75 return (vfs(filename), False)
76
76
77 _token = object()
78
79 class dirstate(object):
77 class dirstate(object):
80
78
81 def __init__(self, opener, ui, root, validate):
79 def __init__(self, opener, ui, root, validate):
82 '''Create a new dirstate object.
80 '''Create a new dirstate object.
83
81
84 opener is an open()-like callable that can be used to open the
82 opener is an open()-like callable that can be used to open the
85 dirstate file; root is the root of the directory tracked by
83 dirstate file; root is the root of the directory tracked by
86 the dirstate.
84 the dirstate.
87 '''
85 '''
88 self._opener = opener
86 self._opener = opener
89 self._validate = validate
87 self._validate = validate
90 self._root = root
88 self._root = root
91 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
92 # UNC path pointing to root share (issue4557)
90 # UNC path pointing to root share (issue4557)
93 self._rootdir = pathutil.normasprefix(root)
91 self._rootdir = pathutil.normasprefix(root)
94 # internal config: ui.forcecwd
92 # internal config: ui.forcecwd
95 forcecwd = ui.config('ui', 'forcecwd')
93 forcecwd = ui.config('ui', 'forcecwd')
96 if forcecwd:
94 if forcecwd:
97 self._cwd = forcecwd
95 self._cwd = forcecwd
98 self._dirty = False
96 self._dirty = False
99 self._dirtypl = False
97 self._dirtypl = False
100 self._lastnormaltime = 0
98 self._lastnormaltime = 0
101 self._ui = ui
99 self._ui = ui
102 self._filecache = {}
100 self._filecache = {}
103 self._parentwriters = 0
101 self._parentwriters = 0
104 self._filename = 'dirstate'
102 self._filename = 'dirstate'
105 self._pendingfilename = '%s.pending' % self._filename
103 self._pendingfilename = '%s.pending' % self._filename
106
104
107 # for consistent view between _pl() and _read() invocations
105 # for consistent view between _pl() and _read() invocations
108 self._pendingmode = None
106 self._pendingmode = None
109
107
110 def beginparentchange(self):
108 def beginparentchange(self):
111 '''Marks the beginning of a set of changes that involve changing
109 '''Marks the beginning of a set of changes that involve changing
112 the dirstate parents. If there is an exception during this time,
110 the dirstate parents. If there is an exception during this time,
113 the dirstate will not be written when the wlock is released. This
111 the dirstate will not be written when the wlock is released. This
114 prevents writing an incoherent dirstate where the parent doesn't
112 prevents writing an incoherent dirstate where the parent doesn't
115 match the contents.
113 match the contents.
116 '''
114 '''
117 self._parentwriters += 1
115 self._parentwriters += 1
118
116
119 def endparentchange(self):
117 def endparentchange(self):
120 '''Marks the end of a set of changes that involve changing the
118 '''Marks the end of a set of changes that involve changing the
121 dirstate parents. Once all parent changes have been marked done,
119 dirstate parents. Once all parent changes have been marked done,
122 the wlock will be free to write the dirstate on release.
120 the wlock will be free to write the dirstate on release.
123 '''
121 '''
124 if self._parentwriters > 0:
122 if self._parentwriters > 0:
125 self._parentwriters -= 1
123 self._parentwriters -= 1
126
124
127 def pendingparentchange(self):
125 def pendingparentchange(self):
128 '''Returns true if the dirstate is in the middle of a set of changes
126 '''Returns true if the dirstate is in the middle of a set of changes
129 that modify the dirstate parent.
127 that modify the dirstate parent.
130 '''
128 '''
131 return self._parentwriters > 0
129 return self._parentwriters > 0
132
130
133 @propertycache
131 @propertycache
134 def _map(self):
132 def _map(self):
135 '''Return the dirstate contents as a map from filename to
133 '''Return the dirstate contents as a map from filename to
136 (state, mode, size, time).'''
134 (state, mode, size, time).'''
137 self._read()
135 self._read()
138 return self._map
136 return self._map
139
137
140 @propertycache
138 @propertycache
141 def _copymap(self):
139 def _copymap(self):
142 self._read()
140 self._read()
143 return self._copymap
141 return self._copymap
144
142
145 @propertycache
143 @propertycache
146 def _nonnormalset(self):
144 def _nonnormalset(self):
147 return nonnormalentries(self._map)
145 return nonnormalentries(self._map)
148
146
149 @propertycache
147 @propertycache
150 def _filefoldmap(self):
148 def _filefoldmap(self):
151 try:
149 try:
152 makefilefoldmap = parsers.make_file_foldmap
150 makefilefoldmap = parsers.make_file_foldmap
153 except AttributeError:
151 except AttributeError:
154 pass
152 pass
155 else:
153 else:
156 return makefilefoldmap(self._map, util.normcasespec,
154 return makefilefoldmap(self._map, util.normcasespec,
157 util.normcasefallback)
155 util.normcasefallback)
158
156
159 f = {}
157 f = {}
160 normcase = util.normcase
158 normcase = util.normcase
161 for name, s in self._map.iteritems():
159 for name, s in self._map.iteritems():
162 if s[0] != 'r':
160 if s[0] != 'r':
163 f[normcase(name)] = name
161 f[normcase(name)] = name
164 f['.'] = '.' # prevents useless util.fspath() invocation
162 f['.'] = '.' # prevents useless util.fspath() invocation
165 return f
163 return f
166
164
167 @propertycache
165 @propertycache
168 def _dirfoldmap(self):
166 def _dirfoldmap(self):
169 f = {}
167 f = {}
170 normcase = util.normcase
168 normcase = util.normcase
171 for name in self._dirs:
169 for name in self._dirs:
172 f[normcase(name)] = name
170 f[normcase(name)] = name
173 return f
171 return f
174
172
175 @repocache('branch')
173 @repocache('branch')
176 def _branch(self):
174 def _branch(self):
177 try:
175 try:
178 return self._opener.read("branch").strip() or "default"
176 return self._opener.read("branch").strip() or "default"
179 except IOError as inst:
177 except IOError as inst:
180 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
181 raise
179 raise
182 return "default"
180 return "default"
183
181
184 @propertycache
182 @propertycache
185 def _pl(self):
183 def _pl(self):
186 try:
184 try:
187 fp = self._opendirstatefile()
185 fp = self._opendirstatefile()
188 st = fp.read(40)
186 st = fp.read(40)
189 fp.close()
187 fp.close()
190 l = len(st)
188 l = len(st)
191 if l == 40:
189 if l == 40:
192 return st[:20], st[20:40]
190 return st[:20], st[20:40]
193 elif l > 0 and l < 40:
191 elif l > 0 and l < 40:
194 raise error.Abort(_('working directory state appears damaged!'))
192 raise error.Abort(_('working directory state appears damaged!'))
195 except IOError as err:
193 except IOError as err:
196 if err.errno != errno.ENOENT:
194 if err.errno != errno.ENOENT:
197 raise
195 raise
198 return [nullid, nullid]
196 return [nullid, nullid]
199
197
200 @propertycache
198 @propertycache
201 def _dirs(self):
199 def _dirs(self):
202 return util.dirs(self._map, 'r')
200 return util.dirs(self._map, 'r')
203
201
204 def dirs(self):
202 def dirs(self):
205 return self._dirs
203 return self._dirs
206
204
207 @rootcache('.hgignore')
205 @rootcache('.hgignore')
208 def _ignore(self):
206 def _ignore(self):
209 files = self._ignorefiles()
207 files = self._ignorefiles()
210 if not files:
208 if not files:
211 return util.never
209 return util.never
212
210
213 pats = ['include:%s' % f for f in files]
211 pats = ['include:%s' % f for f in files]
214 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
212 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
215
213
216 @propertycache
214 @propertycache
217 def _slash(self):
215 def _slash(self):
218 return self._ui.configbool('ui', 'slash') and os.sep != '/'
216 return self._ui.configbool('ui', 'slash') and os.sep != '/'
219
217
220 @propertycache
218 @propertycache
221 def _checklink(self):
219 def _checklink(self):
222 return util.checklink(self._root)
220 return util.checklink(self._root)
223
221
224 @propertycache
222 @propertycache
225 def _checkexec(self):
223 def _checkexec(self):
226 return util.checkexec(self._root)
224 return util.checkexec(self._root)
227
225
228 @propertycache
226 @propertycache
229 def _checkcase(self):
227 def _checkcase(self):
230 return not util.checkcase(self._join('.hg'))
228 return not util.checkcase(self._join('.hg'))
231
229
232 def _join(self, f):
230 def _join(self, f):
233 # much faster than os.path.join()
231 # much faster than os.path.join()
234 # it's safe because f is always a relative path
232 # it's safe because f is always a relative path
235 return self._rootdir + f
233 return self._rootdir + f
236
234
237 def flagfunc(self, buildfallback):
235 def flagfunc(self, buildfallback):
238 if self._checklink and self._checkexec:
236 if self._checklink and self._checkexec:
239 def f(x):
237 def f(x):
240 try:
238 try:
241 st = os.lstat(self._join(x))
239 st = os.lstat(self._join(x))
242 if util.statislink(st):
240 if util.statislink(st):
243 return 'l'
241 return 'l'
244 if util.statisexec(st):
242 if util.statisexec(st):
245 return 'x'
243 return 'x'
246 except OSError:
244 except OSError:
247 pass
245 pass
248 return ''
246 return ''
249 return f
247 return f
250
248
251 fallback = buildfallback()
249 fallback = buildfallback()
252 if self._checklink:
250 if self._checklink:
253 def f(x):
251 def f(x):
254 if os.path.islink(self._join(x)):
252 if os.path.islink(self._join(x)):
255 return 'l'
253 return 'l'
256 if 'x' in fallback(x):
254 if 'x' in fallback(x):
257 return 'x'
255 return 'x'
258 return ''
256 return ''
259 return f
257 return f
260 if self._checkexec:
258 if self._checkexec:
261 def f(x):
259 def f(x):
262 if 'l' in fallback(x):
260 if 'l' in fallback(x):
263 return 'l'
261 return 'l'
264 if util.isexec(self._join(x)):
262 if util.isexec(self._join(x)):
265 return 'x'
263 return 'x'
266 return ''
264 return ''
267 return f
265 return f
268 else:
266 else:
269 return fallback
267 return fallback
270
268
271 @propertycache
269 @propertycache
272 def _cwd(self):
270 def _cwd(self):
273 return os.getcwd()
271 return os.getcwd()
274
272
275 def getcwd(self):
273 def getcwd(self):
276 '''Return the path from which a canonical path is calculated.
274 '''Return the path from which a canonical path is calculated.
277
275
278 This path should be used to resolve file patterns or to convert
276 This path should be used to resolve file patterns or to convert
279 canonical paths back to file paths for display. It shouldn't be
277 canonical paths back to file paths for display. It shouldn't be
280 used to get real file paths. Use vfs functions instead.
278 used to get real file paths. Use vfs functions instead.
281 '''
279 '''
282 cwd = self._cwd
280 cwd = self._cwd
283 if cwd == self._root:
281 if cwd == self._root:
284 return ''
282 return ''
285 # self._root ends with a path separator if self._root is '/' or 'C:\'
283 # self._root ends with a path separator if self._root is '/' or 'C:\'
286 rootsep = self._root
284 rootsep = self._root
287 if not util.endswithsep(rootsep):
285 if not util.endswithsep(rootsep):
288 rootsep += os.sep
286 rootsep += os.sep
289 if cwd.startswith(rootsep):
287 if cwd.startswith(rootsep):
290 return cwd[len(rootsep):]
288 return cwd[len(rootsep):]
291 else:
289 else:
292 # we're outside the repo. return an absolute path.
290 # we're outside the repo. return an absolute path.
293 return cwd
291 return cwd
294
292
295 def pathto(self, f, cwd=None):
293 def pathto(self, f, cwd=None):
296 if cwd is None:
294 if cwd is None:
297 cwd = self.getcwd()
295 cwd = self.getcwd()
298 path = util.pathto(self._root, cwd, f)
296 path = util.pathto(self._root, cwd, f)
299 if self._slash:
297 if self._slash:
300 return util.pconvert(path)
298 return util.pconvert(path)
301 return path
299 return path
302
300
303 def __getitem__(self, key):
301 def __getitem__(self, key):
304 '''Return the current state of key (a filename) in the dirstate.
302 '''Return the current state of key (a filename) in the dirstate.
305
303
306 States are:
304 States are:
307 n normal
305 n normal
308 m needs merging
306 m needs merging
309 r marked for removal
307 r marked for removal
310 a marked for addition
308 a marked for addition
311 ? not tracked
309 ? not tracked
312 '''
310 '''
313 return self._map.get(key, ("?",))[0]
311 return self._map.get(key, ("?",))[0]
314
312
315 def __contains__(self, key):
313 def __contains__(self, key):
316 return key in self._map
314 return key in self._map
317
315
318 def __iter__(self):
316 def __iter__(self):
319 for x in sorted(self._map):
317 for x in sorted(self._map):
320 yield x
318 yield x
321
319
322 def iteritems(self):
320 def iteritems(self):
323 return self._map.iteritems()
321 return self._map.iteritems()
324
322
325 def parents(self):
323 def parents(self):
326 return [self._validate(p) for p in self._pl]
324 return [self._validate(p) for p in self._pl]
327
325
328 def p1(self):
326 def p1(self):
329 return self._validate(self._pl[0])
327 return self._validate(self._pl[0])
330
328
331 def p2(self):
329 def p2(self):
332 return self._validate(self._pl[1])
330 return self._validate(self._pl[1])
333
331
334 def branch(self):
332 def branch(self):
335 return encoding.tolocal(self._branch)
333 return encoding.tolocal(self._branch)
336
334
337 def setparents(self, p1, p2=nullid):
335 def setparents(self, p1, p2=nullid):
338 """Set dirstate parents to p1 and p2.
336 """Set dirstate parents to p1 and p2.
339
337
340 When moving from two parents to one, 'm' merged entries a
338 When moving from two parents to one, 'm' merged entries a
341 adjusted to normal and previous copy records discarded and
339 adjusted to normal and previous copy records discarded and
342 returned by the call.
340 returned by the call.
343
341
344 See localrepo.setparents()
342 See localrepo.setparents()
345 """
343 """
346 if self._parentwriters == 0:
344 if self._parentwriters == 0:
347 raise ValueError("cannot set dirstate parent without "
345 raise ValueError("cannot set dirstate parent without "
348 "calling dirstate.beginparentchange")
346 "calling dirstate.beginparentchange")
349
347
350 self._dirty = self._dirtypl = True
348 self._dirty = self._dirtypl = True
351 oldp2 = self._pl[1]
349 oldp2 = self._pl[1]
352 self._pl = p1, p2
350 self._pl = p1, p2
353 copies = {}
351 copies = {}
354 if oldp2 != nullid and p2 == nullid:
352 if oldp2 != nullid and p2 == nullid:
355 for f, s in self._map.iteritems():
353 for f, s in self._map.iteritems():
356 # Discard 'm' markers when moving away from a merge state
354 # Discard 'm' markers when moving away from a merge state
357 if s[0] == 'm':
355 if s[0] == 'm':
358 if f in self._copymap:
356 if f in self._copymap:
359 copies[f] = self._copymap[f]
357 copies[f] = self._copymap[f]
360 self.normallookup(f)
358 self.normallookup(f)
361 # Also fix up otherparent markers
359 # Also fix up otherparent markers
362 elif s[0] == 'n' and s[2] == -2:
360 elif s[0] == 'n' and s[2] == -2:
363 if f in self._copymap:
361 if f in self._copymap:
364 copies[f] = self._copymap[f]
362 copies[f] = self._copymap[f]
365 self.add(f)
363 self.add(f)
366 return copies
364 return copies
367
365
368 def setbranch(self, branch):
366 def setbranch(self, branch):
369 self._branch = encoding.fromlocal(branch)
367 self._branch = encoding.fromlocal(branch)
370 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
368 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
371 try:
369 try:
372 f.write(self._branch + '\n')
370 f.write(self._branch + '\n')
373 f.close()
371 f.close()
374
372
375 # make sure filecache has the correct stat info for _branch after
373 # make sure filecache has the correct stat info for _branch after
376 # replacing the underlying file
374 # replacing the underlying file
377 ce = self._filecache['_branch']
375 ce = self._filecache['_branch']
378 if ce:
376 if ce:
379 ce.refresh()
377 ce.refresh()
380 except: # re-raises
378 except: # re-raises
381 f.discard()
379 f.discard()
382 raise
380 raise
383
381
384 def _opendirstatefile(self):
382 def _opendirstatefile(self):
385 fp, mode = _trypending(self._root, self._opener, self._filename)
383 fp, mode = _trypending(self._root, self._opener, self._filename)
386 if self._pendingmode is not None and self._pendingmode != mode:
384 if self._pendingmode is not None and self._pendingmode != mode:
387 fp.close()
385 fp.close()
388 raise error.Abort(_('working directory state may be '
386 raise error.Abort(_('working directory state may be '
389 'changed parallelly'))
387 'changed parallelly'))
390 self._pendingmode = mode
388 self._pendingmode = mode
391 return fp
389 return fp
392
390
393 def _read(self):
391 def _read(self):
394 self._map = {}
392 self._map = {}
395 self._copymap = {}
393 self._copymap = {}
396 try:
394 try:
397 fp = self._opendirstatefile()
395 fp = self._opendirstatefile()
398 try:
396 try:
399 st = fp.read()
397 st = fp.read()
400 finally:
398 finally:
401 fp.close()
399 fp.close()
402 except IOError as err:
400 except IOError as err:
403 if err.errno != errno.ENOENT:
401 if err.errno != errno.ENOENT:
404 raise
402 raise
405 return
403 return
406 if not st:
404 if not st:
407 return
405 return
408
406
409 if util.safehasattr(parsers, 'dict_new_presized'):
407 if util.safehasattr(parsers, 'dict_new_presized'):
410 # Make an estimate of the number of files in the dirstate based on
408 # Make an estimate of the number of files in the dirstate based on
411 # its size. From a linear regression on a set of real-world repos,
409 # its size. From a linear regression on a set of real-world repos,
412 # all over 10,000 files, the size of a dirstate entry is 85
410 # all over 10,000 files, the size of a dirstate entry is 85
413 # bytes. The cost of resizing is significantly higher than the cost
411 # bytes. The cost of resizing is significantly higher than the cost
414 # of filling in a larger presized dict, so subtract 20% from the
412 # of filling in a larger presized dict, so subtract 20% from the
415 # size.
413 # size.
416 #
414 #
417 # This heuristic is imperfect in many ways, so in a future dirstate
415 # This heuristic is imperfect in many ways, so in a future dirstate
418 # format update it makes sense to just record the number of entries
416 # format update it makes sense to just record the number of entries
419 # on write.
417 # on write.
420 self._map = parsers.dict_new_presized(len(st) / 71)
418 self._map = parsers.dict_new_presized(len(st) / 71)
421
419
422 # Python's garbage collector triggers a GC each time a certain number
420 # Python's garbage collector triggers a GC each time a certain number
423 # of container objects (the number being defined by
421 # of container objects (the number being defined by
424 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
425 # for each file in the dirstate. The C version then immediately marks
423 # for each file in the dirstate. The C version then immediately marks
426 # them as not to be tracked by the collector. However, this has no
424 # them as not to be tracked by the collector. However, this has no
427 # effect on when GCs are triggered, only on what objects the GC looks
425 # effect on when GCs are triggered, only on what objects the GC looks
428 # into. This means that O(number of files) GCs are unavoidable.
426 # into. This means that O(number of files) GCs are unavoidable.
429 # Depending on when in the process's lifetime the dirstate is parsed,
427 # Depending on when in the process's lifetime the dirstate is parsed,
430 # this can get very expensive. As a workaround, disable GC while
428 # this can get very expensive. As a workaround, disable GC while
431 # parsing the dirstate.
429 # parsing the dirstate.
432 #
430 #
433 # (we cannot decorate the function directly since it is in a C module)
431 # (we cannot decorate the function directly since it is in a C module)
434 parse_dirstate = util.nogc(parsers.parse_dirstate)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
435 p = parse_dirstate(self._map, self._copymap, st)
433 p = parse_dirstate(self._map, self._copymap, st)
436 if not self._dirtypl:
434 if not self._dirtypl:
437 self._pl = p
435 self._pl = p
438
436
439 def invalidate(self):
437 def invalidate(self):
440 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
438 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
441 "_pl", "_dirs", "_ignore", "_nonnormalset"):
439 "_pl", "_dirs", "_ignore", "_nonnormalset"):
442 if a in self.__dict__:
440 if a in self.__dict__:
443 delattr(self, a)
441 delattr(self, a)
444 self._lastnormaltime = 0
442 self._lastnormaltime = 0
445 self._dirty = False
443 self._dirty = False
446 self._parentwriters = 0
444 self._parentwriters = 0
447
445
448 def copy(self, source, dest):
446 def copy(self, source, dest):
449 """Mark dest as a copy of source. Unmark dest if source is None."""
447 """Mark dest as a copy of source. Unmark dest if source is None."""
450 if source == dest:
448 if source == dest:
451 return
449 return
452 self._dirty = True
450 self._dirty = True
453 if source is not None:
451 if source is not None:
454 self._copymap[dest] = source
452 self._copymap[dest] = source
455 elif dest in self._copymap:
453 elif dest in self._copymap:
456 del self._copymap[dest]
454 del self._copymap[dest]
457
455
458 def copied(self, file):
456 def copied(self, file):
459 return self._copymap.get(file, None)
457 return self._copymap.get(file, None)
460
458
461 def copies(self):
459 def copies(self):
462 return self._copymap
460 return self._copymap
463
461
464 def _droppath(self, f):
462 def _droppath(self, f):
465 if self[f] not in "?r" and "_dirs" in self.__dict__:
463 if self[f] not in "?r" and "_dirs" in self.__dict__:
466 self._dirs.delpath(f)
464 self._dirs.delpath(f)
467
465
468 if "_filefoldmap" in self.__dict__:
466 if "_filefoldmap" in self.__dict__:
469 normed = util.normcase(f)
467 normed = util.normcase(f)
470 if normed in self._filefoldmap:
468 if normed in self._filefoldmap:
471 del self._filefoldmap[normed]
469 del self._filefoldmap[normed]
472
470
473 def _addpath(self, f, state, mode, size, mtime):
471 def _addpath(self, f, state, mode, size, mtime):
474 oldstate = self[f]
472 oldstate = self[f]
475 if state == 'a' or oldstate == 'r':
473 if state == 'a' or oldstate == 'r':
476 scmutil.checkfilename(f)
474 scmutil.checkfilename(f)
477 if f in self._dirs:
475 if f in self._dirs:
478 raise error.Abort(_('directory %r already in dirstate') % f)
476 raise error.Abort(_('directory %r already in dirstate') % f)
479 # shadows
477 # shadows
480 for d in util.finddirs(f):
478 for d in util.finddirs(f):
481 if d in self._dirs:
479 if d in self._dirs:
482 break
480 break
483 if d in self._map and self[d] != 'r':
481 if d in self._map and self[d] != 'r':
484 raise error.Abort(
482 raise error.Abort(
485 _('file %r in dirstate clashes with %r') % (d, f))
483 _('file %r in dirstate clashes with %r') % (d, f))
486 if oldstate in "?r" and "_dirs" in self.__dict__:
484 if oldstate in "?r" and "_dirs" in self.__dict__:
487 self._dirs.addpath(f)
485 self._dirs.addpath(f)
488 self._dirty = True
486 self._dirty = True
489 self._map[f] = dirstatetuple(state, mode, size, mtime)
487 self._map[f] = dirstatetuple(state, mode, size, mtime)
490 if state != 'n' or mtime == -1:
488 if state != 'n' or mtime == -1:
491 self._nonnormalset.add(f)
489 self._nonnormalset.add(f)
492
490
493 def normal(self, f):
491 def normal(self, f):
494 '''Mark a file normal and clean.'''
492 '''Mark a file normal and clean.'''
495 s = os.lstat(self._join(f))
493 s = os.lstat(self._join(f))
496 mtime = s.st_mtime
494 mtime = s.st_mtime
497 self._addpath(f, 'n', s.st_mode,
495 self._addpath(f, 'n', s.st_mode,
498 s.st_size & _rangemask, mtime & _rangemask)
496 s.st_size & _rangemask, mtime & _rangemask)
499 if f in self._copymap:
497 if f in self._copymap:
500 del self._copymap[f]
498 del self._copymap[f]
501 if f in self._nonnormalset:
499 if f in self._nonnormalset:
502 self._nonnormalset.remove(f)
500 self._nonnormalset.remove(f)
503 if mtime > self._lastnormaltime:
501 if mtime > self._lastnormaltime:
504 # Remember the most recent modification timeslot for status(),
502 # Remember the most recent modification timeslot for status(),
505 # to make sure we won't miss future size-preserving file content
503 # to make sure we won't miss future size-preserving file content
506 # modifications that happen within the same timeslot.
504 # modifications that happen within the same timeslot.
507 self._lastnormaltime = mtime
505 self._lastnormaltime = mtime
508
506
509 def normallookup(self, f):
507 def normallookup(self, f):
510 '''Mark a file normal, but possibly dirty.'''
508 '''Mark a file normal, but possibly dirty.'''
511 if self._pl[1] != nullid and f in self._map:
509 if self._pl[1] != nullid and f in self._map:
512 # if there is a merge going on and the file was either
510 # if there is a merge going on and the file was either
513 # in state 'm' (-1) or coming from other parent (-2) before
511 # in state 'm' (-1) or coming from other parent (-2) before
514 # being removed, restore that state.
512 # being removed, restore that state.
515 entry = self._map[f]
513 entry = self._map[f]
516 if entry[0] == 'r' and entry[2] in (-1, -2):
514 if entry[0] == 'r' and entry[2] in (-1, -2):
517 source = self._copymap.get(f)
515 source = self._copymap.get(f)
518 if entry[2] == -1:
516 if entry[2] == -1:
519 self.merge(f)
517 self.merge(f)
520 elif entry[2] == -2:
518 elif entry[2] == -2:
521 self.otherparent(f)
519 self.otherparent(f)
522 if source:
520 if source:
523 self.copy(source, f)
521 self.copy(source, f)
524 return
522 return
525 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
523 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
526 return
524 return
527 self._addpath(f, 'n', 0, -1, -1)
525 self._addpath(f, 'n', 0, -1, -1)
528 if f in self._copymap:
526 if f in self._copymap:
529 del self._copymap[f]
527 del self._copymap[f]
530 if f in self._nonnormalset:
528 if f in self._nonnormalset:
531 self._nonnormalset.remove(f)
529 self._nonnormalset.remove(f)
532
530
533 def otherparent(self, f):
531 def otherparent(self, f):
534 '''Mark as coming from the other parent, always dirty.'''
532 '''Mark as coming from the other parent, always dirty.'''
535 if self._pl[1] == nullid:
533 if self._pl[1] == nullid:
536 raise error.Abort(_("setting %r to other parent "
534 raise error.Abort(_("setting %r to other parent "
537 "only allowed in merges") % f)
535 "only allowed in merges") % f)
538 if f in self and self[f] == 'n':
536 if f in self and self[f] == 'n':
539 # merge-like
537 # merge-like
540 self._addpath(f, 'm', 0, -2, -1)
538 self._addpath(f, 'm', 0, -2, -1)
541 else:
539 else:
542 # add-like
540 # add-like
543 self._addpath(f, 'n', 0, -2, -1)
541 self._addpath(f, 'n', 0, -2, -1)
544
542
545 if f in self._copymap:
543 if f in self._copymap:
546 del self._copymap[f]
544 del self._copymap[f]
547
545
548 def add(self, f):
546 def add(self, f):
549 '''Mark a file added.'''
547 '''Mark a file added.'''
550 self._addpath(f, 'a', 0, -1, -1)
548 self._addpath(f, 'a', 0, -1, -1)
551 if f in self._copymap:
549 if f in self._copymap:
552 del self._copymap[f]
550 del self._copymap[f]
553
551
554 def remove(self, f):
552 def remove(self, f):
555 '''Mark a file removed.'''
553 '''Mark a file removed.'''
556 self._dirty = True
554 self._dirty = True
557 self._droppath(f)
555 self._droppath(f)
558 size = 0
556 size = 0
559 if self._pl[1] != nullid and f in self._map:
557 if self._pl[1] != nullid and f in self._map:
560 # backup the previous state
558 # backup the previous state
561 entry = self._map[f]
559 entry = self._map[f]
562 if entry[0] == 'm': # merge
560 if entry[0] == 'm': # merge
563 size = -1
561 size = -1
564 elif entry[0] == 'n' and entry[2] == -2: # other parent
562 elif entry[0] == 'n' and entry[2] == -2: # other parent
565 size = -2
563 size = -2
566 self._map[f] = dirstatetuple('r', 0, size, 0)
564 self._map[f] = dirstatetuple('r', 0, size, 0)
567 self._nonnormalset.add(f)
565 self._nonnormalset.add(f)
568 if size == 0 and f in self._copymap:
566 if size == 0 and f in self._copymap:
569 del self._copymap[f]
567 del self._copymap[f]
570
568
571 def merge(self, f):
569 def merge(self, f):
572 '''Mark a file merged.'''
570 '''Mark a file merged.'''
573 if self._pl[1] == nullid:
571 if self._pl[1] == nullid:
574 return self.normallookup(f)
572 return self.normallookup(f)
575 return self.otherparent(f)
573 return self.otherparent(f)
576
574
577 def drop(self, f):
575 def drop(self, f):
578 '''Drop a file from the dirstate'''
576 '''Drop a file from the dirstate'''
579 if f in self._map:
577 if f in self._map:
580 self._dirty = True
578 self._dirty = True
581 self._droppath(f)
579 self._droppath(f)
582 del self._map[f]
580 del self._map[f]
583 if f in self._nonnormalset:
581 if f in self._nonnormalset:
584 self._nonnormalset.remove(f)
582 self._nonnormalset.remove(f)
585 if f in self._copymap:
583 if f in self._copymap:
586 del self._copymap[f]
584 del self._copymap[f]
587
585
588 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
586 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
589 if exists is None:
587 if exists is None:
590 exists = os.path.lexists(os.path.join(self._root, path))
588 exists = os.path.lexists(os.path.join(self._root, path))
591 if not exists:
589 if not exists:
592 # Maybe a path component exists
590 # Maybe a path component exists
593 if not ignoremissing and '/' in path:
591 if not ignoremissing and '/' in path:
594 d, f = path.rsplit('/', 1)
592 d, f = path.rsplit('/', 1)
595 d = self._normalize(d, False, ignoremissing, None)
593 d = self._normalize(d, False, ignoremissing, None)
596 folded = d + "/" + f
594 folded = d + "/" + f
597 else:
595 else:
598 # No path components, preserve original case
596 # No path components, preserve original case
599 folded = path
597 folded = path
600 else:
598 else:
601 # recursively normalize leading directory components
599 # recursively normalize leading directory components
602 # against dirstate
600 # against dirstate
603 if '/' in normed:
601 if '/' in normed:
604 d, f = normed.rsplit('/', 1)
602 d, f = normed.rsplit('/', 1)
605 d = self._normalize(d, False, ignoremissing, True)
603 d = self._normalize(d, False, ignoremissing, True)
606 r = self._root + "/" + d
604 r = self._root + "/" + d
607 folded = d + "/" + util.fspath(f, r)
605 folded = d + "/" + util.fspath(f, r)
608 else:
606 else:
609 folded = util.fspath(normed, self._root)
607 folded = util.fspath(normed, self._root)
610 storemap[normed] = folded
608 storemap[normed] = folded
611
609
612 return folded
610 return folded
613
611
614 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
612 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
615 normed = util.normcase(path)
613 normed = util.normcase(path)
616 folded = self._filefoldmap.get(normed, None)
614 folded = self._filefoldmap.get(normed, None)
617 if folded is None:
615 if folded is None:
618 if isknown:
616 if isknown:
619 folded = path
617 folded = path
620 else:
618 else:
621 folded = self._discoverpath(path, normed, ignoremissing, exists,
619 folded = self._discoverpath(path, normed, ignoremissing, exists,
622 self._filefoldmap)
620 self._filefoldmap)
623 return folded
621 return folded
624
622
625 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
623 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
626 normed = util.normcase(path)
624 normed = util.normcase(path)
627 folded = self._filefoldmap.get(normed, None)
625 folded = self._filefoldmap.get(normed, None)
628 if folded is None:
626 if folded is None:
629 folded = self._dirfoldmap.get(normed, None)
627 folded = self._dirfoldmap.get(normed, None)
630 if folded is None:
628 if folded is None:
631 if isknown:
629 if isknown:
632 folded = path
630 folded = path
633 else:
631 else:
634 # store discovered result in dirfoldmap so that future
632 # store discovered result in dirfoldmap so that future
635 # normalizefile calls don't start matching directories
633 # normalizefile calls don't start matching directories
636 folded = self._discoverpath(path, normed, ignoremissing, exists,
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
637 self._dirfoldmap)
635 self._dirfoldmap)
638 return folded
636 return folded
639
637
640 def normalize(self, path, isknown=False, ignoremissing=False):
638 def normalize(self, path, isknown=False, ignoremissing=False):
641 '''
639 '''
642 normalize the case of a pathname when on a casefolding filesystem
640 normalize the case of a pathname when on a casefolding filesystem
643
641
644 isknown specifies whether the filename came from walking the
642 isknown specifies whether the filename came from walking the
645 disk, to avoid extra filesystem access.
643 disk, to avoid extra filesystem access.
646
644
647 If ignoremissing is True, missing path are returned
645 If ignoremissing is True, missing path are returned
648 unchanged. Otherwise, we try harder to normalize possibly
646 unchanged. Otherwise, we try harder to normalize possibly
649 existing path components.
647 existing path components.
650
648
651 The normalized case is determined based on the following precedence:
649 The normalized case is determined based on the following precedence:
652
650
653 - version of name already stored in the dirstate
651 - version of name already stored in the dirstate
654 - version of name stored on disk
652 - version of name stored on disk
655 - version provided via command arguments
653 - version provided via command arguments
656 '''
654 '''
657
655
658 if self._checkcase:
656 if self._checkcase:
659 return self._normalize(path, isknown, ignoremissing)
657 return self._normalize(path, isknown, ignoremissing)
660 return path
658 return path
661
659
662 def clear(self):
660 def clear(self):
663 self._map = {}
661 self._map = {}
664 self._nonnormalset = set()
662 self._nonnormalset = set()
665 if "_dirs" in self.__dict__:
663 if "_dirs" in self.__dict__:
666 delattr(self, "_dirs")
664 delattr(self, "_dirs")
667 self._copymap = {}
665 self._copymap = {}
668 self._pl = [nullid, nullid]
666 self._pl = [nullid, nullid]
669 self._lastnormaltime = 0
667 self._lastnormaltime = 0
670 self._dirty = True
668 self._dirty = True
671
669
672 def rebuild(self, parent, allfiles, changedfiles=None):
670 def rebuild(self, parent, allfiles, changedfiles=None):
673 if changedfiles is None:
671 if changedfiles is None:
674 # Rebuild entire dirstate
672 # Rebuild entire dirstate
675 changedfiles = allfiles
673 changedfiles = allfiles
676 lastnormaltime = self._lastnormaltime
674 lastnormaltime = self._lastnormaltime
677 self.clear()
675 self.clear()
678 self._lastnormaltime = lastnormaltime
676 self._lastnormaltime = lastnormaltime
679
677
680 for f in changedfiles:
678 for f in changedfiles:
681 mode = 0o666
679 mode = 0o666
682 if f in allfiles and 'x' in allfiles.flags(f):
680 if f in allfiles and 'x' in allfiles.flags(f):
683 mode = 0o777
681 mode = 0o777
684
682
685 if f in allfiles:
683 if f in allfiles:
686 self._map[f] = dirstatetuple('n', mode, -1, 0)
684 self._map[f] = dirstatetuple('n', mode, -1, 0)
687 else:
685 else:
688 self._map.pop(f, None)
686 self._map.pop(f, None)
689 if f in self._nonnormalset:
687 if f in self._nonnormalset:
690 self._nonnormalset.remove(f)
688 self._nonnormalset.remove(f)
691
689
692 self._pl = (parent, nullid)
690 self._pl = (parent, nullid)
693 self._dirty = True
691 self._dirty = True
694
692
695 def write(self, tr=_token):
693 def write(self, tr):
696 if not self._dirty:
694 if not self._dirty:
697 return
695 return
698
696
699 filename = self._filename
697 filename = self._filename
700 if tr is _token: # not explicitly specified
698 if tr:
701 self._ui.deprecwarn('use dirstate.write with '
702 'repo.currenttransaction()',
703 '3.9')
704
705 if self._opener.lexists(self._pendingfilename):
706 # if pending file already exists, in-memory changes
707 # should be written into it, because it has priority
708 # to '.hg/dirstate' at reading under HG_PENDING mode
709 filename = self._pendingfilename
710 elif tr:
711 # 'dirstate.write()' is not only for writing in-memory
699 # 'dirstate.write()' is not only for writing in-memory
712 # changes out, but also for dropping ambiguous timestamp.
700 # changes out, but also for dropping ambiguous timestamp.
713 # delayed writing re-raise "ambiguous timestamp issue".
701 # delayed writing re-raise "ambiguous timestamp issue".
714 # See also the wiki page below for detail:
702 # See also the wiki page below for detail:
715 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
703 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
716
704
717 # emulate dropping timestamp in 'parsers.pack_dirstate'
705 # emulate dropping timestamp in 'parsers.pack_dirstate'
718 now = _getfsnow(self._opener)
706 now = _getfsnow(self._opener)
719 dmap = self._map
707 dmap = self._map
720 for f, e in dmap.iteritems():
708 for f, e in dmap.iteritems():
721 if e[0] == 'n' and e[3] == now:
709 if e[0] == 'n' and e[3] == now:
722 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
710 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
723 self._nonnormalset.add(f)
711 self._nonnormalset.add(f)
724
712
725 # emulate that all 'dirstate.normal' results are written out
713 # emulate that all 'dirstate.normal' results are written out
726 self._lastnormaltime = 0
714 self._lastnormaltime = 0
727
715
728 # delay writing in-memory changes out
716 # delay writing in-memory changes out
729 tr.addfilegenerator('dirstate', (self._filename,),
717 tr.addfilegenerator('dirstate', (self._filename,),
730 self._writedirstate, location='plain')
718 self._writedirstate, location='plain')
731 return
719 return
732
720
733 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
721 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
734 self._writedirstate(st)
722 self._writedirstate(st)
735
723
736 def _writedirstate(self, st):
724 def _writedirstate(self, st):
737 # use the modification time of the newly created temporary file as the
725 # use the modification time of the newly created temporary file as the
738 # filesystem's notion of 'now'
726 # filesystem's notion of 'now'
739 now = util.fstat(st).st_mtime & _rangemask
727 now = util.fstat(st).st_mtime & _rangemask
740
728
741 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
729 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
742 # timestamp of each entries in dirstate, because of 'now > mtime'
730 # timestamp of each entries in dirstate, because of 'now > mtime'
743 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
731 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
744 if delaywrite > 0:
732 if delaywrite > 0:
745 # do we have any files to delay for?
733 # do we have any files to delay for?
746 for f, e in self._map.iteritems():
734 for f, e in self._map.iteritems():
747 if e[0] == 'n' and e[3] == now:
735 if e[0] == 'n' and e[3] == now:
748 import time # to avoid useless import
736 import time # to avoid useless import
749 # rather than sleep n seconds, sleep until the next
737 # rather than sleep n seconds, sleep until the next
750 # multiple of n seconds
738 # multiple of n seconds
751 clock = time.time()
739 clock = time.time()
752 start = int(clock) - (int(clock) % delaywrite)
740 start = int(clock) - (int(clock) % delaywrite)
753 end = start + delaywrite
741 end = start + delaywrite
754 time.sleep(end - clock)
742 time.sleep(end - clock)
755 break
743 break
756
744
757 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
745 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
758 self._nonnormalset = nonnormalentries(self._map)
746 self._nonnormalset = nonnormalentries(self._map)
759 st.close()
747 st.close()
760 self._lastnormaltime = 0
748 self._lastnormaltime = 0
761 self._dirty = self._dirtypl = False
749 self._dirty = self._dirtypl = False
762
750
763 def _dirignore(self, f):
751 def _dirignore(self, f):
764 if f == '.':
752 if f == '.':
765 return False
753 return False
766 if self._ignore(f):
754 if self._ignore(f):
767 return True
755 return True
768 for p in util.finddirs(f):
756 for p in util.finddirs(f):
769 if self._ignore(p):
757 if self._ignore(p):
770 return True
758 return True
771 return False
759 return False
772
760
773 def _ignorefiles(self):
761 def _ignorefiles(self):
774 files = []
762 files = []
775 if os.path.exists(self._join('.hgignore')):
763 if os.path.exists(self._join('.hgignore')):
776 files.append(self._join('.hgignore'))
764 files.append(self._join('.hgignore'))
777 for name, path in self._ui.configitems("ui"):
765 for name, path in self._ui.configitems("ui"):
778 if name == 'ignore' or name.startswith('ignore.'):
766 if name == 'ignore' or name.startswith('ignore.'):
779 # we need to use os.path.join here rather than self._join
767 # we need to use os.path.join here rather than self._join
780 # because path is arbitrary and user-specified
768 # because path is arbitrary and user-specified
781 files.append(os.path.join(self._rootdir, util.expandpath(path)))
769 files.append(os.path.join(self._rootdir, util.expandpath(path)))
782 return files
770 return files
783
771
784 def _ignorefileandline(self, f):
772 def _ignorefileandline(self, f):
785 files = collections.deque(self._ignorefiles())
773 files = collections.deque(self._ignorefiles())
786 visited = set()
774 visited = set()
787 while files:
775 while files:
788 i = files.popleft()
776 i = files.popleft()
789 patterns = matchmod.readpatternfile(i, self._ui.warn,
777 patterns = matchmod.readpatternfile(i, self._ui.warn,
790 sourceinfo=True)
778 sourceinfo=True)
791 for pattern, lineno, line in patterns:
779 for pattern, lineno, line in patterns:
792 kind, p = matchmod._patsplit(pattern, 'glob')
780 kind, p = matchmod._patsplit(pattern, 'glob')
793 if kind == "subinclude":
781 if kind == "subinclude":
794 if p not in visited:
782 if p not in visited:
795 files.append(p)
783 files.append(p)
796 continue
784 continue
797 m = matchmod.match(self._root, '', [], [pattern],
785 m = matchmod.match(self._root, '', [], [pattern],
798 warn=self._ui.warn)
786 warn=self._ui.warn)
799 if m(f):
787 if m(f):
800 return (i, lineno, line)
788 return (i, lineno, line)
801 visited.add(i)
789 visited.add(i)
802 return (None, -1, "")
790 return (None, -1, "")
803
791
804 def _walkexplicit(self, match, subrepos):
792 def _walkexplicit(self, match, subrepos):
805 '''Get stat data about the files explicitly specified by match.
793 '''Get stat data about the files explicitly specified by match.
806
794
807 Return a triple (results, dirsfound, dirsnotfound).
795 Return a triple (results, dirsfound, dirsnotfound).
808 - results is a mapping from filename to stat result. It also contains
796 - results is a mapping from filename to stat result. It also contains
809 listings mapping subrepos and .hg to None.
797 listings mapping subrepos and .hg to None.
810 - dirsfound is a list of files found to be directories.
798 - dirsfound is a list of files found to be directories.
811 - dirsnotfound is a list of files that the dirstate thinks are
799 - dirsnotfound is a list of files that the dirstate thinks are
812 directories and that were not found.'''
800 directories and that were not found.'''
813
801
814 def badtype(mode):
802 def badtype(mode):
815 kind = _('unknown')
803 kind = _('unknown')
816 if stat.S_ISCHR(mode):
804 if stat.S_ISCHR(mode):
817 kind = _('character device')
805 kind = _('character device')
818 elif stat.S_ISBLK(mode):
806 elif stat.S_ISBLK(mode):
819 kind = _('block device')
807 kind = _('block device')
820 elif stat.S_ISFIFO(mode):
808 elif stat.S_ISFIFO(mode):
821 kind = _('fifo')
809 kind = _('fifo')
822 elif stat.S_ISSOCK(mode):
810 elif stat.S_ISSOCK(mode):
823 kind = _('socket')
811 kind = _('socket')
824 elif stat.S_ISDIR(mode):
812 elif stat.S_ISDIR(mode):
825 kind = _('directory')
813 kind = _('directory')
826 return _('unsupported file type (type is %s)') % kind
814 return _('unsupported file type (type is %s)') % kind
827
815
828 matchedir = match.explicitdir
816 matchedir = match.explicitdir
829 badfn = match.bad
817 badfn = match.bad
830 dmap = self._map
818 dmap = self._map
831 lstat = os.lstat
819 lstat = os.lstat
832 getkind = stat.S_IFMT
820 getkind = stat.S_IFMT
833 dirkind = stat.S_IFDIR
821 dirkind = stat.S_IFDIR
834 regkind = stat.S_IFREG
822 regkind = stat.S_IFREG
835 lnkkind = stat.S_IFLNK
823 lnkkind = stat.S_IFLNK
836 join = self._join
824 join = self._join
837 dirsfound = []
825 dirsfound = []
838 foundadd = dirsfound.append
826 foundadd = dirsfound.append
839 dirsnotfound = []
827 dirsnotfound = []
840 notfoundadd = dirsnotfound.append
828 notfoundadd = dirsnotfound.append
841
829
842 if not match.isexact() and self._checkcase:
830 if not match.isexact() and self._checkcase:
843 normalize = self._normalize
831 normalize = self._normalize
844 else:
832 else:
845 normalize = None
833 normalize = None
846
834
847 files = sorted(match.files())
835 files = sorted(match.files())
848 subrepos.sort()
836 subrepos.sort()
849 i, j = 0, 0
837 i, j = 0, 0
850 while i < len(files) and j < len(subrepos):
838 while i < len(files) and j < len(subrepos):
851 subpath = subrepos[j] + "/"
839 subpath = subrepos[j] + "/"
852 if files[i] < subpath:
840 if files[i] < subpath:
853 i += 1
841 i += 1
854 continue
842 continue
855 while i < len(files) and files[i].startswith(subpath):
843 while i < len(files) and files[i].startswith(subpath):
856 del files[i]
844 del files[i]
857 j += 1
845 j += 1
858
846
859 if not files or '.' in files:
847 if not files or '.' in files:
860 files = ['.']
848 files = ['.']
861 results = dict.fromkeys(subrepos)
849 results = dict.fromkeys(subrepos)
862 results['.hg'] = None
850 results['.hg'] = None
863
851
864 alldirs = None
852 alldirs = None
865 for ff in files:
853 for ff in files:
866 # constructing the foldmap is expensive, so don't do it for the
854 # constructing the foldmap is expensive, so don't do it for the
867 # common case where files is ['.']
855 # common case where files is ['.']
868 if normalize and ff != '.':
856 if normalize and ff != '.':
869 nf = normalize(ff, False, True)
857 nf = normalize(ff, False, True)
870 else:
858 else:
871 nf = ff
859 nf = ff
872 if nf in results:
860 if nf in results:
873 continue
861 continue
874
862
875 try:
863 try:
876 st = lstat(join(nf))
864 st = lstat(join(nf))
877 kind = getkind(st.st_mode)
865 kind = getkind(st.st_mode)
878 if kind == dirkind:
866 if kind == dirkind:
879 if nf in dmap:
867 if nf in dmap:
880 # file replaced by dir on disk but still in dirstate
868 # file replaced by dir on disk but still in dirstate
881 results[nf] = None
869 results[nf] = None
882 if matchedir:
870 if matchedir:
883 matchedir(nf)
871 matchedir(nf)
884 foundadd((nf, ff))
872 foundadd((nf, ff))
885 elif kind == regkind or kind == lnkkind:
873 elif kind == regkind or kind == lnkkind:
886 results[nf] = st
874 results[nf] = st
887 else:
875 else:
888 badfn(ff, badtype(kind))
876 badfn(ff, badtype(kind))
889 if nf in dmap:
877 if nf in dmap:
890 results[nf] = None
878 results[nf] = None
891 except OSError as inst: # nf not found on disk - it is dirstate only
879 except OSError as inst: # nf not found on disk - it is dirstate only
892 if nf in dmap: # does it exactly match a missing file?
880 if nf in dmap: # does it exactly match a missing file?
893 results[nf] = None
881 results[nf] = None
894 else: # does it match a missing directory?
882 else: # does it match a missing directory?
895 if alldirs is None:
883 if alldirs is None:
896 alldirs = util.dirs(dmap)
884 alldirs = util.dirs(dmap)
897 if nf in alldirs:
885 if nf in alldirs:
898 if matchedir:
886 if matchedir:
899 matchedir(nf)
887 matchedir(nf)
900 notfoundadd(nf)
888 notfoundadd(nf)
901 else:
889 else:
902 badfn(ff, inst.strerror)
890 badfn(ff, inst.strerror)
903
891
904 # Case insensitive filesystems cannot rely on lstat() failing to detect
892 # Case insensitive filesystems cannot rely on lstat() failing to detect
905 # a case-only rename. Prune the stat object for any file that does not
893 # a case-only rename. Prune the stat object for any file that does not
906 # match the case in the filesystem, if there are multiple files that
894 # match the case in the filesystem, if there are multiple files that
907 # normalize to the same path.
895 # normalize to the same path.
908 if match.isexact() and self._checkcase:
896 if match.isexact() and self._checkcase:
909 normed = {}
897 normed = {}
910
898
911 for f, st in results.iteritems():
899 for f, st in results.iteritems():
912 if st is None:
900 if st is None:
913 continue
901 continue
914
902
915 nc = util.normcase(f)
903 nc = util.normcase(f)
916 paths = normed.get(nc)
904 paths = normed.get(nc)
917
905
918 if paths is None:
906 if paths is None:
919 paths = set()
907 paths = set()
920 normed[nc] = paths
908 normed[nc] = paths
921
909
922 paths.add(f)
910 paths.add(f)
923
911
924 for norm, paths in normed.iteritems():
912 for norm, paths in normed.iteritems():
925 if len(paths) > 1:
913 if len(paths) > 1:
926 for path in paths:
914 for path in paths:
927 folded = self._discoverpath(path, norm, True, None,
915 folded = self._discoverpath(path, norm, True, None,
928 self._dirfoldmap)
916 self._dirfoldmap)
929 if path != folded:
917 if path != folded:
930 results[path] = None
918 results[path] = None
931
919
932 return results, dirsfound, dirsnotfound
920 return results, dirsfound, dirsnotfound
933
921
934 def walk(self, match, subrepos, unknown, ignored, full=True):
922 def walk(self, match, subrepos, unknown, ignored, full=True):
935 '''
923 '''
936 Walk recursively through the directory tree, finding all files
924 Walk recursively through the directory tree, finding all files
937 matched by match.
925 matched by match.
938
926
939 If full is False, maybe skip some known-clean files.
927 If full is False, maybe skip some known-clean files.
940
928
941 Return a dict mapping filename to stat-like object (either
929 Return a dict mapping filename to stat-like object (either
942 mercurial.osutil.stat instance or return value of os.stat()).
930 mercurial.osutil.stat instance or return value of os.stat()).
943
931
944 '''
932 '''
945 # full is a flag that extensions that hook into walk can use -- this
933 # full is a flag that extensions that hook into walk can use -- this
946 # implementation doesn't use it at all. This satisfies the contract
934 # implementation doesn't use it at all. This satisfies the contract
947 # because we only guarantee a "maybe".
935 # because we only guarantee a "maybe".
948
936
949 if ignored:
937 if ignored:
950 ignore = util.never
938 ignore = util.never
951 dirignore = util.never
939 dirignore = util.never
952 elif unknown:
940 elif unknown:
953 ignore = self._ignore
941 ignore = self._ignore
954 dirignore = self._dirignore
942 dirignore = self._dirignore
955 else:
943 else:
956 # if not unknown and not ignored, drop dir recursion and step 2
944 # if not unknown and not ignored, drop dir recursion and step 2
957 ignore = util.always
945 ignore = util.always
958 dirignore = util.always
946 dirignore = util.always
959
947
960 matchfn = match.matchfn
948 matchfn = match.matchfn
961 matchalways = match.always()
949 matchalways = match.always()
962 matchtdir = match.traversedir
950 matchtdir = match.traversedir
963 dmap = self._map
951 dmap = self._map
964 listdir = osutil.listdir
952 listdir = osutil.listdir
965 lstat = os.lstat
953 lstat = os.lstat
966 dirkind = stat.S_IFDIR
954 dirkind = stat.S_IFDIR
967 regkind = stat.S_IFREG
955 regkind = stat.S_IFREG
968 lnkkind = stat.S_IFLNK
956 lnkkind = stat.S_IFLNK
969 join = self._join
957 join = self._join
970
958
971 exact = skipstep3 = False
959 exact = skipstep3 = False
972 if match.isexact(): # match.exact
960 if match.isexact(): # match.exact
973 exact = True
961 exact = True
974 dirignore = util.always # skip step 2
962 dirignore = util.always # skip step 2
975 elif match.prefix(): # match.match, no patterns
963 elif match.prefix(): # match.match, no patterns
976 skipstep3 = True
964 skipstep3 = True
977
965
978 if not exact and self._checkcase:
966 if not exact and self._checkcase:
979 normalize = self._normalize
967 normalize = self._normalize
980 normalizefile = self._normalizefile
968 normalizefile = self._normalizefile
981 skipstep3 = False
969 skipstep3 = False
982 else:
970 else:
983 normalize = self._normalize
971 normalize = self._normalize
984 normalizefile = None
972 normalizefile = None
985
973
986 # step 1: find all explicit files
974 # step 1: find all explicit files
987 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
975 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
988
976
989 skipstep3 = skipstep3 and not (work or dirsnotfound)
977 skipstep3 = skipstep3 and not (work or dirsnotfound)
990 work = [d for d in work if not dirignore(d[0])]
978 work = [d for d in work if not dirignore(d[0])]
991
979
992 # step 2: visit subdirectories
980 # step 2: visit subdirectories
993 def traverse(work, alreadynormed):
981 def traverse(work, alreadynormed):
994 wadd = work.append
982 wadd = work.append
995 while work:
983 while work:
996 nd = work.pop()
984 nd = work.pop()
997 skip = None
985 skip = None
998 if nd == '.':
986 if nd == '.':
999 nd = ''
987 nd = ''
1000 else:
988 else:
1001 skip = '.hg'
989 skip = '.hg'
1002 try:
990 try:
1003 entries = listdir(join(nd), stat=True, skip=skip)
991 entries = listdir(join(nd), stat=True, skip=skip)
1004 except OSError as inst:
992 except OSError as inst:
1005 if inst.errno in (errno.EACCES, errno.ENOENT):
993 if inst.errno in (errno.EACCES, errno.ENOENT):
1006 match.bad(self.pathto(nd), inst.strerror)
994 match.bad(self.pathto(nd), inst.strerror)
1007 continue
995 continue
1008 raise
996 raise
1009 for f, kind, st in entries:
997 for f, kind, st in entries:
1010 if normalizefile:
998 if normalizefile:
1011 # even though f might be a directory, we're only
999 # even though f might be a directory, we're only
1012 # interested in comparing it to files currently in the
1000 # interested in comparing it to files currently in the
1013 # dmap -- therefore normalizefile is enough
1001 # dmap -- therefore normalizefile is enough
1014 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1002 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1015 True)
1003 True)
1016 else:
1004 else:
1017 nf = nd and (nd + "/" + f) or f
1005 nf = nd and (nd + "/" + f) or f
1018 if nf not in results:
1006 if nf not in results:
1019 if kind == dirkind:
1007 if kind == dirkind:
1020 if not ignore(nf):
1008 if not ignore(nf):
1021 if matchtdir:
1009 if matchtdir:
1022 matchtdir(nf)
1010 matchtdir(nf)
1023 wadd(nf)
1011 wadd(nf)
1024 if nf in dmap and (matchalways or matchfn(nf)):
1012 if nf in dmap and (matchalways or matchfn(nf)):
1025 results[nf] = None
1013 results[nf] = None
1026 elif kind == regkind or kind == lnkkind:
1014 elif kind == regkind or kind == lnkkind:
1027 if nf in dmap:
1015 if nf in dmap:
1028 if matchalways or matchfn(nf):
1016 if matchalways or matchfn(nf):
1029 results[nf] = st
1017 results[nf] = st
1030 elif ((matchalways or matchfn(nf))
1018 elif ((matchalways or matchfn(nf))
1031 and not ignore(nf)):
1019 and not ignore(nf)):
1032 # unknown file -- normalize if necessary
1020 # unknown file -- normalize if necessary
1033 if not alreadynormed:
1021 if not alreadynormed:
1034 nf = normalize(nf, False, True)
1022 nf = normalize(nf, False, True)
1035 results[nf] = st
1023 results[nf] = st
1036 elif nf in dmap and (matchalways or matchfn(nf)):
1024 elif nf in dmap and (matchalways or matchfn(nf)):
1037 results[nf] = None
1025 results[nf] = None
1038
1026
1039 for nd, d in work:
1027 for nd, d in work:
1040 # alreadynormed means that processwork doesn't have to do any
1028 # alreadynormed means that processwork doesn't have to do any
1041 # expensive directory normalization
1029 # expensive directory normalization
1042 alreadynormed = not normalize or nd == d
1030 alreadynormed = not normalize or nd == d
1043 traverse([d], alreadynormed)
1031 traverse([d], alreadynormed)
1044
1032
1045 for s in subrepos:
1033 for s in subrepos:
1046 del results[s]
1034 del results[s]
1047 del results['.hg']
1035 del results['.hg']
1048
1036
1049 # step 3: visit remaining files from dmap
1037 # step 3: visit remaining files from dmap
1050 if not skipstep3 and not exact:
1038 if not skipstep3 and not exact:
1051 # If a dmap file is not in results yet, it was either
1039 # If a dmap file is not in results yet, it was either
1052 # a) not matching matchfn b) ignored, c) missing, or d) under a
1040 # a) not matching matchfn b) ignored, c) missing, or d) under a
1053 # symlink directory.
1041 # symlink directory.
1054 if not results and matchalways:
1042 if not results and matchalways:
1055 visit = dmap.keys()
1043 visit = dmap.keys()
1056 else:
1044 else:
1057 visit = [f for f in dmap if f not in results and matchfn(f)]
1045 visit = [f for f in dmap if f not in results and matchfn(f)]
1058 visit.sort()
1046 visit.sort()
1059
1047
1060 if unknown:
1048 if unknown:
1061 # unknown == True means we walked all dirs under the roots
1049 # unknown == True means we walked all dirs under the roots
1062 # that wasn't ignored, and everything that matched was stat'ed
1050 # that wasn't ignored, and everything that matched was stat'ed
1063 # and is already in results.
1051 # and is already in results.
1064 # The rest must thus be ignored or under a symlink.
1052 # The rest must thus be ignored or under a symlink.
1065 audit_path = pathutil.pathauditor(self._root)
1053 audit_path = pathutil.pathauditor(self._root)
1066
1054
1067 for nf in iter(visit):
1055 for nf in iter(visit):
1068 # If a stat for the same file was already added with a
1056 # If a stat for the same file was already added with a
1069 # different case, don't add one for this, since that would
1057 # different case, don't add one for this, since that would
1070 # make it appear as if the file exists under both names
1058 # make it appear as if the file exists under both names
1071 # on disk.
1059 # on disk.
1072 if (normalizefile and
1060 if (normalizefile and
1073 normalizefile(nf, True, True) in results):
1061 normalizefile(nf, True, True) in results):
1074 results[nf] = None
1062 results[nf] = None
1075 # Report ignored items in the dmap as long as they are not
1063 # Report ignored items in the dmap as long as they are not
1076 # under a symlink directory.
1064 # under a symlink directory.
1077 elif audit_path.check(nf):
1065 elif audit_path.check(nf):
1078 try:
1066 try:
1079 results[nf] = lstat(join(nf))
1067 results[nf] = lstat(join(nf))
1080 # file was just ignored, no links, and exists
1068 # file was just ignored, no links, and exists
1081 except OSError:
1069 except OSError:
1082 # file doesn't exist
1070 # file doesn't exist
1083 results[nf] = None
1071 results[nf] = None
1084 else:
1072 else:
1085 # It's either missing or under a symlink directory
1073 # It's either missing or under a symlink directory
1086 # which we in this case report as missing
1074 # which we in this case report as missing
1087 results[nf] = None
1075 results[nf] = None
1088 else:
1076 else:
1089 # We may not have walked the full directory tree above,
1077 # We may not have walked the full directory tree above,
1090 # so stat and check everything we missed.
1078 # so stat and check everything we missed.
1091 nf = iter(visit).next
1079 nf = iter(visit).next
1092 for st in util.statfiles([join(i) for i in visit]):
1080 for st in util.statfiles([join(i) for i in visit]):
1093 results[nf()] = st
1081 results[nf()] = st
1094 return results
1082 return results
1095
1083
1096 def status(self, match, subrepos, ignored, clean, unknown):
1084 def status(self, match, subrepos, ignored, clean, unknown):
1097 '''Determine the status of the working copy relative to the
1085 '''Determine the status of the working copy relative to the
1098 dirstate and return a pair of (unsure, status), where status is of type
1086 dirstate and return a pair of (unsure, status), where status is of type
1099 scmutil.status and:
1087 scmutil.status and:
1100
1088
1101 unsure:
1089 unsure:
1102 files that might have been modified since the dirstate was
1090 files that might have been modified since the dirstate was
1103 written, but need to be read to be sure (size is the same
1091 written, but need to be read to be sure (size is the same
1104 but mtime differs)
1092 but mtime differs)
1105 status.modified:
1093 status.modified:
1106 files that have definitely been modified since the dirstate
1094 files that have definitely been modified since the dirstate
1107 was written (different size or mode)
1095 was written (different size or mode)
1108 status.clean:
1096 status.clean:
1109 files that have definitely not been modified since the
1097 files that have definitely not been modified since the
1110 dirstate was written
1098 dirstate was written
1111 '''
1099 '''
1112 listignored, listclean, listunknown = ignored, clean, unknown
1100 listignored, listclean, listunknown = ignored, clean, unknown
1113 lookup, modified, added, unknown, ignored = [], [], [], [], []
1101 lookup, modified, added, unknown, ignored = [], [], [], [], []
1114 removed, deleted, clean = [], [], []
1102 removed, deleted, clean = [], [], []
1115
1103
1116 dmap = self._map
1104 dmap = self._map
1117 ladd = lookup.append # aka "unsure"
1105 ladd = lookup.append # aka "unsure"
1118 madd = modified.append
1106 madd = modified.append
1119 aadd = added.append
1107 aadd = added.append
1120 uadd = unknown.append
1108 uadd = unknown.append
1121 iadd = ignored.append
1109 iadd = ignored.append
1122 radd = removed.append
1110 radd = removed.append
1123 dadd = deleted.append
1111 dadd = deleted.append
1124 cadd = clean.append
1112 cadd = clean.append
1125 mexact = match.exact
1113 mexact = match.exact
1126 dirignore = self._dirignore
1114 dirignore = self._dirignore
1127 checkexec = self._checkexec
1115 checkexec = self._checkexec
1128 copymap = self._copymap
1116 copymap = self._copymap
1129 lastnormaltime = self._lastnormaltime
1117 lastnormaltime = self._lastnormaltime
1130
1118
1131 # We need to do full walks when either
1119 # We need to do full walks when either
1132 # - we're listing all clean files, or
1120 # - we're listing all clean files, or
1133 # - match.traversedir does something, because match.traversedir should
1121 # - match.traversedir does something, because match.traversedir should
1134 # be called for every dir in the working dir
1122 # be called for every dir in the working dir
1135 full = listclean or match.traversedir is not None
1123 full = listclean or match.traversedir is not None
1136 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1124 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1137 full=full).iteritems():
1125 full=full).iteritems():
1138 if fn not in dmap:
1126 if fn not in dmap:
1139 if (listignored or mexact(fn)) and dirignore(fn):
1127 if (listignored or mexact(fn)) and dirignore(fn):
1140 if listignored:
1128 if listignored:
1141 iadd(fn)
1129 iadd(fn)
1142 else:
1130 else:
1143 uadd(fn)
1131 uadd(fn)
1144 continue
1132 continue
1145
1133
1146 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1134 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1147 # written like that for performance reasons. dmap[fn] is not a
1135 # written like that for performance reasons. dmap[fn] is not a
1148 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1136 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1149 # opcode has fast paths when the value to be unpacked is a tuple or
1137 # opcode has fast paths when the value to be unpacked is a tuple or
1150 # a list, but falls back to creating a full-fledged iterator in
1138 # a list, but falls back to creating a full-fledged iterator in
1151 # general. That is much slower than simply accessing and storing the
1139 # general. That is much slower than simply accessing and storing the
1152 # tuple members one by one.
1140 # tuple members one by one.
1153 t = dmap[fn]
1141 t = dmap[fn]
1154 state = t[0]
1142 state = t[0]
1155 mode = t[1]
1143 mode = t[1]
1156 size = t[2]
1144 size = t[2]
1157 time = t[3]
1145 time = t[3]
1158
1146
1159 if not st and state in "nma":
1147 if not st and state in "nma":
1160 dadd(fn)
1148 dadd(fn)
1161 elif state == 'n':
1149 elif state == 'n':
1162 if (size >= 0 and
1150 if (size >= 0 and
1163 ((size != st.st_size and size != st.st_size & _rangemask)
1151 ((size != st.st_size and size != st.st_size & _rangemask)
1164 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1152 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1165 or size == -2 # other parent
1153 or size == -2 # other parent
1166 or fn in copymap):
1154 or fn in copymap):
1167 madd(fn)
1155 madd(fn)
1168 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1156 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1169 ladd(fn)
1157 ladd(fn)
1170 elif st.st_mtime == lastnormaltime:
1158 elif st.st_mtime == lastnormaltime:
1171 # fn may have just been marked as normal and it may have
1159 # fn may have just been marked as normal and it may have
1172 # changed in the same second without changing its size.
1160 # changed in the same second without changing its size.
1173 # This can happen if we quickly do multiple commits.
1161 # This can happen if we quickly do multiple commits.
1174 # Force lookup, so we don't miss such a racy file change.
1162 # Force lookup, so we don't miss such a racy file change.
1175 ladd(fn)
1163 ladd(fn)
1176 elif listclean:
1164 elif listclean:
1177 cadd(fn)
1165 cadd(fn)
1178 elif state == 'm':
1166 elif state == 'm':
1179 madd(fn)
1167 madd(fn)
1180 elif state == 'a':
1168 elif state == 'a':
1181 aadd(fn)
1169 aadd(fn)
1182 elif state == 'r':
1170 elif state == 'r':
1183 radd(fn)
1171 radd(fn)
1184
1172
1185 return (lookup, scmutil.status(modified, added, removed, deleted,
1173 return (lookup, scmutil.status(modified, added, removed, deleted,
1186 unknown, ignored, clean))
1174 unknown, ignored, clean))
1187
1175
1188 def matches(self, match):
1176 def matches(self, match):
1189 '''
1177 '''
1190 return files in the dirstate (in whatever state) filtered by match
1178 return files in the dirstate (in whatever state) filtered by match
1191 '''
1179 '''
1192 dmap = self._map
1180 dmap = self._map
1193 if match.always():
1181 if match.always():
1194 return dmap.keys()
1182 return dmap.keys()
1195 files = match.files()
1183 files = match.files()
1196 if match.isexact():
1184 if match.isexact():
1197 # fast path -- filter the other way around, since typically files is
1185 # fast path -- filter the other way around, since typically files is
1198 # much smaller than dmap
1186 # much smaller than dmap
1199 return [f for f in files if f in dmap]
1187 return [f for f in files if f in dmap]
1200 if match.prefix() and all(fn in dmap for fn in files):
1188 if match.prefix() and all(fn in dmap for fn in files):
1201 # fast path -- all the values are known to be files, so just return
1189 # fast path -- all the values are known to be files, so just return
1202 # that
1190 # that
1203 return list(files)
1191 return list(files)
1204 return [f for f in dmap if match(f)]
1192 return [f for f in dmap if match(f)]
1205
1193
1206 def _actualfilename(self, tr):
1194 def _actualfilename(self, tr):
1207 if tr:
1195 if tr:
1208 return self._pendingfilename
1196 return self._pendingfilename
1209 else:
1197 else:
1210 return self._filename
1198 return self._filename
1211
1199
1212 def savebackup(self, tr, suffix='', prefix=''):
1200 def savebackup(self, tr, suffix='', prefix=''):
1213 '''Save current dirstate into backup file with suffix'''
1201 '''Save current dirstate into backup file with suffix'''
1214 assert len(suffix) > 0 or len(prefix) > 0
1202 assert len(suffix) > 0 or len(prefix) > 0
1215 filename = self._actualfilename(tr)
1203 filename = self._actualfilename(tr)
1216
1204
1217 # use '_writedirstate' instead of 'write' to write changes certainly,
1205 # use '_writedirstate' instead of 'write' to write changes certainly,
1218 # because the latter omits writing out if transaction is running.
1206 # because the latter omits writing out if transaction is running.
1219 # output file will be used to create backup of dirstate at this point.
1207 # output file will be used to create backup of dirstate at this point.
1220 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1208 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1221 checkambig=True))
1209 checkambig=True))
1222
1210
1223 if tr:
1211 if tr:
1224 # ensure that subsequent tr.writepending returns True for
1212 # ensure that subsequent tr.writepending returns True for
1225 # changes written out above, even if dirstate is never
1213 # changes written out above, even if dirstate is never
1226 # changed after this
1214 # changed after this
1227 tr.addfilegenerator('dirstate', (self._filename,),
1215 tr.addfilegenerator('dirstate', (self._filename,),
1228 self._writedirstate, location='plain')
1216 self._writedirstate, location='plain')
1229
1217
1230 # ensure that pending file written above is unlinked at
1218 # ensure that pending file written above is unlinked at
1231 # failure, even if tr.writepending isn't invoked until the
1219 # failure, even if tr.writepending isn't invoked until the
1232 # end of this transaction
1220 # end of this transaction
1233 tr.registertmp(filename, location='plain')
1221 tr.registertmp(filename, location='plain')
1234
1222
1235 self._opener.write(prefix + self._filename + suffix,
1223 self._opener.write(prefix + self._filename + suffix,
1236 self._opener.tryread(filename))
1224 self._opener.tryread(filename))
1237
1225
1238 def restorebackup(self, tr, suffix='', prefix=''):
1226 def restorebackup(self, tr, suffix='', prefix=''):
1239 '''Restore dirstate by backup file with suffix'''
1227 '''Restore dirstate by backup file with suffix'''
1240 assert len(suffix) > 0 or len(prefix) > 0
1228 assert len(suffix) > 0 or len(prefix) > 0
1241 # this "invalidate()" prevents "wlock.release()" from writing
1229 # this "invalidate()" prevents "wlock.release()" from writing
1242 # changes of dirstate out after restoring from backup file
1230 # changes of dirstate out after restoring from backup file
1243 self.invalidate()
1231 self.invalidate()
1244 filename = self._actualfilename(tr)
1232 filename = self._actualfilename(tr)
1245 # using self._filename to avoid having "pending" in the backup filename
1233 # using self._filename to avoid having "pending" in the backup filename
1246 self._opener.rename(prefix + self._filename + suffix, filename,
1234 self._opener.rename(prefix + self._filename + suffix, filename,
1247 checkambig=True)
1235 checkambig=True)
1248
1236
1249 def clearbackup(self, tr, suffix='', prefix=''):
1237 def clearbackup(self, tr, suffix='', prefix=''):
1250 '''Clear backup file with suffix'''
1238 '''Clear backup file with suffix'''
1251 assert len(suffix) > 0 or len(prefix) > 0
1239 assert len(suffix) > 0 or len(prefix) > 0
1252 # using self._filename to avoid having "pending" in the backup filename
1240 # using self._filename to avoid having "pending" in the backup filename
1253 self._opener.unlink(prefix + self._filename + suffix)
1241 self._opener.unlink(prefix + self._filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now