##// END OF EJS Templates
rust-dirstate: call parse/pack bindings from Python...
Raphaël Gomès -
r42490:9c6c0f73 default
parent child Browse files
Show More
@@ -1,1508 +1,1524
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 try:
31 from . import rustext
32 rustext.__name__ # force actual import (see hgdemandimport)
33 except ImportError:
34 rustext = None
35
30 parsers = policy.importmod(r'parsers')
36 parsers = policy.importmod(r'parsers')
31
37
32 propertycache = util.propertycache
38 propertycache = util.propertycache
33 filecache = scmutil.filecache
39 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
40 _rangemask = 0x7fffffff
35
41
36 dirstatetuple = parsers.dirstatetuple
42 dirstatetuple = parsers.dirstatetuple
37
43
38 class repocache(filecache):
44 class repocache(filecache):
39 """filecache for files in .hg/"""
45 """filecache for files in .hg/"""
40 def join(self, obj, fname):
46 def join(self, obj, fname):
41 return obj._opener.join(fname)
47 return obj._opener.join(fname)
42
48
43 class rootcache(filecache):
49 class rootcache(filecache):
44 """filecache for files in the repository root"""
50 """filecache for files in the repository root"""
45 def join(self, obj, fname):
51 def join(self, obj, fname):
46 return obj._join(fname)
52 return obj._join(fname)
47
53
48 def _getfsnow(vfs):
54 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
55 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
56 tmpfd, tmpname = vfs.mkstemp()
51 try:
57 try:
52 return os.fstat(tmpfd)[stat.ST_MTIME]
58 return os.fstat(tmpfd)[stat.ST_MTIME]
53 finally:
59 finally:
54 os.close(tmpfd)
60 os.close(tmpfd)
55 vfs.unlink(tmpname)
61 vfs.unlink(tmpname)
56
62
57 class dirstate(object):
63 class dirstate(object):
58
64
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
65 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
66 '''Create a new dirstate object.
61
67
62 opener is an open()-like callable that can be used to open the
68 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
69 dirstate file; root is the root of the directory tracked by
64 the dirstate.
70 the dirstate.
65 '''
71 '''
66 self._opener = opener
72 self._opener = opener
67 self._validate = validate
73 self._validate = validate
68 self._root = root
74 self._root = root
69 self._sparsematchfn = sparsematchfn
75 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
76 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
77 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
78 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
79 self._dirty = False
74 self._lastnormaltime = 0
80 self._lastnormaltime = 0
75 self._ui = ui
81 self._ui = ui
76 self._filecache = {}
82 self._filecache = {}
77 self._parentwriters = 0
83 self._parentwriters = 0
78 self._filename = 'dirstate'
84 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
85 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
86 self._plchangecallbacks = {}
81 self._origpl = None
87 self._origpl = None
82 self._updatedfiles = set()
88 self._updatedfiles = set()
83 self._mapcls = dirstatemap
89 self._mapcls = dirstatemap
84 # Access and cache cwd early, so we don't access it for the first time
90 # Access and cache cwd early, so we don't access it for the first time
85 # after a working-copy update caused it to not exist (accessing it then
91 # after a working-copy update caused it to not exist (accessing it then
86 # raises an exception).
92 # raises an exception).
87 self._cwd
93 self._cwd
88
94
89 @contextlib.contextmanager
95 @contextlib.contextmanager
90 def parentchange(self):
96 def parentchange(self):
91 '''Context manager for handling dirstate parents.
97 '''Context manager for handling dirstate parents.
92
98
93 If an exception occurs in the scope of the context manager,
99 If an exception occurs in the scope of the context manager,
94 the incoherent dirstate won't be written when wlock is
100 the incoherent dirstate won't be written when wlock is
95 released.
101 released.
96 '''
102 '''
97 self._parentwriters += 1
103 self._parentwriters += 1
98 yield
104 yield
99 # Typically we want the "undo" step of a context manager in a
105 # Typically we want the "undo" step of a context manager in a
100 # finally block so it happens even when an exception
106 # finally block so it happens even when an exception
101 # occurs. In this case, however, we only want to decrement
107 # occurs. In this case, however, we only want to decrement
102 # parentwriters if the code in the with statement exits
108 # parentwriters if the code in the with statement exits
103 # normally, so we don't have a try/finally here on purpose.
109 # normally, so we don't have a try/finally here on purpose.
104 self._parentwriters -= 1
110 self._parentwriters -= 1
105
111
106 def pendingparentchange(self):
112 def pendingparentchange(self):
107 '''Returns true if the dirstate is in the middle of a set of changes
113 '''Returns true if the dirstate is in the middle of a set of changes
108 that modify the dirstate parent.
114 that modify the dirstate parent.
109 '''
115 '''
110 return self._parentwriters > 0
116 return self._parentwriters > 0
111
117
112 @propertycache
118 @propertycache
113 def _map(self):
119 def _map(self):
114 """Return the dirstate contents (see documentation for dirstatemap)."""
120 """Return the dirstate contents (see documentation for dirstatemap)."""
115 self._map = self._mapcls(self._ui, self._opener, self._root)
121 self._map = self._mapcls(self._ui, self._opener, self._root)
116 return self._map
122 return self._map
117
123
118 @property
124 @property
119 def _sparsematcher(self):
125 def _sparsematcher(self):
120 """The matcher for the sparse checkout.
126 """The matcher for the sparse checkout.
121
127
122 The working directory may not include every file from a manifest. The
128 The working directory may not include every file from a manifest. The
123 matcher obtained by this property will match a path if it is to be
129 matcher obtained by this property will match a path if it is to be
124 included in the working directory.
130 included in the working directory.
125 """
131 """
126 # TODO there is potential to cache this property. For now, the matcher
132 # TODO there is potential to cache this property. For now, the matcher
127 # is resolved on every access. (But the called function does use a
133 # is resolved on every access. (But the called function does use a
128 # cache to keep the lookup fast.)
134 # cache to keep the lookup fast.)
129 return self._sparsematchfn()
135 return self._sparsematchfn()
130
136
131 @repocache('branch')
137 @repocache('branch')
132 def _branch(self):
138 def _branch(self):
133 try:
139 try:
134 return self._opener.read("branch").strip() or "default"
140 return self._opener.read("branch").strip() or "default"
135 except IOError as inst:
141 except IOError as inst:
136 if inst.errno != errno.ENOENT:
142 if inst.errno != errno.ENOENT:
137 raise
143 raise
138 return "default"
144 return "default"
139
145
140 @property
146 @property
141 def _pl(self):
147 def _pl(self):
142 return self._map.parents()
148 return self._map.parents()
143
149
144 def hasdir(self, d):
150 def hasdir(self, d):
145 return self._map.hastrackeddir(d)
151 return self._map.hastrackeddir(d)
146
152
147 @rootcache('.hgignore')
153 @rootcache('.hgignore')
148 def _ignore(self):
154 def _ignore(self):
149 files = self._ignorefiles()
155 files = self._ignorefiles()
150 if not files:
156 if not files:
151 return matchmod.never()
157 return matchmod.never()
152
158
153 pats = ['include:%s' % f for f in files]
159 pats = ['include:%s' % f for f in files]
154 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
160 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
155
161
156 @propertycache
162 @propertycache
157 def _slash(self):
163 def _slash(self):
158 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
164 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
159
165
160 @propertycache
166 @propertycache
161 def _checklink(self):
167 def _checklink(self):
162 return util.checklink(self._root)
168 return util.checklink(self._root)
163
169
164 @propertycache
170 @propertycache
165 def _checkexec(self):
171 def _checkexec(self):
166 return util.checkexec(self._root)
172 return util.checkexec(self._root)
167
173
168 @propertycache
174 @propertycache
169 def _checkcase(self):
175 def _checkcase(self):
170 return not util.fscasesensitive(self._join('.hg'))
176 return not util.fscasesensitive(self._join('.hg'))
171
177
172 def _join(self, f):
178 def _join(self, f):
173 # much faster than os.path.join()
179 # much faster than os.path.join()
174 # it's safe because f is always a relative path
180 # it's safe because f is always a relative path
175 return self._rootdir + f
181 return self._rootdir + f
176
182
177 def flagfunc(self, buildfallback):
183 def flagfunc(self, buildfallback):
178 if self._checklink and self._checkexec:
184 if self._checklink and self._checkexec:
179 def f(x):
185 def f(x):
180 try:
186 try:
181 st = os.lstat(self._join(x))
187 st = os.lstat(self._join(x))
182 if util.statislink(st):
188 if util.statislink(st):
183 return 'l'
189 return 'l'
184 if util.statisexec(st):
190 if util.statisexec(st):
185 return 'x'
191 return 'x'
186 except OSError:
192 except OSError:
187 pass
193 pass
188 return ''
194 return ''
189 return f
195 return f
190
196
191 fallback = buildfallback()
197 fallback = buildfallback()
192 if self._checklink:
198 if self._checklink:
193 def f(x):
199 def f(x):
194 if os.path.islink(self._join(x)):
200 if os.path.islink(self._join(x)):
195 return 'l'
201 return 'l'
196 if 'x' in fallback(x):
202 if 'x' in fallback(x):
197 return 'x'
203 return 'x'
198 return ''
204 return ''
199 return f
205 return f
200 if self._checkexec:
206 if self._checkexec:
201 def f(x):
207 def f(x):
202 if 'l' in fallback(x):
208 if 'l' in fallback(x):
203 return 'l'
209 return 'l'
204 if util.isexec(self._join(x)):
210 if util.isexec(self._join(x)):
205 return 'x'
211 return 'x'
206 return ''
212 return ''
207 return f
213 return f
208 else:
214 else:
209 return fallback
215 return fallback
210
216
211 @propertycache
217 @propertycache
212 def _cwd(self):
218 def _cwd(self):
213 # internal config: ui.forcecwd
219 # internal config: ui.forcecwd
214 forcecwd = self._ui.config('ui', 'forcecwd')
220 forcecwd = self._ui.config('ui', 'forcecwd')
215 if forcecwd:
221 if forcecwd:
216 return forcecwd
222 return forcecwd
217 return encoding.getcwd()
223 return encoding.getcwd()
218
224
219 def getcwd(self):
225 def getcwd(self):
220 '''Return the path from which a canonical path is calculated.
226 '''Return the path from which a canonical path is calculated.
221
227
222 This path should be used to resolve file patterns or to convert
228 This path should be used to resolve file patterns or to convert
223 canonical paths back to file paths for display. It shouldn't be
229 canonical paths back to file paths for display. It shouldn't be
224 used to get real file paths. Use vfs functions instead.
230 used to get real file paths. Use vfs functions instead.
225 '''
231 '''
226 cwd = self._cwd
232 cwd = self._cwd
227 if cwd == self._root:
233 if cwd == self._root:
228 return ''
234 return ''
229 # self._root ends with a path separator if self._root is '/' or 'C:\'
235 # self._root ends with a path separator if self._root is '/' or 'C:\'
230 rootsep = self._root
236 rootsep = self._root
231 if not util.endswithsep(rootsep):
237 if not util.endswithsep(rootsep):
232 rootsep += pycompat.ossep
238 rootsep += pycompat.ossep
233 if cwd.startswith(rootsep):
239 if cwd.startswith(rootsep):
234 return cwd[len(rootsep):]
240 return cwd[len(rootsep):]
235 else:
241 else:
236 # we're outside the repo. return an absolute path.
242 # we're outside the repo. return an absolute path.
237 return cwd
243 return cwd
238
244
239 def pathto(self, f, cwd=None):
245 def pathto(self, f, cwd=None):
240 if cwd is None:
246 if cwd is None:
241 cwd = self.getcwd()
247 cwd = self.getcwd()
242 path = util.pathto(self._root, cwd, f)
248 path = util.pathto(self._root, cwd, f)
243 if self._slash:
249 if self._slash:
244 return util.pconvert(path)
250 return util.pconvert(path)
245 return path
251 return path
246
252
247 def __getitem__(self, key):
253 def __getitem__(self, key):
248 '''Return the current state of key (a filename) in the dirstate.
254 '''Return the current state of key (a filename) in the dirstate.
249
255
250 States are:
256 States are:
251 n normal
257 n normal
252 m needs merging
258 m needs merging
253 r marked for removal
259 r marked for removal
254 a marked for addition
260 a marked for addition
255 ? not tracked
261 ? not tracked
256 '''
262 '''
257 return self._map.get(key, ("?",))[0]
263 return self._map.get(key, ("?",))[0]
258
264
259 def __contains__(self, key):
265 def __contains__(self, key):
260 return key in self._map
266 return key in self._map
261
267
262 def __iter__(self):
268 def __iter__(self):
263 return iter(sorted(self._map))
269 return iter(sorted(self._map))
264
270
265 def items(self):
271 def items(self):
266 return self._map.iteritems()
272 return self._map.iteritems()
267
273
268 iteritems = items
274 iteritems = items
269
275
270 def parents(self):
276 def parents(self):
271 return [self._validate(p) for p in self._pl]
277 return [self._validate(p) for p in self._pl]
272
278
273 def p1(self):
279 def p1(self):
274 return self._validate(self._pl[0])
280 return self._validate(self._pl[0])
275
281
276 def p2(self):
282 def p2(self):
277 return self._validate(self._pl[1])
283 return self._validate(self._pl[1])
278
284
279 def branch(self):
285 def branch(self):
280 return encoding.tolocal(self._branch)
286 return encoding.tolocal(self._branch)
281
287
282 def setparents(self, p1, p2=nullid):
288 def setparents(self, p1, p2=nullid):
283 """Set dirstate parents to p1 and p2.
289 """Set dirstate parents to p1 and p2.
284
290
285 When moving from two parents to one, 'm' merged entries a
291 When moving from two parents to one, 'm' merged entries a
286 adjusted to normal and previous copy records discarded and
292 adjusted to normal and previous copy records discarded and
287 returned by the call.
293 returned by the call.
288
294
289 See localrepo.setparents()
295 See localrepo.setparents()
290 """
296 """
291 if self._parentwriters == 0:
297 if self._parentwriters == 0:
292 raise ValueError("cannot set dirstate parent outside of "
298 raise ValueError("cannot set dirstate parent outside of "
293 "dirstate.parentchange context manager")
299 "dirstate.parentchange context manager")
294
300
295 self._dirty = True
301 self._dirty = True
296 oldp2 = self._pl[1]
302 oldp2 = self._pl[1]
297 if self._origpl is None:
303 if self._origpl is None:
298 self._origpl = self._pl
304 self._origpl = self._pl
299 self._map.setparents(p1, p2)
305 self._map.setparents(p1, p2)
300 copies = {}
306 copies = {}
301 if oldp2 != nullid and p2 == nullid:
307 if oldp2 != nullid and p2 == nullid:
302 candidatefiles = self._map.nonnormalset.union(
308 candidatefiles = self._map.nonnormalset.union(
303 self._map.otherparentset)
309 self._map.otherparentset)
304 for f in candidatefiles:
310 for f in candidatefiles:
305 s = self._map.get(f)
311 s = self._map.get(f)
306 if s is None:
312 if s is None:
307 continue
313 continue
308
314
309 # Discard 'm' markers when moving away from a merge state
315 # Discard 'm' markers when moving away from a merge state
310 if s[0] == 'm':
316 if s[0] == 'm':
311 source = self._map.copymap.get(f)
317 source = self._map.copymap.get(f)
312 if source:
318 if source:
313 copies[f] = source
319 copies[f] = source
314 self.normallookup(f)
320 self.normallookup(f)
315 # Also fix up otherparent markers
321 # Also fix up otherparent markers
316 elif s[0] == 'n' and s[2] == -2:
322 elif s[0] == 'n' and s[2] == -2:
317 source = self._map.copymap.get(f)
323 source = self._map.copymap.get(f)
318 if source:
324 if source:
319 copies[f] = source
325 copies[f] = source
320 self.add(f)
326 self.add(f)
321 return copies
327 return copies
322
328
323 def setbranch(self, branch):
329 def setbranch(self, branch):
324 self.__class__._branch.set(self, encoding.fromlocal(branch))
330 self.__class__._branch.set(self, encoding.fromlocal(branch))
325 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
331 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
326 try:
332 try:
327 f.write(self._branch + '\n')
333 f.write(self._branch + '\n')
328 f.close()
334 f.close()
329
335
330 # make sure filecache has the correct stat info for _branch after
336 # make sure filecache has the correct stat info for _branch after
331 # replacing the underlying file
337 # replacing the underlying file
332 ce = self._filecache['_branch']
338 ce = self._filecache['_branch']
333 if ce:
339 if ce:
334 ce.refresh()
340 ce.refresh()
335 except: # re-raises
341 except: # re-raises
336 f.discard()
342 f.discard()
337 raise
343 raise
338
344
339 def invalidate(self):
345 def invalidate(self):
340 '''Causes the next access to reread the dirstate.
346 '''Causes the next access to reread the dirstate.
341
347
342 This is different from localrepo.invalidatedirstate() because it always
348 This is different from localrepo.invalidatedirstate() because it always
343 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
349 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
344 check whether the dirstate has changed before rereading it.'''
350 check whether the dirstate has changed before rereading it.'''
345
351
346 for a in (r"_map", r"_branch", r"_ignore"):
352 for a in (r"_map", r"_branch", r"_ignore"):
347 if a in self.__dict__:
353 if a in self.__dict__:
348 delattr(self, a)
354 delattr(self, a)
349 self._lastnormaltime = 0
355 self._lastnormaltime = 0
350 self._dirty = False
356 self._dirty = False
351 self._updatedfiles.clear()
357 self._updatedfiles.clear()
352 self._parentwriters = 0
358 self._parentwriters = 0
353 self._origpl = None
359 self._origpl = None
354
360
355 def copy(self, source, dest):
361 def copy(self, source, dest):
356 """Mark dest as a copy of source. Unmark dest if source is None."""
362 """Mark dest as a copy of source. Unmark dest if source is None."""
357 if source == dest:
363 if source == dest:
358 return
364 return
359 self._dirty = True
365 self._dirty = True
360 if source is not None:
366 if source is not None:
361 self._map.copymap[dest] = source
367 self._map.copymap[dest] = source
362 self._updatedfiles.add(source)
368 self._updatedfiles.add(source)
363 self._updatedfiles.add(dest)
369 self._updatedfiles.add(dest)
364 elif self._map.copymap.pop(dest, None):
370 elif self._map.copymap.pop(dest, None):
365 self._updatedfiles.add(dest)
371 self._updatedfiles.add(dest)
366
372
367 def copied(self, file):
373 def copied(self, file):
368 return self._map.copymap.get(file, None)
374 return self._map.copymap.get(file, None)
369
375
370 def copies(self):
376 def copies(self):
371 return self._map.copymap
377 return self._map.copymap
372
378
373 def _addpath(self, f, state, mode, size, mtime):
379 def _addpath(self, f, state, mode, size, mtime):
374 oldstate = self[f]
380 oldstate = self[f]
375 if state == 'a' or oldstate == 'r':
381 if state == 'a' or oldstate == 'r':
376 scmutil.checkfilename(f)
382 scmutil.checkfilename(f)
377 if self._map.hastrackeddir(f):
383 if self._map.hastrackeddir(f):
378 raise error.Abort(_('directory %r already in dirstate') %
384 raise error.Abort(_('directory %r already in dirstate') %
379 pycompat.bytestr(f))
385 pycompat.bytestr(f))
380 # shadows
386 # shadows
381 for d in util.finddirs(f):
387 for d in util.finddirs(f):
382 if self._map.hastrackeddir(d):
388 if self._map.hastrackeddir(d):
383 break
389 break
384 entry = self._map.get(d)
390 entry = self._map.get(d)
385 if entry is not None and entry[0] != 'r':
391 if entry is not None and entry[0] != 'r':
386 raise error.Abort(
392 raise error.Abort(
387 _('file %r in dirstate clashes with %r') %
393 _('file %r in dirstate clashes with %r') %
388 (pycompat.bytestr(d), pycompat.bytestr(f)))
394 (pycompat.bytestr(d), pycompat.bytestr(f)))
389 self._dirty = True
395 self._dirty = True
390 self._updatedfiles.add(f)
396 self._updatedfiles.add(f)
391 self._map.addfile(f, oldstate, state, mode, size, mtime)
397 self._map.addfile(f, oldstate, state, mode, size, mtime)
392
398
393 def normal(self, f):
399 def normal(self, f):
394 '''Mark a file normal and clean.'''
400 '''Mark a file normal and clean.'''
395 s = os.lstat(self._join(f))
401 s = os.lstat(self._join(f))
396 mtime = s[stat.ST_MTIME]
402 mtime = s[stat.ST_MTIME]
397 self._addpath(f, 'n', s.st_mode,
403 self._addpath(f, 'n', s.st_mode,
398 s.st_size & _rangemask, mtime & _rangemask)
404 s.st_size & _rangemask, mtime & _rangemask)
399 self._map.copymap.pop(f, None)
405 self._map.copymap.pop(f, None)
400 if f in self._map.nonnormalset:
406 if f in self._map.nonnormalset:
401 self._map.nonnormalset.remove(f)
407 self._map.nonnormalset.remove(f)
402 if mtime > self._lastnormaltime:
408 if mtime > self._lastnormaltime:
403 # Remember the most recent modification timeslot for status(),
409 # Remember the most recent modification timeslot for status(),
404 # to make sure we won't miss future size-preserving file content
410 # to make sure we won't miss future size-preserving file content
405 # modifications that happen within the same timeslot.
411 # modifications that happen within the same timeslot.
406 self._lastnormaltime = mtime
412 self._lastnormaltime = mtime
407
413
408 def normallookup(self, f):
414 def normallookup(self, f):
409 '''Mark a file normal, but possibly dirty.'''
415 '''Mark a file normal, but possibly dirty.'''
410 if self._pl[1] != nullid:
416 if self._pl[1] != nullid:
411 # if there is a merge going on and the file was either
417 # if there is a merge going on and the file was either
412 # in state 'm' (-1) or coming from other parent (-2) before
418 # in state 'm' (-1) or coming from other parent (-2) before
413 # being removed, restore that state.
419 # being removed, restore that state.
414 entry = self._map.get(f)
420 entry = self._map.get(f)
415 if entry is not None:
421 if entry is not None:
416 if entry[0] == 'r' and entry[2] in (-1, -2):
422 if entry[0] == 'r' and entry[2] in (-1, -2):
417 source = self._map.copymap.get(f)
423 source = self._map.copymap.get(f)
418 if entry[2] == -1:
424 if entry[2] == -1:
419 self.merge(f)
425 self.merge(f)
420 elif entry[2] == -2:
426 elif entry[2] == -2:
421 self.otherparent(f)
427 self.otherparent(f)
422 if source:
428 if source:
423 self.copy(source, f)
429 self.copy(source, f)
424 return
430 return
425 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
431 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
426 return
432 return
427 self._addpath(f, 'n', 0, -1, -1)
433 self._addpath(f, 'n', 0, -1, -1)
428 self._map.copymap.pop(f, None)
434 self._map.copymap.pop(f, None)
429
435
430 def otherparent(self, f):
436 def otherparent(self, f):
431 '''Mark as coming from the other parent, always dirty.'''
437 '''Mark as coming from the other parent, always dirty.'''
432 if self._pl[1] == nullid:
438 if self._pl[1] == nullid:
433 raise error.Abort(_("setting %r to other parent "
439 raise error.Abort(_("setting %r to other parent "
434 "only allowed in merges") % f)
440 "only allowed in merges") % f)
435 if f in self and self[f] == 'n':
441 if f in self and self[f] == 'n':
436 # merge-like
442 # merge-like
437 self._addpath(f, 'm', 0, -2, -1)
443 self._addpath(f, 'm', 0, -2, -1)
438 else:
444 else:
439 # add-like
445 # add-like
440 self._addpath(f, 'n', 0, -2, -1)
446 self._addpath(f, 'n', 0, -2, -1)
441 self._map.copymap.pop(f, None)
447 self._map.copymap.pop(f, None)
442
448
443 def add(self, f):
449 def add(self, f):
444 '''Mark a file added.'''
450 '''Mark a file added.'''
445 self._addpath(f, 'a', 0, -1, -1)
451 self._addpath(f, 'a', 0, -1, -1)
446 self._map.copymap.pop(f, None)
452 self._map.copymap.pop(f, None)
447
453
448 def remove(self, f):
454 def remove(self, f):
449 '''Mark a file removed.'''
455 '''Mark a file removed.'''
450 self._dirty = True
456 self._dirty = True
451 oldstate = self[f]
457 oldstate = self[f]
452 size = 0
458 size = 0
453 if self._pl[1] != nullid:
459 if self._pl[1] != nullid:
454 entry = self._map.get(f)
460 entry = self._map.get(f)
455 if entry is not None:
461 if entry is not None:
456 # backup the previous state
462 # backup the previous state
457 if entry[0] == 'm': # merge
463 if entry[0] == 'm': # merge
458 size = -1
464 size = -1
459 elif entry[0] == 'n' and entry[2] == -2: # other parent
465 elif entry[0] == 'n' and entry[2] == -2: # other parent
460 size = -2
466 size = -2
461 self._map.otherparentset.add(f)
467 self._map.otherparentset.add(f)
462 self._updatedfiles.add(f)
468 self._updatedfiles.add(f)
463 self._map.removefile(f, oldstate, size)
469 self._map.removefile(f, oldstate, size)
464 if size == 0:
470 if size == 0:
465 self._map.copymap.pop(f, None)
471 self._map.copymap.pop(f, None)
466
472
467 def merge(self, f):
473 def merge(self, f):
468 '''Mark a file merged.'''
474 '''Mark a file merged.'''
469 if self._pl[1] == nullid:
475 if self._pl[1] == nullid:
470 return self.normallookup(f)
476 return self.normallookup(f)
471 return self.otherparent(f)
477 return self.otherparent(f)
472
478
473 def drop(self, f):
479 def drop(self, f):
474 '''Drop a file from the dirstate'''
480 '''Drop a file from the dirstate'''
475 oldstate = self[f]
481 oldstate = self[f]
476 if self._map.dropfile(f, oldstate):
482 if self._map.dropfile(f, oldstate):
477 self._dirty = True
483 self._dirty = True
478 self._updatedfiles.add(f)
484 self._updatedfiles.add(f)
479 self._map.copymap.pop(f, None)
485 self._map.copymap.pop(f, None)
480
486
481 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
487 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
482 if exists is None:
488 if exists is None:
483 exists = os.path.lexists(os.path.join(self._root, path))
489 exists = os.path.lexists(os.path.join(self._root, path))
484 if not exists:
490 if not exists:
485 # Maybe a path component exists
491 # Maybe a path component exists
486 if not ignoremissing and '/' in path:
492 if not ignoremissing and '/' in path:
487 d, f = path.rsplit('/', 1)
493 d, f = path.rsplit('/', 1)
488 d = self._normalize(d, False, ignoremissing, None)
494 d = self._normalize(d, False, ignoremissing, None)
489 folded = d + "/" + f
495 folded = d + "/" + f
490 else:
496 else:
491 # No path components, preserve original case
497 # No path components, preserve original case
492 folded = path
498 folded = path
493 else:
499 else:
494 # recursively normalize leading directory components
500 # recursively normalize leading directory components
495 # against dirstate
501 # against dirstate
496 if '/' in normed:
502 if '/' in normed:
497 d, f = normed.rsplit('/', 1)
503 d, f = normed.rsplit('/', 1)
498 d = self._normalize(d, False, ignoremissing, True)
504 d = self._normalize(d, False, ignoremissing, True)
499 r = self._root + "/" + d
505 r = self._root + "/" + d
500 folded = d + "/" + util.fspath(f, r)
506 folded = d + "/" + util.fspath(f, r)
501 else:
507 else:
502 folded = util.fspath(normed, self._root)
508 folded = util.fspath(normed, self._root)
503 storemap[normed] = folded
509 storemap[normed] = folded
504
510
505 return folded
511 return folded
506
512
507 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
513 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
508 normed = util.normcase(path)
514 normed = util.normcase(path)
509 folded = self._map.filefoldmap.get(normed, None)
515 folded = self._map.filefoldmap.get(normed, None)
510 if folded is None:
516 if folded is None:
511 if isknown:
517 if isknown:
512 folded = path
518 folded = path
513 else:
519 else:
514 folded = self._discoverpath(path, normed, ignoremissing, exists,
520 folded = self._discoverpath(path, normed, ignoremissing, exists,
515 self._map.filefoldmap)
521 self._map.filefoldmap)
516 return folded
522 return folded
517
523
518 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
524 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
519 normed = util.normcase(path)
525 normed = util.normcase(path)
520 folded = self._map.filefoldmap.get(normed, None)
526 folded = self._map.filefoldmap.get(normed, None)
521 if folded is None:
527 if folded is None:
522 folded = self._map.dirfoldmap.get(normed, None)
528 folded = self._map.dirfoldmap.get(normed, None)
523 if folded is None:
529 if folded is None:
524 if isknown:
530 if isknown:
525 folded = path
531 folded = path
526 else:
532 else:
527 # store discovered result in dirfoldmap so that future
533 # store discovered result in dirfoldmap so that future
528 # normalizefile calls don't start matching directories
534 # normalizefile calls don't start matching directories
529 folded = self._discoverpath(path, normed, ignoremissing, exists,
535 folded = self._discoverpath(path, normed, ignoremissing, exists,
530 self._map.dirfoldmap)
536 self._map.dirfoldmap)
531 return folded
537 return folded
532
538
533 def normalize(self, path, isknown=False, ignoremissing=False):
539 def normalize(self, path, isknown=False, ignoremissing=False):
534 '''
540 '''
535 normalize the case of a pathname when on a casefolding filesystem
541 normalize the case of a pathname when on a casefolding filesystem
536
542
537 isknown specifies whether the filename came from walking the
543 isknown specifies whether the filename came from walking the
538 disk, to avoid extra filesystem access.
544 disk, to avoid extra filesystem access.
539
545
540 If ignoremissing is True, missing path are returned
546 If ignoremissing is True, missing path are returned
541 unchanged. Otherwise, we try harder to normalize possibly
547 unchanged. Otherwise, we try harder to normalize possibly
542 existing path components.
548 existing path components.
543
549
544 The normalized case is determined based on the following precedence:
550 The normalized case is determined based on the following precedence:
545
551
546 - version of name already stored in the dirstate
552 - version of name already stored in the dirstate
547 - version of name stored on disk
553 - version of name stored on disk
548 - version provided via command arguments
554 - version provided via command arguments
549 '''
555 '''
550
556
551 if self._checkcase:
557 if self._checkcase:
552 return self._normalize(path, isknown, ignoremissing)
558 return self._normalize(path, isknown, ignoremissing)
553 return path
559 return path
554
560
555 def clear(self):
561 def clear(self):
556 self._map.clear()
562 self._map.clear()
557 self._lastnormaltime = 0
563 self._lastnormaltime = 0
558 self._updatedfiles.clear()
564 self._updatedfiles.clear()
559 self._dirty = True
565 self._dirty = True
560
566
561 def rebuild(self, parent, allfiles, changedfiles=None):
567 def rebuild(self, parent, allfiles, changedfiles=None):
562 if changedfiles is None:
568 if changedfiles is None:
563 # Rebuild entire dirstate
569 # Rebuild entire dirstate
564 changedfiles = allfiles
570 changedfiles = allfiles
565 lastnormaltime = self._lastnormaltime
571 lastnormaltime = self._lastnormaltime
566 self.clear()
572 self.clear()
567 self._lastnormaltime = lastnormaltime
573 self._lastnormaltime = lastnormaltime
568
574
569 if self._origpl is None:
575 if self._origpl is None:
570 self._origpl = self._pl
576 self._origpl = self._pl
571 self._map.setparents(parent, nullid)
577 self._map.setparents(parent, nullid)
572 for f in changedfiles:
578 for f in changedfiles:
573 if f in allfiles:
579 if f in allfiles:
574 self.normallookup(f)
580 self.normallookup(f)
575 else:
581 else:
576 self.drop(f)
582 self.drop(f)
577
583
578 self._dirty = True
584 self._dirty = True
579
585
580 def identity(self):
586 def identity(self):
581 '''Return identity of dirstate itself to detect changing in storage
587 '''Return identity of dirstate itself to detect changing in storage
582
588
583 If identity of previous dirstate is equal to this, writing
589 If identity of previous dirstate is equal to this, writing
584 changes based on the former dirstate out can keep consistency.
590 changes based on the former dirstate out can keep consistency.
585 '''
591 '''
586 return self._map.identity
592 return self._map.identity
587
593
588 def write(self, tr):
594 def write(self, tr):
589 if not self._dirty:
595 if not self._dirty:
590 return
596 return
591
597
592 filename = self._filename
598 filename = self._filename
593 if tr:
599 if tr:
594 # 'dirstate.write()' is not only for writing in-memory
600 # 'dirstate.write()' is not only for writing in-memory
595 # changes out, but also for dropping ambiguous timestamp.
601 # changes out, but also for dropping ambiguous timestamp.
596 # delayed writing re-raise "ambiguous timestamp issue".
602 # delayed writing re-raise "ambiguous timestamp issue".
597 # See also the wiki page below for detail:
603 # See also the wiki page below for detail:
598 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
604 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
599
605
600 # emulate dropping timestamp in 'parsers.pack_dirstate'
606 # emulate dropping timestamp in 'parsers.pack_dirstate'
601 now = _getfsnow(self._opener)
607 now = _getfsnow(self._opener)
602 self._map.clearambiguoustimes(self._updatedfiles, now)
608 self._map.clearambiguoustimes(self._updatedfiles, now)
603
609
604 # emulate that all 'dirstate.normal' results are written out
610 # emulate that all 'dirstate.normal' results are written out
605 self._lastnormaltime = 0
611 self._lastnormaltime = 0
606 self._updatedfiles.clear()
612 self._updatedfiles.clear()
607
613
608 # delay writing in-memory changes out
614 # delay writing in-memory changes out
609 tr.addfilegenerator('dirstate', (self._filename,),
615 tr.addfilegenerator('dirstate', (self._filename,),
610 self._writedirstate, location='plain')
616 self._writedirstate, location='plain')
611 return
617 return
612
618
613 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
619 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
614 self._writedirstate(st)
620 self._writedirstate(st)
615
621
616 def addparentchangecallback(self, category, callback):
622 def addparentchangecallback(self, category, callback):
617 """add a callback to be called when the wd parents are changed
623 """add a callback to be called when the wd parents are changed
618
624
619 Callback will be called with the following arguments:
625 Callback will be called with the following arguments:
620 dirstate, (oldp1, oldp2), (newp1, newp2)
626 dirstate, (oldp1, oldp2), (newp1, newp2)
621
627
622 Category is a unique identifier to allow overwriting an old callback
628 Category is a unique identifier to allow overwriting an old callback
623 with a newer callback.
629 with a newer callback.
624 """
630 """
625 self._plchangecallbacks[category] = callback
631 self._plchangecallbacks[category] = callback
626
632
627 def _writedirstate(self, st):
633 def _writedirstate(self, st):
628 # notify callbacks about parents change
634 # notify callbacks about parents change
629 if self._origpl is not None and self._origpl != self._pl:
635 if self._origpl is not None and self._origpl != self._pl:
630 for c, callback in sorted(self._plchangecallbacks.iteritems()):
636 for c, callback in sorted(self._plchangecallbacks.iteritems()):
631 callback(self, self._origpl, self._pl)
637 callback(self, self._origpl, self._pl)
632 self._origpl = None
638 self._origpl = None
633 # use the modification time of the newly created temporary file as the
639 # use the modification time of the newly created temporary file as the
634 # filesystem's notion of 'now'
640 # filesystem's notion of 'now'
635 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
641 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
636
642
637 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
643 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
638 # timestamp of each entries in dirstate, because of 'now > mtime'
644 # timestamp of each entries in dirstate, because of 'now > mtime'
639 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
645 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
640 if delaywrite > 0:
646 if delaywrite > 0:
641 # do we have any files to delay for?
647 # do we have any files to delay for?
642 for f, e in self._map.iteritems():
648 for f, e in self._map.iteritems():
643 if e[0] == 'n' and e[3] == now:
649 if e[0] == 'n' and e[3] == now:
644 import time # to avoid useless import
650 import time # to avoid useless import
645 # rather than sleep n seconds, sleep until the next
651 # rather than sleep n seconds, sleep until the next
646 # multiple of n seconds
652 # multiple of n seconds
647 clock = time.time()
653 clock = time.time()
648 start = int(clock) - (int(clock) % delaywrite)
654 start = int(clock) - (int(clock) % delaywrite)
649 end = start + delaywrite
655 end = start + delaywrite
650 time.sleep(end - clock)
656 time.sleep(end - clock)
651 now = end # trust our estimate that the end is near now
657 now = end # trust our estimate that the end is near now
652 break
658 break
653
659
654 self._map.write(st, now)
660 self._map.write(st, now)
655 self._lastnormaltime = 0
661 self._lastnormaltime = 0
656 self._dirty = False
662 self._dirty = False
657
663
658 def _dirignore(self, f):
664 def _dirignore(self, f):
659 if f == '.':
665 if f == '.':
660 return False
666 return False
661 if self._ignore(f):
667 if self._ignore(f):
662 return True
668 return True
663 for p in util.finddirs(f):
669 for p in util.finddirs(f):
664 if self._ignore(p):
670 if self._ignore(p):
665 return True
671 return True
666 return False
672 return False
667
673
668 def _ignorefiles(self):
674 def _ignorefiles(self):
669 files = []
675 files = []
670 if os.path.exists(self._join('.hgignore')):
676 if os.path.exists(self._join('.hgignore')):
671 files.append(self._join('.hgignore'))
677 files.append(self._join('.hgignore'))
672 for name, path in self._ui.configitems("ui"):
678 for name, path in self._ui.configitems("ui"):
673 if name == 'ignore' or name.startswith('ignore.'):
679 if name == 'ignore' or name.startswith('ignore.'):
674 # we need to use os.path.join here rather than self._join
680 # we need to use os.path.join here rather than self._join
675 # because path is arbitrary and user-specified
681 # because path is arbitrary and user-specified
676 files.append(os.path.join(self._rootdir, util.expandpath(path)))
682 files.append(os.path.join(self._rootdir, util.expandpath(path)))
677 return files
683 return files
678
684
679 def _ignorefileandline(self, f):
685 def _ignorefileandline(self, f):
680 files = collections.deque(self._ignorefiles())
686 files = collections.deque(self._ignorefiles())
681 visited = set()
687 visited = set()
682 while files:
688 while files:
683 i = files.popleft()
689 i = files.popleft()
684 patterns = matchmod.readpatternfile(i, self._ui.warn,
690 patterns = matchmod.readpatternfile(i, self._ui.warn,
685 sourceinfo=True)
691 sourceinfo=True)
686 for pattern, lineno, line in patterns:
692 for pattern, lineno, line in patterns:
687 kind, p = matchmod._patsplit(pattern, 'glob')
693 kind, p = matchmod._patsplit(pattern, 'glob')
688 if kind == "subinclude":
694 if kind == "subinclude":
689 if p not in visited:
695 if p not in visited:
690 files.append(p)
696 files.append(p)
691 continue
697 continue
692 m = matchmod.match(self._root, '', [], [pattern],
698 m = matchmod.match(self._root, '', [], [pattern],
693 warn=self._ui.warn)
699 warn=self._ui.warn)
694 if m(f):
700 if m(f):
695 return (i, lineno, line)
701 return (i, lineno, line)
696 visited.add(i)
702 visited.add(i)
697 return (None, -1, "")
703 return (None, -1, "")
698
704
699 def _walkexplicit(self, match, subrepos):
705 def _walkexplicit(self, match, subrepos):
700 '''Get stat data about the files explicitly specified by match.
706 '''Get stat data about the files explicitly specified by match.
701
707
702 Return a triple (results, dirsfound, dirsnotfound).
708 Return a triple (results, dirsfound, dirsnotfound).
703 - results is a mapping from filename to stat result. It also contains
709 - results is a mapping from filename to stat result. It also contains
704 listings mapping subrepos and .hg to None.
710 listings mapping subrepos and .hg to None.
705 - dirsfound is a list of files found to be directories.
711 - dirsfound is a list of files found to be directories.
706 - dirsnotfound is a list of files that the dirstate thinks are
712 - dirsnotfound is a list of files that the dirstate thinks are
707 directories and that were not found.'''
713 directories and that were not found.'''
708
714
709 def badtype(mode):
715 def badtype(mode):
710 kind = _('unknown')
716 kind = _('unknown')
711 if stat.S_ISCHR(mode):
717 if stat.S_ISCHR(mode):
712 kind = _('character device')
718 kind = _('character device')
713 elif stat.S_ISBLK(mode):
719 elif stat.S_ISBLK(mode):
714 kind = _('block device')
720 kind = _('block device')
715 elif stat.S_ISFIFO(mode):
721 elif stat.S_ISFIFO(mode):
716 kind = _('fifo')
722 kind = _('fifo')
717 elif stat.S_ISSOCK(mode):
723 elif stat.S_ISSOCK(mode):
718 kind = _('socket')
724 kind = _('socket')
719 elif stat.S_ISDIR(mode):
725 elif stat.S_ISDIR(mode):
720 kind = _('directory')
726 kind = _('directory')
721 return _('unsupported file type (type is %s)') % kind
727 return _('unsupported file type (type is %s)') % kind
722
728
723 matchedir = match.explicitdir
729 matchedir = match.explicitdir
724 badfn = match.bad
730 badfn = match.bad
725 dmap = self._map
731 dmap = self._map
726 lstat = os.lstat
732 lstat = os.lstat
727 getkind = stat.S_IFMT
733 getkind = stat.S_IFMT
728 dirkind = stat.S_IFDIR
734 dirkind = stat.S_IFDIR
729 regkind = stat.S_IFREG
735 regkind = stat.S_IFREG
730 lnkkind = stat.S_IFLNK
736 lnkkind = stat.S_IFLNK
731 join = self._join
737 join = self._join
732 dirsfound = []
738 dirsfound = []
733 foundadd = dirsfound.append
739 foundadd = dirsfound.append
734 dirsnotfound = []
740 dirsnotfound = []
735 notfoundadd = dirsnotfound.append
741 notfoundadd = dirsnotfound.append
736
742
737 if not match.isexact() and self._checkcase:
743 if not match.isexact() and self._checkcase:
738 normalize = self._normalize
744 normalize = self._normalize
739 else:
745 else:
740 normalize = None
746 normalize = None
741
747
742 files = sorted(match.files())
748 files = sorted(match.files())
743 subrepos.sort()
749 subrepos.sort()
744 i, j = 0, 0
750 i, j = 0, 0
745 while i < len(files) and j < len(subrepos):
751 while i < len(files) and j < len(subrepos):
746 subpath = subrepos[j] + "/"
752 subpath = subrepos[j] + "/"
747 if files[i] < subpath:
753 if files[i] < subpath:
748 i += 1
754 i += 1
749 continue
755 continue
750 while i < len(files) and files[i].startswith(subpath):
756 while i < len(files) and files[i].startswith(subpath):
751 del files[i]
757 del files[i]
752 j += 1
758 j += 1
753
759
754 if not files or '.' in files:
760 if not files or '.' in files:
755 files = ['.']
761 files = ['.']
756 results = dict.fromkeys(subrepos)
762 results = dict.fromkeys(subrepos)
757 results['.hg'] = None
763 results['.hg'] = None
758
764
759 for ff in files:
765 for ff in files:
760 # constructing the foldmap is expensive, so don't do it for the
766 # constructing the foldmap is expensive, so don't do it for the
761 # common case where files is ['.']
767 # common case where files is ['.']
762 if normalize and ff != '.':
768 if normalize and ff != '.':
763 nf = normalize(ff, False, True)
769 nf = normalize(ff, False, True)
764 else:
770 else:
765 nf = ff
771 nf = ff
766 if nf in results:
772 if nf in results:
767 continue
773 continue
768
774
769 try:
775 try:
770 st = lstat(join(nf))
776 st = lstat(join(nf))
771 kind = getkind(st.st_mode)
777 kind = getkind(st.st_mode)
772 if kind == dirkind:
778 if kind == dirkind:
773 if nf in dmap:
779 if nf in dmap:
774 # file replaced by dir on disk but still in dirstate
780 # file replaced by dir on disk but still in dirstate
775 results[nf] = None
781 results[nf] = None
776 if matchedir:
782 if matchedir:
777 matchedir(nf)
783 matchedir(nf)
778 foundadd((nf, ff))
784 foundadd((nf, ff))
779 elif kind == regkind or kind == lnkkind:
785 elif kind == regkind or kind == lnkkind:
780 results[nf] = st
786 results[nf] = st
781 else:
787 else:
782 badfn(ff, badtype(kind))
788 badfn(ff, badtype(kind))
783 if nf in dmap:
789 if nf in dmap:
784 results[nf] = None
790 results[nf] = None
785 except OSError as inst: # nf not found on disk - it is dirstate only
791 except OSError as inst: # nf not found on disk - it is dirstate only
786 if nf in dmap: # does it exactly match a missing file?
792 if nf in dmap: # does it exactly match a missing file?
787 results[nf] = None
793 results[nf] = None
788 else: # does it match a missing directory?
794 else: # does it match a missing directory?
789 if self._map.hasdir(nf):
795 if self._map.hasdir(nf):
790 if matchedir:
796 if matchedir:
791 matchedir(nf)
797 matchedir(nf)
792 notfoundadd(nf)
798 notfoundadd(nf)
793 else:
799 else:
794 badfn(ff, encoding.strtolocal(inst.strerror))
800 badfn(ff, encoding.strtolocal(inst.strerror))
795
801
796 # match.files() may contain explicitly-specified paths that shouldn't
802 # match.files() may contain explicitly-specified paths that shouldn't
797 # be taken; drop them from the list of files found. dirsfound/notfound
803 # be taken; drop them from the list of files found. dirsfound/notfound
798 # aren't filtered here because they will be tested later.
804 # aren't filtered here because they will be tested later.
799 if match.anypats():
805 if match.anypats():
800 for f in list(results):
806 for f in list(results):
801 if f == '.hg' or f in subrepos:
807 if f == '.hg' or f in subrepos:
802 # keep sentinel to disable further out-of-repo walks
808 # keep sentinel to disable further out-of-repo walks
803 continue
809 continue
804 if not match(f):
810 if not match(f):
805 del results[f]
811 del results[f]
806
812
807 # Case insensitive filesystems cannot rely on lstat() failing to detect
813 # Case insensitive filesystems cannot rely on lstat() failing to detect
808 # a case-only rename. Prune the stat object for any file that does not
814 # a case-only rename. Prune the stat object for any file that does not
809 # match the case in the filesystem, if there are multiple files that
815 # match the case in the filesystem, if there are multiple files that
810 # normalize to the same path.
816 # normalize to the same path.
811 if match.isexact() and self._checkcase:
817 if match.isexact() and self._checkcase:
812 normed = {}
818 normed = {}
813
819
814 for f, st in results.iteritems():
820 for f, st in results.iteritems():
815 if st is None:
821 if st is None:
816 continue
822 continue
817
823
818 nc = util.normcase(f)
824 nc = util.normcase(f)
819 paths = normed.get(nc)
825 paths = normed.get(nc)
820
826
821 if paths is None:
827 if paths is None:
822 paths = set()
828 paths = set()
823 normed[nc] = paths
829 normed[nc] = paths
824
830
825 paths.add(f)
831 paths.add(f)
826
832
827 for norm, paths in normed.iteritems():
833 for norm, paths in normed.iteritems():
828 if len(paths) > 1:
834 if len(paths) > 1:
829 for path in paths:
835 for path in paths:
830 folded = self._discoverpath(path, norm, True, None,
836 folded = self._discoverpath(path, norm, True, None,
831 self._map.dirfoldmap)
837 self._map.dirfoldmap)
832 if path != folded:
838 if path != folded:
833 results[path] = None
839 results[path] = None
834
840
835 return results, dirsfound, dirsnotfound
841 return results, dirsfound, dirsnotfound
836
842
837 def walk(self, match, subrepos, unknown, ignored, full=True):
843 def walk(self, match, subrepos, unknown, ignored, full=True):
838 '''
844 '''
839 Walk recursively through the directory tree, finding all files
845 Walk recursively through the directory tree, finding all files
840 matched by match.
846 matched by match.
841
847
842 If full is False, maybe skip some known-clean files.
848 If full is False, maybe skip some known-clean files.
843
849
844 Return a dict mapping filename to stat-like object (either
850 Return a dict mapping filename to stat-like object (either
845 mercurial.osutil.stat instance or return value of os.stat()).
851 mercurial.osutil.stat instance or return value of os.stat()).
846
852
847 '''
853 '''
848 # full is a flag that extensions that hook into walk can use -- this
854 # full is a flag that extensions that hook into walk can use -- this
849 # implementation doesn't use it at all. This satisfies the contract
855 # implementation doesn't use it at all. This satisfies the contract
850 # because we only guarantee a "maybe".
856 # because we only guarantee a "maybe".
851
857
852 if ignored:
858 if ignored:
853 ignore = util.never
859 ignore = util.never
854 dirignore = util.never
860 dirignore = util.never
855 elif unknown:
861 elif unknown:
856 ignore = self._ignore
862 ignore = self._ignore
857 dirignore = self._dirignore
863 dirignore = self._dirignore
858 else:
864 else:
859 # if not unknown and not ignored, drop dir recursion and step 2
865 # if not unknown and not ignored, drop dir recursion and step 2
860 ignore = util.always
866 ignore = util.always
861 dirignore = util.always
867 dirignore = util.always
862
868
863 matchfn = match.matchfn
869 matchfn = match.matchfn
864 matchalways = match.always()
870 matchalways = match.always()
865 matchtdir = match.traversedir
871 matchtdir = match.traversedir
866 dmap = self._map
872 dmap = self._map
867 listdir = util.listdir
873 listdir = util.listdir
868 lstat = os.lstat
874 lstat = os.lstat
869 dirkind = stat.S_IFDIR
875 dirkind = stat.S_IFDIR
870 regkind = stat.S_IFREG
876 regkind = stat.S_IFREG
871 lnkkind = stat.S_IFLNK
877 lnkkind = stat.S_IFLNK
872 join = self._join
878 join = self._join
873
879
874 exact = skipstep3 = False
880 exact = skipstep3 = False
875 if match.isexact(): # match.exact
881 if match.isexact(): # match.exact
876 exact = True
882 exact = True
877 dirignore = util.always # skip step 2
883 dirignore = util.always # skip step 2
878 elif match.prefix(): # match.match, no patterns
884 elif match.prefix(): # match.match, no patterns
879 skipstep3 = True
885 skipstep3 = True
880
886
881 if not exact and self._checkcase:
887 if not exact and self._checkcase:
882 normalize = self._normalize
888 normalize = self._normalize
883 normalizefile = self._normalizefile
889 normalizefile = self._normalizefile
884 skipstep3 = False
890 skipstep3 = False
885 else:
891 else:
886 normalize = self._normalize
892 normalize = self._normalize
887 normalizefile = None
893 normalizefile = None
888
894
889 # step 1: find all explicit files
895 # step 1: find all explicit files
890 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
896 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
891
897
892 skipstep3 = skipstep3 and not (work or dirsnotfound)
898 skipstep3 = skipstep3 and not (work or dirsnotfound)
893 work = [d for d in work if not dirignore(d[0])]
899 work = [d for d in work if not dirignore(d[0])]
894
900
895 # step 2: visit subdirectories
901 # step 2: visit subdirectories
896 def traverse(work, alreadynormed):
902 def traverse(work, alreadynormed):
897 wadd = work.append
903 wadd = work.append
898 while work:
904 while work:
899 nd = work.pop()
905 nd = work.pop()
900 visitentries = match.visitchildrenset(nd)
906 visitentries = match.visitchildrenset(nd)
901 if not visitentries:
907 if not visitentries:
902 continue
908 continue
903 if visitentries == 'this' or visitentries == 'all':
909 if visitentries == 'this' or visitentries == 'all':
904 visitentries = None
910 visitentries = None
905 skip = None
911 skip = None
906 if nd == '.':
912 if nd == '.':
907 nd = ''
913 nd = ''
908 else:
914 else:
909 skip = '.hg'
915 skip = '.hg'
910 try:
916 try:
911 entries = listdir(join(nd), stat=True, skip=skip)
917 entries = listdir(join(nd), stat=True, skip=skip)
912 except OSError as inst:
918 except OSError as inst:
913 if inst.errno in (errno.EACCES, errno.ENOENT):
919 if inst.errno in (errno.EACCES, errno.ENOENT):
914 match.bad(self.pathto(nd),
920 match.bad(self.pathto(nd),
915 encoding.strtolocal(inst.strerror))
921 encoding.strtolocal(inst.strerror))
916 continue
922 continue
917 raise
923 raise
918 for f, kind, st in entries:
924 for f, kind, st in entries:
919 # Some matchers may return files in the visitentries set,
925 # Some matchers may return files in the visitentries set,
920 # instead of 'this', if the matcher explicitly mentions them
926 # instead of 'this', if the matcher explicitly mentions them
921 # and is not an exactmatcher. This is acceptable; we do not
927 # and is not an exactmatcher. This is acceptable; we do not
922 # make any hard assumptions about file-or-directory below
928 # make any hard assumptions about file-or-directory below
923 # based on the presence of `f` in visitentries. If
929 # based on the presence of `f` in visitentries. If
924 # visitchildrenset returned a set, we can always skip the
930 # visitchildrenset returned a set, we can always skip the
925 # entries *not* in the set it provided regardless of whether
931 # entries *not* in the set it provided regardless of whether
926 # they're actually a file or a directory.
932 # they're actually a file or a directory.
927 if visitentries and f not in visitentries:
933 if visitentries and f not in visitentries:
928 continue
934 continue
929 if normalizefile:
935 if normalizefile:
930 # even though f might be a directory, we're only
936 # even though f might be a directory, we're only
931 # interested in comparing it to files currently in the
937 # interested in comparing it to files currently in the
932 # dmap -- therefore normalizefile is enough
938 # dmap -- therefore normalizefile is enough
933 nf = normalizefile(nd and (nd + "/" + f) or f, True,
939 nf = normalizefile(nd and (nd + "/" + f) or f, True,
934 True)
940 True)
935 else:
941 else:
936 nf = nd and (nd + "/" + f) or f
942 nf = nd and (nd + "/" + f) or f
937 if nf not in results:
943 if nf not in results:
938 if kind == dirkind:
944 if kind == dirkind:
939 if not ignore(nf):
945 if not ignore(nf):
940 if matchtdir:
946 if matchtdir:
941 matchtdir(nf)
947 matchtdir(nf)
942 wadd(nf)
948 wadd(nf)
943 if nf in dmap and (matchalways or matchfn(nf)):
949 if nf in dmap and (matchalways or matchfn(nf)):
944 results[nf] = None
950 results[nf] = None
945 elif kind == regkind or kind == lnkkind:
951 elif kind == regkind or kind == lnkkind:
946 if nf in dmap:
952 if nf in dmap:
947 if matchalways or matchfn(nf):
953 if matchalways or matchfn(nf):
948 results[nf] = st
954 results[nf] = st
949 elif ((matchalways or matchfn(nf))
955 elif ((matchalways or matchfn(nf))
950 and not ignore(nf)):
956 and not ignore(nf)):
951 # unknown file -- normalize if necessary
957 # unknown file -- normalize if necessary
952 if not alreadynormed:
958 if not alreadynormed:
953 nf = normalize(nf, False, True)
959 nf = normalize(nf, False, True)
954 results[nf] = st
960 results[nf] = st
955 elif nf in dmap and (matchalways or matchfn(nf)):
961 elif nf in dmap and (matchalways or matchfn(nf)):
956 results[nf] = None
962 results[nf] = None
957
963
958 for nd, d in work:
964 for nd, d in work:
959 # alreadynormed means that processwork doesn't have to do any
965 # alreadynormed means that processwork doesn't have to do any
960 # expensive directory normalization
966 # expensive directory normalization
961 alreadynormed = not normalize or nd == d
967 alreadynormed = not normalize or nd == d
962 traverse([d], alreadynormed)
968 traverse([d], alreadynormed)
963
969
964 for s in subrepos:
970 for s in subrepos:
965 del results[s]
971 del results[s]
966 del results['.hg']
972 del results['.hg']
967
973
968 # step 3: visit remaining files from dmap
974 # step 3: visit remaining files from dmap
969 if not skipstep3 and not exact:
975 if not skipstep3 and not exact:
970 # If a dmap file is not in results yet, it was either
976 # If a dmap file is not in results yet, it was either
971 # a) not matching matchfn b) ignored, c) missing, or d) under a
977 # a) not matching matchfn b) ignored, c) missing, or d) under a
972 # symlink directory.
978 # symlink directory.
973 if not results and matchalways:
979 if not results and matchalways:
974 visit = [f for f in dmap]
980 visit = [f for f in dmap]
975 else:
981 else:
976 visit = [f for f in dmap if f not in results and matchfn(f)]
982 visit = [f for f in dmap if f not in results and matchfn(f)]
977 visit.sort()
983 visit.sort()
978
984
979 if unknown:
985 if unknown:
980 # unknown == True means we walked all dirs under the roots
986 # unknown == True means we walked all dirs under the roots
981 # that wasn't ignored, and everything that matched was stat'ed
987 # that wasn't ignored, and everything that matched was stat'ed
982 # and is already in results.
988 # and is already in results.
983 # The rest must thus be ignored or under a symlink.
989 # The rest must thus be ignored or under a symlink.
984 audit_path = pathutil.pathauditor(self._root, cached=True)
990 audit_path = pathutil.pathauditor(self._root, cached=True)
985
991
986 for nf in iter(visit):
992 for nf in iter(visit):
987 # If a stat for the same file was already added with a
993 # If a stat for the same file was already added with a
988 # different case, don't add one for this, since that would
994 # different case, don't add one for this, since that would
989 # make it appear as if the file exists under both names
995 # make it appear as if the file exists under both names
990 # on disk.
996 # on disk.
991 if (normalizefile and
997 if (normalizefile and
992 normalizefile(nf, True, True) in results):
998 normalizefile(nf, True, True) in results):
993 results[nf] = None
999 results[nf] = None
994 # Report ignored items in the dmap as long as they are not
1000 # Report ignored items in the dmap as long as they are not
995 # under a symlink directory.
1001 # under a symlink directory.
996 elif audit_path.check(nf):
1002 elif audit_path.check(nf):
997 try:
1003 try:
998 results[nf] = lstat(join(nf))
1004 results[nf] = lstat(join(nf))
999 # file was just ignored, no links, and exists
1005 # file was just ignored, no links, and exists
1000 except OSError:
1006 except OSError:
1001 # file doesn't exist
1007 # file doesn't exist
1002 results[nf] = None
1008 results[nf] = None
1003 else:
1009 else:
1004 # It's either missing or under a symlink directory
1010 # It's either missing or under a symlink directory
1005 # which we in this case report as missing
1011 # which we in this case report as missing
1006 results[nf] = None
1012 results[nf] = None
1007 else:
1013 else:
1008 # We may not have walked the full directory tree above,
1014 # We may not have walked the full directory tree above,
1009 # so stat and check everything we missed.
1015 # so stat and check everything we missed.
1010 iv = iter(visit)
1016 iv = iter(visit)
1011 for st in util.statfiles([join(i) for i in visit]):
1017 for st in util.statfiles([join(i) for i in visit]):
1012 results[next(iv)] = st
1018 results[next(iv)] = st
1013 return results
1019 return results
1014
1020
1015 def status(self, match, subrepos, ignored, clean, unknown):
1021 def status(self, match, subrepos, ignored, clean, unknown):
1016 '''Determine the status of the working copy relative to the
1022 '''Determine the status of the working copy relative to the
1017 dirstate and return a pair of (unsure, status), where status is of type
1023 dirstate and return a pair of (unsure, status), where status is of type
1018 scmutil.status and:
1024 scmutil.status and:
1019
1025
1020 unsure:
1026 unsure:
1021 files that might have been modified since the dirstate was
1027 files that might have been modified since the dirstate was
1022 written, but need to be read to be sure (size is the same
1028 written, but need to be read to be sure (size is the same
1023 but mtime differs)
1029 but mtime differs)
1024 status.modified:
1030 status.modified:
1025 files that have definitely been modified since the dirstate
1031 files that have definitely been modified since the dirstate
1026 was written (different size or mode)
1032 was written (different size or mode)
1027 status.clean:
1033 status.clean:
1028 files that have definitely not been modified since the
1034 files that have definitely not been modified since the
1029 dirstate was written
1035 dirstate was written
1030 '''
1036 '''
1031 listignored, listclean, listunknown = ignored, clean, unknown
1037 listignored, listclean, listunknown = ignored, clean, unknown
1032 lookup, modified, added, unknown, ignored = [], [], [], [], []
1038 lookup, modified, added, unknown, ignored = [], [], [], [], []
1033 removed, deleted, clean = [], [], []
1039 removed, deleted, clean = [], [], []
1034
1040
1035 dmap = self._map
1041 dmap = self._map
1036 dmap.preload()
1042 dmap.preload()
1037 dcontains = dmap.__contains__
1043 dcontains = dmap.__contains__
1038 dget = dmap.__getitem__
1044 dget = dmap.__getitem__
1039 ladd = lookup.append # aka "unsure"
1045 ladd = lookup.append # aka "unsure"
1040 madd = modified.append
1046 madd = modified.append
1041 aadd = added.append
1047 aadd = added.append
1042 uadd = unknown.append
1048 uadd = unknown.append
1043 iadd = ignored.append
1049 iadd = ignored.append
1044 radd = removed.append
1050 radd = removed.append
1045 dadd = deleted.append
1051 dadd = deleted.append
1046 cadd = clean.append
1052 cadd = clean.append
1047 mexact = match.exact
1053 mexact = match.exact
1048 dirignore = self._dirignore
1054 dirignore = self._dirignore
1049 checkexec = self._checkexec
1055 checkexec = self._checkexec
1050 copymap = self._map.copymap
1056 copymap = self._map.copymap
1051 lastnormaltime = self._lastnormaltime
1057 lastnormaltime = self._lastnormaltime
1052
1058
1053 # We need to do full walks when either
1059 # We need to do full walks when either
1054 # - we're listing all clean files, or
1060 # - we're listing all clean files, or
1055 # - match.traversedir does something, because match.traversedir should
1061 # - match.traversedir does something, because match.traversedir should
1056 # be called for every dir in the working dir
1062 # be called for every dir in the working dir
1057 full = listclean or match.traversedir is not None
1063 full = listclean or match.traversedir is not None
1058 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1064 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1059 full=full).iteritems():
1065 full=full).iteritems():
1060 if not dcontains(fn):
1066 if not dcontains(fn):
1061 if (listignored or mexact(fn)) and dirignore(fn):
1067 if (listignored or mexact(fn)) and dirignore(fn):
1062 if listignored:
1068 if listignored:
1063 iadd(fn)
1069 iadd(fn)
1064 else:
1070 else:
1065 uadd(fn)
1071 uadd(fn)
1066 continue
1072 continue
1067
1073
1068 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1074 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1069 # written like that for performance reasons. dmap[fn] is not a
1075 # written like that for performance reasons. dmap[fn] is not a
1070 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1076 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1071 # opcode has fast paths when the value to be unpacked is a tuple or
1077 # opcode has fast paths when the value to be unpacked is a tuple or
1072 # a list, but falls back to creating a full-fledged iterator in
1078 # a list, but falls back to creating a full-fledged iterator in
1073 # general. That is much slower than simply accessing and storing the
1079 # general. That is much slower than simply accessing and storing the
1074 # tuple members one by one.
1080 # tuple members one by one.
1075 t = dget(fn)
1081 t = dget(fn)
1076 state = t[0]
1082 state = t[0]
1077 mode = t[1]
1083 mode = t[1]
1078 size = t[2]
1084 size = t[2]
1079 time = t[3]
1085 time = t[3]
1080
1086
1081 if not st and state in "nma":
1087 if not st and state in "nma":
1082 dadd(fn)
1088 dadd(fn)
1083 elif state == 'n':
1089 elif state == 'n':
1084 if (size >= 0 and
1090 if (size >= 0 and
1085 ((size != st.st_size and size != st.st_size & _rangemask)
1091 ((size != st.st_size and size != st.st_size & _rangemask)
1086 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1092 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1087 or size == -2 # other parent
1093 or size == -2 # other parent
1088 or fn in copymap):
1094 or fn in copymap):
1089 madd(fn)
1095 madd(fn)
1090 elif (time != st[stat.ST_MTIME]
1096 elif (time != st[stat.ST_MTIME]
1091 and time != st[stat.ST_MTIME] & _rangemask):
1097 and time != st[stat.ST_MTIME] & _rangemask):
1092 ladd(fn)
1098 ladd(fn)
1093 elif st[stat.ST_MTIME] == lastnormaltime:
1099 elif st[stat.ST_MTIME] == lastnormaltime:
1094 # fn may have just been marked as normal and it may have
1100 # fn may have just been marked as normal and it may have
1095 # changed in the same second without changing its size.
1101 # changed in the same second without changing its size.
1096 # This can happen if we quickly do multiple commits.
1102 # This can happen if we quickly do multiple commits.
1097 # Force lookup, so we don't miss such a racy file change.
1103 # Force lookup, so we don't miss such a racy file change.
1098 ladd(fn)
1104 ladd(fn)
1099 elif listclean:
1105 elif listclean:
1100 cadd(fn)
1106 cadd(fn)
1101 elif state == 'm':
1107 elif state == 'm':
1102 madd(fn)
1108 madd(fn)
1103 elif state == 'a':
1109 elif state == 'a':
1104 aadd(fn)
1110 aadd(fn)
1105 elif state == 'r':
1111 elif state == 'r':
1106 radd(fn)
1112 radd(fn)
1107
1113
1108 return (lookup, scmutil.status(modified, added, removed, deleted,
1114 return (lookup, scmutil.status(modified, added, removed, deleted,
1109 unknown, ignored, clean))
1115 unknown, ignored, clean))
1110
1116
1111 def matches(self, match):
1117 def matches(self, match):
1112 '''
1118 '''
1113 return files in the dirstate (in whatever state) filtered by match
1119 return files in the dirstate (in whatever state) filtered by match
1114 '''
1120 '''
1115 dmap = self._map
1121 dmap = self._map
1116 if match.always():
1122 if match.always():
1117 return dmap.keys()
1123 return dmap.keys()
1118 files = match.files()
1124 files = match.files()
1119 if match.isexact():
1125 if match.isexact():
1120 # fast path -- filter the other way around, since typically files is
1126 # fast path -- filter the other way around, since typically files is
1121 # much smaller than dmap
1127 # much smaller than dmap
1122 return [f for f in files if f in dmap]
1128 return [f for f in files if f in dmap]
1123 if match.prefix() and all(fn in dmap for fn in files):
1129 if match.prefix() and all(fn in dmap for fn in files):
1124 # fast path -- all the values are known to be files, so just return
1130 # fast path -- all the values are known to be files, so just return
1125 # that
1131 # that
1126 return list(files)
1132 return list(files)
1127 return [f for f in dmap if match(f)]
1133 return [f for f in dmap if match(f)]
1128
1134
1129 def _actualfilename(self, tr):
1135 def _actualfilename(self, tr):
1130 if tr:
1136 if tr:
1131 return self._pendingfilename
1137 return self._pendingfilename
1132 else:
1138 else:
1133 return self._filename
1139 return self._filename
1134
1140
1135 def savebackup(self, tr, backupname):
1141 def savebackup(self, tr, backupname):
1136 '''Save current dirstate into backup file'''
1142 '''Save current dirstate into backup file'''
1137 filename = self._actualfilename(tr)
1143 filename = self._actualfilename(tr)
1138 assert backupname != filename
1144 assert backupname != filename
1139
1145
1140 # use '_writedirstate' instead of 'write' to write changes certainly,
1146 # use '_writedirstate' instead of 'write' to write changes certainly,
1141 # because the latter omits writing out if transaction is running.
1147 # because the latter omits writing out if transaction is running.
1142 # output file will be used to create backup of dirstate at this point.
1148 # output file will be used to create backup of dirstate at this point.
1143 if self._dirty or not self._opener.exists(filename):
1149 if self._dirty or not self._opener.exists(filename):
1144 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1150 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1145 checkambig=True))
1151 checkambig=True))
1146
1152
1147 if tr:
1153 if tr:
1148 # ensure that subsequent tr.writepending returns True for
1154 # ensure that subsequent tr.writepending returns True for
1149 # changes written out above, even if dirstate is never
1155 # changes written out above, even if dirstate is never
1150 # changed after this
1156 # changed after this
1151 tr.addfilegenerator('dirstate', (self._filename,),
1157 tr.addfilegenerator('dirstate', (self._filename,),
1152 self._writedirstate, location='plain')
1158 self._writedirstate, location='plain')
1153
1159
1154 # ensure that pending file written above is unlinked at
1160 # ensure that pending file written above is unlinked at
1155 # failure, even if tr.writepending isn't invoked until the
1161 # failure, even if tr.writepending isn't invoked until the
1156 # end of this transaction
1162 # end of this transaction
1157 tr.registertmp(filename, location='plain')
1163 tr.registertmp(filename, location='plain')
1158
1164
1159 self._opener.tryunlink(backupname)
1165 self._opener.tryunlink(backupname)
1160 # hardlink backup is okay because _writedirstate is always called
1166 # hardlink backup is okay because _writedirstate is always called
1161 # with an "atomictemp=True" file.
1167 # with an "atomictemp=True" file.
1162 util.copyfile(self._opener.join(filename),
1168 util.copyfile(self._opener.join(filename),
1163 self._opener.join(backupname), hardlink=True)
1169 self._opener.join(backupname), hardlink=True)
1164
1170
1165 def restorebackup(self, tr, backupname):
1171 def restorebackup(self, tr, backupname):
1166 '''Restore dirstate by backup file'''
1172 '''Restore dirstate by backup file'''
1167 # this "invalidate()" prevents "wlock.release()" from writing
1173 # this "invalidate()" prevents "wlock.release()" from writing
1168 # changes of dirstate out after restoring from backup file
1174 # changes of dirstate out after restoring from backup file
1169 self.invalidate()
1175 self.invalidate()
1170 filename = self._actualfilename(tr)
1176 filename = self._actualfilename(tr)
1171 o = self._opener
1177 o = self._opener
1172 if util.samefile(o.join(backupname), o.join(filename)):
1178 if util.samefile(o.join(backupname), o.join(filename)):
1173 o.unlink(backupname)
1179 o.unlink(backupname)
1174 else:
1180 else:
1175 o.rename(backupname, filename, checkambig=True)
1181 o.rename(backupname, filename, checkambig=True)
1176
1182
1177 def clearbackup(self, tr, backupname):
1183 def clearbackup(self, tr, backupname):
1178 '''Clear backup file'''
1184 '''Clear backup file'''
1179 self._opener.unlink(backupname)
1185 self._opener.unlink(backupname)
1180
1186
1181 class dirstatemap(object):
1187 class dirstatemap(object):
1182 """Map encapsulating the dirstate's contents.
1188 """Map encapsulating the dirstate's contents.
1183
1189
1184 The dirstate contains the following state:
1190 The dirstate contains the following state:
1185
1191
1186 - `identity` is the identity of the dirstate file, which can be used to
1192 - `identity` is the identity of the dirstate file, which can be used to
1187 detect when changes have occurred to the dirstate file.
1193 detect when changes have occurred to the dirstate file.
1188
1194
1189 - `parents` is a pair containing the parents of the working copy. The
1195 - `parents` is a pair containing the parents of the working copy. The
1190 parents are updated by calling `setparents`.
1196 parents are updated by calling `setparents`.
1191
1197
1192 - the state map maps filenames to tuples of (state, mode, size, mtime),
1198 - the state map maps filenames to tuples of (state, mode, size, mtime),
1193 where state is a single character representing 'normal', 'added',
1199 where state is a single character representing 'normal', 'added',
1194 'removed', or 'merged'. It is read by treating the dirstate as a
1200 'removed', or 'merged'. It is read by treating the dirstate as a
1195 dict. File state is updated by calling the `addfile`, `removefile` and
1201 dict. File state is updated by calling the `addfile`, `removefile` and
1196 `dropfile` methods.
1202 `dropfile` methods.
1197
1203
1198 - `copymap` maps destination filenames to their source filename.
1204 - `copymap` maps destination filenames to their source filename.
1199
1205
1200 The dirstate also provides the following views onto the state:
1206 The dirstate also provides the following views onto the state:
1201
1207
1202 - `nonnormalset` is a set of the filenames that have state other
1208 - `nonnormalset` is a set of the filenames that have state other
1203 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1209 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1204
1210
1205 - `otherparentset` is a set of the filenames that are marked as coming
1211 - `otherparentset` is a set of the filenames that are marked as coming
1206 from the second parent when the dirstate is currently being merged.
1212 from the second parent when the dirstate is currently being merged.
1207
1213
1208 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1214 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1209 form that they appear as in the dirstate.
1215 form that they appear as in the dirstate.
1210
1216
1211 - `dirfoldmap` is a dict mapping normalized directory names to the
1217 - `dirfoldmap` is a dict mapping normalized directory names to the
1212 denormalized form that they appear as in the dirstate.
1218 denormalized form that they appear as in the dirstate.
1213 """
1219 """
1214
1220
1215 def __init__(self, ui, opener, root):
1221 def __init__(self, ui, opener, root):
1216 self._ui = ui
1222 self._ui = ui
1217 self._opener = opener
1223 self._opener = opener
1218 self._root = root
1224 self._root = root
1219 self._filename = 'dirstate'
1225 self._filename = 'dirstate'
1220
1226
1221 self._parents = None
1227 self._parents = None
1222 self._dirtyparents = False
1228 self._dirtyparents = False
1223
1229
1224 # for consistent view between _pl() and _read() invocations
1230 # for consistent view between _pl() and _read() invocations
1225 self._pendingmode = None
1231 self._pendingmode = None
1226
1232
1227 @propertycache
1233 @propertycache
1228 def _map(self):
1234 def _map(self):
1229 self._map = {}
1235 self._map = {}
1230 self.read()
1236 self.read()
1231 return self._map
1237 return self._map
1232
1238
1233 @propertycache
1239 @propertycache
1234 def copymap(self):
1240 def copymap(self):
1235 self.copymap = {}
1241 self.copymap = {}
1236 self._map
1242 self._map
1237 return self.copymap
1243 return self.copymap
1238
1244
1239 def clear(self):
1245 def clear(self):
1240 self._map.clear()
1246 self._map.clear()
1241 self.copymap.clear()
1247 self.copymap.clear()
1242 self.setparents(nullid, nullid)
1248 self.setparents(nullid, nullid)
1243 util.clearcachedproperty(self, "_dirs")
1249 util.clearcachedproperty(self, "_dirs")
1244 util.clearcachedproperty(self, "_alldirs")
1250 util.clearcachedproperty(self, "_alldirs")
1245 util.clearcachedproperty(self, "filefoldmap")
1251 util.clearcachedproperty(self, "filefoldmap")
1246 util.clearcachedproperty(self, "dirfoldmap")
1252 util.clearcachedproperty(self, "dirfoldmap")
1247 util.clearcachedproperty(self, "nonnormalset")
1253 util.clearcachedproperty(self, "nonnormalset")
1248 util.clearcachedproperty(self, "otherparentset")
1254 util.clearcachedproperty(self, "otherparentset")
1249
1255
1250 def items(self):
1256 def items(self):
1251 return self._map.iteritems()
1257 return self._map.iteritems()
1252
1258
1253 # forward for python2,3 compat
1259 # forward for python2,3 compat
1254 iteritems = items
1260 iteritems = items
1255
1261
1256 def __len__(self):
1262 def __len__(self):
1257 return len(self._map)
1263 return len(self._map)
1258
1264
1259 def __iter__(self):
1265 def __iter__(self):
1260 return iter(self._map)
1266 return iter(self._map)
1261
1267
1262 def get(self, key, default=None):
1268 def get(self, key, default=None):
1263 return self._map.get(key, default)
1269 return self._map.get(key, default)
1264
1270
1265 def __contains__(self, key):
1271 def __contains__(self, key):
1266 return key in self._map
1272 return key in self._map
1267
1273
1268 def __getitem__(self, key):
1274 def __getitem__(self, key):
1269 return self._map[key]
1275 return self._map[key]
1270
1276
1271 def keys(self):
1277 def keys(self):
1272 return self._map.keys()
1278 return self._map.keys()
1273
1279
1274 def preload(self):
1280 def preload(self):
1275 """Loads the underlying data, if it's not already loaded"""
1281 """Loads the underlying data, if it's not already loaded"""
1276 self._map
1282 self._map
1277
1283
1278 def addfile(self, f, oldstate, state, mode, size, mtime):
1284 def addfile(self, f, oldstate, state, mode, size, mtime):
1279 """Add a tracked file to the dirstate."""
1285 """Add a tracked file to the dirstate."""
1280 if oldstate in "?r" and r"_dirs" in self.__dict__:
1286 if oldstate in "?r" and r"_dirs" in self.__dict__:
1281 self._dirs.addpath(f)
1287 self._dirs.addpath(f)
1282 if oldstate == "?" and r"_alldirs" in self.__dict__:
1288 if oldstate == "?" and r"_alldirs" in self.__dict__:
1283 self._alldirs.addpath(f)
1289 self._alldirs.addpath(f)
1284 self._map[f] = dirstatetuple(state, mode, size, mtime)
1290 self._map[f] = dirstatetuple(state, mode, size, mtime)
1285 if state != 'n' or mtime == -1:
1291 if state != 'n' or mtime == -1:
1286 self.nonnormalset.add(f)
1292 self.nonnormalset.add(f)
1287 if size == -2:
1293 if size == -2:
1288 self.otherparentset.add(f)
1294 self.otherparentset.add(f)
1289
1295
1290 def removefile(self, f, oldstate, size):
1296 def removefile(self, f, oldstate, size):
1291 """
1297 """
1292 Mark a file as removed in the dirstate.
1298 Mark a file as removed in the dirstate.
1293
1299
1294 The `size` parameter is used to store sentinel values that indicate
1300 The `size` parameter is used to store sentinel values that indicate
1295 the file's previous state. In the future, we should refactor this
1301 the file's previous state. In the future, we should refactor this
1296 to be more explicit about what that state is.
1302 to be more explicit about what that state is.
1297 """
1303 """
1298 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1304 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1299 self._dirs.delpath(f)
1305 self._dirs.delpath(f)
1300 if oldstate == "?" and r"_alldirs" in self.__dict__:
1306 if oldstate == "?" and r"_alldirs" in self.__dict__:
1301 self._alldirs.addpath(f)
1307 self._alldirs.addpath(f)
1302 if r"filefoldmap" in self.__dict__:
1308 if r"filefoldmap" in self.__dict__:
1303 normed = util.normcase(f)
1309 normed = util.normcase(f)
1304 self.filefoldmap.pop(normed, None)
1310 self.filefoldmap.pop(normed, None)
1305 self._map[f] = dirstatetuple('r', 0, size, 0)
1311 self._map[f] = dirstatetuple('r', 0, size, 0)
1306 self.nonnormalset.add(f)
1312 self.nonnormalset.add(f)
1307
1313
1308 def dropfile(self, f, oldstate):
1314 def dropfile(self, f, oldstate):
1309 """
1315 """
1310 Remove a file from the dirstate. Returns True if the file was
1316 Remove a file from the dirstate. Returns True if the file was
1311 previously recorded.
1317 previously recorded.
1312 """
1318 """
1313 exists = self._map.pop(f, None) is not None
1319 exists = self._map.pop(f, None) is not None
1314 if exists:
1320 if exists:
1315 if oldstate != "r" and r"_dirs" in self.__dict__:
1321 if oldstate != "r" and r"_dirs" in self.__dict__:
1316 self._dirs.delpath(f)
1322 self._dirs.delpath(f)
1317 if r"_alldirs" in self.__dict__:
1323 if r"_alldirs" in self.__dict__:
1318 self._alldirs.delpath(f)
1324 self._alldirs.delpath(f)
1319 if r"filefoldmap" in self.__dict__:
1325 if r"filefoldmap" in self.__dict__:
1320 normed = util.normcase(f)
1326 normed = util.normcase(f)
1321 self.filefoldmap.pop(normed, None)
1327 self.filefoldmap.pop(normed, None)
1322 self.nonnormalset.discard(f)
1328 self.nonnormalset.discard(f)
1323 return exists
1329 return exists
1324
1330
1325 def clearambiguoustimes(self, files, now):
1331 def clearambiguoustimes(self, files, now):
1326 for f in files:
1332 for f in files:
1327 e = self.get(f)
1333 e = self.get(f)
1328 if e is not None and e[0] == 'n' and e[3] == now:
1334 if e is not None and e[0] == 'n' and e[3] == now:
1329 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1335 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1330 self.nonnormalset.add(f)
1336 self.nonnormalset.add(f)
1331
1337
1332 def nonnormalentries(self):
1338 def nonnormalentries(self):
1333 '''Compute the nonnormal dirstate entries from the dmap'''
1339 '''Compute the nonnormal dirstate entries from the dmap'''
1334 try:
1340 try:
1335 return parsers.nonnormalotherparententries(self._map)
1341 return parsers.nonnormalotherparententries(self._map)
1336 except AttributeError:
1342 except AttributeError:
1337 nonnorm = set()
1343 nonnorm = set()
1338 otherparent = set()
1344 otherparent = set()
1339 for fname, e in self._map.iteritems():
1345 for fname, e in self._map.iteritems():
1340 if e[0] != 'n' or e[3] == -1:
1346 if e[0] != 'n' or e[3] == -1:
1341 nonnorm.add(fname)
1347 nonnorm.add(fname)
1342 if e[0] == 'n' and e[2] == -2:
1348 if e[0] == 'n' and e[2] == -2:
1343 otherparent.add(fname)
1349 otherparent.add(fname)
1344 return nonnorm, otherparent
1350 return nonnorm, otherparent
1345
1351
1346 @propertycache
1352 @propertycache
1347 def filefoldmap(self):
1353 def filefoldmap(self):
1348 """Returns a dictionary mapping normalized case paths to their
1354 """Returns a dictionary mapping normalized case paths to their
1349 non-normalized versions.
1355 non-normalized versions.
1350 """
1356 """
1351 try:
1357 try:
1352 makefilefoldmap = parsers.make_file_foldmap
1358 makefilefoldmap = parsers.make_file_foldmap
1353 except AttributeError:
1359 except AttributeError:
1354 pass
1360 pass
1355 else:
1361 else:
1356 return makefilefoldmap(self._map, util.normcasespec,
1362 return makefilefoldmap(self._map, util.normcasespec,
1357 util.normcasefallback)
1363 util.normcasefallback)
1358
1364
1359 f = {}
1365 f = {}
1360 normcase = util.normcase
1366 normcase = util.normcase
1361 for name, s in self._map.iteritems():
1367 for name, s in self._map.iteritems():
1362 if s[0] != 'r':
1368 if s[0] != 'r':
1363 f[normcase(name)] = name
1369 f[normcase(name)] = name
1364 f['.'] = '.' # prevents useless util.fspath() invocation
1370 f['.'] = '.' # prevents useless util.fspath() invocation
1365 return f
1371 return f
1366
1372
1367 def hastrackeddir(self, d):
1373 def hastrackeddir(self, d):
1368 """
1374 """
1369 Returns True if the dirstate contains a tracked (not removed) file
1375 Returns True if the dirstate contains a tracked (not removed) file
1370 in this directory.
1376 in this directory.
1371 """
1377 """
1372 return d in self._dirs
1378 return d in self._dirs
1373
1379
1374 def hasdir(self, d):
1380 def hasdir(self, d):
1375 """
1381 """
1376 Returns True if the dirstate contains a file (tracked or removed)
1382 Returns True if the dirstate contains a file (tracked or removed)
1377 in this directory.
1383 in this directory.
1378 """
1384 """
1379 return d in self._alldirs
1385 return d in self._alldirs
1380
1386
1381 @propertycache
1387 @propertycache
1382 def _dirs(self):
1388 def _dirs(self):
1383 return util.dirs(self._map, 'r')
1389 return util.dirs(self._map, 'r')
1384
1390
1385 @propertycache
1391 @propertycache
1386 def _alldirs(self):
1392 def _alldirs(self):
1387 return util.dirs(self._map)
1393 return util.dirs(self._map)
1388
1394
1389 def _opendirstatefile(self):
1395 def _opendirstatefile(self):
1390 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1396 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1391 if self._pendingmode is not None and self._pendingmode != mode:
1397 if self._pendingmode is not None and self._pendingmode != mode:
1392 fp.close()
1398 fp.close()
1393 raise error.Abort(_('working directory state may be '
1399 raise error.Abort(_('working directory state may be '
1394 'changed parallelly'))
1400 'changed parallelly'))
1395 self._pendingmode = mode
1401 self._pendingmode = mode
1396 return fp
1402 return fp
1397
1403
1398 def parents(self):
1404 def parents(self):
1399 if not self._parents:
1405 if not self._parents:
1400 try:
1406 try:
1401 fp = self._opendirstatefile()
1407 fp = self._opendirstatefile()
1402 st = fp.read(40)
1408 st = fp.read(40)
1403 fp.close()
1409 fp.close()
1404 except IOError as err:
1410 except IOError as err:
1405 if err.errno != errno.ENOENT:
1411 if err.errno != errno.ENOENT:
1406 raise
1412 raise
1407 # File doesn't exist, so the current state is empty
1413 # File doesn't exist, so the current state is empty
1408 st = ''
1414 st = ''
1409
1415
1410 l = len(st)
1416 l = len(st)
1411 if l == 40:
1417 if l == 40:
1412 self._parents = (st[:20], st[20:40])
1418 self._parents = (st[:20], st[20:40])
1413 elif l == 0:
1419 elif l == 0:
1414 self._parents = (nullid, nullid)
1420 self._parents = (nullid, nullid)
1415 else:
1421 else:
1416 raise error.Abort(_('working directory state appears '
1422 raise error.Abort(_('working directory state appears '
1417 'damaged!'))
1423 'damaged!'))
1418
1424
1419 return self._parents
1425 return self._parents
1420
1426
1421 def setparents(self, p1, p2):
1427 def setparents(self, p1, p2):
1422 self._parents = (p1, p2)
1428 self._parents = (p1, p2)
1423 self._dirtyparents = True
1429 self._dirtyparents = True
1424
1430
1425 def read(self):
1431 def read(self):
1426 # ignore HG_PENDING because identity is used only for writing
1432 # ignore HG_PENDING because identity is used only for writing
1427 self.identity = util.filestat.frompath(
1433 self.identity = util.filestat.frompath(
1428 self._opener.join(self._filename))
1434 self._opener.join(self._filename))
1429
1435
1430 try:
1436 try:
1431 fp = self._opendirstatefile()
1437 fp = self._opendirstatefile()
1432 try:
1438 try:
1433 st = fp.read()
1439 st = fp.read()
1434 finally:
1440 finally:
1435 fp.close()
1441 fp.close()
1436 except IOError as err:
1442 except IOError as err:
1437 if err.errno != errno.ENOENT:
1443 if err.errno != errno.ENOENT:
1438 raise
1444 raise
1439 return
1445 return
1440 if not st:
1446 if not st:
1441 return
1447 return
1442
1448
1443 if util.safehasattr(parsers, 'dict_new_presized'):
1449 if util.safehasattr(parsers, 'dict_new_presized'):
1444 # Make an estimate of the number of files in the dirstate based on
1450 # Make an estimate of the number of files in the dirstate based on
1445 # its size. From a linear regression on a set of real-world repos,
1451 # its size. From a linear regression on a set of real-world repos,
1446 # all over 10,000 files, the size of a dirstate entry is 85
1452 # all over 10,000 files, the size of a dirstate entry is 85
1447 # bytes. The cost of resizing is significantly higher than the cost
1453 # bytes. The cost of resizing is significantly higher than the cost
1448 # of filling in a larger presized dict, so subtract 20% from the
1454 # of filling in a larger presized dict, so subtract 20% from the
1449 # size.
1455 # size.
1450 #
1456 #
1451 # This heuristic is imperfect in many ways, so in a future dirstate
1457 # This heuristic is imperfect in many ways, so in a future dirstate
1452 # format update it makes sense to just record the number of entries
1458 # format update it makes sense to just record the number of entries
1453 # on write.
1459 # on write.
1454 self._map = parsers.dict_new_presized(len(st) // 71)
1460 self._map = parsers.dict_new_presized(len(st) // 71)
1455
1461
1456 # Python's garbage collector triggers a GC each time a certain number
1462 # Python's garbage collector triggers a GC each time a certain number
1457 # of container objects (the number being defined by
1463 # of container objects (the number being defined by
1458 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1464 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1459 # for each file in the dirstate. The C version then immediately marks
1465 # for each file in the dirstate. The C version then immediately marks
1460 # them as not to be tracked by the collector. However, this has no
1466 # them as not to be tracked by the collector. However, this has no
1461 # effect on when GCs are triggered, only on what objects the GC looks
1467 # effect on when GCs are triggered, only on what objects the GC looks
1462 # into. This means that O(number of files) GCs are unavoidable.
1468 # into. This means that O(number of files) GCs are unavoidable.
1463 # Depending on when in the process's lifetime the dirstate is parsed,
1469 # Depending on when in the process's lifetime the dirstate is parsed,
1464 # this can get very expensive. As a workaround, disable GC while
1470 # this can get very expensive. As a workaround, disable GC while
1465 # parsing the dirstate.
1471 # parsing the dirstate.
1466 #
1472 #
1467 # (we cannot decorate the function directly since it is in a C module)
1473 # (we cannot decorate the function directly since it is in a C module)
1468 parse_dirstate = util.nogc(parsers.parse_dirstate)
1474 if rustext is not None:
1475 parse_dirstate = rustext.dirstate.parse_dirstate
1476 else:
1477 parse_dirstate = parsers.parse_dirstate
1478
1479 parse_dirstate = util.nogc(parse_dirstate)
1469 p = parse_dirstate(self._map, self.copymap, st)
1480 p = parse_dirstate(self._map, self.copymap, st)
1470 if not self._dirtyparents:
1481 if not self._dirtyparents:
1471 self.setparents(*p)
1482 self.setparents(*p)
1472
1483
1473 # Avoid excess attribute lookups by fast pathing certain checks
1484 # Avoid excess attribute lookups by fast pathing certain checks
1474 self.__contains__ = self._map.__contains__
1485 self.__contains__ = self._map.__contains__
1475 self.__getitem__ = self._map.__getitem__
1486 self.__getitem__ = self._map.__getitem__
1476 self.get = self._map.get
1487 self.get = self._map.get
1477
1488
1478 def write(self, st, now):
1489 def write(self, st, now):
1479 st.write(parsers.pack_dirstate(self._map, self.copymap,
1490 if rustext is not None:
1491 pack_dirstate = rustext.dirstate.pack_dirstate
1492 else:
1493 pack_dirstate = parsers.pack_dirstate
1494
1495 st.write(pack_dirstate(self._map, self.copymap,
1480 self.parents(), now))
1496 self.parents(), now))
1481 st.close()
1497 st.close()
1482 self._dirtyparents = False
1498 self._dirtyparents = False
1483 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1499 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1484
1500
1485 @propertycache
1501 @propertycache
1486 def nonnormalset(self):
1502 def nonnormalset(self):
1487 nonnorm, otherparents = self.nonnormalentries()
1503 nonnorm, otherparents = self.nonnormalentries()
1488 self.otherparentset = otherparents
1504 self.otherparentset = otherparents
1489 return nonnorm
1505 return nonnorm
1490
1506
1491 @propertycache
1507 @propertycache
1492 def otherparentset(self):
1508 def otherparentset(self):
1493 nonnorm, otherparents = self.nonnormalentries()
1509 nonnorm, otherparents = self.nonnormalentries()
1494 self.nonnormalset = nonnorm
1510 self.nonnormalset = nonnorm
1495 return otherparents
1511 return otherparents
1496
1512
1497 @propertycache
1513 @propertycache
1498 def identity(self):
1514 def identity(self):
1499 self._map
1515 self._map
1500 return self.identity
1516 return self.identity
1501
1517
1502 @propertycache
1518 @propertycache
1503 def dirfoldmap(self):
1519 def dirfoldmap(self):
1504 f = {}
1520 f = {}
1505 normcase = util.normcase
1521 normcase = util.normcase
1506 for name in self._dirs:
1522 for name in self._dirs:
1507 f[normcase(name)] = name
1523 f[normcase(name)] = name
1508 return f
1524 return f
@@ -1,78 +1,90
1 # extension to emulate invoking 'dirstate.write()' at the time
1 # extension to emulate invoking 'dirstate.write()' at the time
2 # specified by '[fakedirstatewritetime] fakenow', only when
2 # specified by '[fakedirstatewritetime] fakenow', only when
3 # 'dirstate.write()' is invoked via functions below:
3 # 'dirstate.write()' is invoked via functions below:
4 #
4 #
5 # - 'workingctx._poststatusfixup()' (= 'repo.status()')
5 # - 'workingctx._poststatusfixup()' (= 'repo.status()')
6 # - 'committablectx.markcommitted()'
6 # - 'committablectx.markcommitted()'
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from mercurial import (
10 from mercurial import (
11 context,
11 context,
12 dirstate,
12 dirstate,
13 extensions,
13 extensions,
14 policy,
14 policy,
15 registrar,
15 registrar,
16 )
16 )
17 from mercurial.utils import dateutil
17 from mercurial.utils import dateutil
18
18
19 try:
20 from mercurial import rustext
21 rustext.__name__ # force actual import (see hgdemandimport)
22 except ImportError:
23 rustext = None
24
19 configtable = {}
25 configtable = {}
20 configitem = registrar.configitem(configtable)
26 configitem = registrar.configitem(configtable)
21
27
22 configitem(b'fakedirstatewritetime', b'fakenow',
28 configitem(b'fakedirstatewritetime', b'fakenow',
23 default=None,
29 default=None,
24 )
30 )
25
31
26 parsers = policy.importmod(r'parsers')
32 parsers = policy.importmod(r'parsers')
27
33
28 def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
34 def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
29 # execute what original parsers.pack_dirstate should do actually
35 # execute what original parsers.pack_dirstate should do actually
30 # for consistency
36 # for consistency
31 actualnow = int(now)
37 actualnow = int(now)
32 for f, e in dmap.items():
38 for f, e in dmap.items():
33 if e[0] == 'n' and e[3] == actualnow:
39 if e[0] == 'n' and e[3] == actualnow:
34 e = parsers.dirstatetuple(e[0], e[1], e[2], -1)
40 e = parsers.dirstatetuple(e[0], e[1], e[2], -1)
35 dmap[f] = e
41 dmap[f] = e
36
42
37 return orig(dmap, copymap, pl, fakenow)
43 return orig(dmap, copymap, pl, fakenow)
38
44
39 def fakewrite(ui, func):
45 def fakewrite(ui, func):
40 # fake "now" of 'pack_dirstate' only if it is invoked while 'func'
46 # fake "now" of 'pack_dirstate' only if it is invoked while 'func'
41
47
42 fakenow = ui.config(b'fakedirstatewritetime', b'fakenow')
48 fakenow = ui.config(b'fakedirstatewritetime', b'fakenow')
43 if not fakenow:
49 if not fakenow:
44 # Execute original one, if fakenow isn't configured. This is
50 # Execute original one, if fakenow isn't configured. This is
45 # useful to prevent subrepos from executing replaced one,
51 # useful to prevent subrepos from executing replaced one,
46 # because replacing 'parsers.pack_dirstate' is also effective
52 # because replacing 'parsers.pack_dirstate' is also effective
47 # in subrepos.
53 # in subrepos.
48 return func()
54 return func()
49
55
50 # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between
56 # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between
51 # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
57 # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
52 fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
58 fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
53
59
60 if rustext is not None:
61 orig_module = rustext.dirstate
62 orig_pack_dirstate = rustext.dirstate.pack_dirstate
63 else:
64 orig_module = parsers
54 orig_pack_dirstate = parsers.pack_dirstate
65 orig_pack_dirstate = parsers.pack_dirstate
66
55 orig_dirstate_getfsnow = dirstate._getfsnow
67 orig_dirstate_getfsnow = dirstate._getfsnow
56 wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args)
68 wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args)
57
69
58 parsers.pack_dirstate = wrapper
70 orig_module.pack_dirstate = wrapper
59 dirstate._getfsnow = lambda *args: fakenow
71 dirstate._getfsnow = lambda *args: fakenow
60 try:
72 try:
61 return func()
73 return func()
62 finally:
74 finally:
63 parsers.pack_dirstate = orig_pack_dirstate
75 orig_module.pack_dirstate = orig_pack_dirstate
64 dirstate._getfsnow = orig_dirstate_getfsnow
76 dirstate._getfsnow = orig_dirstate_getfsnow
65
77
66 def _poststatusfixup(orig, workingctx, status, fixup):
78 def _poststatusfixup(orig, workingctx, status, fixup):
67 ui = workingctx.repo().ui
79 ui = workingctx.repo().ui
68 return fakewrite(ui, lambda : orig(workingctx, status, fixup))
80 return fakewrite(ui, lambda : orig(workingctx, status, fixup))
69
81
70 def markcommitted(orig, committablectx, node):
82 def markcommitted(orig, committablectx, node):
71 ui = committablectx.repo().ui
83 ui = committablectx.repo().ui
72 return fakewrite(ui, lambda : orig(committablectx, node))
84 return fakewrite(ui, lambda : orig(committablectx, node))
73
85
74 def extsetup(ui):
86 def extsetup(ui):
75 extensions.wrapfunction(context.workingctx, '_poststatusfixup',
87 extensions.wrapfunction(context.workingctx, '_poststatusfixup',
76 _poststatusfixup)
88 _poststatusfixup)
77 extensions.wrapfunction(context.workingctx, 'markcommitted',
89 extensions.wrapfunction(context.workingctx, 'markcommitted',
78 markcommitted)
90 markcommitted)
General Comments 0
You need to be logged in to leave comments. Login now