##// END OF EJS Templates
dirstate: remove the dedicated backup logic...
marmoute -
r50981:76d44983 default
parent child Browse files
Show More
@@ -1,407 +1,395 b''
1 import contextlib
1 import contextlib
2 import os
2 import os
3
3
4 from mercurial.node import sha1nodeconstants
4 from mercurial.node import sha1nodeconstants
5 from mercurial import (
5 from mercurial import (
6 dirstatemap,
6 dirstatemap,
7 error,
7 error,
8 extensions,
8 extensions,
9 match as matchmod,
9 match as matchmod,
10 pycompat,
10 pycompat,
11 scmutil,
11 scmutil,
12 util,
12 util,
13 )
13 )
14 from mercurial.dirstateutils import (
14 from mercurial.dirstateutils import (
15 timestamp,
15 timestamp,
16 )
16 )
17 from mercurial.interfaces import (
17 from mercurial.interfaces import (
18 dirstate as intdirstate,
18 dirstate as intdirstate,
19 util as interfaceutil,
19 util as interfaceutil,
20 )
20 )
21
21
22 from . import gitutil
22 from . import gitutil
23
23
24
24
25 DirstateItem = dirstatemap.DirstateItem
25 DirstateItem = dirstatemap.DirstateItem
26 propertycache = util.propertycache
26 propertycache = util.propertycache
27 pygit2 = gitutil.get_pygit2()
27 pygit2 = gitutil.get_pygit2()
28
28
29
29
30 def readpatternfile(orig, filepath, warn, sourceinfo=False):
30 def readpatternfile(orig, filepath, warn, sourceinfo=False):
31 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
31 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
32 return orig(filepath, warn, sourceinfo=False)
32 return orig(filepath, warn, sourceinfo=False)
33 result = []
33 result = []
34 warnings = []
34 warnings = []
35 with open(filepath, 'rb') as fp:
35 with open(filepath, 'rb') as fp:
36 for l in fp:
36 for l in fp:
37 l = l.strip()
37 l = l.strip()
38 if not l or l.startswith(b'#'):
38 if not l or l.startswith(b'#'):
39 continue
39 continue
40 if l.startswith(b'!'):
40 if l.startswith(b'!'):
41 warnings.append(b'unsupported ignore pattern %s' % l)
41 warnings.append(b'unsupported ignore pattern %s' % l)
42 continue
42 continue
43 if l.startswith(b'/'):
43 if l.startswith(b'/'):
44 result.append(b'rootglob:' + l[1:])
44 result.append(b'rootglob:' + l[1:])
45 else:
45 else:
46 result.append(b'relglob:' + l)
46 result.append(b'relglob:' + l)
47 return result, warnings
47 return result, warnings
48
48
49
49
50 extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
50 extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
51
51
52
52
53 _STATUS_MAP = {}
53 _STATUS_MAP = {}
54 if pygit2:
54 if pygit2:
55 _STATUS_MAP = {
55 _STATUS_MAP = {
56 pygit2.GIT_STATUS_CONFLICTED: b'm',
56 pygit2.GIT_STATUS_CONFLICTED: b'm',
57 pygit2.GIT_STATUS_CURRENT: b'n',
57 pygit2.GIT_STATUS_CURRENT: b'n',
58 pygit2.GIT_STATUS_IGNORED: b'?',
58 pygit2.GIT_STATUS_IGNORED: b'?',
59 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
59 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
60 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
60 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
61 pygit2.GIT_STATUS_INDEX_NEW: b'a',
61 pygit2.GIT_STATUS_INDEX_NEW: b'a',
62 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
62 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
63 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
63 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
64 pygit2.GIT_STATUS_WT_DELETED: b'r',
64 pygit2.GIT_STATUS_WT_DELETED: b'r',
65 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
65 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
66 pygit2.GIT_STATUS_WT_NEW: b'?',
66 pygit2.GIT_STATUS_WT_NEW: b'?',
67 pygit2.GIT_STATUS_WT_RENAMED: b'a',
67 pygit2.GIT_STATUS_WT_RENAMED: b'a',
68 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
68 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
69 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
69 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
70 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
70 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
71 }
71 }
72
72
73
73
74 @interfaceutil.implementer(intdirstate.idirstate)
74 @interfaceutil.implementer(intdirstate.idirstate)
75 class gitdirstate:
75 class gitdirstate:
76 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
76 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
77 self._ui = ui
77 self._ui = ui
78 self._root = os.path.dirname(vfs.base)
78 self._root = os.path.dirname(vfs.base)
79 self._opener = vfs
79 self._opener = vfs
80 self.git = gitrepo
80 self.git = gitrepo
81 self._plchangecallbacks = {}
81 self._plchangecallbacks = {}
82 # TODO: context.poststatusfixup is bad and uses this attribute
82 # TODO: context.poststatusfixup is bad and uses this attribute
83 self._dirty = False
83 self._dirty = False
84 self._mapcls = dirstatemap.dirstatemap
84 self._mapcls = dirstatemap.dirstatemap
85 self._use_dirstate_v2 = use_dirstate_v2
85 self._use_dirstate_v2 = use_dirstate_v2
86
86
87 @propertycache
87 @propertycache
88 def _map(self):
88 def _map(self):
89 """Return the dirstate contents (see documentation for dirstatemap)."""
89 """Return the dirstate contents (see documentation for dirstatemap)."""
90 self._map = self._mapcls(
90 self._map = self._mapcls(
91 self._ui,
91 self._ui,
92 self._opener,
92 self._opener,
93 self._root,
93 self._root,
94 sha1nodeconstants,
94 sha1nodeconstants,
95 self._use_dirstate_v2,
95 self._use_dirstate_v2,
96 )
96 )
97 return self._map
97 return self._map
98
98
99 def p1(self):
99 def p1(self):
100 try:
100 try:
101 return self.git.head.peel().id.raw
101 return self.git.head.peel().id.raw
102 except pygit2.GitError:
102 except pygit2.GitError:
103 # Typically happens when peeling HEAD fails, as in an
103 # Typically happens when peeling HEAD fails, as in an
104 # empty repository.
104 # empty repository.
105 return sha1nodeconstants.nullid
105 return sha1nodeconstants.nullid
106
106
107 def p2(self):
107 def p2(self):
108 # TODO: MERGE_HEAD? something like that, right?
108 # TODO: MERGE_HEAD? something like that, right?
109 return sha1nodeconstants.nullid
109 return sha1nodeconstants.nullid
110
110
111 def setparents(self, p1, p2=None):
111 def setparents(self, p1, p2=None):
112 if p2 is None:
112 if p2 is None:
113 p2 = sha1nodeconstants.nullid
113 p2 = sha1nodeconstants.nullid
114 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
114 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
115 self.git.head.set_target(gitutil.togitnode(p1))
115 self.git.head.set_target(gitutil.togitnode(p1))
116
116
117 @util.propertycache
117 @util.propertycache
118 def identity(self):
118 def identity(self):
119 return util.filestat.frompath(
119 return util.filestat.frompath(
120 os.path.join(self._root, b'.git', b'index')
120 os.path.join(self._root, b'.git', b'index')
121 )
121 )
122
122
123 def branch(self):
123 def branch(self):
124 return b'default'
124 return b'default'
125
125
126 def parents(self):
126 def parents(self):
127 # TODO how on earth do we find p2 if a merge is in flight?
127 # TODO how on earth do we find p2 if a merge is in flight?
128 return self.p1(), sha1nodeconstants.nullid
128 return self.p1(), sha1nodeconstants.nullid
129
129
130 def __iter__(self):
130 def __iter__(self):
131 return (pycompat.fsencode(f.path) for f in self.git.index)
131 return (pycompat.fsencode(f.path) for f in self.git.index)
132
132
133 def items(self):
133 def items(self):
134 for ie in self.git.index:
134 for ie in self.git.index:
135 yield ie.path, None # value should be a DirstateItem
135 yield ie.path, None # value should be a DirstateItem
136
136
137 # py2,3 compat forward
137 # py2,3 compat forward
138 iteritems = items
138 iteritems = items
139
139
140 def __getitem__(self, filename):
140 def __getitem__(self, filename):
141 try:
141 try:
142 gs = self.git.status_file(filename)
142 gs = self.git.status_file(filename)
143 except KeyError:
143 except KeyError:
144 return b'?'
144 return b'?'
145 return _STATUS_MAP[gs]
145 return _STATUS_MAP[gs]
146
146
147 def __contains__(self, filename):
147 def __contains__(self, filename):
148 try:
148 try:
149 gs = self.git.status_file(filename)
149 gs = self.git.status_file(filename)
150 return _STATUS_MAP[gs] != b'?'
150 return _STATUS_MAP[gs] != b'?'
151 except KeyError:
151 except KeyError:
152 return False
152 return False
153
153
154 def status(self, match, subrepos, ignored, clean, unknown):
154 def status(self, match, subrepos, ignored, clean, unknown):
155 listclean = clean
155 listclean = clean
156 # TODO handling of clean files - can we get that from git.status()?
156 # TODO handling of clean files - can we get that from git.status()?
157 modified, added, removed, deleted, unknown, ignored, clean = (
157 modified, added, removed, deleted, unknown, ignored, clean = (
158 [],
158 [],
159 [],
159 [],
160 [],
160 [],
161 [],
161 [],
162 [],
162 [],
163 [],
163 [],
164 [],
164 [],
165 )
165 )
166
166
167 try:
167 try:
168 mtime_boundary = timestamp.get_fs_now(self._opener)
168 mtime_boundary = timestamp.get_fs_now(self._opener)
169 except OSError:
169 except OSError:
170 # In largefiles or readonly context
170 # In largefiles or readonly context
171 mtime_boundary = None
171 mtime_boundary = None
172
172
173 gstatus = self.git.status()
173 gstatus = self.git.status()
174 for path, status in gstatus.items():
174 for path, status in gstatus.items():
175 path = pycompat.fsencode(path)
175 path = pycompat.fsencode(path)
176 if not match(path):
176 if not match(path):
177 continue
177 continue
178 if status == pygit2.GIT_STATUS_IGNORED:
178 if status == pygit2.GIT_STATUS_IGNORED:
179 if path.endswith(b'/'):
179 if path.endswith(b'/'):
180 continue
180 continue
181 ignored.append(path)
181 ignored.append(path)
182 elif status in (
182 elif status in (
183 pygit2.GIT_STATUS_WT_MODIFIED,
183 pygit2.GIT_STATUS_WT_MODIFIED,
184 pygit2.GIT_STATUS_INDEX_MODIFIED,
184 pygit2.GIT_STATUS_INDEX_MODIFIED,
185 pygit2.GIT_STATUS_WT_MODIFIED
185 pygit2.GIT_STATUS_WT_MODIFIED
186 | pygit2.GIT_STATUS_INDEX_MODIFIED,
186 | pygit2.GIT_STATUS_INDEX_MODIFIED,
187 ):
187 ):
188 modified.append(path)
188 modified.append(path)
189 elif status == pygit2.GIT_STATUS_INDEX_NEW:
189 elif status == pygit2.GIT_STATUS_INDEX_NEW:
190 added.append(path)
190 added.append(path)
191 elif status == pygit2.GIT_STATUS_WT_NEW:
191 elif status == pygit2.GIT_STATUS_WT_NEW:
192 unknown.append(path)
192 unknown.append(path)
193 elif status == pygit2.GIT_STATUS_WT_DELETED:
193 elif status == pygit2.GIT_STATUS_WT_DELETED:
194 deleted.append(path)
194 deleted.append(path)
195 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
195 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
196 removed.append(path)
196 removed.append(path)
197 else:
197 else:
198 raise error.Abort(
198 raise error.Abort(
199 b'unhandled case: status for %r is %r' % (path, status)
199 b'unhandled case: status for %r is %r' % (path, status)
200 )
200 )
201
201
202 if listclean:
202 if listclean:
203 observed = set(
203 observed = set(
204 modified + added + removed + deleted + unknown + ignored
204 modified + added + removed + deleted + unknown + ignored
205 )
205 )
206 index = self.git.index
206 index = self.git.index
207 index.read()
207 index.read()
208 for entry in index:
208 for entry in index:
209 path = pycompat.fsencode(entry.path)
209 path = pycompat.fsencode(entry.path)
210 if not match(path):
210 if not match(path):
211 continue
211 continue
212 if path in observed:
212 if path in observed:
213 continue # already in some other set
213 continue # already in some other set
214 if path[-1] == b'/':
214 if path[-1] == b'/':
215 continue # directory
215 continue # directory
216 clean.append(path)
216 clean.append(path)
217
217
218 # TODO are we really always sure of status here?
218 # TODO are we really always sure of status here?
219 return (
219 return (
220 False,
220 False,
221 scmutil.status(
221 scmutil.status(
222 modified, added, removed, deleted, unknown, ignored, clean
222 modified, added, removed, deleted, unknown, ignored, clean
223 ),
223 ),
224 mtime_boundary,
224 mtime_boundary,
225 )
225 )
226
226
227 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
228 # TODO we can do better
228 # TODO we can do better
229 return buildfallback()
229 return buildfallback()
230
230
231 def getcwd(self):
231 def getcwd(self):
232 # TODO is this a good way to do this?
232 # TODO is this a good way to do this?
233 return os.path.dirname(
233 return os.path.dirname(
234 os.path.dirname(pycompat.fsencode(self.git.path))
234 os.path.dirname(pycompat.fsencode(self.git.path))
235 )
235 )
236
236
237 def get_entry(self, path):
237 def get_entry(self, path):
238 """return a DirstateItem for the associated path"""
238 """return a DirstateItem for the associated path"""
239 entry = self._map.get(path)
239 entry = self._map.get(path)
240 if entry is None:
240 if entry is None:
241 return DirstateItem()
241 return DirstateItem()
242 return entry
242 return entry
243
243
244 def normalize(self, path):
244 def normalize(self, path):
245 normed = util.normcase(path)
245 normed = util.normcase(path)
246 assert normed == path, b"TODO handling of case folding: %s != %s" % (
246 assert normed == path, b"TODO handling of case folding: %s != %s" % (
247 normed,
247 normed,
248 path,
248 path,
249 )
249 )
250 return path
250 return path
251
251
252 @property
252 @property
253 def _checklink(self):
253 def _checklink(self):
254 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
254 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
255
255
256 def copies(self):
256 def copies(self):
257 # TODO support copies?
257 # TODO support copies?
258 return {}
258 return {}
259
259
260 # # TODO what the heck is this
260 # # TODO what the heck is this
261 _filecache = set()
261 _filecache = set()
262
262
263 def is_changing_parents(self):
263 def is_changing_parents(self):
264 # TODO: we need to implement the context manager bits and
264 # TODO: we need to implement the context manager bits and
265 # correctly stage/revert index edits.
265 # correctly stage/revert index edits.
266 return False
266 return False
267
267
268 def is_changing_any(self):
268 def is_changing_any(self):
269 # TODO: we need to implement the context manager bits and
269 # TODO: we need to implement the context manager bits and
270 # correctly stage/revert index edits.
270 # correctly stage/revert index edits.
271 return False
271 return False
272
272
273 def write(self, tr):
273 def write(self, tr):
274 # TODO: call parent change callbacks
274 # TODO: call parent change callbacks
275
275
276 if tr:
276 if tr:
277
277
278 def writeinner(category):
278 def writeinner(category):
279 self.git.index.write()
279 self.git.index.write()
280
280
281 tr.addpending(b'gitdirstate', writeinner)
281 tr.addpending(b'gitdirstate', writeinner)
282 else:
282 else:
283 self.git.index.write()
283 self.git.index.write()
284
284
285 def pathto(self, f, cwd=None):
285 def pathto(self, f, cwd=None):
286 if cwd is None:
286 if cwd is None:
287 cwd = self.getcwd()
287 cwd = self.getcwd()
288 # TODO core dirstate does something about slashes here
288 # TODO core dirstate does something about slashes here
289 assert isinstance(f, bytes)
289 assert isinstance(f, bytes)
290 r = util.pathto(self._root, cwd, f)
290 r = util.pathto(self._root, cwd, f)
291 return r
291 return r
292
292
293 def matches(self, match):
293 def matches(self, match):
294 for x in self.git.index:
294 for x in self.git.index:
295 p = pycompat.fsencode(x.path)
295 p = pycompat.fsencode(x.path)
296 if match(p):
296 if match(p):
297 yield p
297 yield p
298
298
299 def set_clean(self, f, parentfiledata):
299 def set_clean(self, f, parentfiledata):
300 """Mark a file normal and clean."""
300 """Mark a file normal and clean."""
301 # TODO: for now we just let libgit2 re-stat the file. We can
301 # TODO: for now we just let libgit2 re-stat the file. We can
302 # clearly do better.
302 # clearly do better.
303
303
304 def set_possibly_dirty(self, f):
304 def set_possibly_dirty(self, f):
305 """Mark a file normal, but possibly dirty."""
305 """Mark a file normal, but possibly dirty."""
306 # TODO: for now we just let libgit2 re-stat the file. We can
306 # TODO: for now we just let libgit2 re-stat the file. We can
307 # clearly do better.
307 # clearly do better.
308
308
309 def walk(self, match, subrepos, unknown, ignored, full=True):
309 def walk(self, match, subrepos, unknown, ignored, full=True):
310 # TODO: we need to use .status() and not iterate the index,
310 # TODO: we need to use .status() and not iterate the index,
311 # because the index doesn't force a re-walk and so `hg add` of
311 # because the index doesn't force a re-walk and so `hg add` of
312 # a new file without an intervening call to status will
312 # a new file without an intervening call to status will
313 # silently do nothing.
313 # silently do nothing.
314 r = {}
314 r = {}
315 cwd = self.getcwd()
315 cwd = self.getcwd()
316 for path, status in self.git.status().items():
316 for path, status in self.git.status().items():
317 if path.startswith('.hg/'):
317 if path.startswith('.hg/'):
318 continue
318 continue
319 path = pycompat.fsencode(path)
319 path = pycompat.fsencode(path)
320 if not match(path):
320 if not match(path):
321 continue
321 continue
322 # TODO construct the stat info from the status object?
322 # TODO construct the stat info from the status object?
323 try:
323 try:
324 s = os.stat(os.path.join(cwd, path))
324 s = os.stat(os.path.join(cwd, path))
325 except FileNotFoundError:
325 except FileNotFoundError:
326 continue
326 continue
327 r[path] = s
327 r[path] = s
328 return r
328 return r
329
329
330 def savebackup(self, tr, backupname):
331 # TODO: figure out a strategy for saving index backups.
332 pass
333
334 def restorebackup(self, tr, backupname):
335 # TODO: figure out a strategy for saving index backups.
336 pass
337
338 def set_tracked(self, f, reset_copy=False):
330 def set_tracked(self, f, reset_copy=False):
339 # TODO: support copies and reset_copy=True
331 # TODO: support copies and reset_copy=True
340 uf = pycompat.fsdecode(f)
332 uf = pycompat.fsdecode(f)
341 if uf in self.git.index:
333 if uf in self.git.index:
342 return False
334 return False
343 index = self.git.index
335 index = self.git.index
344 index.read()
336 index.read()
345 index.add(uf)
337 index.add(uf)
346 index.write()
338 index.write()
347 return True
339 return True
348
340
349 def add(self, f):
341 def add(self, f):
350 index = self.git.index
342 index = self.git.index
351 index.read()
343 index.read()
352 index.add(pycompat.fsdecode(f))
344 index.add(pycompat.fsdecode(f))
353 index.write()
345 index.write()
354
346
355 def drop(self, f):
347 def drop(self, f):
356 index = self.git.index
348 index = self.git.index
357 index.read()
349 index.read()
358 fs = pycompat.fsdecode(f)
350 fs = pycompat.fsdecode(f)
359 if fs in index:
351 if fs in index:
360 index.remove(fs)
352 index.remove(fs)
361 index.write()
353 index.write()
362
354
363 def set_untracked(self, f):
355 def set_untracked(self, f):
364 index = self.git.index
356 index = self.git.index
365 index.read()
357 index.read()
366 fs = pycompat.fsdecode(f)
358 fs = pycompat.fsdecode(f)
367 if fs in index:
359 if fs in index:
368 index.remove(fs)
360 index.remove(fs)
369 index.write()
361 index.write()
370 return True
362 return True
371 return False
363 return False
372
364
373 def remove(self, f):
365 def remove(self, f):
374 index = self.git.index
366 index = self.git.index
375 index.read()
367 index.read()
376 index.remove(pycompat.fsdecode(f))
368 index.remove(pycompat.fsdecode(f))
377 index.write()
369 index.write()
378
370
379 def copied(self, path):
371 def copied(self, path):
380 # TODO: track copies?
372 # TODO: track copies?
381 return None
373 return None
382
374
383 def prefetch_parents(self):
375 def prefetch_parents(self):
384 # TODO
376 # TODO
385 pass
377 pass
386
378
387 def update_file(self, *args, **kwargs):
379 def update_file(self, *args, **kwargs):
388 # TODO
380 # TODO
389 pass
381 pass
390
382
391 @contextlib.contextmanager
383 @contextlib.contextmanager
392 def changing_parents(self, repo):
384 def changing_parents(self, repo):
393 # TODO: track this maybe?
385 # TODO: track this maybe?
394 yield
386 yield
395
387
396 def addparentchangecallback(self, category, callback):
388 def addparentchangecallback(self, category, callback):
397 # TODO: should this be added to the dirstate interface?
389 # TODO: should this be added to the dirstate interface?
398 self._plchangecallbacks[category] = callback
390 self._plchangecallbacks[category] = callback
399
391
400 def clearbackup(self, tr, backupname):
401 # TODO
402 pass
403
404 def setbranch(self, branch):
392 def setbranch(self, branch):
405 raise error.Abort(
393 raise error.Abort(
406 b'git repos do not support branches. try using bookmarks'
394 b'git repos do not support branches. try using bookmarks'
407 )
395 )
@@ -1,1778 +1,1660 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
35 timestamp,
34 timestamp,
36 )
35 )
37
36
38 from .interfaces import (
37 from .interfaces import (
39 dirstate as intdirstate,
38 dirstate as intdirstate,
40 util as interfaceutil,
39 util as interfaceutil,
41 )
40 )
42
41
43 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
45
44
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
46
48 propertycache = util.propertycache
47 propertycache = util.propertycache
49 filecache = scmutil.filecache
48 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
51
50
52 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
53
52
54
53
55 class repocache(filecache):
54 class repocache(filecache):
56 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
57
56
58 def join(self, obj, fname):
57 def join(self, obj, fname):
59 return obj._opener.join(fname)
58 return obj._opener.join(fname)
60
59
61
60
62 class rootcache(filecache):
61 class rootcache(filecache):
63 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
64
63
65 def join(self, obj, fname):
64 def join(self, obj, fname):
66 return obj._join(fname)
65 return obj._join(fname)
67
66
68
67
69 def requires_changing_parents(func):
68 def requires_changing_parents(func):
70 def wrap(self, *args, **kwargs):
69 def wrap(self, *args, **kwargs):
71 if not self.is_changing_parents:
70 if not self.is_changing_parents:
72 msg = 'calling `%s` outside of a changing_parents context'
71 msg = 'calling `%s` outside of a changing_parents context'
73 msg %= func.__name__
72 msg %= func.__name__
74 raise error.ProgrammingError(msg)
73 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
74 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
75 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
76 raise error.ProgrammingError(msg)
78 return func(self, *args, **kwargs)
77 return func(self, *args, **kwargs)
79
78
80 return wrap
79 return wrap
81
80
82
81
83 def requires_changing_files(func):
82 def requires_changing_files(func):
84 def wrap(self, *args, **kwargs):
83 def wrap(self, *args, **kwargs):
85 if not self.is_changing_files:
84 if not self.is_changing_files:
86 msg = 'calling `%s` outside of a `changing_files`'
85 msg = 'calling `%s` outside of a `changing_files`'
87 msg %= func.__name__
86 msg %= func.__name__
88 raise error.ProgrammingError(msg)
87 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
88 return func(self, *args, **kwargs)
90
89
91 return wrap
90 return wrap
92
91
93
92
94 def requires_not_changing_parents(func):
93 def requires_not_changing_parents(func):
95 def wrap(self, *args, **kwargs):
94 def wrap(self, *args, **kwargs):
96 if self.is_changing_parents:
95 if self.is_changing_parents:
97 msg = 'calling `%s` inside of a changing_parents context'
96 msg = 'calling `%s` inside of a changing_parents context'
98 msg %= func.__name__
97 msg %= func.__name__
99 raise error.ProgrammingError(msg)
98 raise error.ProgrammingError(msg)
100 return func(self, *args, **kwargs)
99 return func(self, *args, **kwargs)
101
100
102 return wrap
101 return wrap
103
102
104
103
105 CHANGE_TYPE_PARENTS = "parents"
104 CHANGE_TYPE_PARENTS = "parents"
106 CHANGE_TYPE_FILES = "files"
105 CHANGE_TYPE_FILES = "files"
107
106
108
107
109 @interfaceutil.implementer(intdirstate.idirstate)
108 @interfaceutil.implementer(intdirstate.idirstate)
110 class dirstate:
109 class dirstate:
111 def __init__(
110 def __init__(
112 self,
111 self,
113 opener,
112 opener,
114 ui,
113 ui,
115 root,
114 root,
116 validate,
115 validate,
117 sparsematchfn,
116 sparsematchfn,
118 nodeconstants,
117 nodeconstants,
119 use_dirstate_v2,
118 use_dirstate_v2,
120 use_tracked_hint=False,
119 use_tracked_hint=False,
121 ):
120 ):
122 """Create a new dirstate object.
121 """Create a new dirstate object.
123
122
124 opener is an open()-like callable that can be used to open the
123 opener is an open()-like callable that can be used to open the
125 dirstate file; root is the root of the directory tracked by
124 dirstate file; root is the root of the directory tracked by
126 the dirstate.
125 the dirstate.
127 """
126 """
128 self._use_dirstate_v2 = use_dirstate_v2
127 self._use_dirstate_v2 = use_dirstate_v2
129 self._use_tracked_hint = use_tracked_hint
128 self._use_tracked_hint = use_tracked_hint
130 self._nodeconstants = nodeconstants
129 self._nodeconstants = nodeconstants
131 self._opener = opener
130 self._opener = opener
132 self._validate = validate
131 self._validate = validate
133 self._root = root
132 self._root = root
134 # Either build a sparse-matcher or None if sparse is disabled
133 # Either build a sparse-matcher or None if sparse is disabled
135 self._sparsematchfn = sparsematchfn
134 self._sparsematchfn = sparsematchfn
136 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
135 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
137 # UNC path pointing to root share (issue4557)
136 # UNC path pointing to root share (issue4557)
138 self._rootdir = pathutil.normasprefix(root)
137 self._rootdir = pathutil.normasprefix(root)
139 # True is any internal state may be different
138 # True is any internal state may be different
140 self._dirty = False
139 self._dirty = False
141 # True if the set of tracked file may be different
140 # True if the set of tracked file may be different
142 self._dirty_tracked_set = False
141 self._dirty_tracked_set = False
143 self._ui = ui
142 self._ui = ui
144 self._filecache = {}
143 self._filecache = {}
145 # nesting level of `changing_parents` context
144 # nesting level of `changing_parents` context
146 self._changing_level = 0
145 self._changing_level = 0
147 # the change currently underway
146 # the change currently underway
148 self._change_type = None
147 self._change_type = None
149 # True if the current dirstate changing operations have been
148 # True if the current dirstate changing operations have been
150 # invalidated (used to make sure all nested contexts have been exited)
149 # invalidated (used to make sure all nested contexts have been exited)
151 self._invalidated_context = False
150 self._invalidated_context = False
152 self._filename = b'dirstate'
151 self._filename = b'dirstate'
153 self._filename_th = b'dirstate-tracked-hint'
152 self._filename_th = b'dirstate-tracked-hint'
154 self._pendingfilename = b'%s.pending' % self._filename
153 self._pendingfilename = b'%s.pending' % self._filename
155 self._plchangecallbacks = {}
154 self._plchangecallbacks = {}
156 self._origpl = None
155 self._origpl = None
157 self._mapcls = dirstatemap.dirstatemap
156 self._mapcls = dirstatemap.dirstatemap
158 # Access and cache cwd early, so we don't access it for the first time
157 # Access and cache cwd early, so we don't access it for the first time
159 # after a working-copy update caused it to not exist (accessing it then
158 # after a working-copy update caused it to not exist (accessing it then
160 # raises an exception).
159 # raises an exception).
161 self._cwd
160 self._cwd
162
161
163 def prefetch_parents(self):
162 def prefetch_parents(self):
164 """make sure the parents are loaded
163 """make sure the parents are loaded
165
164
166 Used to avoid a race condition.
165 Used to avoid a race condition.
167 """
166 """
168 self._pl
167 self._pl
169
168
170 @contextlib.contextmanager
169 @contextlib.contextmanager
171 def _changing(self, repo, change_type):
170 def _changing(self, repo, change_type):
172 if repo.currentwlock() is None:
171 if repo.currentwlock() is None:
173 msg = b"trying to change the dirstate without holding the wlock"
172 msg = b"trying to change the dirstate without holding the wlock"
174 raise error.ProgrammingError(msg)
173 raise error.ProgrammingError(msg)
175 if self._invalidated_context:
174 if self._invalidated_context:
176 msg = "trying to use an invalidated dirstate before it has reset"
175 msg = "trying to use an invalidated dirstate before it has reset"
177 raise error.ProgrammingError(msg)
176 raise error.ProgrammingError(msg)
178
177
179 has_tr = repo.currenttransaction() is not None
178 has_tr = repo.currenttransaction() is not None
180
179
181 # different type of change are mutually exclusive
180 # different type of change are mutually exclusive
182 if self._change_type is None:
181 if self._change_type is None:
183 assert self._changing_level == 0
182 assert self._changing_level == 0
184 self._change_type = change_type
183 self._change_type = change_type
185 elif self._change_type != change_type:
184 elif self._change_type != change_type:
186 msg = (
185 msg = (
187 'trying to open "%s" dirstate-changing context while a "%s" is'
186 'trying to open "%s" dirstate-changing context while a "%s" is'
188 ' already open'
187 ' already open'
189 )
188 )
190 msg %= (change_type, self._change_type)
189 msg %= (change_type, self._change_type)
191 raise error.ProgrammingError(msg)
190 raise error.ProgrammingError(msg)
192 self._changing_level += 1
191 self._changing_level += 1
193 try:
192 try:
194 yield
193 yield
195 except Exception:
194 except Exception:
196 self.invalidate()
195 self.invalidate()
197 raise
196 raise
198 finally:
197 finally:
199 tr = repo.currenttransaction()
198 tr = repo.currenttransaction()
200 if self._changing_level > 0:
199 if self._changing_level > 0:
201 if self._invalidated_context:
200 if self._invalidated_context:
202 # make sure we invalidate anything an upper context might
201 # make sure we invalidate anything an upper context might
203 # have changed.
202 # have changed.
204 self.invalidate()
203 self.invalidate()
205 self._changing_level -= 1
204 self._changing_level -= 1
206 # The invalidation is complete once we exit the final context
205 # The invalidation is complete once we exit the final context
207 # manager
206 # manager
208 if self._changing_level <= 0:
207 if self._changing_level <= 0:
209 self._change_type = None
208 self._change_type = None
210 assert self._changing_level == 0
209 assert self._changing_level == 0
211 if self._invalidated_context:
210 if self._invalidated_context:
212 self._invalidated_context = False
211 self._invalidated_context = False
213 else:
212 else:
214 # When an exception occured, `_invalidated_context`
213 # When an exception occured, `_invalidated_context`
215 # would have been set to True by the `invalidate`
214 # would have been set to True by the `invalidate`
216 # call earlier.
215 # call earlier.
217 #
216 #
218 # We don't have more straightforward code, because the
217 # We don't have more straightforward code, because the
219 # Exception catching (and the associated `invalidate`
218 # Exception catching (and the associated `invalidate`
220 # calling) might have been called by a nested context
219 # calling) might have been called by a nested context
221 # instead of the top level one.
220 # instead of the top level one.
222 self.write(tr)
221 self.write(tr)
223 if has_tr != (tr is not None):
222 if has_tr != (tr is not None):
224 if has_tr:
223 if has_tr:
225 m = "transaction vanished while changing dirstate"
224 m = "transaction vanished while changing dirstate"
226 else:
225 else:
227 m = "transaction appeared while changing dirstate"
226 m = "transaction appeared while changing dirstate"
228 raise error.ProgrammingError(m)
227 raise error.ProgrammingError(m)
229
228
230 @contextlib.contextmanager
229 @contextlib.contextmanager
231 def changing_parents(self, repo):
230 def changing_parents(self, repo):
232 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
231 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
233 yield c
232 yield c
234
233
235 @contextlib.contextmanager
234 @contextlib.contextmanager
236 def changing_files(self, repo):
235 def changing_files(self, repo):
237 with self._changing(repo, CHANGE_TYPE_FILES) as c:
236 with self._changing(repo, CHANGE_TYPE_FILES) as c:
238 yield c
237 yield c
239
238
240 # here to help migration to the new code
239 # here to help migration to the new code
241 def parentchange(self):
240 def parentchange(self):
242 msg = (
241 msg = (
243 "Mercurial 6.4 and later requires call to "
242 "Mercurial 6.4 and later requires call to "
244 "`dirstate.changing_parents(repo)`"
243 "`dirstate.changing_parents(repo)`"
245 )
244 )
246 raise error.ProgrammingError(msg)
245 raise error.ProgrammingError(msg)
247
246
248 @property
247 @property
249 def is_changing_any(self):
248 def is_changing_any(self):
250 """Returns true if the dirstate is in the middle of a set of changes.
249 """Returns true if the dirstate is in the middle of a set of changes.
251
250
252 This returns True for any kind of change.
251 This returns True for any kind of change.
253 """
252 """
254 return self._changing_level > 0
253 return self._changing_level > 0
255
254
256 def pendingparentchange(self):
255 def pendingparentchange(self):
257 return self.is_changing_parent()
256 return self.is_changing_parent()
258
257
259 def is_changing_parent(self):
258 def is_changing_parent(self):
260 """Returns true if the dirstate is in the middle of a set of changes
259 """Returns true if the dirstate is in the middle of a set of changes
261 that modify the dirstate parent.
260 that modify the dirstate parent.
262 """
261 """
263 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
262 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
264 return self.is_changing_parents
263 return self.is_changing_parents
265
264
266 @property
265 @property
267 def is_changing_parents(self):
266 def is_changing_parents(self):
268 """Returns true if the dirstate is in the middle of a set of changes
267 """Returns true if the dirstate is in the middle of a set of changes
269 that modify the dirstate parent.
268 that modify the dirstate parent.
270 """
269 """
271 if self._changing_level <= 0:
270 if self._changing_level <= 0:
272 return False
271 return False
273 return self._change_type == CHANGE_TYPE_PARENTS
272 return self._change_type == CHANGE_TYPE_PARENTS
274
273
275 @property
274 @property
276 def is_changing_files(self):
275 def is_changing_files(self):
277 """Returns true if the dirstate is in the middle of a set of changes
276 """Returns true if the dirstate is in the middle of a set of changes
278 that modify the files tracked or their sources.
277 that modify the files tracked or their sources.
279 """
278 """
280 if self._changing_level <= 0:
279 if self._changing_level <= 0:
281 return False
280 return False
282 return self._change_type == CHANGE_TYPE_FILES
281 return self._change_type == CHANGE_TYPE_FILES
283
282
284 @propertycache
283 @propertycache
285 def _map(self):
284 def _map(self):
286 """Return the dirstate contents (see documentation for dirstatemap)."""
285 """Return the dirstate contents (see documentation for dirstatemap)."""
287 self._map = self._mapcls(
286 self._map = self._mapcls(
288 self._ui,
287 self._ui,
289 self._opener,
288 self._opener,
290 self._root,
289 self._root,
291 self._nodeconstants,
290 self._nodeconstants,
292 self._use_dirstate_v2,
291 self._use_dirstate_v2,
293 )
292 )
294 return self._map
293 return self._map
295
294
296 @property
295 @property
297 def _sparsematcher(self):
296 def _sparsematcher(self):
298 """The matcher for the sparse checkout.
297 """The matcher for the sparse checkout.
299
298
300 The working directory may not include every file from a manifest. The
299 The working directory may not include every file from a manifest. The
301 matcher obtained by this property will match a path if it is to be
300 matcher obtained by this property will match a path if it is to be
302 included in the working directory.
301 included in the working directory.
303
302
304 When sparse if disabled, return None.
303 When sparse if disabled, return None.
305 """
304 """
306 if self._sparsematchfn is None:
305 if self._sparsematchfn is None:
307 return None
306 return None
308 # TODO there is potential to cache this property. For now, the matcher
307 # TODO there is potential to cache this property. For now, the matcher
309 # is resolved on every access. (But the called function does use a
308 # is resolved on every access. (But the called function does use a
310 # cache to keep the lookup fast.)
309 # cache to keep the lookup fast.)
311 return self._sparsematchfn()
310 return self._sparsematchfn()
312
311
313 @repocache(b'branch')
312 @repocache(b'branch')
314 def _branch(self):
313 def _branch(self):
315 try:
314 try:
316 return self._opener.read(b"branch").strip() or b"default"
315 return self._opener.read(b"branch").strip() or b"default"
317 except FileNotFoundError:
316 except FileNotFoundError:
318 return b"default"
317 return b"default"
319
318
320 @property
319 @property
321 def _pl(self):
320 def _pl(self):
322 return self._map.parents()
321 return self._map.parents()
323
322
324 def hasdir(self, d):
323 def hasdir(self, d):
325 return self._map.hastrackeddir(d)
324 return self._map.hastrackeddir(d)
326
325
327 @rootcache(b'.hgignore')
326 @rootcache(b'.hgignore')
328 def _ignore(self):
327 def _ignore(self):
329 files = self._ignorefiles()
328 files = self._ignorefiles()
330 if not files:
329 if not files:
331 return matchmod.never()
330 return matchmod.never()
332
331
333 pats = [b'include:%s' % f for f in files]
332 pats = [b'include:%s' % f for f in files]
334 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
333 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
335
334
336 @propertycache
335 @propertycache
337 def _slash(self):
336 def _slash(self):
338 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
337 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
339
338
340 @propertycache
339 @propertycache
341 def _checklink(self):
340 def _checklink(self):
342 return util.checklink(self._root)
341 return util.checklink(self._root)
343
342
344 @propertycache
343 @propertycache
345 def _checkexec(self):
344 def _checkexec(self):
346 return bool(util.checkexec(self._root))
345 return bool(util.checkexec(self._root))
347
346
348 @propertycache
347 @propertycache
349 def _checkcase(self):
348 def _checkcase(self):
350 return not util.fscasesensitive(self._join(b'.hg'))
349 return not util.fscasesensitive(self._join(b'.hg'))
351
350
352 def _join(self, f):
351 def _join(self, f):
353 # much faster than os.path.join()
352 # much faster than os.path.join()
354 # it's safe because f is always a relative path
353 # it's safe because f is always a relative path
355 return self._rootdir + f
354 return self._rootdir + f
356
355
357 def flagfunc(self, buildfallback):
356 def flagfunc(self, buildfallback):
358 """build a callable that returns flags associated with a filename
357 """build a callable that returns flags associated with a filename
359
358
360 The information is extracted from three possible layers:
359 The information is extracted from three possible layers:
361 1. the file system if it supports the information
360 1. the file system if it supports the information
362 2. the "fallback" information stored in the dirstate if any
361 2. the "fallback" information stored in the dirstate if any
363 3. a more expensive mechanism inferring the flags from the parents.
362 3. a more expensive mechanism inferring the flags from the parents.
364 """
363 """
365
364
366 # small hack to cache the result of buildfallback()
365 # small hack to cache the result of buildfallback()
367 fallback_func = []
366 fallback_func = []
368
367
369 def get_flags(x):
368 def get_flags(x):
370 entry = None
369 entry = None
371 fallback_value = None
370 fallback_value = None
372 try:
371 try:
373 st = os.lstat(self._join(x))
372 st = os.lstat(self._join(x))
374 except OSError:
373 except OSError:
375 return b''
374 return b''
376
375
377 if self._checklink:
376 if self._checklink:
378 if util.statislink(st):
377 if util.statislink(st):
379 return b'l'
378 return b'l'
380 else:
379 else:
381 entry = self.get_entry(x)
380 entry = self.get_entry(x)
382 if entry.has_fallback_symlink:
381 if entry.has_fallback_symlink:
383 if entry.fallback_symlink:
382 if entry.fallback_symlink:
384 return b'l'
383 return b'l'
385 else:
384 else:
386 if not fallback_func:
385 if not fallback_func:
387 fallback_func.append(buildfallback())
386 fallback_func.append(buildfallback())
388 fallback_value = fallback_func[0](x)
387 fallback_value = fallback_func[0](x)
389 if b'l' in fallback_value:
388 if b'l' in fallback_value:
390 return b'l'
389 return b'l'
391
390
392 if self._checkexec:
391 if self._checkexec:
393 if util.statisexec(st):
392 if util.statisexec(st):
394 return b'x'
393 return b'x'
395 else:
394 else:
396 if entry is None:
395 if entry is None:
397 entry = self.get_entry(x)
396 entry = self.get_entry(x)
398 if entry.has_fallback_exec:
397 if entry.has_fallback_exec:
399 if entry.fallback_exec:
398 if entry.fallback_exec:
400 return b'x'
399 return b'x'
401 else:
400 else:
402 if fallback_value is None:
401 if fallback_value is None:
403 if not fallback_func:
402 if not fallback_func:
404 fallback_func.append(buildfallback())
403 fallback_func.append(buildfallback())
405 fallback_value = fallback_func[0](x)
404 fallback_value = fallback_func[0](x)
406 if b'x' in fallback_value:
405 if b'x' in fallback_value:
407 return b'x'
406 return b'x'
408 return b''
407 return b''
409
408
410 return get_flags
409 return get_flags
411
410
412 @propertycache
411 @propertycache
413 def _cwd(self):
412 def _cwd(self):
414 # internal config: ui.forcecwd
413 # internal config: ui.forcecwd
415 forcecwd = self._ui.config(b'ui', b'forcecwd')
414 forcecwd = self._ui.config(b'ui', b'forcecwd')
416 if forcecwd:
415 if forcecwd:
417 return forcecwd
416 return forcecwd
418 return encoding.getcwd()
417 return encoding.getcwd()
419
418
420 def getcwd(self):
419 def getcwd(self):
421 """Return the path from which a canonical path is calculated.
420 """Return the path from which a canonical path is calculated.
422
421
423 This path should be used to resolve file patterns or to convert
422 This path should be used to resolve file patterns or to convert
424 canonical paths back to file paths for display. It shouldn't be
423 canonical paths back to file paths for display. It shouldn't be
425 used to get real file paths. Use vfs functions instead.
424 used to get real file paths. Use vfs functions instead.
426 """
425 """
427 cwd = self._cwd
426 cwd = self._cwd
428 if cwd == self._root:
427 if cwd == self._root:
429 return b''
428 return b''
430 # self._root ends with a path separator if self._root is '/' or 'C:\'
429 # self._root ends with a path separator if self._root is '/' or 'C:\'
431 rootsep = self._root
430 rootsep = self._root
432 if not util.endswithsep(rootsep):
431 if not util.endswithsep(rootsep):
433 rootsep += pycompat.ossep
432 rootsep += pycompat.ossep
434 if cwd.startswith(rootsep):
433 if cwd.startswith(rootsep):
435 return cwd[len(rootsep) :]
434 return cwd[len(rootsep) :]
436 else:
435 else:
437 # we're outside the repo. return an absolute path.
436 # we're outside the repo. return an absolute path.
438 return cwd
437 return cwd
439
438
440 def pathto(self, f, cwd=None):
439 def pathto(self, f, cwd=None):
441 if cwd is None:
440 if cwd is None:
442 cwd = self.getcwd()
441 cwd = self.getcwd()
443 path = util.pathto(self._root, cwd, f)
442 path = util.pathto(self._root, cwd, f)
444 if self._slash:
443 if self._slash:
445 return util.pconvert(path)
444 return util.pconvert(path)
446 return path
445 return path
447
446
448 def get_entry(self, path):
447 def get_entry(self, path):
449 """return a DirstateItem for the associated path"""
448 """return a DirstateItem for the associated path"""
450 entry = self._map.get(path)
449 entry = self._map.get(path)
451 if entry is None:
450 if entry is None:
452 return DirstateItem()
451 return DirstateItem()
453 return entry
452 return entry
454
453
455 def __contains__(self, key):
454 def __contains__(self, key):
456 return key in self._map
455 return key in self._map
457
456
458 def __iter__(self):
457 def __iter__(self):
459 return iter(sorted(self._map))
458 return iter(sorted(self._map))
460
459
461 def items(self):
460 def items(self):
462 return self._map.items()
461 return self._map.items()
463
462
464 iteritems = items
463 iteritems = items
465
464
466 def parents(self):
465 def parents(self):
467 return [self._validate(p) for p in self._pl]
466 return [self._validate(p) for p in self._pl]
468
467
469 def p1(self):
468 def p1(self):
470 return self._validate(self._pl[0])
469 return self._validate(self._pl[0])
471
470
472 def p2(self):
471 def p2(self):
473 return self._validate(self._pl[1])
472 return self._validate(self._pl[1])
474
473
475 @property
474 @property
476 def in_merge(self):
475 def in_merge(self):
477 """True if a merge is in progress"""
476 """True if a merge is in progress"""
478 return self._pl[1] != self._nodeconstants.nullid
477 return self._pl[1] != self._nodeconstants.nullid
479
478
480 def branch(self):
479 def branch(self):
481 return encoding.tolocal(self._branch)
480 return encoding.tolocal(self._branch)
482
481
483 # XXX since this make the dirstate dirty, we should enforce that it is done
482 # XXX since this make the dirstate dirty, we should enforce that it is done
484 # withing an appropriate change-context that scope the change and ensure it
483 # withing an appropriate change-context that scope the change and ensure it
485 # eventually get written on disk (or rolled back)
484 # eventually get written on disk (or rolled back)
486 def setparents(self, p1, p2=None):
485 def setparents(self, p1, p2=None):
487 """Set dirstate parents to p1 and p2.
486 """Set dirstate parents to p1 and p2.
488
487
489 When moving from two parents to one, "merged" entries a
488 When moving from two parents to one, "merged" entries a
490 adjusted to normal and previous copy records discarded and
489 adjusted to normal and previous copy records discarded and
491 returned by the call.
490 returned by the call.
492
491
493 See localrepo.setparents()
492 See localrepo.setparents()
494 """
493 """
495 if p2 is None:
494 if p2 is None:
496 p2 = self._nodeconstants.nullid
495 p2 = self._nodeconstants.nullid
497 if self._changing_level == 0:
496 if self._changing_level == 0:
498 raise ValueError(
497 raise ValueError(
499 b"cannot set dirstate parent outside of "
498 b"cannot set dirstate parent outside of "
500 b"dirstate.changing_parents context manager"
499 b"dirstate.changing_parents context manager"
501 )
500 )
502
501
503 self._dirty = True
502 self._dirty = True
504 oldp2 = self._pl[1]
503 oldp2 = self._pl[1]
505 if self._origpl is None:
504 if self._origpl is None:
506 self._origpl = self._pl
505 self._origpl = self._pl
507 nullid = self._nodeconstants.nullid
506 nullid = self._nodeconstants.nullid
508 # True if we need to fold p2 related state back to a linear case
507 # True if we need to fold p2 related state back to a linear case
509 fold_p2 = oldp2 != nullid and p2 == nullid
508 fold_p2 = oldp2 != nullid and p2 == nullid
510 return self._map.setparents(p1, p2, fold_p2=fold_p2)
509 return self._map.setparents(p1, p2, fold_p2=fold_p2)
511
510
512 def setbranch(self, branch):
511 def setbranch(self, branch):
513 self.__class__._branch.set(self, encoding.fromlocal(branch))
512 self.__class__._branch.set(self, encoding.fromlocal(branch))
514 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
513 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
515 try:
514 try:
516 f.write(self._branch + b'\n')
515 f.write(self._branch + b'\n')
517 f.close()
516 f.close()
518
517
519 # make sure filecache has the correct stat info for _branch after
518 # make sure filecache has the correct stat info for _branch after
520 # replacing the underlying file
519 # replacing the underlying file
521 ce = self._filecache[b'_branch']
520 ce = self._filecache[b'_branch']
522 if ce:
521 if ce:
523 ce.refresh()
522 ce.refresh()
524 except: # re-raises
523 except: # re-raises
525 f.discard()
524 f.discard()
526 raise
525 raise
527
526
528 def invalidate(self):
527 def invalidate(self):
529 """Causes the next access to reread the dirstate.
528 """Causes the next access to reread the dirstate.
530
529
531 This is different from localrepo.invalidatedirstate() because it always
530 This is different from localrepo.invalidatedirstate() because it always
532 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
531 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
533 check whether the dirstate has changed before rereading it."""
532 check whether the dirstate has changed before rereading it."""
534
533
535 for a in ("_map", "_branch", "_ignore"):
534 for a in ("_map", "_branch", "_ignore"):
536 if a in self.__dict__:
535 if a in self.__dict__:
537 delattr(self, a)
536 delattr(self, a)
538 self._dirty = False
537 self._dirty = False
539 self._dirty_tracked_set = False
538 self._dirty_tracked_set = False
540 self._invalidated_context = self._changing_level > 0
539 self._invalidated_context = self._changing_level > 0
541 self._origpl = None
540 self._origpl = None
542
541
543 # XXX since this make the dirstate dirty, we should enforce that it is done
542 # XXX since this make the dirstate dirty, we should enforce that it is done
544 # withing an appropriate change-context that scope the change and ensure it
543 # withing an appropriate change-context that scope the change and ensure it
545 # eventually get written on disk (or rolled back)
544 # eventually get written on disk (or rolled back)
546 def copy(self, source, dest):
545 def copy(self, source, dest):
547 """Mark dest as a copy of source. Unmark dest if source is None."""
546 """Mark dest as a copy of source. Unmark dest if source is None."""
548 if source == dest:
547 if source == dest:
549 return
548 return
550 self._dirty = True
549 self._dirty = True
551 if source is not None:
550 if source is not None:
552 self._check_sparse(source)
551 self._check_sparse(source)
553 self._map.copymap[dest] = source
552 self._map.copymap[dest] = source
554 else:
553 else:
555 self._map.copymap.pop(dest, None)
554 self._map.copymap.pop(dest, None)
556
555
557 def copied(self, file):
556 def copied(self, file):
558 return self._map.copymap.get(file, None)
557 return self._map.copymap.get(file, None)
559
558
560 def copies(self):
559 def copies(self):
561 return self._map.copymap
560 return self._map.copymap
562
561
563 @requires_changing_files
562 @requires_changing_files
564 def set_tracked(self, filename, reset_copy=False):
563 def set_tracked(self, filename, reset_copy=False):
565 """a "public" method for generic code to mark a file as tracked
564 """a "public" method for generic code to mark a file as tracked
566
565
567 This function is to be called outside of "update/merge" case. For
566 This function is to be called outside of "update/merge" case. For
568 example by a command like `hg add X`.
567 example by a command like `hg add X`.
569
568
570 if reset_copy is set, any existing copy information will be dropped.
569 if reset_copy is set, any existing copy information will be dropped.
571
570
572 return True the file was previously untracked, False otherwise.
571 return True the file was previously untracked, False otherwise.
573 """
572 """
574 self._dirty = True
573 self._dirty = True
575 entry = self._map.get(filename)
574 entry = self._map.get(filename)
576 if entry is None or not entry.tracked:
575 if entry is None or not entry.tracked:
577 self._check_new_tracked_filename(filename)
576 self._check_new_tracked_filename(filename)
578 pre_tracked = self._map.set_tracked(filename)
577 pre_tracked = self._map.set_tracked(filename)
579 if reset_copy:
578 if reset_copy:
580 self._map.copymap.pop(filename, None)
579 self._map.copymap.pop(filename, None)
581 if pre_tracked:
580 if pre_tracked:
582 self._dirty_tracked_set = True
581 self._dirty_tracked_set = True
583 return pre_tracked
582 return pre_tracked
584
583
585 @requires_changing_files
584 @requires_changing_files
586 def set_untracked(self, filename):
585 def set_untracked(self, filename):
587 """a "public" method for generic code to mark a file as untracked
586 """a "public" method for generic code to mark a file as untracked
588
587
589 This function is to be called outside of "update/merge" case. For
588 This function is to be called outside of "update/merge" case. For
590 example by a command like `hg remove X`.
589 example by a command like `hg remove X`.
591
590
592 return True the file was previously tracked, False otherwise.
591 return True the file was previously tracked, False otherwise.
593 """
592 """
594 ret = self._map.set_untracked(filename)
593 ret = self._map.set_untracked(filename)
595 if ret:
594 if ret:
596 self._dirty = True
595 self._dirty = True
597 self._dirty_tracked_set = True
596 self._dirty_tracked_set = True
598 return ret
597 return ret
599
598
600 @requires_not_changing_parents
599 @requires_not_changing_parents
601 def set_clean(self, filename, parentfiledata):
600 def set_clean(self, filename, parentfiledata):
602 """record that the current state of the file on disk is known to be clean"""
601 """record that the current state of the file on disk is known to be clean"""
603 self._dirty = True
602 self._dirty = True
604 if not self._map[filename].tracked:
603 if not self._map[filename].tracked:
605 self._check_new_tracked_filename(filename)
604 self._check_new_tracked_filename(filename)
606 (mode, size, mtime) = parentfiledata
605 (mode, size, mtime) = parentfiledata
607 self._map.set_clean(filename, mode, size, mtime)
606 self._map.set_clean(filename, mode, size, mtime)
608
607
609 @requires_not_changing_parents
608 @requires_not_changing_parents
610 def set_possibly_dirty(self, filename):
609 def set_possibly_dirty(self, filename):
611 """record that the current state of the file on disk is unknown"""
610 """record that the current state of the file on disk is unknown"""
612 self._dirty = True
611 self._dirty = True
613 self._map.set_possibly_dirty(filename)
612 self._map.set_possibly_dirty(filename)
614
613
615 @requires_changing_parents
614 @requires_changing_parents
616 def update_file_p1(
615 def update_file_p1(
617 self,
616 self,
618 filename,
617 filename,
619 p1_tracked,
618 p1_tracked,
620 ):
619 ):
621 """Set a file as tracked in the parent (or not)
620 """Set a file as tracked in the parent (or not)
622
621
623 This is to be called when adjust the dirstate to a new parent after an history
622 This is to be called when adjust the dirstate to a new parent after an history
624 rewriting operation.
623 rewriting operation.
625
624
626 It should not be called during a merge (p2 != nullid) and only within
625 It should not be called during a merge (p2 != nullid) and only within
627 a `with dirstate.changing_parents(repo):` context.
626 a `with dirstate.changing_parents(repo):` context.
628 """
627 """
629 if self.in_merge:
628 if self.in_merge:
630 msg = b'update_file_reference should not be called when merging'
629 msg = b'update_file_reference should not be called when merging'
631 raise error.ProgrammingError(msg)
630 raise error.ProgrammingError(msg)
632 entry = self._map.get(filename)
631 entry = self._map.get(filename)
633 if entry is None:
632 if entry is None:
634 wc_tracked = False
633 wc_tracked = False
635 else:
634 else:
636 wc_tracked = entry.tracked
635 wc_tracked = entry.tracked
637 if not (p1_tracked or wc_tracked):
636 if not (p1_tracked or wc_tracked):
638 # the file is no longer relevant to anyone
637 # the file is no longer relevant to anyone
639 if self._map.get(filename) is not None:
638 if self._map.get(filename) is not None:
640 self._map.reset_state(filename)
639 self._map.reset_state(filename)
641 self._dirty = True
640 self._dirty = True
642 elif (not p1_tracked) and wc_tracked:
641 elif (not p1_tracked) and wc_tracked:
643 if entry is not None and entry.added:
642 if entry is not None and entry.added:
644 return # avoid dropping copy information (maybe?)
643 return # avoid dropping copy information (maybe?)
645
644
646 self._map.reset_state(
645 self._map.reset_state(
647 filename,
646 filename,
648 wc_tracked,
647 wc_tracked,
649 p1_tracked,
648 p1_tracked,
650 # the underlying reference might have changed, we will have to
649 # the underlying reference might have changed, we will have to
651 # check it.
650 # check it.
652 has_meaningful_mtime=False,
651 has_meaningful_mtime=False,
653 )
652 )
654
653
655 @requires_changing_parents
654 @requires_changing_parents
656 def update_file(
655 def update_file(
657 self,
656 self,
658 filename,
657 filename,
659 wc_tracked,
658 wc_tracked,
660 p1_tracked,
659 p1_tracked,
661 p2_info=False,
660 p2_info=False,
662 possibly_dirty=False,
661 possibly_dirty=False,
663 parentfiledata=None,
662 parentfiledata=None,
664 ):
663 ):
665 """update the information about a file in the dirstate
664 """update the information about a file in the dirstate
666
665
667 This is to be called when the direstates parent changes to keep track
666 This is to be called when the direstates parent changes to keep track
668 of what is the file situation in regards to the working copy and its parent.
667 of what is the file situation in regards to the working copy and its parent.
669
668
670 This function must be called within a `dirstate.changing_parents` context.
669 This function must be called within a `dirstate.changing_parents` context.
671
670
672 note: the API is at an early stage and we might need to adjust it
671 note: the API is at an early stage and we might need to adjust it
673 depending of what information ends up being relevant and useful to
672 depending of what information ends up being relevant and useful to
674 other processing.
673 other processing.
675 """
674 """
676 self._update_file(
675 self._update_file(
677 filename=filename,
676 filename=filename,
678 wc_tracked=wc_tracked,
677 wc_tracked=wc_tracked,
679 p1_tracked=p1_tracked,
678 p1_tracked=p1_tracked,
680 p2_info=p2_info,
679 p2_info=p2_info,
681 possibly_dirty=possibly_dirty,
680 possibly_dirty=possibly_dirty,
682 parentfiledata=parentfiledata,
681 parentfiledata=parentfiledata,
683 )
682 )
684
683
685 # XXX since this make the dirstate dirty, we should enforce that it is done
684 # XXX since this make the dirstate dirty, we should enforce that it is done
686 # withing an appropriate change-context that scope the change and ensure it
685 # withing an appropriate change-context that scope the change and ensure it
687 # eventually get written on disk (or rolled back)
686 # eventually get written on disk (or rolled back)
688 def hacky_extension_update_file(self, *args, **kwargs):
687 def hacky_extension_update_file(self, *args, **kwargs):
689 """NEVER USE THIS, YOU DO NOT NEED IT
688 """NEVER USE THIS, YOU DO NOT NEED IT
690
689
691 This function is a variant of "update_file" to be called by a small set
690 This function is a variant of "update_file" to be called by a small set
692 of extensions, it also adjust the internal state of file, but can be
691 of extensions, it also adjust the internal state of file, but can be
693 called outside an `changing_parents` context.
692 called outside an `changing_parents` context.
694
693
695 A very small number of extension meddle with the working copy content
694 A very small number of extension meddle with the working copy content
696 in a way that requires to adjust the dirstate accordingly. At the time
695 in a way that requires to adjust the dirstate accordingly. At the time
697 this command is written they are :
696 this command is written they are :
698 - keyword,
697 - keyword,
699 - largefile,
698 - largefile,
700 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
699 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
701
700
702 This function could probably be replaced by more semantic one (like
701 This function could probably be replaced by more semantic one (like
703 "adjust expected size" or "always revalidate file content", etc)
702 "adjust expected size" or "always revalidate file content", etc)
704 however at the time where this is writen, this is too much of a detour
703 however at the time where this is writen, this is too much of a detour
705 to be considered.
704 to be considered.
706 """
705 """
707 self._update_file(
706 self._update_file(
708 *args,
707 *args,
709 **kwargs,
708 **kwargs,
710 )
709 )
711
710
712 def _update_file(
711 def _update_file(
713 self,
712 self,
714 filename,
713 filename,
715 wc_tracked,
714 wc_tracked,
716 p1_tracked,
715 p1_tracked,
717 p2_info=False,
716 p2_info=False,
718 possibly_dirty=False,
717 possibly_dirty=False,
719 parentfiledata=None,
718 parentfiledata=None,
720 ):
719 ):
721
720
722 # note: I do not think we need to double check name clash here since we
721 # note: I do not think we need to double check name clash here since we
723 # are in a update/merge case that should already have taken care of
722 # are in a update/merge case that should already have taken care of
724 # this. The test agrees
723 # this. The test agrees
725
724
726 self._dirty = True
725 self._dirty = True
727 old_entry = self._map.get(filename)
726 old_entry = self._map.get(filename)
728 if old_entry is None:
727 if old_entry is None:
729 prev_tracked = False
728 prev_tracked = False
730 else:
729 else:
731 prev_tracked = old_entry.tracked
730 prev_tracked = old_entry.tracked
732 if prev_tracked != wc_tracked:
731 if prev_tracked != wc_tracked:
733 self._dirty_tracked_set = True
732 self._dirty_tracked_set = True
734
733
735 self._map.reset_state(
734 self._map.reset_state(
736 filename,
735 filename,
737 wc_tracked,
736 wc_tracked,
738 p1_tracked,
737 p1_tracked,
739 p2_info=p2_info,
738 p2_info=p2_info,
740 has_meaningful_mtime=not possibly_dirty,
739 has_meaningful_mtime=not possibly_dirty,
741 parentfiledata=parentfiledata,
740 parentfiledata=parentfiledata,
742 )
741 )
743
742
744 def _check_new_tracked_filename(self, filename):
743 def _check_new_tracked_filename(self, filename):
745 scmutil.checkfilename(filename)
744 scmutil.checkfilename(filename)
746 if self._map.hastrackeddir(filename):
745 if self._map.hastrackeddir(filename):
747 msg = _(b'directory %r already in dirstate')
746 msg = _(b'directory %r already in dirstate')
748 msg %= pycompat.bytestr(filename)
747 msg %= pycompat.bytestr(filename)
749 raise error.Abort(msg)
748 raise error.Abort(msg)
750 # shadows
749 # shadows
751 for d in pathutil.finddirs(filename):
750 for d in pathutil.finddirs(filename):
752 if self._map.hastrackeddir(d):
751 if self._map.hastrackeddir(d):
753 break
752 break
754 entry = self._map.get(d)
753 entry = self._map.get(d)
755 if entry is not None and not entry.removed:
754 if entry is not None and not entry.removed:
756 msg = _(b'file %r in dirstate clashes with %r')
755 msg = _(b'file %r in dirstate clashes with %r')
757 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
756 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
758 raise error.Abort(msg)
757 raise error.Abort(msg)
759 self._check_sparse(filename)
758 self._check_sparse(filename)
760
759
761 def _check_sparse(self, filename):
760 def _check_sparse(self, filename):
762 """Check that a filename is inside the sparse profile"""
761 """Check that a filename is inside the sparse profile"""
763 sparsematch = self._sparsematcher
762 sparsematch = self._sparsematcher
764 if sparsematch is not None and not sparsematch.always():
763 if sparsematch is not None and not sparsematch.always():
765 if not sparsematch(filename):
764 if not sparsematch(filename):
766 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
765 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
767 hint = _(
766 hint = _(
768 b'include file with `hg debugsparse --include <pattern>` or use '
767 b'include file with `hg debugsparse --include <pattern>` or use '
769 b'`hg add -s <file>` to include file directory while adding'
768 b'`hg add -s <file>` to include file directory while adding'
770 )
769 )
771 raise error.Abort(msg % filename, hint=hint)
770 raise error.Abort(msg % filename, hint=hint)
772
771
773 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
772 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
774 if exists is None:
773 if exists is None:
775 exists = os.path.lexists(os.path.join(self._root, path))
774 exists = os.path.lexists(os.path.join(self._root, path))
776 if not exists:
775 if not exists:
777 # Maybe a path component exists
776 # Maybe a path component exists
778 if not ignoremissing and b'/' in path:
777 if not ignoremissing and b'/' in path:
779 d, f = path.rsplit(b'/', 1)
778 d, f = path.rsplit(b'/', 1)
780 d = self._normalize(d, False, ignoremissing, None)
779 d = self._normalize(d, False, ignoremissing, None)
781 folded = d + b"/" + f
780 folded = d + b"/" + f
782 else:
781 else:
783 # No path components, preserve original case
782 # No path components, preserve original case
784 folded = path
783 folded = path
785 else:
784 else:
786 # recursively normalize leading directory components
785 # recursively normalize leading directory components
787 # against dirstate
786 # against dirstate
788 if b'/' in normed:
787 if b'/' in normed:
789 d, f = normed.rsplit(b'/', 1)
788 d, f = normed.rsplit(b'/', 1)
790 d = self._normalize(d, False, ignoremissing, True)
789 d = self._normalize(d, False, ignoremissing, True)
791 r = self._root + b"/" + d
790 r = self._root + b"/" + d
792 folded = d + b"/" + util.fspath(f, r)
791 folded = d + b"/" + util.fspath(f, r)
793 else:
792 else:
794 folded = util.fspath(normed, self._root)
793 folded = util.fspath(normed, self._root)
795 storemap[normed] = folded
794 storemap[normed] = folded
796
795
797 return folded
796 return folded
798
797
799 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
798 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
800 normed = util.normcase(path)
799 normed = util.normcase(path)
801 folded = self._map.filefoldmap.get(normed, None)
800 folded = self._map.filefoldmap.get(normed, None)
802 if folded is None:
801 if folded is None:
803 if isknown:
802 if isknown:
804 folded = path
803 folded = path
805 else:
804 else:
806 folded = self._discoverpath(
805 folded = self._discoverpath(
807 path, normed, ignoremissing, exists, self._map.filefoldmap
806 path, normed, ignoremissing, exists, self._map.filefoldmap
808 )
807 )
809 return folded
808 return folded
810
809
811 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
810 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
812 normed = util.normcase(path)
811 normed = util.normcase(path)
813 folded = self._map.filefoldmap.get(normed, None)
812 folded = self._map.filefoldmap.get(normed, None)
814 if folded is None:
813 if folded is None:
815 folded = self._map.dirfoldmap.get(normed, None)
814 folded = self._map.dirfoldmap.get(normed, None)
816 if folded is None:
815 if folded is None:
817 if isknown:
816 if isknown:
818 folded = path
817 folded = path
819 else:
818 else:
820 # store discovered result in dirfoldmap so that future
819 # store discovered result in dirfoldmap so that future
821 # normalizefile calls don't start matching directories
820 # normalizefile calls don't start matching directories
822 folded = self._discoverpath(
821 folded = self._discoverpath(
823 path, normed, ignoremissing, exists, self._map.dirfoldmap
822 path, normed, ignoremissing, exists, self._map.dirfoldmap
824 )
823 )
825 return folded
824 return folded
826
825
827 def normalize(self, path, isknown=False, ignoremissing=False):
826 def normalize(self, path, isknown=False, ignoremissing=False):
828 """
827 """
829 normalize the case of a pathname when on a casefolding filesystem
828 normalize the case of a pathname when on a casefolding filesystem
830
829
831 isknown specifies whether the filename came from walking the
830 isknown specifies whether the filename came from walking the
832 disk, to avoid extra filesystem access.
831 disk, to avoid extra filesystem access.
833
832
834 If ignoremissing is True, missing path are returned
833 If ignoremissing is True, missing path are returned
835 unchanged. Otherwise, we try harder to normalize possibly
834 unchanged. Otherwise, we try harder to normalize possibly
836 existing path components.
835 existing path components.
837
836
838 The normalized case is determined based on the following precedence:
837 The normalized case is determined based on the following precedence:
839
838
840 - version of name already stored in the dirstate
839 - version of name already stored in the dirstate
841 - version of name stored on disk
840 - version of name stored on disk
842 - version provided via command arguments
841 - version provided via command arguments
843 """
842 """
844
843
845 if self._checkcase:
844 if self._checkcase:
846 return self._normalize(path, isknown, ignoremissing)
845 return self._normalize(path, isknown, ignoremissing)
847 return path
846 return path
848
847
849 # XXX since this make the dirstate dirty, we should enforce that it is done
848 # XXX since this make the dirstate dirty, we should enforce that it is done
850 # withing an appropriate change-context that scope the change and ensure it
849 # withing an appropriate change-context that scope the change and ensure it
851 # eventually get written on disk (or rolled back)
850 # eventually get written on disk (or rolled back)
852 def clear(self):
851 def clear(self):
853 self._map.clear()
852 self._map.clear()
854 self._dirty = True
853 self._dirty = True
855
854
856 # XXX since this make the dirstate dirty, we should enforce that it is done
855 # XXX since this make the dirstate dirty, we should enforce that it is done
857 # withing an appropriate change-context that scope the change and ensure it
856 # withing an appropriate change-context that scope the change and ensure it
858 # eventually get written on disk (or rolled back)
857 # eventually get written on disk (or rolled back)
859 def rebuild(self, parent, allfiles, changedfiles=None):
858 def rebuild(self, parent, allfiles, changedfiles=None):
860 matcher = self._sparsematcher
859 matcher = self._sparsematcher
861 if matcher is not None and not matcher.always():
860 if matcher is not None and not matcher.always():
862 # should not add non-matching files
861 # should not add non-matching files
863 allfiles = [f for f in allfiles if matcher(f)]
862 allfiles = [f for f in allfiles if matcher(f)]
864 if changedfiles:
863 if changedfiles:
865 changedfiles = [f for f in changedfiles if matcher(f)]
864 changedfiles = [f for f in changedfiles if matcher(f)]
866
865
867 if changedfiles is not None:
866 if changedfiles is not None:
868 # these files will be deleted from the dirstate when they are
867 # these files will be deleted from the dirstate when they are
869 # not found to be in allfiles
868 # not found to be in allfiles
870 dirstatefilestoremove = {f for f in self if not matcher(f)}
869 dirstatefilestoremove = {f for f in self if not matcher(f)}
871 changedfiles = dirstatefilestoremove.union(changedfiles)
870 changedfiles = dirstatefilestoremove.union(changedfiles)
872
871
873 if changedfiles is None:
872 if changedfiles is None:
874 # Rebuild entire dirstate
873 # Rebuild entire dirstate
875 to_lookup = allfiles
874 to_lookup = allfiles
876 to_drop = []
875 to_drop = []
877 self.clear()
876 self.clear()
878 elif len(changedfiles) < 10:
877 elif len(changedfiles) < 10:
879 # Avoid turning allfiles into a set, which can be expensive if it's
878 # Avoid turning allfiles into a set, which can be expensive if it's
880 # large.
879 # large.
881 to_lookup = []
880 to_lookup = []
882 to_drop = []
881 to_drop = []
883 for f in changedfiles:
882 for f in changedfiles:
884 if f in allfiles:
883 if f in allfiles:
885 to_lookup.append(f)
884 to_lookup.append(f)
886 else:
885 else:
887 to_drop.append(f)
886 to_drop.append(f)
888 else:
887 else:
889 changedfilesset = set(changedfiles)
888 changedfilesset = set(changedfiles)
890 to_lookup = changedfilesset & set(allfiles)
889 to_lookup = changedfilesset & set(allfiles)
891 to_drop = changedfilesset - to_lookup
890 to_drop = changedfilesset - to_lookup
892
891
893 if self._origpl is None:
892 if self._origpl is None:
894 self._origpl = self._pl
893 self._origpl = self._pl
895 self._map.setparents(parent, self._nodeconstants.nullid)
894 self._map.setparents(parent, self._nodeconstants.nullid)
896
895
897 for f in to_lookup:
896 for f in to_lookup:
898 if self.in_merge:
897 if self.in_merge:
899 self.set_tracked(f)
898 self.set_tracked(f)
900 else:
899 else:
901 self._map.reset_state(
900 self._map.reset_state(
902 f,
901 f,
903 wc_tracked=True,
902 wc_tracked=True,
904 p1_tracked=True,
903 p1_tracked=True,
905 )
904 )
906 for f in to_drop:
905 for f in to_drop:
907 self._map.reset_state(f)
906 self._map.reset_state(f)
908
907
909 self._dirty = True
908 self._dirty = True
910
909
911 def identity(self):
910 def identity(self):
912 """Return identity of dirstate itself to detect changing in storage
911 """Return identity of dirstate itself to detect changing in storage
913
912
914 If identity of previous dirstate is equal to this, writing
913 If identity of previous dirstate is equal to this, writing
915 changes based on the former dirstate out can keep consistency.
914 changes based on the former dirstate out can keep consistency.
916 """
915 """
917 return self._map.identity
916 return self._map.identity
918
917
919 def write(self, tr):
918 def write(self, tr):
920 if not self._dirty:
919 if not self._dirty:
921 return
920 return
922
921
923 write_key = self._use_tracked_hint and self._dirty_tracked_set
922 write_key = self._use_tracked_hint and self._dirty_tracked_set
924 if tr:
923 if tr:
925 # make sure we invalidate the current change on abort
924 # make sure we invalidate the current change on abort
926 if tr is not None:
925 if tr is not None:
927 tr.addabort(
926 tr.addabort(
928 b'dirstate-invalidate',
927 b'dirstate-invalidate',
929 lambda tr: self.invalidate(),
928 lambda tr: self.invalidate(),
930 )
929 )
931 # delay writing in-memory changes out
930 # delay writing in-memory changes out
932 tr.addfilegenerator(
931 tr.addfilegenerator(
933 b'dirstate-1-main',
932 b'dirstate-1-main',
934 (self._filename,),
933 (self._filename,),
935 lambda f: self._writedirstate(tr, f),
934 lambda f: self._writedirstate(tr, f),
936 location=b'plain',
935 location=b'plain',
937 post_finalize=True,
936 post_finalize=True,
938 )
937 )
939 if write_key:
938 if write_key:
940 tr.addfilegenerator(
939 tr.addfilegenerator(
941 b'dirstate-2-key-post',
940 b'dirstate-2-key-post',
942 (self._filename_th,),
941 (self._filename_th,),
943 lambda f: self._write_tracked_hint(tr, f),
942 lambda f: self._write_tracked_hint(tr, f),
944 location=b'plain',
943 location=b'plain',
945 post_finalize=True,
944 post_finalize=True,
946 )
945 )
947 return
946 return
948
947
949 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
948 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
950 with file(self._filename) as f:
949 with file(self._filename) as f:
951 self._writedirstate(tr, f)
950 self._writedirstate(tr, f)
952 if write_key:
951 if write_key:
953 # we update the key-file after writing to make sure reader have a
952 # we update the key-file after writing to make sure reader have a
954 # key that match the newly written content
953 # key that match the newly written content
955 with file(self._filename_th) as f:
954 with file(self._filename_th) as f:
956 self._write_tracked_hint(tr, f)
955 self._write_tracked_hint(tr, f)
957
956
958 def delete_tracked_hint(self):
957 def delete_tracked_hint(self):
959 """remove the tracked_hint file
958 """remove the tracked_hint file
960
959
961 To be used by format downgrades operation"""
960 To be used by format downgrades operation"""
962 self._opener.unlink(self._filename_th)
961 self._opener.unlink(self._filename_th)
963 self._use_tracked_hint = False
962 self._use_tracked_hint = False
964
963
965 def addparentchangecallback(self, category, callback):
964 def addparentchangecallback(self, category, callback):
966 """add a callback to be called when the wd parents are changed
965 """add a callback to be called when the wd parents are changed
967
966
968 Callback will be called with the following arguments:
967 Callback will be called with the following arguments:
969 dirstate, (oldp1, oldp2), (newp1, newp2)
968 dirstate, (oldp1, oldp2), (newp1, newp2)
970
969
971 Category is a unique identifier to allow overwriting an old callback
970 Category is a unique identifier to allow overwriting an old callback
972 with a newer callback.
971 with a newer callback.
973 """
972 """
974 self._plchangecallbacks[category] = callback
973 self._plchangecallbacks[category] = callback
975
974
976 def _writedirstate(self, tr, st):
975 def _writedirstate(self, tr, st):
977 # notify callbacks about parents change
976 # notify callbacks about parents change
978 if self._origpl is not None and self._origpl != self._pl:
977 if self._origpl is not None and self._origpl != self._pl:
979 for c, callback in sorted(self._plchangecallbacks.items()):
978 for c, callback in sorted(self._plchangecallbacks.items()):
980 callback(self, self._origpl, self._pl)
979 callback(self, self._origpl, self._pl)
981 self._origpl = None
980 self._origpl = None
982 self._map.write(tr, st)
981 self._map.write(tr, st)
983 self._dirty = False
982 self._dirty = False
984 self._dirty_tracked_set = False
983 self._dirty_tracked_set = False
985
984
986 def _write_tracked_hint(self, tr, f):
985 def _write_tracked_hint(self, tr, f):
987 key = node.hex(uuid.uuid4().bytes)
986 key = node.hex(uuid.uuid4().bytes)
988 f.write(b"1\n%s\n" % key) # 1 is the format version
987 f.write(b"1\n%s\n" % key) # 1 is the format version
989
988
990 def _dirignore(self, f):
989 def _dirignore(self, f):
991 if self._ignore(f):
990 if self._ignore(f):
992 return True
991 return True
993 for p in pathutil.finddirs(f):
992 for p in pathutil.finddirs(f):
994 if self._ignore(p):
993 if self._ignore(p):
995 return True
994 return True
996 return False
995 return False
997
996
998 def _ignorefiles(self):
997 def _ignorefiles(self):
999 files = []
998 files = []
1000 if os.path.exists(self._join(b'.hgignore')):
999 if os.path.exists(self._join(b'.hgignore')):
1001 files.append(self._join(b'.hgignore'))
1000 files.append(self._join(b'.hgignore'))
1002 for name, path in self._ui.configitems(b"ui"):
1001 for name, path in self._ui.configitems(b"ui"):
1003 if name == b'ignore' or name.startswith(b'ignore.'):
1002 if name == b'ignore' or name.startswith(b'ignore.'):
1004 # we need to use os.path.join here rather than self._join
1003 # we need to use os.path.join here rather than self._join
1005 # because path is arbitrary and user-specified
1004 # because path is arbitrary and user-specified
1006 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1005 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1007 return files
1006 return files
1008
1007
1009 def _ignorefileandline(self, f):
1008 def _ignorefileandline(self, f):
1010 files = collections.deque(self._ignorefiles())
1009 files = collections.deque(self._ignorefiles())
1011 visited = set()
1010 visited = set()
1012 while files:
1011 while files:
1013 i = files.popleft()
1012 i = files.popleft()
1014 patterns = matchmod.readpatternfile(
1013 patterns = matchmod.readpatternfile(
1015 i, self._ui.warn, sourceinfo=True
1014 i, self._ui.warn, sourceinfo=True
1016 )
1015 )
1017 for pattern, lineno, line in patterns:
1016 for pattern, lineno, line in patterns:
1018 kind, p = matchmod._patsplit(pattern, b'glob')
1017 kind, p = matchmod._patsplit(pattern, b'glob')
1019 if kind == b"subinclude":
1018 if kind == b"subinclude":
1020 if p not in visited:
1019 if p not in visited:
1021 files.append(p)
1020 files.append(p)
1022 continue
1021 continue
1023 m = matchmod.match(
1022 m = matchmod.match(
1024 self._root, b'', [], [pattern], warn=self._ui.warn
1023 self._root, b'', [], [pattern], warn=self._ui.warn
1025 )
1024 )
1026 if m(f):
1025 if m(f):
1027 return (i, lineno, line)
1026 return (i, lineno, line)
1028 visited.add(i)
1027 visited.add(i)
1029 return (None, -1, b"")
1028 return (None, -1, b"")
1030
1029
1031 def _walkexplicit(self, match, subrepos):
1030 def _walkexplicit(self, match, subrepos):
1032 """Get stat data about the files explicitly specified by match.
1031 """Get stat data about the files explicitly specified by match.
1033
1032
1034 Return a triple (results, dirsfound, dirsnotfound).
1033 Return a triple (results, dirsfound, dirsnotfound).
1035 - results is a mapping from filename to stat result. It also contains
1034 - results is a mapping from filename to stat result. It also contains
1036 listings mapping subrepos and .hg to None.
1035 listings mapping subrepos and .hg to None.
1037 - dirsfound is a list of files found to be directories.
1036 - dirsfound is a list of files found to be directories.
1038 - dirsnotfound is a list of files that the dirstate thinks are
1037 - dirsnotfound is a list of files that the dirstate thinks are
1039 directories and that were not found."""
1038 directories and that were not found."""
1040
1039
1041 def badtype(mode):
1040 def badtype(mode):
1042 kind = _(b'unknown')
1041 kind = _(b'unknown')
1043 if stat.S_ISCHR(mode):
1042 if stat.S_ISCHR(mode):
1044 kind = _(b'character device')
1043 kind = _(b'character device')
1045 elif stat.S_ISBLK(mode):
1044 elif stat.S_ISBLK(mode):
1046 kind = _(b'block device')
1045 kind = _(b'block device')
1047 elif stat.S_ISFIFO(mode):
1046 elif stat.S_ISFIFO(mode):
1048 kind = _(b'fifo')
1047 kind = _(b'fifo')
1049 elif stat.S_ISSOCK(mode):
1048 elif stat.S_ISSOCK(mode):
1050 kind = _(b'socket')
1049 kind = _(b'socket')
1051 elif stat.S_ISDIR(mode):
1050 elif stat.S_ISDIR(mode):
1052 kind = _(b'directory')
1051 kind = _(b'directory')
1053 return _(b'unsupported file type (type is %s)') % kind
1052 return _(b'unsupported file type (type is %s)') % kind
1054
1053
1055 badfn = match.bad
1054 badfn = match.bad
1056 dmap = self._map
1055 dmap = self._map
1057 lstat = os.lstat
1056 lstat = os.lstat
1058 getkind = stat.S_IFMT
1057 getkind = stat.S_IFMT
1059 dirkind = stat.S_IFDIR
1058 dirkind = stat.S_IFDIR
1060 regkind = stat.S_IFREG
1059 regkind = stat.S_IFREG
1061 lnkkind = stat.S_IFLNK
1060 lnkkind = stat.S_IFLNK
1062 join = self._join
1061 join = self._join
1063 dirsfound = []
1062 dirsfound = []
1064 foundadd = dirsfound.append
1063 foundadd = dirsfound.append
1065 dirsnotfound = []
1064 dirsnotfound = []
1066 notfoundadd = dirsnotfound.append
1065 notfoundadd = dirsnotfound.append
1067
1066
1068 if not match.isexact() and self._checkcase:
1067 if not match.isexact() and self._checkcase:
1069 normalize = self._normalize
1068 normalize = self._normalize
1070 else:
1069 else:
1071 normalize = None
1070 normalize = None
1072
1071
1073 files = sorted(match.files())
1072 files = sorted(match.files())
1074 subrepos.sort()
1073 subrepos.sort()
1075 i, j = 0, 0
1074 i, j = 0, 0
1076 while i < len(files) and j < len(subrepos):
1075 while i < len(files) and j < len(subrepos):
1077 subpath = subrepos[j] + b"/"
1076 subpath = subrepos[j] + b"/"
1078 if files[i] < subpath:
1077 if files[i] < subpath:
1079 i += 1
1078 i += 1
1080 continue
1079 continue
1081 while i < len(files) and files[i].startswith(subpath):
1080 while i < len(files) and files[i].startswith(subpath):
1082 del files[i]
1081 del files[i]
1083 j += 1
1082 j += 1
1084
1083
1085 if not files or b'' in files:
1084 if not files or b'' in files:
1086 files = [b'']
1085 files = [b'']
1087 # constructing the foldmap is expensive, so don't do it for the
1086 # constructing the foldmap is expensive, so don't do it for the
1088 # common case where files is ['']
1087 # common case where files is ['']
1089 normalize = None
1088 normalize = None
1090 results = dict.fromkeys(subrepos)
1089 results = dict.fromkeys(subrepos)
1091 results[b'.hg'] = None
1090 results[b'.hg'] = None
1092
1091
1093 for ff in files:
1092 for ff in files:
1094 if normalize:
1093 if normalize:
1095 nf = normalize(ff, False, True)
1094 nf = normalize(ff, False, True)
1096 else:
1095 else:
1097 nf = ff
1096 nf = ff
1098 if nf in results:
1097 if nf in results:
1099 continue
1098 continue
1100
1099
1101 try:
1100 try:
1102 st = lstat(join(nf))
1101 st = lstat(join(nf))
1103 kind = getkind(st.st_mode)
1102 kind = getkind(st.st_mode)
1104 if kind == dirkind:
1103 if kind == dirkind:
1105 if nf in dmap:
1104 if nf in dmap:
1106 # file replaced by dir on disk but still in dirstate
1105 # file replaced by dir on disk but still in dirstate
1107 results[nf] = None
1106 results[nf] = None
1108 foundadd((nf, ff))
1107 foundadd((nf, ff))
1109 elif kind == regkind or kind == lnkkind:
1108 elif kind == regkind or kind == lnkkind:
1110 results[nf] = st
1109 results[nf] = st
1111 else:
1110 else:
1112 badfn(ff, badtype(kind))
1111 badfn(ff, badtype(kind))
1113 if nf in dmap:
1112 if nf in dmap:
1114 results[nf] = None
1113 results[nf] = None
1115 except (OSError) as inst:
1114 except (OSError) as inst:
1116 # nf not found on disk - it is dirstate only
1115 # nf not found on disk - it is dirstate only
1117 if nf in dmap: # does it exactly match a missing file?
1116 if nf in dmap: # does it exactly match a missing file?
1118 results[nf] = None
1117 results[nf] = None
1119 else: # does it match a missing directory?
1118 else: # does it match a missing directory?
1120 if self._map.hasdir(nf):
1119 if self._map.hasdir(nf):
1121 notfoundadd(nf)
1120 notfoundadd(nf)
1122 else:
1121 else:
1123 badfn(ff, encoding.strtolocal(inst.strerror))
1122 badfn(ff, encoding.strtolocal(inst.strerror))
1124
1123
1125 # match.files() may contain explicitly-specified paths that shouldn't
1124 # match.files() may contain explicitly-specified paths that shouldn't
1126 # be taken; drop them from the list of files found. dirsfound/notfound
1125 # be taken; drop them from the list of files found. dirsfound/notfound
1127 # aren't filtered here because they will be tested later.
1126 # aren't filtered here because they will be tested later.
1128 if match.anypats():
1127 if match.anypats():
1129 for f in list(results):
1128 for f in list(results):
1130 if f == b'.hg' or f in subrepos:
1129 if f == b'.hg' or f in subrepos:
1131 # keep sentinel to disable further out-of-repo walks
1130 # keep sentinel to disable further out-of-repo walks
1132 continue
1131 continue
1133 if not match(f):
1132 if not match(f):
1134 del results[f]
1133 del results[f]
1135
1134
1136 # Case insensitive filesystems cannot rely on lstat() failing to detect
1135 # Case insensitive filesystems cannot rely on lstat() failing to detect
1137 # a case-only rename. Prune the stat object for any file that does not
1136 # a case-only rename. Prune the stat object for any file that does not
1138 # match the case in the filesystem, if there are multiple files that
1137 # match the case in the filesystem, if there are multiple files that
1139 # normalize to the same path.
1138 # normalize to the same path.
1140 if match.isexact() and self._checkcase:
1139 if match.isexact() and self._checkcase:
1141 normed = {}
1140 normed = {}
1142
1141
1143 for f, st in results.items():
1142 for f, st in results.items():
1144 if st is None:
1143 if st is None:
1145 continue
1144 continue
1146
1145
1147 nc = util.normcase(f)
1146 nc = util.normcase(f)
1148 paths = normed.get(nc)
1147 paths = normed.get(nc)
1149
1148
1150 if paths is None:
1149 if paths is None:
1151 paths = set()
1150 paths = set()
1152 normed[nc] = paths
1151 normed[nc] = paths
1153
1152
1154 paths.add(f)
1153 paths.add(f)
1155
1154
1156 for norm, paths in normed.items():
1155 for norm, paths in normed.items():
1157 if len(paths) > 1:
1156 if len(paths) > 1:
1158 for path in paths:
1157 for path in paths:
1159 folded = self._discoverpath(
1158 folded = self._discoverpath(
1160 path, norm, True, None, self._map.dirfoldmap
1159 path, norm, True, None, self._map.dirfoldmap
1161 )
1160 )
1162 if path != folded:
1161 if path != folded:
1163 results[path] = None
1162 results[path] = None
1164
1163
1165 return results, dirsfound, dirsnotfound
1164 return results, dirsfound, dirsnotfound
1166
1165
1167 def walk(self, match, subrepos, unknown, ignored, full=True):
1166 def walk(self, match, subrepos, unknown, ignored, full=True):
1168 """
1167 """
1169 Walk recursively through the directory tree, finding all files
1168 Walk recursively through the directory tree, finding all files
1170 matched by match.
1169 matched by match.
1171
1170
1172 If full is False, maybe skip some known-clean files.
1171 If full is False, maybe skip some known-clean files.
1173
1172
1174 Return a dict mapping filename to stat-like object (either
1173 Return a dict mapping filename to stat-like object (either
1175 mercurial.osutil.stat instance or return value of os.stat()).
1174 mercurial.osutil.stat instance or return value of os.stat()).
1176
1175
1177 """
1176 """
1178 # full is a flag that extensions that hook into walk can use -- this
1177 # full is a flag that extensions that hook into walk can use -- this
1179 # implementation doesn't use it at all. This satisfies the contract
1178 # implementation doesn't use it at all. This satisfies the contract
1180 # because we only guarantee a "maybe".
1179 # because we only guarantee a "maybe".
1181
1180
1182 if ignored:
1181 if ignored:
1183 ignore = util.never
1182 ignore = util.never
1184 dirignore = util.never
1183 dirignore = util.never
1185 elif unknown:
1184 elif unknown:
1186 ignore = self._ignore
1185 ignore = self._ignore
1187 dirignore = self._dirignore
1186 dirignore = self._dirignore
1188 else:
1187 else:
1189 # if not unknown and not ignored, drop dir recursion and step 2
1188 # if not unknown and not ignored, drop dir recursion and step 2
1190 ignore = util.always
1189 ignore = util.always
1191 dirignore = util.always
1190 dirignore = util.always
1192
1191
1193 if self._sparsematchfn is not None:
1192 if self._sparsematchfn is not None:
1194 em = matchmod.exact(match.files())
1193 em = matchmod.exact(match.files())
1195 sm = matchmod.unionmatcher([self._sparsematcher, em])
1194 sm = matchmod.unionmatcher([self._sparsematcher, em])
1196 match = matchmod.intersectmatchers(match, sm)
1195 match = matchmod.intersectmatchers(match, sm)
1197
1196
1198 matchfn = match.matchfn
1197 matchfn = match.matchfn
1199 matchalways = match.always()
1198 matchalways = match.always()
1200 matchtdir = match.traversedir
1199 matchtdir = match.traversedir
1201 dmap = self._map
1200 dmap = self._map
1202 listdir = util.listdir
1201 listdir = util.listdir
1203 lstat = os.lstat
1202 lstat = os.lstat
1204 dirkind = stat.S_IFDIR
1203 dirkind = stat.S_IFDIR
1205 regkind = stat.S_IFREG
1204 regkind = stat.S_IFREG
1206 lnkkind = stat.S_IFLNK
1205 lnkkind = stat.S_IFLNK
1207 join = self._join
1206 join = self._join
1208
1207
1209 exact = skipstep3 = False
1208 exact = skipstep3 = False
1210 if match.isexact(): # match.exact
1209 if match.isexact(): # match.exact
1211 exact = True
1210 exact = True
1212 dirignore = util.always # skip step 2
1211 dirignore = util.always # skip step 2
1213 elif match.prefix(): # match.match, no patterns
1212 elif match.prefix(): # match.match, no patterns
1214 skipstep3 = True
1213 skipstep3 = True
1215
1214
1216 if not exact and self._checkcase:
1215 if not exact and self._checkcase:
1217 normalize = self._normalize
1216 normalize = self._normalize
1218 normalizefile = self._normalizefile
1217 normalizefile = self._normalizefile
1219 skipstep3 = False
1218 skipstep3 = False
1220 else:
1219 else:
1221 normalize = self._normalize
1220 normalize = self._normalize
1222 normalizefile = None
1221 normalizefile = None
1223
1222
1224 # step 1: find all explicit files
1223 # step 1: find all explicit files
1225 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1224 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1226 if matchtdir:
1225 if matchtdir:
1227 for d in work:
1226 for d in work:
1228 matchtdir(d[0])
1227 matchtdir(d[0])
1229 for d in dirsnotfound:
1228 for d in dirsnotfound:
1230 matchtdir(d)
1229 matchtdir(d)
1231
1230
1232 skipstep3 = skipstep3 and not (work or dirsnotfound)
1231 skipstep3 = skipstep3 and not (work or dirsnotfound)
1233 work = [d for d in work if not dirignore(d[0])]
1232 work = [d for d in work if not dirignore(d[0])]
1234
1233
1235 # step 2: visit subdirectories
1234 # step 2: visit subdirectories
1236 def traverse(work, alreadynormed):
1235 def traverse(work, alreadynormed):
1237 wadd = work.append
1236 wadd = work.append
1238 while work:
1237 while work:
1239 tracing.counter('dirstate.walk work', len(work))
1238 tracing.counter('dirstate.walk work', len(work))
1240 nd = work.pop()
1239 nd = work.pop()
1241 visitentries = match.visitchildrenset(nd)
1240 visitentries = match.visitchildrenset(nd)
1242 if not visitentries:
1241 if not visitentries:
1243 continue
1242 continue
1244 if visitentries == b'this' or visitentries == b'all':
1243 if visitentries == b'this' or visitentries == b'all':
1245 visitentries = None
1244 visitentries = None
1246 skip = None
1245 skip = None
1247 if nd != b'':
1246 if nd != b'':
1248 skip = b'.hg'
1247 skip = b'.hg'
1249 try:
1248 try:
1250 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1249 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1251 entries = listdir(join(nd), stat=True, skip=skip)
1250 entries = listdir(join(nd), stat=True, skip=skip)
1252 except (PermissionError, FileNotFoundError) as inst:
1251 except (PermissionError, FileNotFoundError) as inst:
1253 match.bad(
1252 match.bad(
1254 self.pathto(nd), encoding.strtolocal(inst.strerror)
1253 self.pathto(nd), encoding.strtolocal(inst.strerror)
1255 )
1254 )
1256 continue
1255 continue
1257 for f, kind, st in entries:
1256 for f, kind, st in entries:
1258 # Some matchers may return files in the visitentries set,
1257 # Some matchers may return files in the visitentries set,
1259 # instead of 'this', if the matcher explicitly mentions them
1258 # instead of 'this', if the matcher explicitly mentions them
1260 # and is not an exactmatcher. This is acceptable; we do not
1259 # and is not an exactmatcher. This is acceptable; we do not
1261 # make any hard assumptions about file-or-directory below
1260 # make any hard assumptions about file-or-directory below
1262 # based on the presence of `f` in visitentries. If
1261 # based on the presence of `f` in visitentries. If
1263 # visitchildrenset returned a set, we can always skip the
1262 # visitchildrenset returned a set, we can always skip the
1264 # entries *not* in the set it provided regardless of whether
1263 # entries *not* in the set it provided regardless of whether
1265 # they're actually a file or a directory.
1264 # they're actually a file or a directory.
1266 if visitentries and f not in visitentries:
1265 if visitentries and f not in visitentries:
1267 continue
1266 continue
1268 if normalizefile:
1267 if normalizefile:
1269 # even though f might be a directory, we're only
1268 # even though f might be a directory, we're only
1270 # interested in comparing it to files currently in the
1269 # interested in comparing it to files currently in the
1271 # dmap -- therefore normalizefile is enough
1270 # dmap -- therefore normalizefile is enough
1272 nf = normalizefile(
1271 nf = normalizefile(
1273 nd and (nd + b"/" + f) or f, True, True
1272 nd and (nd + b"/" + f) or f, True, True
1274 )
1273 )
1275 else:
1274 else:
1276 nf = nd and (nd + b"/" + f) or f
1275 nf = nd and (nd + b"/" + f) or f
1277 if nf not in results:
1276 if nf not in results:
1278 if kind == dirkind:
1277 if kind == dirkind:
1279 if not ignore(nf):
1278 if not ignore(nf):
1280 if matchtdir:
1279 if matchtdir:
1281 matchtdir(nf)
1280 matchtdir(nf)
1282 wadd(nf)
1281 wadd(nf)
1283 if nf in dmap and (matchalways or matchfn(nf)):
1282 if nf in dmap and (matchalways or matchfn(nf)):
1284 results[nf] = None
1283 results[nf] = None
1285 elif kind == regkind or kind == lnkkind:
1284 elif kind == regkind or kind == lnkkind:
1286 if nf in dmap:
1285 if nf in dmap:
1287 if matchalways or matchfn(nf):
1286 if matchalways or matchfn(nf):
1288 results[nf] = st
1287 results[nf] = st
1289 elif (matchalways or matchfn(nf)) and not ignore(
1288 elif (matchalways or matchfn(nf)) and not ignore(
1290 nf
1289 nf
1291 ):
1290 ):
1292 # unknown file -- normalize if necessary
1291 # unknown file -- normalize if necessary
1293 if not alreadynormed:
1292 if not alreadynormed:
1294 nf = normalize(nf, False, True)
1293 nf = normalize(nf, False, True)
1295 results[nf] = st
1294 results[nf] = st
1296 elif nf in dmap and (matchalways or matchfn(nf)):
1295 elif nf in dmap and (matchalways or matchfn(nf)):
1297 results[nf] = None
1296 results[nf] = None
1298
1297
1299 for nd, d in work:
1298 for nd, d in work:
1300 # alreadynormed means that processwork doesn't have to do any
1299 # alreadynormed means that processwork doesn't have to do any
1301 # expensive directory normalization
1300 # expensive directory normalization
1302 alreadynormed = not normalize or nd == d
1301 alreadynormed = not normalize or nd == d
1303 traverse([d], alreadynormed)
1302 traverse([d], alreadynormed)
1304
1303
1305 for s in subrepos:
1304 for s in subrepos:
1306 del results[s]
1305 del results[s]
1307 del results[b'.hg']
1306 del results[b'.hg']
1308
1307
1309 # step 3: visit remaining files from dmap
1308 # step 3: visit remaining files from dmap
1310 if not skipstep3 and not exact:
1309 if not skipstep3 and not exact:
1311 # If a dmap file is not in results yet, it was either
1310 # If a dmap file is not in results yet, it was either
1312 # a) not matching matchfn b) ignored, c) missing, or d) under a
1311 # a) not matching matchfn b) ignored, c) missing, or d) under a
1313 # symlink directory.
1312 # symlink directory.
1314 if not results and matchalways:
1313 if not results and matchalways:
1315 visit = [f for f in dmap]
1314 visit = [f for f in dmap]
1316 else:
1315 else:
1317 visit = [f for f in dmap if f not in results and matchfn(f)]
1316 visit = [f for f in dmap if f not in results and matchfn(f)]
1318 visit.sort()
1317 visit.sort()
1319
1318
1320 if unknown:
1319 if unknown:
1321 # unknown == True means we walked all dirs under the roots
1320 # unknown == True means we walked all dirs under the roots
1322 # that wasn't ignored, and everything that matched was stat'ed
1321 # that wasn't ignored, and everything that matched was stat'ed
1323 # and is already in results.
1322 # and is already in results.
1324 # The rest must thus be ignored or under a symlink.
1323 # The rest must thus be ignored or under a symlink.
1325 audit_path = pathutil.pathauditor(self._root, cached=True)
1324 audit_path = pathutil.pathauditor(self._root, cached=True)
1326
1325
1327 for nf in iter(visit):
1326 for nf in iter(visit):
1328 # If a stat for the same file was already added with a
1327 # If a stat for the same file was already added with a
1329 # different case, don't add one for this, since that would
1328 # different case, don't add one for this, since that would
1330 # make it appear as if the file exists under both names
1329 # make it appear as if the file exists under both names
1331 # on disk.
1330 # on disk.
1332 if (
1331 if (
1333 normalizefile
1332 normalizefile
1334 and normalizefile(nf, True, True) in results
1333 and normalizefile(nf, True, True) in results
1335 ):
1334 ):
1336 results[nf] = None
1335 results[nf] = None
1337 # Report ignored items in the dmap as long as they are not
1336 # Report ignored items in the dmap as long as they are not
1338 # under a symlink directory.
1337 # under a symlink directory.
1339 elif audit_path.check(nf):
1338 elif audit_path.check(nf):
1340 try:
1339 try:
1341 results[nf] = lstat(join(nf))
1340 results[nf] = lstat(join(nf))
1342 # file was just ignored, no links, and exists
1341 # file was just ignored, no links, and exists
1343 except OSError:
1342 except OSError:
1344 # file doesn't exist
1343 # file doesn't exist
1345 results[nf] = None
1344 results[nf] = None
1346 else:
1345 else:
1347 # It's either missing or under a symlink directory
1346 # It's either missing or under a symlink directory
1348 # which we in this case report as missing
1347 # which we in this case report as missing
1349 results[nf] = None
1348 results[nf] = None
1350 else:
1349 else:
1351 # We may not have walked the full directory tree above,
1350 # We may not have walked the full directory tree above,
1352 # so stat and check everything we missed.
1351 # so stat and check everything we missed.
1353 iv = iter(visit)
1352 iv = iter(visit)
1354 for st in util.statfiles([join(i) for i in visit]):
1353 for st in util.statfiles([join(i) for i in visit]):
1355 results[next(iv)] = st
1354 results[next(iv)] = st
1356 return results
1355 return results
1357
1356
1358 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1357 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1359 if self._sparsematchfn is not None:
1358 if self._sparsematchfn is not None:
1360 em = matchmod.exact(matcher.files())
1359 em = matchmod.exact(matcher.files())
1361 sm = matchmod.unionmatcher([self._sparsematcher, em])
1360 sm = matchmod.unionmatcher([self._sparsematcher, em])
1362 matcher = matchmod.intersectmatchers(matcher, sm)
1361 matcher = matchmod.intersectmatchers(matcher, sm)
1363 # Force Rayon (Rust parallelism library) to respect the number of
1362 # Force Rayon (Rust parallelism library) to respect the number of
1364 # workers. This is a temporary workaround until Rust code knows
1363 # workers. This is a temporary workaround until Rust code knows
1365 # how to read the config file.
1364 # how to read the config file.
1366 numcpus = self._ui.configint(b"worker", b"numcpus")
1365 numcpus = self._ui.configint(b"worker", b"numcpus")
1367 if numcpus is not None:
1366 if numcpus is not None:
1368 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1367 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1369
1368
1370 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1369 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1371 if not workers_enabled:
1370 if not workers_enabled:
1372 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1371 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1373
1372
1374 (
1373 (
1375 lookup,
1374 lookup,
1376 modified,
1375 modified,
1377 added,
1376 added,
1378 removed,
1377 removed,
1379 deleted,
1378 deleted,
1380 clean,
1379 clean,
1381 ignored,
1380 ignored,
1382 unknown,
1381 unknown,
1383 warnings,
1382 warnings,
1384 bad,
1383 bad,
1385 traversed,
1384 traversed,
1386 dirty,
1385 dirty,
1387 ) = rustmod.status(
1386 ) = rustmod.status(
1388 self._map._map,
1387 self._map._map,
1389 matcher,
1388 matcher,
1390 self._rootdir,
1389 self._rootdir,
1391 self._ignorefiles(),
1390 self._ignorefiles(),
1392 self._checkexec,
1391 self._checkexec,
1393 bool(list_clean),
1392 bool(list_clean),
1394 bool(list_ignored),
1393 bool(list_ignored),
1395 bool(list_unknown),
1394 bool(list_unknown),
1396 bool(matcher.traversedir),
1395 bool(matcher.traversedir),
1397 )
1396 )
1398
1397
1399 self._dirty |= dirty
1398 self._dirty |= dirty
1400
1399
1401 if matcher.traversedir:
1400 if matcher.traversedir:
1402 for dir in traversed:
1401 for dir in traversed:
1403 matcher.traversedir(dir)
1402 matcher.traversedir(dir)
1404
1403
1405 if self._ui.warn:
1404 if self._ui.warn:
1406 for item in warnings:
1405 for item in warnings:
1407 if isinstance(item, tuple):
1406 if isinstance(item, tuple):
1408 file_path, syntax = item
1407 file_path, syntax = item
1409 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1408 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1410 file_path,
1409 file_path,
1411 syntax,
1410 syntax,
1412 )
1411 )
1413 self._ui.warn(msg)
1412 self._ui.warn(msg)
1414 else:
1413 else:
1415 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1414 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1416 self._ui.warn(
1415 self._ui.warn(
1417 msg
1416 msg
1418 % (
1417 % (
1419 pathutil.canonpath(
1418 pathutil.canonpath(
1420 self._rootdir, self._rootdir, item
1419 self._rootdir, self._rootdir, item
1421 ),
1420 ),
1422 b"No such file or directory",
1421 b"No such file or directory",
1423 )
1422 )
1424 )
1423 )
1425
1424
1426 for fn, message in bad:
1425 for fn, message in bad:
1427 matcher.bad(fn, encoding.strtolocal(message))
1426 matcher.bad(fn, encoding.strtolocal(message))
1428
1427
1429 status = scmutil.status(
1428 status = scmutil.status(
1430 modified=modified,
1429 modified=modified,
1431 added=added,
1430 added=added,
1432 removed=removed,
1431 removed=removed,
1433 deleted=deleted,
1432 deleted=deleted,
1434 unknown=unknown,
1433 unknown=unknown,
1435 ignored=ignored,
1434 ignored=ignored,
1436 clean=clean,
1435 clean=clean,
1437 )
1436 )
1438 return (lookup, status)
1437 return (lookup, status)
1439
1438
1440 # XXX since this can make the dirstate dirty (through rust), we should
1439 # XXX since this can make the dirstate dirty (through rust), we should
1441 # enforce that it is done withing an appropriate change-context that scope
1440 # enforce that it is done withing an appropriate change-context that scope
1442 # the change and ensure it eventually get written on disk (or rolled back)
1441 # the change and ensure it eventually get written on disk (or rolled back)
1443 def status(self, match, subrepos, ignored, clean, unknown):
1442 def status(self, match, subrepos, ignored, clean, unknown):
1444 """Determine the status of the working copy relative to the
1443 """Determine the status of the working copy relative to the
1445 dirstate and return a pair of (unsure, status), where status is of type
1444 dirstate and return a pair of (unsure, status), where status is of type
1446 scmutil.status and:
1445 scmutil.status and:
1447
1446
1448 unsure:
1447 unsure:
1449 files that might have been modified since the dirstate was
1448 files that might have been modified since the dirstate was
1450 written, but need to be read to be sure (size is the same
1449 written, but need to be read to be sure (size is the same
1451 but mtime differs)
1450 but mtime differs)
1452 status.modified:
1451 status.modified:
1453 files that have definitely been modified since the dirstate
1452 files that have definitely been modified since the dirstate
1454 was written (different size or mode)
1453 was written (different size or mode)
1455 status.clean:
1454 status.clean:
1456 files that have definitely not been modified since the
1455 files that have definitely not been modified since the
1457 dirstate was written
1456 dirstate was written
1458 """
1457 """
1459 listignored, listclean, listunknown = ignored, clean, unknown
1458 listignored, listclean, listunknown = ignored, clean, unknown
1460 lookup, modified, added, unknown, ignored = [], [], [], [], []
1459 lookup, modified, added, unknown, ignored = [], [], [], [], []
1461 removed, deleted, clean = [], [], []
1460 removed, deleted, clean = [], [], []
1462
1461
1463 dmap = self._map
1462 dmap = self._map
1464 dmap.preload()
1463 dmap.preload()
1465
1464
1466 use_rust = True
1465 use_rust = True
1467
1466
1468 allowed_matchers = (
1467 allowed_matchers = (
1469 matchmod.alwaysmatcher,
1468 matchmod.alwaysmatcher,
1470 matchmod.differencematcher,
1469 matchmod.differencematcher,
1471 matchmod.exactmatcher,
1470 matchmod.exactmatcher,
1472 matchmod.includematcher,
1471 matchmod.includematcher,
1473 matchmod.intersectionmatcher,
1472 matchmod.intersectionmatcher,
1474 matchmod.nevermatcher,
1473 matchmod.nevermatcher,
1475 matchmod.unionmatcher,
1474 matchmod.unionmatcher,
1476 )
1475 )
1477
1476
1478 if rustmod is None:
1477 if rustmod is None:
1479 use_rust = False
1478 use_rust = False
1480 elif self._checkcase:
1479 elif self._checkcase:
1481 # Case-insensitive filesystems are not handled yet
1480 # Case-insensitive filesystems are not handled yet
1482 use_rust = False
1481 use_rust = False
1483 elif subrepos:
1482 elif subrepos:
1484 use_rust = False
1483 use_rust = False
1485 elif not isinstance(match, allowed_matchers):
1484 elif not isinstance(match, allowed_matchers):
1486 # Some matchers have yet to be implemented
1485 # Some matchers have yet to be implemented
1487 use_rust = False
1486 use_rust = False
1488
1487
1489 # Get the time from the filesystem so we can disambiguate files that
1488 # Get the time from the filesystem so we can disambiguate files that
1490 # appear modified in the present or future.
1489 # appear modified in the present or future.
1491 try:
1490 try:
1492 mtime_boundary = timestamp.get_fs_now(self._opener)
1491 mtime_boundary = timestamp.get_fs_now(self._opener)
1493 except OSError:
1492 except OSError:
1494 # In largefiles or readonly context
1493 # In largefiles or readonly context
1495 mtime_boundary = None
1494 mtime_boundary = None
1496
1495
1497 if use_rust:
1496 if use_rust:
1498 try:
1497 try:
1499 res = self._rust_status(
1498 res = self._rust_status(
1500 match, listclean, listignored, listunknown
1499 match, listclean, listignored, listunknown
1501 )
1500 )
1502 return res + (mtime_boundary,)
1501 return res + (mtime_boundary,)
1503 except rustmod.FallbackError:
1502 except rustmod.FallbackError:
1504 pass
1503 pass
1505
1504
1506 def noop(f):
1505 def noop(f):
1507 pass
1506 pass
1508
1507
1509 dcontains = dmap.__contains__
1508 dcontains = dmap.__contains__
1510 dget = dmap.__getitem__
1509 dget = dmap.__getitem__
1511 ladd = lookup.append # aka "unsure"
1510 ladd = lookup.append # aka "unsure"
1512 madd = modified.append
1511 madd = modified.append
1513 aadd = added.append
1512 aadd = added.append
1514 uadd = unknown.append if listunknown else noop
1513 uadd = unknown.append if listunknown else noop
1515 iadd = ignored.append if listignored else noop
1514 iadd = ignored.append if listignored else noop
1516 radd = removed.append
1515 radd = removed.append
1517 dadd = deleted.append
1516 dadd = deleted.append
1518 cadd = clean.append if listclean else noop
1517 cadd = clean.append if listclean else noop
1519 mexact = match.exact
1518 mexact = match.exact
1520 dirignore = self._dirignore
1519 dirignore = self._dirignore
1521 checkexec = self._checkexec
1520 checkexec = self._checkexec
1522 checklink = self._checklink
1521 checklink = self._checklink
1523 copymap = self._map.copymap
1522 copymap = self._map.copymap
1524
1523
1525 # We need to do full walks when either
1524 # We need to do full walks when either
1526 # - we're listing all clean files, or
1525 # - we're listing all clean files, or
1527 # - match.traversedir does something, because match.traversedir should
1526 # - match.traversedir does something, because match.traversedir should
1528 # be called for every dir in the working dir
1527 # be called for every dir in the working dir
1529 full = listclean or match.traversedir is not None
1528 full = listclean or match.traversedir is not None
1530 for fn, st in self.walk(
1529 for fn, st in self.walk(
1531 match, subrepos, listunknown, listignored, full=full
1530 match, subrepos, listunknown, listignored, full=full
1532 ).items():
1531 ).items():
1533 if not dcontains(fn):
1532 if not dcontains(fn):
1534 if (listignored or mexact(fn)) and dirignore(fn):
1533 if (listignored or mexact(fn)) and dirignore(fn):
1535 if listignored:
1534 if listignored:
1536 iadd(fn)
1535 iadd(fn)
1537 else:
1536 else:
1538 uadd(fn)
1537 uadd(fn)
1539 continue
1538 continue
1540
1539
1541 t = dget(fn)
1540 t = dget(fn)
1542 mode = t.mode
1541 mode = t.mode
1543 size = t.size
1542 size = t.size
1544
1543
1545 if not st and t.tracked:
1544 if not st and t.tracked:
1546 dadd(fn)
1545 dadd(fn)
1547 elif t.p2_info:
1546 elif t.p2_info:
1548 madd(fn)
1547 madd(fn)
1549 elif t.added:
1548 elif t.added:
1550 aadd(fn)
1549 aadd(fn)
1551 elif t.removed:
1550 elif t.removed:
1552 radd(fn)
1551 radd(fn)
1553 elif t.tracked:
1552 elif t.tracked:
1554 if not checklink and t.has_fallback_symlink:
1553 if not checklink and t.has_fallback_symlink:
1555 # If the file system does not support symlink, the mode
1554 # If the file system does not support symlink, the mode
1556 # might not be correctly stored in the dirstate, so do not
1555 # might not be correctly stored in the dirstate, so do not
1557 # trust it.
1556 # trust it.
1558 ladd(fn)
1557 ladd(fn)
1559 elif not checkexec and t.has_fallback_exec:
1558 elif not checkexec and t.has_fallback_exec:
1560 # If the file system does not support exec bits, the mode
1559 # If the file system does not support exec bits, the mode
1561 # might not be correctly stored in the dirstate, so do not
1560 # might not be correctly stored in the dirstate, so do not
1562 # trust it.
1561 # trust it.
1563 ladd(fn)
1562 ladd(fn)
1564 elif (
1563 elif (
1565 size >= 0
1564 size >= 0
1566 and (
1565 and (
1567 (size != st.st_size and size != st.st_size & _rangemask)
1566 (size != st.st_size and size != st.st_size & _rangemask)
1568 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1567 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1569 )
1568 )
1570 or fn in copymap
1569 or fn in copymap
1571 ):
1570 ):
1572 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1571 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1573 # issue6456: Size returned may be longer due to
1572 # issue6456: Size returned may be longer due to
1574 # encryption on EXT-4 fscrypt, undecided.
1573 # encryption on EXT-4 fscrypt, undecided.
1575 ladd(fn)
1574 ladd(fn)
1576 else:
1575 else:
1577 madd(fn)
1576 madd(fn)
1578 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1577 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1579 # There might be a change in the future if for example the
1578 # There might be a change in the future if for example the
1580 # internal clock is off, but this is a case where the issues
1579 # internal clock is off, but this is a case where the issues
1581 # the user would face would be a lot worse and there is
1580 # the user would face would be a lot worse and there is
1582 # nothing we can really do.
1581 # nothing we can really do.
1583 ladd(fn)
1582 ladd(fn)
1584 elif listclean:
1583 elif listclean:
1585 cadd(fn)
1584 cadd(fn)
1586 status = scmutil.status(
1585 status = scmutil.status(
1587 modified, added, removed, deleted, unknown, ignored, clean
1586 modified, added, removed, deleted, unknown, ignored, clean
1588 )
1587 )
1589 return (lookup, status, mtime_boundary)
1588 return (lookup, status, mtime_boundary)
1590
1589
1591 def matches(self, match):
1590 def matches(self, match):
1592 """
1591 """
1593 return files in the dirstate (in whatever state) filtered by match
1592 return files in the dirstate (in whatever state) filtered by match
1594 """
1593 """
1595 dmap = self._map
1594 dmap = self._map
1596 if rustmod is not None:
1595 if rustmod is not None:
1597 dmap = self._map._map
1596 dmap = self._map._map
1598
1597
1599 if match.always():
1598 if match.always():
1600 return dmap.keys()
1599 return dmap.keys()
1601 files = match.files()
1600 files = match.files()
1602 if match.isexact():
1601 if match.isexact():
1603 # fast path -- filter the other way around, since typically files is
1602 # fast path -- filter the other way around, since typically files is
1604 # much smaller than dmap
1603 # much smaller than dmap
1605 return [f for f in files if f in dmap]
1604 return [f for f in files if f in dmap]
1606 if match.prefix() and all(fn in dmap for fn in files):
1605 if match.prefix() and all(fn in dmap for fn in files):
1607 # fast path -- all the values are known to be files, so just return
1606 # fast path -- all the values are known to be files, so just return
1608 # that
1607 # that
1609 return list(files)
1608 return list(files)
1610 return [f for f in dmap if match(f)]
1609 return [f for f in dmap if match(f)]
1611
1610
1612 def _actualfilename(self, tr):
1611 def _actualfilename(self, tr):
1613 if tr:
1612 if tr:
1614 return self._pendingfilename
1613 return self._pendingfilename
1615 else:
1614 else:
1616 return self._filename
1615 return self._filename
1617
1616
1618 def all_file_names(self):
1617 def all_file_names(self):
1619 """list all filename currently used by this dirstate
1618 """list all filename currently used by this dirstate
1620
1619
1621 This is only used to do `hg rollback` related backup in the transaction
1620 This is only used to do `hg rollback` related backup in the transaction
1622 """
1621 """
1623 if not self._opener.exists(self._filename):
1622 if not self._opener.exists(self._filename):
1624 # no data every written to disk yet
1623 # no data every written to disk yet
1625 return ()
1624 return ()
1626 elif self._use_dirstate_v2:
1625 elif self._use_dirstate_v2:
1627 return (
1626 return (
1628 self._filename,
1627 self._filename,
1629 self._map.docket.data_filename(),
1628 self._map.docket.data_filename(),
1630 )
1629 )
1631 else:
1630 else:
1632 return (self._filename,)
1631 return (self._filename,)
1633
1632
1634 def data_backup_filename(self, backupname):
1635 if not self._use_dirstate_v2:
1636 return None
1637 return backupname + b'.v2-data'
1638
1639 def _new_backup_data_filename(self, backupname):
1640 """return a filename to backup a data-file or None"""
1641 if not self._use_dirstate_v2:
1642 return None
1643 if self._map.docket.uuid is None:
1644 # not created yet, nothing to backup
1645 return None
1646 data_filename = self._map.docket.data_filename()
1647 return data_filename, self.data_backup_filename(backupname)
1648
1649 def backup_data_file(self, backupname):
1650 if not self._use_dirstate_v2:
1651 return None
1652 docket = docketmod.DirstateDocket.parse(
1653 self._opener.read(backupname),
1654 self._nodeconstants,
1655 )
1656 return self.data_backup_filename(backupname), docket.data_filename()
1657
1658 def savebackup(self, tr, backupname):
1659 '''Save current dirstate into backup file'''
1660 filename = self._actualfilename(tr)
1661 assert backupname != filename
1662
1663 # use '_writedirstate' instead of 'write' to write changes certainly,
1664 # because the latter omits writing out if transaction is running.
1665 # output file will be used to create backup of dirstate at this point.
1666 if self._dirty:
1667 self._writedirstate(
1668 tr,
1669 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1670 )
1671
1672 if tr:
1673 # ensure that subsequent tr.writepending returns True for
1674 # changes written out above, even if dirstate is never
1675 # changed after this
1676 tr.addfilegenerator(
1677 b'dirstate-1-main',
1678 (self._filename,),
1679 lambda f: self._writedirstate(tr, f),
1680 location=b'plain',
1681 post_finalize=True,
1682 )
1683
1684 self._opener.tryunlink(backupname)
1685 if self._opener.exists(filename):
1686 # hardlink backup is okay because _writedirstate is always called
1687 # with an "atomictemp=True" file.
1688 util.copyfile(
1689 self._opener.join(filename),
1690 self._opener.join(backupname),
1691 hardlink=True,
1692 )
1693 data_pair = self._new_backup_data_filename(backupname)
1694 if data_pair is not None:
1695 data_filename, bck_data_filename = data_pair
1696 util.copyfile(
1697 self._opener.join(data_filename),
1698 self._opener.join(bck_data_filename),
1699 hardlink=True,
1700 )
1701 if tr is not None:
1702 # ensure that pending file written above is unlinked at
1703 # failure, even if tr.writepending isn't invoked until the
1704 # end of this transaction
1705 tr.registertmp(bck_data_filename, location=b'plain')
1706
1707 def restorebackup(self, tr, backupname):
1708 '''Restore dirstate by backup file'''
1709 # this "invalidate()" prevents "wlock.release()" from writing
1710 # changes of dirstate out after restoring from backup file
1711 self.invalidate()
1712 o = self._opener
1713 if not o.exists(backupname):
1714 # there was no file backup, delete existing files
1715 filename = self._actualfilename(tr)
1716 data_file = None
1717 if self._use_dirstate_v2 and self._map.docket.uuid is not None:
1718 data_file = self._map.docket.data_filename()
1719 if o.exists(filename):
1720 o.unlink(filename)
1721 if data_file is not None and o.exists(data_file):
1722 o.unlink(data_file)
1723 return
1724 filename = self._actualfilename(tr)
1725 data_pair = self.backup_data_file(backupname)
1726 if o.exists(filename) and util.samefile(
1727 o.join(backupname), o.join(filename)
1728 ):
1729 o.unlink(backupname)
1730 else:
1731 o.rename(backupname, filename, checkambig=True)
1732
1733 if data_pair is not None:
1734 data_backup, target = data_pair
1735 if o.exists(target) and util.samefile(
1736 o.join(data_backup), o.join(target)
1737 ):
1738 o.unlink(data_backup)
1739 else:
1740 o.rename(data_backup, target, checkambig=True)
1741
1742 def clearbackup(self, tr, backupname):
1743 '''Clear backup file'''
1744 o = self._opener
1745 if o.exists(backupname):
1746 data_backup = self.backup_data_file(backupname)
1747 o.unlink(backupname)
1748 if data_backup is not None:
1749 o.unlink(data_backup[0])
1750
1751 def verify(self, m1, m2, p1, narrow_matcher=None):
1633 def verify(self, m1, m2, p1, narrow_matcher=None):
1752 """
1634 """
1753 check the dirstate contents against the parent manifest and yield errors
1635 check the dirstate contents against the parent manifest and yield errors
1754 """
1636 """
1755 missing_from_p1 = _(
1637 missing_from_p1 = _(
1756 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1638 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1757 )
1639 )
1758 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1640 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1759 missing_from_ps = _(
1641 missing_from_ps = _(
1760 b"%s marked as modified, but not in either manifest\n"
1642 b"%s marked as modified, but not in either manifest\n"
1761 )
1643 )
1762 missing_from_ds = _(
1644 missing_from_ds = _(
1763 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1645 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1764 )
1646 )
1765 for f, entry in self.items():
1647 for f, entry in self.items():
1766 if entry.p1_tracked:
1648 if entry.p1_tracked:
1767 if entry.modified and f not in m1 and f not in m2:
1649 if entry.modified and f not in m1 and f not in m2:
1768 yield missing_from_ps % f
1650 yield missing_from_ps % f
1769 elif f not in m1:
1651 elif f not in m1:
1770 yield missing_from_p1 % (f, node.short(p1))
1652 yield missing_from_p1 % (f, node.short(p1))
1771 if entry.added and f in m1:
1653 if entry.added and f in m1:
1772 yield unexpected_in_p1 % f
1654 yield unexpected_in_p1 % f
1773 for f in m1:
1655 for f in m1:
1774 if narrow_matcher is not None and not narrow_matcher(f):
1656 if narrow_matcher is not None and not narrow_matcher(f):
1775 continue
1657 continue
1776 entry = self.get_entry(f)
1658 entry = self.get_entry(f)
1777 if not entry.p1_tracked:
1659 if not entry.p1_tracked:
1778 yield missing_from_ds % (f, node.short(p1))
1660 yield missing_from_ds % (f, node.short(p1))
@@ -1,235 +1,226 b''
1 import contextlib
1 import contextlib
2
2
3 from . import util as interfaceutil
3 from . import util as interfaceutil
4
4
5
5
6 class idirstate(interfaceutil.Interface):
6 class idirstate(interfaceutil.Interface):
7 def __init__(
7 def __init__(
8 opener,
8 opener,
9 ui,
9 ui,
10 root,
10 root,
11 validate,
11 validate,
12 sparsematchfn,
12 sparsematchfn,
13 nodeconstants,
13 nodeconstants,
14 use_dirstate_v2,
14 use_dirstate_v2,
15 use_tracked_hint=False,
15 use_tracked_hint=False,
16 ):
16 ):
17 """Create a new dirstate object.
17 """Create a new dirstate object.
18
18
19 opener is an open()-like callable that can be used to open the
19 opener is an open()-like callable that can be used to open the
20 dirstate file; root is the root of the directory tracked by
20 dirstate file; root is the root of the directory tracked by
21 the dirstate.
21 the dirstate.
22 """
22 """
23
23
24 # TODO: all these private methods and attributes should be made
24 # TODO: all these private methods and attributes should be made
25 # public or removed from the interface.
25 # public or removed from the interface.
26 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
26 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
27 is_changing_any = interfaceutil.Attribute(
27 is_changing_any = interfaceutil.Attribute(
28 """True if any changes in progress."""
28 """True if any changes in progress."""
29 )
29 )
30 is_changing_parents = interfaceutil.Attribute(
30 is_changing_parents = interfaceutil.Attribute(
31 """True if parents changes in progress."""
31 """True if parents changes in progress."""
32 )
32 )
33 is_changing_files = interfaceutil.Attribute(
33 is_changing_files = interfaceutil.Attribute(
34 """True if file tracking changes in progress."""
34 """True if file tracking changes in progress."""
35 )
35 )
36
36
37 def _ignorefiles():
37 def _ignorefiles():
38 """Return a list of files containing patterns to ignore."""
38 """Return a list of files containing patterns to ignore."""
39
39
40 def _ignorefileandline(f):
40 def _ignorefileandline(f):
41 """Given a file `f`, return the ignore file and line that ignores it."""
41 """Given a file `f`, return the ignore file and line that ignores it."""
42
42
43 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
43 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
44 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
44 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
45
45
46 @contextlib.contextmanager
46 @contextlib.contextmanager
47 def changing_parents(repo):
47 def changing_parents(repo):
48 """Context manager for handling dirstate parents.
48 """Context manager for handling dirstate parents.
49
49
50 If an exception occurs in the scope of the context manager,
50 If an exception occurs in the scope of the context manager,
51 the incoherent dirstate won't be written when wlock is
51 the incoherent dirstate won't be written when wlock is
52 released.
52 released.
53 """
53 """
54
54
55 @contextlib.contextmanager
55 @contextlib.contextmanager
56 def changing_files(repo):
56 def changing_files(repo):
57 """Context manager for handling dirstate files.
57 """Context manager for handling dirstate files.
58
58
59 If an exception occurs in the scope of the context manager,
59 If an exception occurs in the scope of the context manager,
60 the incoherent dirstate won't be written when wlock is
60 the incoherent dirstate won't be written when wlock is
61 released.
61 released.
62 """
62 """
63
63
64 def hasdir(d):
64 def hasdir(d):
65 pass
65 pass
66
66
67 def flagfunc(buildfallback):
67 def flagfunc(buildfallback):
68 """build a callable that returns flags associated with a filename
68 """build a callable that returns flags associated with a filename
69
69
70 The information is extracted from three possible layers:
70 The information is extracted from three possible layers:
71 1. the file system if it supports the information
71 1. the file system if it supports the information
72 2. the "fallback" information stored in the dirstate if any
72 2. the "fallback" information stored in the dirstate if any
73 3. a more expensive mechanism inferring the flags from the parents.
73 3. a more expensive mechanism inferring the flags from the parents.
74 """
74 """
75
75
76 def getcwd():
76 def getcwd():
77 """Return the path from which a canonical path is calculated.
77 """Return the path from which a canonical path is calculated.
78
78
79 This path should be used to resolve file patterns or to convert
79 This path should be used to resolve file patterns or to convert
80 canonical paths back to file paths for display. It shouldn't be
80 canonical paths back to file paths for display. It shouldn't be
81 used to get real file paths. Use vfs functions instead.
81 used to get real file paths. Use vfs functions instead.
82 """
82 """
83
83
84 def pathto(f, cwd=None):
84 def pathto(f, cwd=None):
85 pass
85 pass
86
86
87 def get_entry(path):
87 def get_entry(path):
88 """return a DirstateItem for the associated path"""
88 """return a DirstateItem for the associated path"""
89
89
90 def __contains__(key):
90 def __contains__(key):
91 """Check if bytestring `key` is known to the dirstate."""
91 """Check if bytestring `key` is known to the dirstate."""
92
92
93 def __iter__():
93 def __iter__():
94 """Iterate the dirstate's contained filenames as bytestrings."""
94 """Iterate the dirstate's contained filenames as bytestrings."""
95
95
96 def items():
96 def items():
97 """Iterate the dirstate's entries as (filename, DirstateItem.
97 """Iterate the dirstate's entries as (filename, DirstateItem.
98
98
99 As usual, filename is a bytestring.
99 As usual, filename is a bytestring.
100 """
100 """
101
101
102 iteritems = items
102 iteritems = items
103
103
104 def parents():
104 def parents():
105 pass
105 pass
106
106
107 def p1():
107 def p1():
108 pass
108 pass
109
109
110 def p2():
110 def p2():
111 pass
111 pass
112
112
113 def branch():
113 def branch():
114 pass
114 pass
115
115
116 def setparents(p1, p2=None):
116 def setparents(p1, p2=None):
117 """Set dirstate parents to p1 and p2.
117 """Set dirstate parents to p1 and p2.
118
118
119 When moving from two parents to one, "merged" entries a
119 When moving from two parents to one, "merged" entries a
120 adjusted to normal and previous copy records discarded and
120 adjusted to normal and previous copy records discarded and
121 returned by the call.
121 returned by the call.
122
122
123 See localrepo.setparents()
123 See localrepo.setparents()
124 """
124 """
125
125
126 def setbranch(branch):
126 def setbranch(branch):
127 pass
127 pass
128
128
129 def invalidate():
129 def invalidate():
130 """Causes the next access to reread the dirstate.
130 """Causes the next access to reread the dirstate.
131
131
132 This is different from localrepo.invalidatedirstate() because it always
132 This is different from localrepo.invalidatedirstate() because it always
133 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
133 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
134 check whether the dirstate has changed before rereading it."""
134 check whether the dirstate has changed before rereading it."""
135
135
136 def copy(source, dest):
136 def copy(source, dest):
137 """Mark dest as a copy of source. Unmark dest if source is None."""
137 """Mark dest as a copy of source. Unmark dest if source is None."""
138
138
139 def copied(file):
139 def copied(file):
140 pass
140 pass
141
141
142 def copies():
142 def copies():
143 pass
143 pass
144
144
145 def normalize(path, isknown=False, ignoremissing=False):
145 def normalize(path, isknown=False, ignoremissing=False):
146 """
146 """
147 normalize the case of a pathname when on a casefolding filesystem
147 normalize the case of a pathname when on a casefolding filesystem
148
148
149 isknown specifies whether the filename came from walking the
149 isknown specifies whether the filename came from walking the
150 disk, to avoid extra filesystem access.
150 disk, to avoid extra filesystem access.
151
151
152 If ignoremissing is True, missing path are returned
152 If ignoremissing is True, missing path are returned
153 unchanged. Otherwise, we try harder to normalize possibly
153 unchanged. Otherwise, we try harder to normalize possibly
154 existing path components.
154 existing path components.
155
155
156 The normalized case is determined based on the following precedence:
156 The normalized case is determined based on the following precedence:
157
157
158 - version of name already stored in the dirstate
158 - version of name already stored in the dirstate
159 - version of name stored on disk
159 - version of name stored on disk
160 - version provided via command arguments
160 - version provided via command arguments
161 """
161 """
162
162
163 def clear():
163 def clear():
164 pass
164 pass
165
165
166 def rebuild(parent, allfiles, changedfiles=None):
166 def rebuild(parent, allfiles, changedfiles=None):
167 pass
167 pass
168
168
169 def identity():
169 def identity():
170 """Return identity of dirstate itself to detect changing in storage
170 """Return identity of dirstate itself to detect changing in storage
171
171
172 If identity of previous dirstate is equal to this, writing
172 If identity of previous dirstate is equal to this, writing
173 changes based on the former dirstate out can keep consistency.
173 changes based on the former dirstate out can keep consistency.
174 """
174 """
175
175
176 def write(tr):
176 def write(tr):
177 pass
177 pass
178
178
179 def addparentchangecallback(category, callback):
179 def addparentchangecallback(category, callback):
180 """add a callback to be called when the wd parents are changed
180 """add a callback to be called when the wd parents are changed
181
181
182 Callback will be called with the following arguments:
182 Callback will be called with the following arguments:
183 dirstate, (oldp1, oldp2), (newp1, newp2)
183 dirstate, (oldp1, oldp2), (newp1, newp2)
184
184
185 Category is a unique identifier to allow overwriting an old callback
185 Category is a unique identifier to allow overwriting an old callback
186 with a newer callback.
186 with a newer callback.
187 """
187 """
188
188
189 def walk(match, subrepos, unknown, ignored, full=True):
189 def walk(match, subrepos, unknown, ignored, full=True):
190 """
190 """
191 Walk recursively through the directory tree, finding all files
191 Walk recursively through the directory tree, finding all files
192 matched by match.
192 matched by match.
193
193
194 If full is False, maybe skip some known-clean files.
194 If full is False, maybe skip some known-clean files.
195
195
196 Return a dict mapping filename to stat-like object (either
196 Return a dict mapping filename to stat-like object (either
197 mercurial.osutil.stat instance or return value of os.stat()).
197 mercurial.osutil.stat instance or return value of os.stat()).
198
198
199 """
199 """
200
200
201 def status(match, subrepos, ignored, clean, unknown):
201 def status(match, subrepos, ignored, clean, unknown):
202 """Determine the status of the working copy relative to the
202 """Determine the status of the working copy relative to the
203 dirstate and return a pair of (unsure, status), where status is of type
203 dirstate and return a pair of (unsure, status), where status is of type
204 scmutil.status and:
204 scmutil.status and:
205
205
206 unsure:
206 unsure:
207 files that might have been modified since the dirstate was
207 files that might have been modified since the dirstate was
208 written, but need to be read to be sure (size is the same
208 written, but need to be read to be sure (size is the same
209 but mtime differs)
209 but mtime differs)
210 status.modified:
210 status.modified:
211 files that have definitely been modified since the dirstate
211 files that have definitely been modified since the dirstate
212 was written (different size or mode)
212 was written (different size or mode)
213 status.clean:
213 status.clean:
214 files that have definitely not been modified since the
214 files that have definitely not been modified since the
215 dirstate was written
215 dirstate was written
216 """
216 """
217
217
218 def matches(match):
218 def matches(match):
219 """
219 """
220 return files in the dirstate (in whatever state) filtered by match
220 return files in the dirstate (in whatever state) filtered by match
221 """
221 """
222
222
223 def savebackup(tr, backupname):
224 '''Save current dirstate into backup file'''
225
226 def restorebackup(tr, backupname):
227 '''Restore dirstate by backup file'''
228
229 def clearbackup(tr, backupname):
230 '''Clear backup file'''
231
232 def verify(m1, m2, p1, narrow_matcher=None):
223 def verify(m1, m2, p1, narrow_matcher=None):
233 """
224 """
234 check the dirstate contents against the parent manifest and yield errors
225 check the dirstate contents against the parent manifest and yield errors
235 """
226 """
General Comments 0
You need to be logged in to leave comments. Login now