##// END OF EJS Templates
dirstate: make the `transaction` argument of `setbranch` mandatory...
marmoute -
r52166:79cd29d5 default
parent child Browse files
Show More
@@ -1,395 +1,395 b''
1 import contextlib
1 import contextlib
2 import os
2 import os
3
3
4 from mercurial.node import sha1nodeconstants
4 from mercurial.node import sha1nodeconstants
5 from mercurial import (
5 from mercurial import (
6 dirstatemap,
6 dirstatemap,
7 error,
7 error,
8 extensions,
8 extensions,
9 match as matchmod,
9 match as matchmod,
10 pycompat,
10 pycompat,
11 scmutil,
11 scmutil,
12 util,
12 util,
13 )
13 )
14 from mercurial.dirstateutils import (
14 from mercurial.dirstateutils import (
15 timestamp,
15 timestamp,
16 )
16 )
17 from mercurial.interfaces import (
17 from mercurial.interfaces import (
18 dirstate as intdirstate,
18 dirstate as intdirstate,
19 util as interfaceutil,
19 util as interfaceutil,
20 )
20 )
21
21
22 from . import gitutil
22 from . import gitutil
23
23
24
24
25 DirstateItem = dirstatemap.DirstateItem
25 DirstateItem = dirstatemap.DirstateItem
26 propertycache = util.propertycache
26 propertycache = util.propertycache
27 pygit2 = gitutil.get_pygit2()
27 pygit2 = gitutil.get_pygit2()
28
28
29
29
30 def readpatternfile(orig, filepath, warn, sourceinfo=False):
30 def readpatternfile(orig, filepath, warn, sourceinfo=False):
31 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
31 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
32 return orig(filepath, warn, sourceinfo=False)
32 return orig(filepath, warn, sourceinfo=False)
33 result = []
33 result = []
34 warnings = []
34 warnings = []
35 with open(filepath, 'rb') as fp:
35 with open(filepath, 'rb') as fp:
36 for l in fp:
36 for l in fp:
37 l = l.strip()
37 l = l.strip()
38 if not l or l.startswith(b'#'):
38 if not l or l.startswith(b'#'):
39 continue
39 continue
40 if l.startswith(b'!'):
40 if l.startswith(b'!'):
41 warnings.append(b'unsupported ignore pattern %s' % l)
41 warnings.append(b'unsupported ignore pattern %s' % l)
42 continue
42 continue
43 if l.startswith(b'/'):
43 if l.startswith(b'/'):
44 result.append(b'rootglob:' + l[1:])
44 result.append(b'rootglob:' + l[1:])
45 else:
45 else:
46 result.append(b'relglob:' + l)
46 result.append(b'relglob:' + l)
47 return result, warnings
47 return result, warnings
48
48
49
49
50 extensions.wrapfunction(matchmod, 'readpatternfile', readpatternfile)
50 extensions.wrapfunction(matchmod, 'readpatternfile', readpatternfile)
51
51
52
52
53 _STATUS_MAP = {}
53 _STATUS_MAP = {}
54 if pygit2:
54 if pygit2:
55 _STATUS_MAP = {
55 _STATUS_MAP = {
56 pygit2.GIT_STATUS_CONFLICTED: b'm',
56 pygit2.GIT_STATUS_CONFLICTED: b'm',
57 pygit2.GIT_STATUS_CURRENT: b'n',
57 pygit2.GIT_STATUS_CURRENT: b'n',
58 pygit2.GIT_STATUS_IGNORED: b'?',
58 pygit2.GIT_STATUS_IGNORED: b'?',
59 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
59 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
60 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
60 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
61 pygit2.GIT_STATUS_INDEX_NEW: b'a',
61 pygit2.GIT_STATUS_INDEX_NEW: b'a',
62 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
62 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
63 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
63 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
64 pygit2.GIT_STATUS_WT_DELETED: b'r',
64 pygit2.GIT_STATUS_WT_DELETED: b'r',
65 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
65 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
66 pygit2.GIT_STATUS_WT_NEW: b'?',
66 pygit2.GIT_STATUS_WT_NEW: b'?',
67 pygit2.GIT_STATUS_WT_RENAMED: b'a',
67 pygit2.GIT_STATUS_WT_RENAMED: b'a',
68 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
68 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
69 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
69 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
70 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
70 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
71 }
71 }
72
72
73
73
74 @interfaceutil.implementer(intdirstate.idirstate)
74 @interfaceutil.implementer(intdirstate.idirstate)
75 class gitdirstate:
75 class gitdirstate:
76 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
76 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
77 self._ui = ui
77 self._ui = ui
78 self._root = os.path.dirname(vfs.base)
78 self._root = os.path.dirname(vfs.base)
79 self._opener = vfs
79 self._opener = vfs
80 self.git = gitrepo
80 self.git = gitrepo
81 self._plchangecallbacks = {}
81 self._plchangecallbacks = {}
82 # TODO: context.poststatusfixup is bad and uses this attribute
82 # TODO: context.poststatusfixup is bad and uses this attribute
83 self._dirty = False
83 self._dirty = False
84 self._mapcls = dirstatemap.dirstatemap
84 self._mapcls = dirstatemap.dirstatemap
85 self._use_dirstate_v2 = use_dirstate_v2
85 self._use_dirstate_v2 = use_dirstate_v2
86
86
87 @propertycache
87 @propertycache
88 def _map(self):
88 def _map(self):
89 """Return the dirstate contents (see documentation for dirstatemap)."""
89 """Return the dirstate contents (see documentation for dirstatemap)."""
90 self._map = self._mapcls(
90 self._map = self._mapcls(
91 self._ui,
91 self._ui,
92 self._opener,
92 self._opener,
93 self._root,
93 self._root,
94 sha1nodeconstants,
94 sha1nodeconstants,
95 self._use_dirstate_v2,
95 self._use_dirstate_v2,
96 )
96 )
97 return self._map
97 return self._map
98
98
99 def p1(self):
99 def p1(self):
100 try:
100 try:
101 return self.git.head.peel().id.raw
101 return self.git.head.peel().id.raw
102 except pygit2.GitError:
102 except pygit2.GitError:
103 # Typically happens when peeling HEAD fails, as in an
103 # Typically happens when peeling HEAD fails, as in an
104 # empty repository.
104 # empty repository.
105 return sha1nodeconstants.nullid
105 return sha1nodeconstants.nullid
106
106
107 def p2(self):
107 def p2(self):
108 # TODO: MERGE_HEAD? something like that, right?
108 # TODO: MERGE_HEAD? something like that, right?
109 return sha1nodeconstants.nullid
109 return sha1nodeconstants.nullid
110
110
111 def setparents(self, p1, p2=None):
111 def setparents(self, p1, p2=None):
112 if p2 is None:
112 if p2 is None:
113 p2 = sha1nodeconstants.nullid
113 p2 = sha1nodeconstants.nullid
114 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
114 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
115 self.git.head.set_target(gitutil.togitnode(p1))
115 self.git.head.set_target(gitutil.togitnode(p1))
116
116
117 @util.propertycache
117 @util.propertycache
118 def identity(self):
118 def identity(self):
119 return util.filestat.frompath(
119 return util.filestat.frompath(
120 os.path.join(self._root, b'.git', b'index')
120 os.path.join(self._root, b'.git', b'index')
121 )
121 )
122
122
123 def branch(self):
123 def branch(self):
124 return b'default'
124 return b'default'
125
125
126 def parents(self):
126 def parents(self):
127 # TODO how on earth do we find p2 if a merge is in flight?
127 # TODO how on earth do we find p2 if a merge is in flight?
128 return self.p1(), sha1nodeconstants.nullid
128 return self.p1(), sha1nodeconstants.nullid
129
129
130 def __iter__(self):
130 def __iter__(self):
131 return (pycompat.fsencode(f.path) for f in self.git.index)
131 return (pycompat.fsencode(f.path) for f in self.git.index)
132
132
133 def items(self):
133 def items(self):
134 for ie in self.git.index:
134 for ie in self.git.index:
135 yield ie.path, None # value should be a DirstateItem
135 yield ie.path, None # value should be a DirstateItem
136
136
137 # py2,3 compat forward
137 # py2,3 compat forward
138 iteritems = items
138 iteritems = items
139
139
140 def __getitem__(self, filename):
140 def __getitem__(self, filename):
141 try:
141 try:
142 gs = self.git.status_file(filename)
142 gs = self.git.status_file(filename)
143 except KeyError:
143 except KeyError:
144 return b'?'
144 return b'?'
145 return _STATUS_MAP[gs]
145 return _STATUS_MAP[gs]
146
146
147 def __contains__(self, filename):
147 def __contains__(self, filename):
148 try:
148 try:
149 gs = self.git.status_file(filename)
149 gs = self.git.status_file(filename)
150 return _STATUS_MAP[gs] != b'?'
150 return _STATUS_MAP[gs] != b'?'
151 except KeyError:
151 except KeyError:
152 return False
152 return False
153
153
154 def status(self, match, subrepos, ignored, clean, unknown):
154 def status(self, match, subrepos, ignored, clean, unknown):
155 listclean = clean
155 listclean = clean
156 # TODO handling of clean files - can we get that from git.status()?
156 # TODO handling of clean files - can we get that from git.status()?
157 modified, added, removed, deleted, unknown, ignored, clean = (
157 modified, added, removed, deleted, unknown, ignored, clean = (
158 [],
158 [],
159 [],
159 [],
160 [],
160 [],
161 [],
161 [],
162 [],
162 [],
163 [],
163 [],
164 [],
164 [],
165 )
165 )
166
166
167 try:
167 try:
168 mtime_boundary = timestamp.get_fs_now(self._opener)
168 mtime_boundary = timestamp.get_fs_now(self._opener)
169 except OSError:
169 except OSError:
170 # In largefiles or readonly context
170 # In largefiles or readonly context
171 mtime_boundary = None
171 mtime_boundary = None
172
172
173 gstatus = self.git.status()
173 gstatus = self.git.status()
174 for path, status in gstatus.items():
174 for path, status in gstatus.items():
175 path = pycompat.fsencode(path)
175 path = pycompat.fsencode(path)
176 if not match(path):
176 if not match(path):
177 continue
177 continue
178 if status == pygit2.GIT_STATUS_IGNORED:
178 if status == pygit2.GIT_STATUS_IGNORED:
179 if path.endswith(b'/'):
179 if path.endswith(b'/'):
180 continue
180 continue
181 ignored.append(path)
181 ignored.append(path)
182 elif status in (
182 elif status in (
183 pygit2.GIT_STATUS_WT_MODIFIED,
183 pygit2.GIT_STATUS_WT_MODIFIED,
184 pygit2.GIT_STATUS_INDEX_MODIFIED,
184 pygit2.GIT_STATUS_INDEX_MODIFIED,
185 pygit2.GIT_STATUS_WT_MODIFIED
185 pygit2.GIT_STATUS_WT_MODIFIED
186 | pygit2.GIT_STATUS_INDEX_MODIFIED,
186 | pygit2.GIT_STATUS_INDEX_MODIFIED,
187 ):
187 ):
188 modified.append(path)
188 modified.append(path)
189 elif status == pygit2.GIT_STATUS_INDEX_NEW:
189 elif status == pygit2.GIT_STATUS_INDEX_NEW:
190 added.append(path)
190 added.append(path)
191 elif status == pygit2.GIT_STATUS_WT_NEW:
191 elif status == pygit2.GIT_STATUS_WT_NEW:
192 unknown.append(path)
192 unknown.append(path)
193 elif status == pygit2.GIT_STATUS_WT_DELETED:
193 elif status == pygit2.GIT_STATUS_WT_DELETED:
194 deleted.append(path)
194 deleted.append(path)
195 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
195 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
196 removed.append(path)
196 removed.append(path)
197 else:
197 else:
198 raise error.Abort(
198 raise error.Abort(
199 b'unhandled case: status for %r is %r' % (path, status)
199 b'unhandled case: status for %r is %r' % (path, status)
200 )
200 )
201
201
202 if listclean:
202 if listclean:
203 observed = set(
203 observed = set(
204 modified + added + removed + deleted + unknown + ignored
204 modified + added + removed + deleted + unknown + ignored
205 )
205 )
206 index = self.git.index
206 index = self.git.index
207 index.read()
207 index.read()
208 for entry in index:
208 for entry in index:
209 path = pycompat.fsencode(entry.path)
209 path = pycompat.fsencode(entry.path)
210 if not match(path):
210 if not match(path):
211 continue
211 continue
212 if path in observed:
212 if path in observed:
213 continue # already in some other set
213 continue # already in some other set
214 if path[-1] == b'/':
214 if path[-1] == b'/':
215 continue # directory
215 continue # directory
216 clean.append(path)
216 clean.append(path)
217
217
218 # TODO are we really always sure of status here?
218 # TODO are we really always sure of status here?
219 return (
219 return (
220 False,
220 False,
221 scmutil.status(
221 scmutil.status(
222 modified, added, removed, deleted, unknown, ignored, clean
222 modified, added, removed, deleted, unknown, ignored, clean
223 ),
223 ),
224 mtime_boundary,
224 mtime_boundary,
225 )
225 )
226
226
227 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
228 # TODO we can do better
228 # TODO we can do better
229 return buildfallback()
229 return buildfallback()
230
230
231 def getcwd(self):
231 def getcwd(self):
232 # TODO is this a good way to do this?
232 # TODO is this a good way to do this?
233 return os.path.dirname(
233 return os.path.dirname(
234 os.path.dirname(pycompat.fsencode(self.git.path))
234 os.path.dirname(pycompat.fsencode(self.git.path))
235 )
235 )
236
236
237 def get_entry(self, path):
237 def get_entry(self, path):
238 """return a DirstateItem for the associated path"""
238 """return a DirstateItem for the associated path"""
239 entry = self._map.get(path)
239 entry = self._map.get(path)
240 if entry is None:
240 if entry is None:
241 return DirstateItem()
241 return DirstateItem()
242 return entry
242 return entry
243
243
244 def normalize(self, path):
244 def normalize(self, path):
245 normed = util.normcase(path)
245 normed = util.normcase(path)
246 assert normed == path, b"TODO handling of case folding: %s != %s" % (
246 assert normed == path, b"TODO handling of case folding: %s != %s" % (
247 normed,
247 normed,
248 path,
248 path,
249 )
249 )
250 return path
250 return path
251
251
252 @property
252 @property
253 def _checklink(self):
253 def _checklink(self):
254 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
254 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
255
255
256 def copies(self):
256 def copies(self):
257 # TODO support copies?
257 # TODO support copies?
258 return {}
258 return {}
259
259
260 # # TODO what the heck is this
260 # # TODO what the heck is this
261 _filecache = set()
261 _filecache = set()
262
262
263 def is_changing_parents(self):
263 def is_changing_parents(self):
264 # TODO: we need to implement the context manager bits and
264 # TODO: we need to implement the context manager bits and
265 # correctly stage/revert index edits.
265 # correctly stage/revert index edits.
266 return False
266 return False
267
267
268 def is_changing_any(self):
268 def is_changing_any(self):
269 # TODO: we need to implement the context manager bits and
269 # TODO: we need to implement the context manager bits and
270 # correctly stage/revert index edits.
270 # correctly stage/revert index edits.
271 return False
271 return False
272
272
273 def write(self, tr):
273 def write(self, tr):
274 # TODO: call parent change callbacks
274 # TODO: call parent change callbacks
275
275
276 if tr:
276 if tr:
277
277
278 def writeinner(category):
278 def writeinner(category):
279 self.git.index.write()
279 self.git.index.write()
280
280
281 tr.addpending(b'gitdirstate', writeinner)
281 tr.addpending(b'gitdirstate', writeinner)
282 else:
282 else:
283 self.git.index.write()
283 self.git.index.write()
284
284
285 def pathto(self, f, cwd=None):
285 def pathto(self, f, cwd=None):
286 if cwd is None:
286 if cwd is None:
287 cwd = self.getcwd()
287 cwd = self.getcwd()
288 # TODO core dirstate does something about slashes here
288 # TODO core dirstate does something about slashes here
289 assert isinstance(f, bytes)
289 assert isinstance(f, bytes)
290 r = util.pathto(self._root, cwd, f)
290 r = util.pathto(self._root, cwd, f)
291 return r
291 return r
292
292
293 def matches(self, match):
293 def matches(self, match):
294 for x in self.git.index:
294 for x in self.git.index:
295 p = pycompat.fsencode(x.path)
295 p = pycompat.fsencode(x.path)
296 if match(p):
296 if match(p):
297 yield p
297 yield p
298
298
299 def set_clean(self, f, parentfiledata):
299 def set_clean(self, f, parentfiledata):
300 """Mark a file normal and clean."""
300 """Mark a file normal and clean."""
301 # TODO: for now we just let libgit2 re-stat the file. We can
301 # TODO: for now we just let libgit2 re-stat the file. We can
302 # clearly do better.
302 # clearly do better.
303
303
304 def set_possibly_dirty(self, f):
304 def set_possibly_dirty(self, f):
305 """Mark a file normal, but possibly dirty."""
305 """Mark a file normal, but possibly dirty."""
306 # TODO: for now we just let libgit2 re-stat the file. We can
306 # TODO: for now we just let libgit2 re-stat the file. We can
307 # clearly do better.
307 # clearly do better.
308
308
309 def walk(self, match, subrepos, unknown, ignored, full=True):
309 def walk(self, match, subrepos, unknown, ignored, full=True):
310 # TODO: we need to use .status() and not iterate the index,
310 # TODO: we need to use .status() and not iterate the index,
311 # because the index doesn't force a re-walk and so `hg add` of
311 # because the index doesn't force a re-walk and so `hg add` of
312 # a new file without an intervening call to status will
312 # a new file without an intervening call to status will
313 # silently do nothing.
313 # silently do nothing.
314 r = {}
314 r = {}
315 cwd = self.getcwd()
315 cwd = self.getcwd()
316 for path, status in self.git.status().items():
316 for path, status in self.git.status().items():
317 if path.startswith('.hg/'):
317 if path.startswith('.hg/'):
318 continue
318 continue
319 path = pycompat.fsencode(path)
319 path = pycompat.fsencode(path)
320 if not match(path):
320 if not match(path):
321 continue
321 continue
322 # TODO construct the stat info from the status object?
322 # TODO construct the stat info from the status object?
323 try:
323 try:
324 s = os.stat(os.path.join(cwd, path))
324 s = os.stat(os.path.join(cwd, path))
325 except FileNotFoundError:
325 except FileNotFoundError:
326 continue
326 continue
327 r[path] = s
327 r[path] = s
328 return r
328 return r
329
329
330 def set_tracked(self, f, reset_copy=False):
330 def set_tracked(self, f, reset_copy=False):
331 # TODO: support copies and reset_copy=True
331 # TODO: support copies and reset_copy=True
332 uf = pycompat.fsdecode(f)
332 uf = pycompat.fsdecode(f)
333 if uf in self.git.index:
333 if uf in self.git.index:
334 return False
334 return False
335 index = self.git.index
335 index = self.git.index
336 index.read()
336 index.read()
337 index.add(uf)
337 index.add(uf)
338 index.write()
338 index.write()
339 return True
339 return True
340
340
341 def add(self, f):
341 def add(self, f):
342 index = self.git.index
342 index = self.git.index
343 index.read()
343 index.read()
344 index.add(pycompat.fsdecode(f))
344 index.add(pycompat.fsdecode(f))
345 index.write()
345 index.write()
346
346
347 def drop(self, f):
347 def drop(self, f):
348 index = self.git.index
348 index = self.git.index
349 index.read()
349 index.read()
350 fs = pycompat.fsdecode(f)
350 fs = pycompat.fsdecode(f)
351 if fs in index:
351 if fs in index:
352 index.remove(fs)
352 index.remove(fs)
353 index.write()
353 index.write()
354
354
355 def set_untracked(self, f):
355 def set_untracked(self, f):
356 index = self.git.index
356 index = self.git.index
357 index.read()
357 index.read()
358 fs = pycompat.fsdecode(f)
358 fs = pycompat.fsdecode(f)
359 if fs in index:
359 if fs in index:
360 index.remove(fs)
360 index.remove(fs)
361 index.write()
361 index.write()
362 return True
362 return True
363 return False
363 return False
364
364
365 def remove(self, f):
365 def remove(self, f):
366 index = self.git.index
366 index = self.git.index
367 index.read()
367 index.read()
368 index.remove(pycompat.fsdecode(f))
368 index.remove(pycompat.fsdecode(f))
369 index.write()
369 index.write()
370
370
371 def copied(self, path):
371 def copied(self, path):
372 # TODO: track copies?
372 # TODO: track copies?
373 return None
373 return None
374
374
375 def prefetch_parents(self):
375 def prefetch_parents(self):
376 # TODO
376 # TODO
377 pass
377 pass
378
378
379 def update_file(self, *args, **kwargs):
379 def update_file(self, *args, **kwargs):
380 # TODO
380 # TODO
381 pass
381 pass
382
382
383 @contextlib.contextmanager
383 @contextlib.contextmanager
384 def changing_parents(self, repo):
384 def changing_parents(self, repo):
385 # TODO: track this maybe?
385 # TODO: track this maybe?
386 yield
386 yield
387
387
388 def addparentchangecallback(self, category, callback):
388 def addparentchangecallback(self, category, callback):
389 # TODO: should this be added to the dirstate interface?
389 # TODO: should this be added to the dirstate interface?
390 self._plchangecallbacks[category] = callback
390 self._plchangecallbacks[category] = callback
391
391
392 def setbranch(self, branch, transaction=None):
392 def setbranch(self, branch, transaction):
393 raise error.Abort(
393 raise error.Abort(
394 b'git repos do not support branches. try using bookmarks'
394 b'git repos do not support branches. try using bookmarks'
395 )
395 )
@@ -1,1832 +1,1825 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16
16
17 from hgdemandimport import tracing
17 from hgdemandimport import tracing
18
18
19 from . import (
19 from . import (
20 dirstatemap,
20 dirstatemap,
21 encoding,
21 encoding,
22 error,
22 error,
23 match as matchmod,
23 match as matchmod,
24 node,
24 node,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 txnutil,
29 txnutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 # use to detect lack of a parameter
46 SENTINEL = object()
47
48 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
49
46
50 propertycache = util.propertycache
47 propertycache = util.propertycache
51 filecache = scmutil.filecache
48 filecache = scmutil.filecache
52 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
53
50
54 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
55
52
56
53
57 class repocache(filecache):
54 class repocache(filecache):
58 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
59
56
60 def join(self, obj, fname):
57 def join(self, obj, fname):
61 return obj._opener.join(fname)
58 return obj._opener.join(fname)
62
59
63
60
64 class rootcache(filecache):
61 class rootcache(filecache):
65 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
66
63
67 def join(self, obj, fname):
64 def join(self, obj, fname):
68 return obj._join(fname)
65 return obj._join(fname)
69
66
70
67
71 def check_invalidated(func):
68 def check_invalidated(func):
72 """check that the func is called with a non-invalidated dirstate
69 """check that the func is called with a non-invalidated dirstate
73
70
74 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
75 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
76 such change.
73 such change.
77 """
74 """
78
75
79 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
80 if self._invalidated_context:
77 if self._invalidated_context:
81 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
82 msg %= func.__name__
79 msg %= func.__name__
83 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
84 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
85
82
86 return wrap
83 return wrap
87
84
88
85
89 def requires_changing_parents(func):
86 def requires_changing_parents(func):
90 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
91 if not self.is_changing_parents:
88 if not self.is_changing_parents:
92 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
93 msg %= func.__name__
90 msg %= func.__name__
94 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
95 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
96
93
97 return check_invalidated(wrap)
94 return check_invalidated(wrap)
98
95
99
96
100 def requires_changing_files(func):
97 def requires_changing_files(func):
101 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
102 if not self.is_changing_files:
99 if not self.is_changing_files:
103 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
104 msg %= func.__name__
101 msg %= func.__name__
105 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
106 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
107
104
108 return check_invalidated(wrap)
105 return check_invalidated(wrap)
109
106
110
107
111 def requires_changing_any(func):
108 def requires_changing_any(func):
112 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
113 if not self.is_changing_any:
110 if not self.is_changing_any:
114 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
115 msg %= func.__name__
112 msg %= func.__name__
116 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
117 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
118
115
119 return check_invalidated(wrap)
116 return check_invalidated(wrap)
120
117
121
118
122 def requires_changing_files_or_status(func):
119 def requires_changing_files_or_status(func):
123 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
124 if not (self.is_changing_files or self._running_status > 0):
121 if not (self.is_changing_files or self._running_status > 0):
125 msg = (
122 msg = (
126 'calling `%s` outside of a changing_files '
123 'calling `%s` outside of a changing_files '
127 'or running_status context'
124 'or running_status context'
128 )
125 )
129 msg %= func.__name__
126 msg %= func.__name__
130 raise error.ProgrammingError(msg)
127 raise error.ProgrammingError(msg)
131 return func(self, *args, **kwargs)
128 return func(self, *args, **kwargs)
132
129
133 return check_invalidated(wrap)
130 return check_invalidated(wrap)
134
131
135
132
136 CHANGE_TYPE_PARENTS = "parents"
133 CHANGE_TYPE_PARENTS = "parents"
137 CHANGE_TYPE_FILES = "files"
134 CHANGE_TYPE_FILES = "files"
138
135
139
136
140 @interfaceutil.implementer(intdirstate.idirstate)
137 @interfaceutil.implementer(intdirstate.idirstate)
141 class dirstate:
138 class dirstate:
142
139
143 # used by largefile to avoid overwritting transaction callback
140 # used by largefile to avoid overwritting transaction callback
144 _tr_key_suffix = b''
141 _tr_key_suffix = b''
145
142
146 def __init__(
143 def __init__(
147 self,
144 self,
148 opener,
145 opener,
149 ui,
146 ui,
150 root,
147 root,
151 validate,
148 validate,
152 sparsematchfn,
149 sparsematchfn,
153 nodeconstants,
150 nodeconstants,
154 use_dirstate_v2,
151 use_dirstate_v2,
155 use_tracked_hint=False,
152 use_tracked_hint=False,
156 ):
153 ):
157 """Create a new dirstate object.
154 """Create a new dirstate object.
158
155
159 opener is an open()-like callable that can be used to open the
156 opener is an open()-like callable that can be used to open the
160 dirstate file; root is the root of the directory tracked by
157 dirstate file; root is the root of the directory tracked by
161 the dirstate.
158 the dirstate.
162 """
159 """
163 self._use_dirstate_v2 = use_dirstate_v2
160 self._use_dirstate_v2 = use_dirstate_v2
164 self._use_tracked_hint = use_tracked_hint
161 self._use_tracked_hint = use_tracked_hint
165 self._nodeconstants = nodeconstants
162 self._nodeconstants = nodeconstants
166 self._opener = opener
163 self._opener = opener
167 self._validate = validate
164 self._validate = validate
168 self._root = root
165 self._root = root
169 # Either build a sparse-matcher or None if sparse is disabled
166 # Either build a sparse-matcher or None if sparse is disabled
170 self._sparsematchfn = sparsematchfn
167 self._sparsematchfn = sparsematchfn
171 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
168 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
172 # UNC path pointing to root share (issue4557)
169 # UNC path pointing to root share (issue4557)
173 self._rootdir = pathutil.normasprefix(root)
170 self._rootdir = pathutil.normasprefix(root)
174 # True is any internal state may be different
171 # True is any internal state may be different
175 self._dirty = False
172 self._dirty = False
176 # True if the set of tracked file may be different
173 # True if the set of tracked file may be different
177 self._dirty_tracked_set = False
174 self._dirty_tracked_set = False
178 self._ui = ui
175 self._ui = ui
179 self._filecache = {}
176 self._filecache = {}
180 # nesting level of `changing_parents` context
177 # nesting level of `changing_parents` context
181 self._changing_level = 0
178 self._changing_level = 0
182 # the change currently underway
179 # the change currently underway
183 self._change_type = None
180 self._change_type = None
184 # number of open _running_status context
181 # number of open _running_status context
185 self._running_status = 0
182 self._running_status = 0
186 # True if the current dirstate changing operations have been
183 # True if the current dirstate changing operations have been
187 # invalidated (used to make sure all nested contexts have been exited)
184 # invalidated (used to make sure all nested contexts have been exited)
188 self._invalidated_context = False
185 self._invalidated_context = False
189 self._attached_to_a_transaction = False
186 self._attached_to_a_transaction = False
190 self._filename = b'dirstate'
187 self._filename = b'dirstate'
191 self._filename_th = b'dirstate-tracked-hint'
188 self._filename_th = b'dirstate-tracked-hint'
192 self._pendingfilename = b'%s.pending' % self._filename
189 self._pendingfilename = b'%s.pending' % self._filename
193 self._plchangecallbacks = {}
190 self._plchangecallbacks = {}
194 self._origpl = None
191 self._origpl = None
195 self._mapcls = dirstatemap.dirstatemap
192 self._mapcls = dirstatemap.dirstatemap
196 # Access and cache cwd early, so we don't access it for the first time
193 # Access and cache cwd early, so we don't access it for the first time
197 # after a working-copy update caused it to not exist (accessing it then
194 # after a working-copy update caused it to not exist (accessing it then
198 # raises an exception).
195 # raises an exception).
199 self._cwd
196 self._cwd
200
197
201 def refresh(self):
198 def refresh(self):
202 # XXX if this happens, you likely did not enter the `changing_xxx`
199 # XXX if this happens, you likely did not enter the `changing_xxx`
203 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
200 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
204 # `refresh`.
201 # `refresh`.
205 if self.is_changing_any:
202 if self.is_changing_any:
206 msg = "refreshing the dirstate in the middle of a change"
203 msg = "refreshing the dirstate in the middle of a change"
207 raise error.ProgrammingError(msg)
204 raise error.ProgrammingError(msg)
208 if '_branch' in vars(self):
205 if '_branch' in vars(self):
209 del self._branch
206 del self._branch
210 if '_map' in vars(self) and self._map.may_need_refresh():
207 if '_map' in vars(self) and self._map.may_need_refresh():
211 self.invalidate()
208 self.invalidate()
212
209
213 def prefetch_parents(self):
210 def prefetch_parents(self):
214 """make sure the parents are loaded
211 """make sure the parents are loaded
215
212
216 Used to avoid a race condition.
213 Used to avoid a race condition.
217 """
214 """
218 self._pl
215 self._pl
219
216
220 @contextlib.contextmanager
217 @contextlib.contextmanager
221 @check_invalidated
218 @check_invalidated
222 def running_status(self, repo):
219 def running_status(self, repo):
223 """Wrap a status operation
220 """Wrap a status operation
224
221
225 This context is not mutally exclusive with the `changing_*` context. It
222 This context is not mutally exclusive with the `changing_*` context. It
226 also do not warrant for the `wlock` to be taken.
223 also do not warrant for the `wlock` to be taken.
227
224
228 If the wlock is taken, this context will behave in a simple way, and
225 If the wlock is taken, this context will behave in a simple way, and
229 ensure the data are scheduled for write when leaving the top level
226 ensure the data are scheduled for write when leaving the top level
230 context.
227 context.
231
228
232 If the lock is not taken, it will only warrant that the data are either
229 If the lock is not taken, it will only warrant that the data are either
233 committed (written) and rolled back (invalidated) when exiting the top
230 committed (written) and rolled back (invalidated) when exiting the top
234 level context. The write/invalidate action must be performed by the
231 level context. The write/invalidate action must be performed by the
235 wrapped code.
232 wrapped code.
236
233
237
234
238 The expected logic is:
235 The expected logic is:
239
236
240 A: read the dirstate
237 A: read the dirstate
241 B: run status
238 B: run status
242 This might make the dirstate dirty by updating cache,
239 This might make the dirstate dirty by updating cache,
243 especially in Rust.
240 especially in Rust.
244 C: do more "post status fixup if relevant
241 C: do more "post status fixup if relevant
245 D: try to take the w-lock (this will invalidate the changes if they were raced)
242 D: try to take the w-lock (this will invalidate the changes if they were raced)
246 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
243 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
247 E1: elif lock was acquired β†’ write the changes
244 E1: elif lock was acquired β†’ write the changes
248 E2: else β†’ discard the changes
245 E2: else β†’ discard the changes
249 """
246 """
250 has_lock = repo.currentwlock() is not None
247 has_lock = repo.currentwlock() is not None
251 is_changing = self.is_changing_any
248 is_changing = self.is_changing_any
252 tr = repo.currenttransaction()
249 tr = repo.currenttransaction()
253 has_tr = tr is not None
250 has_tr = tr is not None
254 nested = bool(self._running_status)
251 nested = bool(self._running_status)
255
252
256 first_and_alone = not (is_changing or has_tr or nested)
253 first_and_alone = not (is_changing or has_tr or nested)
257
254
258 # enforce no change happened outside of a proper context.
255 # enforce no change happened outside of a proper context.
259 if first_and_alone and self._dirty:
256 if first_and_alone and self._dirty:
260 has_tr = repo.currenttransaction() is not None
257 has_tr = repo.currenttransaction() is not None
261 if not has_tr and self._changing_level == 0 and self._dirty:
258 if not has_tr and self._changing_level == 0 and self._dirty:
262 msg = "entering a status context, but dirstate is already dirty"
259 msg = "entering a status context, but dirstate is already dirty"
263 raise error.ProgrammingError(msg)
260 raise error.ProgrammingError(msg)
264
261
265 should_write = has_lock and not (nested or is_changing)
262 should_write = has_lock and not (nested or is_changing)
266
263
267 self._running_status += 1
264 self._running_status += 1
268 try:
265 try:
269 yield
266 yield
270 except Exception:
267 except Exception:
271 self.invalidate()
268 self.invalidate()
272 raise
269 raise
273 finally:
270 finally:
274 self._running_status -= 1
271 self._running_status -= 1
275 if self._invalidated_context:
272 if self._invalidated_context:
276 should_write = False
273 should_write = False
277 self.invalidate()
274 self.invalidate()
278
275
279 if should_write:
276 if should_write:
280 assert repo.currenttransaction() is tr
277 assert repo.currenttransaction() is tr
281 self.write(tr)
278 self.write(tr)
282 elif not has_lock:
279 elif not has_lock:
283 if self._dirty:
280 if self._dirty:
284 msg = b'dirstate dirty while exiting an isolated status context'
281 msg = b'dirstate dirty while exiting an isolated status context'
285 repo.ui.develwarn(msg)
282 repo.ui.develwarn(msg)
286 self.invalidate()
283 self.invalidate()
287
284
288 @contextlib.contextmanager
285 @contextlib.contextmanager
289 @check_invalidated
286 @check_invalidated
290 def _changing(self, repo, change_type):
287 def _changing(self, repo, change_type):
291 if repo.currentwlock() is None:
288 if repo.currentwlock() is None:
292 msg = b"trying to change the dirstate without holding the wlock"
289 msg = b"trying to change the dirstate without holding the wlock"
293 raise error.ProgrammingError(msg)
290 raise error.ProgrammingError(msg)
294
291
295 has_tr = repo.currenttransaction() is not None
292 has_tr = repo.currenttransaction() is not None
296 if not has_tr and self._changing_level == 0 and self._dirty:
293 if not has_tr and self._changing_level == 0 and self._dirty:
297 msg = b"entering a changing context, but dirstate is already dirty"
294 msg = b"entering a changing context, but dirstate is already dirty"
298 repo.ui.develwarn(msg)
295 repo.ui.develwarn(msg)
299
296
300 assert self._changing_level >= 0
297 assert self._changing_level >= 0
301 # different type of change are mutually exclusive
298 # different type of change are mutually exclusive
302 if self._change_type is None:
299 if self._change_type is None:
303 assert self._changing_level == 0
300 assert self._changing_level == 0
304 self._change_type = change_type
301 self._change_type = change_type
305 elif self._change_type != change_type:
302 elif self._change_type != change_type:
306 msg = (
303 msg = (
307 'trying to open "%s" dirstate-changing context while a "%s" is'
304 'trying to open "%s" dirstate-changing context while a "%s" is'
308 ' already open'
305 ' already open'
309 )
306 )
310 msg %= (change_type, self._change_type)
307 msg %= (change_type, self._change_type)
311 raise error.ProgrammingError(msg)
308 raise error.ProgrammingError(msg)
312 should_write = False
309 should_write = False
313 self._changing_level += 1
310 self._changing_level += 1
314 try:
311 try:
315 yield
312 yield
316 except: # re-raises
313 except: # re-raises
317 self.invalidate() # this will set `_invalidated_context`
314 self.invalidate() # this will set `_invalidated_context`
318 raise
315 raise
319 finally:
316 finally:
320 assert self._changing_level > 0
317 assert self._changing_level > 0
321 self._changing_level -= 1
318 self._changing_level -= 1
322 # If the dirstate is being invalidated, call invalidate again.
319 # If the dirstate is being invalidated, call invalidate again.
323 # This will throw away anything added by a upper context and
320 # This will throw away anything added by a upper context and
324 # reset the `_invalidated_context` flag when relevant
321 # reset the `_invalidated_context` flag when relevant
325 if self._changing_level <= 0:
322 if self._changing_level <= 0:
326 self._change_type = None
323 self._change_type = None
327 assert self._changing_level == 0
324 assert self._changing_level == 0
328 if self._invalidated_context:
325 if self._invalidated_context:
329 # make sure we invalidate anything an upper context might
326 # make sure we invalidate anything an upper context might
330 # have changed.
327 # have changed.
331 self.invalidate()
328 self.invalidate()
332 else:
329 else:
333 should_write = self._changing_level <= 0
330 should_write = self._changing_level <= 0
334 tr = repo.currenttransaction()
331 tr = repo.currenttransaction()
335 if has_tr != (tr is not None):
332 if has_tr != (tr is not None):
336 if has_tr:
333 if has_tr:
337 m = "transaction vanished while changing dirstate"
334 m = "transaction vanished while changing dirstate"
338 else:
335 else:
339 m = "transaction appeared while changing dirstate"
336 m = "transaction appeared while changing dirstate"
340 raise error.ProgrammingError(m)
337 raise error.ProgrammingError(m)
341 if should_write:
338 if should_write:
342 self.write(tr)
339 self.write(tr)
343
340
344 @contextlib.contextmanager
341 @contextlib.contextmanager
345 def changing_parents(self, repo):
342 def changing_parents(self, repo):
346 """Wrap a dirstate change related to a change of working copy parents
343 """Wrap a dirstate change related to a change of working copy parents
347
344
348 This context scopes a series of dirstate modifications that match an
345 This context scopes a series of dirstate modifications that match an
349 update of the working copy parents (typically `hg update`, `hg merge`
346 update of the working copy parents (typically `hg update`, `hg merge`
350 etc).
347 etc).
351
348
352 The dirstate's methods that perform this kind of modifications require
349 The dirstate's methods that perform this kind of modifications require
353 this context to be present before being called.
350 this context to be present before being called.
354 Such methods are decorated with `@requires_changing_parents`.
351 Such methods are decorated with `@requires_changing_parents`.
355
352
356 The new dirstate contents will be written to disk when the top-most
353 The new dirstate contents will be written to disk when the top-most
357 `changing_parents` context exits successfully. If an exception is
354 `changing_parents` context exits successfully. If an exception is
358 raised during a `changing_parents` context of any level, all changes
355 raised during a `changing_parents` context of any level, all changes
359 are invalidated. If this context is open within an open transaction,
356 are invalidated. If this context is open within an open transaction,
360 the dirstate writing is delayed until that transaction is successfully
357 the dirstate writing is delayed until that transaction is successfully
361 committed (and the dirstate is invalidated on transaction abort).
358 committed (and the dirstate is invalidated on transaction abort).
362
359
363 The `changing_parents` operation is mutually exclusive with the
360 The `changing_parents` operation is mutually exclusive with the
364 `changing_files` one.
361 `changing_files` one.
365 """
362 """
366 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
363 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
367 yield c
364 yield c
368
365
369 @contextlib.contextmanager
366 @contextlib.contextmanager
370 def changing_files(self, repo):
367 def changing_files(self, repo):
371 """Wrap a dirstate change related to the set of tracked files
368 """Wrap a dirstate change related to the set of tracked files
372
369
373 This context scopes a series of dirstate modifications that change the
370 This context scopes a series of dirstate modifications that change the
374 set of tracked files. (typically `hg add`, `hg remove` etc) or some
371 set of tracked files. (typically `hg add`, `hg remove` etc) or some
375 dirstate stored information (like `hg rename --after`) but preserve
372 dirstate stored information (like `hg rename --after`) but preserve
376 the working copy parents.
373 the working copy parents.
377
374
378 The dirstate's methods that perform this kind of modifications require
375 The dirstate's methods that perform this kind of modifications require
379 this context to be present before being called.
376 this context to be present before being called.
380 Such methods are decorated with `@requires_changing_files`.
377 Such methods are decorated with `@requires_changing_files`.
381
378
382 The new dirstate contents will be written to disk when the top-most
379 The new dirstate contents will be written to disk when the top-most
383 `changing_files` context exits successfully. If an exception is raised
380 `changing_files` context exits successfully. If an exception is raised
384 during a `changing_files` context of any level, all changes are
381 during a `changing_files` context of any level, all changes are
385 invalidated. If this context is open within an open transaction, the
382 invalidated. If this context is open within an open transaction, the
386 dirstate writing is delayed until that transaction is successfully
383 dirstate writing is delayed until that transaction is successfully
387 committed (and the dirstate is invalidated on transaction abort).
384 committed (and the dirstate is invalidated on transaction abort).
388
385
389 The `changing_files` operation is mutually exclusive with the
386 The `changing_files` operation is mutually exclusive with the
390 `changing_parents` one.
387 `changing_parents` one.
391 """
388 """
392 with self._changing(repo, CHANGE_TYPE_FILES) as c:
389 with self._changing(repo, CHANGE_TYPE_FILES) as c:
393 yield c
390 yield c
394
391
395 # here to help migration to the new code
392 # here to help migration to the new code
396 def parentchange(self):
393 def parentchange(self):
397 msg = (
394 msg = (
398 "Mercurial 6.4 and later requires call to "
395 "Mercurial 6.4 and later requires call to "
399 "`dirstate.changing_parents(repo)`"
396 "`dirstate.changing_parents(repo)`"
400 )
397 )
401 raise error.ProgrammingError(msg)
398 raise error.ProgrammingError(msg)
402
399
403 @property
400 @property
404 def is_changing_any(self):
401 def is_changing_any(self):
405 """Returns true if the dirstate is in the middle of a set of changes.
402 """Returns true if the dirstate is in the middle of a set of changes.
406
403
407 This returns True for any kind of change.
404 This returns True for any kind of change.
408 """
405 """
409 return self._changing_level > 0
406 return self._changing_level > 0
410
407
411 @property
408 @property
412 def is_changing_parents(self):
409 def is_changing_parents(self):
413 """Returns true if the dirstate is in the middle of a set of changes
410 """Returns true if the dirstate is in the middle of a set of changes
414 that modify the dirstate parent.
411 that modify the dirstate parent.
415 """
412 """
416 if self._changing_level <= 0:
413 if self._changing_level <= 0:
417 return False
414 return False
418 return self._change_type == CHANGE_TYPE_PARENTS
415 return self._change_type == CHANGE_TYPE_PARENTS
419
416
420 @property
417 @property
421 def is_changing_files(self):
418 def is_changing_files(self):
422 """Returns true if the dirstate is in the middle of a set of changes
419 """Returns true if the dirstate is in the middle of a set of changes
423 that modify the files tracked or their sources.
420 that modify the files tracked or their sources.
424 """
421 """
425 if self._changing_level <= 0:
422 if self._changing_level <= 0:
426 return False
423 return False
427 return self._change_type == CHANGE_TYPE_FILES
424 return self._change_type == CHANGE_TYPE_FILES
428
425
429 @propertycache
426 @propertycache
430 def _map(self):
427 def _map(self):
431 """Return the dirstate contents (see documentation for dirstatemap)."""
428 """Return the dirstate contents (see documentation for dirstatemap)."""
432 return self._mapcls(
429 return self._mapcls(
433 self._ui,
430 self._ui,
434 self._opener,
431 self._opener,
435 self._root,
432 self._root,
436 self._nodeconstants,
433 self._nodeconstants,
437 self._use_dirstate_v2,
434 self._use_dirstate_v2,
438 )
435 )
439
436
440 @property
437 @property
441 def _sparsematcher(self):
438 def _sparsematcher(self):
442 """The matcher for the sparse checkout.
439 """The matcher for the sparse checkout.
443
440
444 The working directory may not include every file from a manifest. The
441 The working directory may not include every file from a manifest. The
445 matcher obtained by this property will match a path if it is to be
442 matcher obtained by this property will match a path if it is to be
446 included in the working directory.
443 included in the working directory.
447
444
448 When sparse if disabled, return None.
445 When sparse if disabled, return None.
449 """
446 """
450 if self._sparsematchfn is None:
447 if self._sparsematchfn is None:
451 return None
448 return None
452 # TODO there is potential to cache this property. For now, the matcher
449 # TODO there is potential to cache this property. For now, the matcher
453 # is resolved on every access. (But the called function does use a
450 # is resolved on every access. (But the called function does use a
454 # cache to keep the lookup fast.)
451 # cache to keep the lookup fast.)
455 return self._sparsematchfn()
452 return self._sparsematchfn()
456
453
457 @repocache(b'branch')
454 @repocache(b'branch')
458 def _branch(self):
455 def _branch(self):
459 f = None
456 f = None
460 data = b''
457 data = b''
461 try:
458 try:
462 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
459 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
463 data = f.read().strip()
460 data = f.read().strip()
464 except FileNotFoundError:
461 except FileNotFoundError:
465 pass
462 pass
466 finally:
463 finally:
467 if f is not None:
464 if f is not None:
468 f.close()
465 f.close()
469 if not data:
466 if not data:
470 return b"default"
467 return b"default"
471 return data
468 return data
472
469
473 @property
470 @property
474 def _pl(self):
471 def _pl(self):
475 return self._map.parents()
472 return self._map.parents()
476
473
477 def hasdir(self, d):
474 def hasdir(self, d):
478 return self._map.hastrackeddir(d)
475 return self._map.hastrackeddir(d)
479
476
480 @rootcache(b'.hgignore')
477 @rootcache(b'.hgignore')
481 def _ignore(self):
478 def _ignore(self):
482 files = self._ignorefiles()
479 files = self._ignorefiles()
483 if not files:
480 if not files:
484 return matchmod.never()
481 return matchmod.never()
485
482
486 pats = [b'include:%s' % f for f in files]
483 pats = [b'include:%s' % f for f in files]
487 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
484 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
488
485
489 @propertycache
486 @propertycache
490 def _slash(self):
487 def _slash(self):
491 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
488 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
492
489
493 @propertycache
490 @propertycache
494 def _checklink(self):
491 def _checklink(self):
495 return util.checklink(self._root)
492 return util.checklink(self._root)
496
493
497 @propertycache
494 @propertycache
498 def _checkexec(self):
495 def _checkexec(self):
499 return bool(util.checkexec(self._root))
496 return bool(util.checkexec(self._root))
500
497
501 @propertycache
498 @propertycache
502 def _checkcase(self):
499 def _checkcase(self):
503 return not util.fscasesensitive(self._join(b'.hg'))
500 return not util.fscasesensitive(self._join(b'.hg'))
504
501
505 def _join(self, f):
502 def _join(self, f):
506 # much faster than os.path.join()
503 # much faster than os.path.join()
507 # it's safe because f is always a relative path
504 # it's safe because f is always a relative path
508 return self._rootdir + f
505 return self._rootdir + f
509
506
510 def flagfunc(self, buildfallback):
507 def flagfunc(self, buildfallback):
511 """build a callable that returns flags associated with a filename
508 """build a callable that returns flags associated with a filename
512
509
513 The information is extracted from three possible layers:
510 The information is extracted from three possible layers:
514 1. the file system if it supports the information
511 1. the file system if it supports the information
515 2. the "fallback" information stored in the dirstate if any
512 2. the "fallback" information stored in the dirstate if any
516 3. a more expensive mechanism inferring the flags from the parents.
513 3. a more expensive mechanism inferring the flags from the parents.
517 """
514 """
518
515
519 # small hack to cache the result of buildfallback()
516 # small hack to cache the result of buildfallback()
520 fallback_func = []
517 fallback_func = []
521
518
522 def get_flags(x):
519 def get_flags(x):
523 entry = None
520 entry = None
524 fallback_value = None
521 fallback_value = None
525 try:
522 try:
526 st = os.lstat(self._join(x))
523 st = os.lstat(self._join(x))
527 except OSError:
524 except OSError:
528 return b''
525 return b''
529
526
530 if self._checklink:
527 if self._checklink:
531 if util.statislink(st):
528 if util.statislink(st):
532 return b'l'
529 return b'l'
533 else:
530 else:
534 entry = self.get_entry(x)
531 entry = self.get_entry(x)
535 if entry.has_fallback_symlink:
532 if entry.has_fallback_symlink:
536 if entry.fallback_symlink:
533 if entry.fallback_symlink:
537 return b'l'
534 return b'l'
538 else:
535 else:
539 if not fallback_func:
536 if not fallback_func:
540 fallback_func.append(buildfallback())
537 fallback_func.append(buildfallback())
541 fallback_value = fallback_func[0](x)
538 fallback_value = fallback_func[0](x)
542 if b'l' in fallback_value:
539 if b'l' in fallback_value:
543 return b'l'
540 return b'l'
544
541
545 if self._checkexec:
542 if self._checkexec:
546 if util.statisexec(st):
543 if util.statisexec(st):
547 return b'x'
544 return b'x'
548 else:
545 else:
549 if entry is None:
546 if entry is None:
550 entry = self.get_entry(x)
547 entry = self.get_entry(x)
551 if entry.has_fallback_exec:
548 if entry.has_fallback_exec:
552 if entry.fallback_exec:
549 if entry.fallback_exec:
553 return b'x'
550 return b'x'
554 else:
551 else:
555 if fallback_value is None:
552 if fallback_value is None:
556 if not fallback_func:
553 if not fallback_func:
557 fallback_func.append(buildfallback())
554 fallback_func.append(buildfallback())
558 fallback_value = fallback_func[0](x)
555 fallback_value = fallback_func[0](x)
559 if b'x' in fallback_value:
556 if b'x' in fallback_value:
560 return b'x'
557 return b'x'
561 return b''
558 return b''
562
559
563 return get_flags
560 return get_flags
564
561
565 @propertycache
562 @propertycache
566 def _cwd(self):
563 def _cwd(self):
567 # internal config: ui.forcecwd
564 # internal config: ui.forcecwd
568 forcecwd = self._ui.config(b'ui', b'forcecwd')
565 forcecwd = self._ui.config(b'ui', b'forcecwd')
569 if forcecwd:
566 if forcecwd:
570 return forcecwd
567 return forcecwd
571 return encoding.getcwd()
568 return encoding.getcwd()
572
569
573 def getcwd(self):
570 def getcwd(self):
574 """Return the path from which a canonical path is calculated.
571 """Return the path from which a canonical path is calculated.
575
572
576 This path should be used to resolve file patterns or to convert
573 This path should be used to resolve file patterns or to convert
577 canonical paths back to file paths for display. It shouldn't be
574 canonical paths back to file paths for display. It shouldn't be
578 used to get real file paths. Use vfs functions instead.
575 used to get real file paths. Use vfs functions instead.
579 """
576 """
580 cwd = self._cwd
577 cwd = self._cwd
581 if cwd == self._root:
578 if cwd == self._root:
582 return b''
579 return b''
583 # self._root ends with a path separator if self._root is '/' or 'C:\'
580 # self._root ends with a path separator if self._root is '/' or 'C:\'
584 rootsep = self._root
581 rootsep = self._root
585 if not util.endswithsep(rootsep):
582 if not util.endswithsep(rootsep):
586 rootsep += pycompat.ossep
583 rootsep += pycompat.ossep
587 if cwd.startswith(rootsep):
584 if cwd.startswith(rootsep):
588 return cwd[len(rootsep) :]
585 return cwd[len(rootsep) :]
589 else:
586 else:
590 # we're outside the repo. return an absolute path.
587 # we're outside the repo. return an absolute path.
591 return cwd
588 return cwd
592
589
593 def pathto(self, f, cwd=None):
590 def pathto(self, f, cwd=None):
594 if cwd is None:
591 if cwd is None:
595 cwd = self.getcwd()
592 cwd = self.getcwd()
596 path = util.pathto(self._root, cwd, f)
593 path = util.pathto(self._root, cwd, f)
597 if self._slash:
594 if self._slash:
598 return util.pconvert(path)
595 return util.pconvert(path)
599 return path
596 return path
600
597
601 def get_entry(self, path):
598 def get_entry(self, path):
602 """return a DirstateItem for the associated path"""
599 """return a DirstateItem for the associated path"""
603 entry = self._map.get(path)
600 entry = self._map.get(path)
604 if entry is None:
601 if entry is None:
605 return DirstateItem()
602 return DirstateItem()
606 return entry
603 return entry
607
604
608 def __contains__(self, key):
605 def __contains__(self, key):
609 return key in self._map
606 return key in self._map
610
607
611 def __iter__(self):
608 def __iter__(self):
612 return iter(sorted(self._map))
609 return iter(sorted(self._map))
613
610
614 def items(self):
611 def items(self):
615 return self._map.items()
612 return self._map.items()
616
613
617 iteritems = items
614 iteritems = items
618
615
619 def parents(self):
616 def parents(self):
620 return [self._validate(p) for p in self._pl]
617 return [self._validate(p) for p in self._pl]
621
618
622 def p1(self):
619 def p1(self):
623 return self._validate(self._pl[0])
620 return self._validate(self._pl[0])
624
621
625 def p2(self):
622 def p2(self):
626 return self._validate(self._pl[1])
623 return self._validate(self._pl[1])
627
624
628 @property
625 @property
629 def in_merge(self):
626 def in_merge(self):
630 """True if a merge is in progress"""
627 """True if a merge is in progress"""
631 return self._pl[1] != self._nodeconstants.nullid
628 return self._pl[1] != self._nodeconstants.nullid
632
629
633 def branch(self):
630 def branch(self):
634 return encoding.tolocal(self._branch)
631 return encoding.tolocal(self._branch)
635
632
636 @requires_changing_parents
633 @requires_changing_parents
637 def setparents(self, p1, p2=None):
634 def setparents(self, p1, p2=None):
638 """Set dirstate parents to p1 and p2.
635 """Set dirstate parents to p1 and p2.
639
636
640 When moving from two parents to one, "merged" entries a
637 When moving from two parents to one, "merged" entries a
641 adjusted to normal and previous copy records discarded and
638 adjusted to normal and previous copy records discarded and
642 returned by the call.
639 returned by the call.
643
640
644 See localrepo.setparents()
641 See localrepo.setparents()
645 """
642 """
646 if p2 is None:
643 if p2 is None:
647 p2 = self._nodeconstants.nullid
644 p2 = self._nodeconstants.nullid
648 if self._changing_level == 0:
645 if self._changing_level == 0:
649 raise ValueError(
646 raise ValueError(
650 b"cannot set dirstate parent outside of "
647 b"cannot set dirstate parent outside of "
651 b"dirstate.changing_parents context manager"
648 b"dirstate.changing_parents context manager"
652 )
649 )
653
650
654 self._dirty = True
651 self._dirty = True
655 oldp2 = self._pl[1]
652 oldp2 = self._pl[1]
656 if self._origpl is None:
653 if self._origpl is None:
657 self._origpl = self._pl
654 self._origpl = self._pl
658 nullid = self._nodeconstants.nullid
655 nullid = self._nodeconstants.nullid
659 # True if we need to fold p2 related state back to a linear case
656 # True if we need to fold p2 related state back to a linear case
660 fold_p2 = oldp2 != nullid and p2 == nullid
657 fold_p2 = oldp2 != nullid and p2 == nullid
661 return self._map.setparents(p1, p2, fold_p2=fold_p2)
658 return self._map.setparents(p1, p2, fold_p2=fold_p2)
662
659
663 def setbranch(self, branch, transaction=SENTINEL):
660 def setbranch(self, branch, transaction):
664 self.__class__._branch.set(self, encoding.fromlocal(branch))
661 self.__class__._branch.set(self, encoding.fromlocal(branch))
665 if transaction is SENTINEL:
666 msg = b"setbranch needs a `transaction` argument"
667 self._ui.deprecwarn(msg, b'6.5')
668 transaction = None
669 if transaction is not None:
662 if transaction is not None:
670 self._setup_tr_abort(transaction)
663 self._setup_tr_abort(transaction)
671 transaction.addfilegenerator(
664 transaction.addfilegenerator(
672 b'dirstate-3-branch%s' % self._tr_key_suffix,
665 b'dirstate-3-branch%s' % self._tr_key_suffix,
673 (b'branch',),
666 (b'branch',),
674 self._write_branch,
667 self._write_branch,
675 location=b'plain',
668 location=b'plain',
676 post_finalize=True,
669 post_finalize=True,
677 )
670 )
678 return
671 return
679
672
680 vfs = self._opener
673 vfs = self._opener
681 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
674 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
682 self._write_branch(f)
675 self._write_branch(f)
683 # make sure filecache has the correct stat info for _branch after
676 # make sure filecache has the correct stat info for _branch after
684 # replacing the underlying file
677 # replacing the underlying file
685 #
678 #
686 # XXX do we actually need this,
679 # XXX do we actually need this,
687 # refreshing the attribute is quite cheap
680 # refreshing the attribute is quite cheap
688 ce = self._filecache[b'_branch']
681 ce = self._filecache[b'_branch']
689 if ce:
682 if ce:
690 ce.refresh()
683 ce.refresh()
691
684
692 def _write_branch(self, file_obj):
685 def _write_branch(self, file_obj):
693 file_obj.write(self._branch + b'\n')
686 file_obj.write(self._branch + b'\n')
694
687
695 def invalidate(self):
688 def invalidate(self):
696 """Causes the next access to reread the dirstate.
689 """Causes the next access to reread the dirstate.
697
690
698 This is different from localrepo.invalidatedirstate() because it always
691 This is different from localrepo.invalidatedirstate() because it always
699 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
692 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
700 check whether the dirstate has changed before rereading it."""
693 check whether the dirstate has changed before rereading it."""
701
694
702 for a in ("_map", "_branch", "_ignore"):
695 for a in ("_map", "_branch", "_ignore"):
703 if a in self.__dict__:
696 if a in self.__dict__:
704 delattr(self, a)
697 delattr(self, a)
705 self._dirty = False
698 self._dirty = False
706 self._dirty_tracked_set = False
699 self._dirty_tracked_set = False
707 self._invalidated_context = bool(
700 self._invalidated_context = bool(
708 self._changing_level > 0
701 self._changing_level > 0
709 or self._attached_to_a_transaction
702 or self._attached_to_a_transaction
710 or self._running_status
703 or self._running_status
711 )
704 )
712 self._origpl = None
705 self._origpl = None
713
706
714 @requires_changing_any
707 @requires_changing_any
715 def copy(self, source, dest):
708 def copy(self, source, dest):
716 """Mark dest as a copy of source. Unmark dest if source is None."""
709 """Mark dest as a copy of source. Unmark dest if source is None."""
717 if source == dest:
710 if source == dest:
718 return
711 return
719 self._dirty = True
712 self._dirty = True
720 if source is not None:
713 if source is not None:
721 self._check_sparse(source)
714 self._check_sparse(source)
722 self._map.copymap[dest] = source
715 self._map.copymap[dest] = source
723 else:
716 else:
724 self._map.copymap.pop(dest, None)
717 self._map.copymap.pop(dest, None)
725
718
726 def copied(self, file):
719 def copied(self, file):
727 return self._map.copymap.get(file, None)
720 return self._map.copymap.get(file, None)
728
721
729 def copies(self):
722 def copies(self):
730 return self._map.copymap
723 return self._map.copymap
731
724
732 @requires_changing_files
725 @requires_changing_files
733 def set_tracked(self, filename, reset_copy=False):
726 def set_tracked(self, filename, reset_copy=False):
734 """a "public" method for generic code to mark a file as tracked
727 """a "public" method for generic code to mark a file as tracked
735
728
736 This function is to be called outside of "update/merge" case. For
729 This function is to be called outside of "update/merge" case. For
737 example by a command like `hg add X`.
730 example by a command like `hg add X`.
738
731
739 if reset_copy is set, any existing copy information will be dropped.
732 if reset_copy is set, any existing copy information will be dropped.
740
733
741 return True the file was previously untracked, False otherwise.
734 return True the file was previously untracked, False otherwise.
742 """
735 """
743 self._dirty = True
736 self._dirty = True
744 entry = self._map.get(filename)
737 entry = self._map.get(filename)
745 if entry is None or not entry.tracked:
738 if entry is None or not entry.tracked:
746 self._check_new_tracked_filename(filename)
739 self._check_new_tracked_filename(filename)
747 pre_tracked = self._map.set_tracked(filename)
740 pre_tracked = self._map.set_tracked(filename)
748 if reset_copy:
741 if reset_copy:
749 self._map.copymap.pop(filename, None)
742 self._map.copymap.pop(filename, None)
750 if pre_tracked:
743 if pre_tracked:
751 self._dirty_tracked_set = True
744 self._dirty_tracked_set = True
752 return pre_tracked
745 return pre_tracked
753
746
754 @requires_changing_files
747 @requires_changing_files
755 def set_untracked(self, filename):
748 def set_untracked(self, filename):
756 """a "public" method for generic code to mark a file as untracked
749 """a "public" method for generic code to mark a file as untracked
757
750
758 This function is to be called outside of "update/merge" case. For
751 This function is to be called outside of "update/merge" case. For
759 example by a command like `hg remove X`.
752 example by a command like `hg remove X`.
760
753
761 return True the file was previously tracked, False otherwise.
754 return True the file was previously tracked, False otherwise.
762 """
755 """
763 ret = self._map.set_untracked(filename)
756 ret = self._map.set_untracked(filename)
764 if ret:
757 if ret:
765 self._dirty = True
758 self._dirty = True
766 self._dirty_tracked_set = True
759 self._dirty_tracked_set = True
767 return ret
760 return ret
768
761
769 @requires_changing_files_or_status
762 @requires_changing_files_or_status
770 def set_clean(self, filename, parentfiledata):
763 def set_clean(self, filename, parentfiledata):
771 """record that the current state of the file on disk is known to be clean"""
764 """record that the current state of the file on disk is known to be clean"""
772 self._dirty = True
765 self._dirty = True
773 if not self._map[filename].tracked:
766 if not self._map[filename].tracked:
774 self._check_new_tracked_filename(filename)
767 self._check_new_tracked_filename(filename)
775 (mode, size, mtime) = parentfiledata
768 (mode, size, mtime) = parentfiledata
776 self._map.set_clean(filename, mode, size, mtime)
769 self._map.set_clean(filename, mode, size, mtime)
777
770
778 @requires_changing_files_or_status
771 @requires_changing_files_or_status
779 def set_possibly_dirty(self, filename):
772 def set_possibly_dirty(self, filename):
780 """record that the current state of the file on disk is unknown"""
773 """record that the current state of the file on disk is unknown"""
781 self._dirty = True
774 self._dirty = True
782 self._map.set_possibly_dirty(filename)
775 self._map.set_possibly_dirty(filename)
783
776
784 @requires_changing_parents
777 @requires_changing_parents
785 def update_file_p1(
778 def update_file_p1(
786 self,
779 self,
787 filename,
780 filename,
788 p1_tracked,
781 p1_tracked,
789 ):
782 ):
790 """Set a file as tracked in the parent (or not)
783 """Set a file as tracked in the parent (or not)
791
784
792 This is to be called when adjust the dirstate to a new parent after an history
785 This is to be called when adjust the dirstate to a new parent after an history
793 rewriting operation.
786 rewriting operation.
794
787
795 It should not be called during a merge (p2 != nullid) and only within
788 It should not be called during a merge (p2 != nullid) and only within
796 a `with dirstate.changing_parents(repo):` context.
789 a `with dirstate.changing_parents(repo):` context.
797 """
790 """
798 if self.in_merge:
791 if self.in_merge:
799 msg = b'update_file_reference should not be called when merging'
792 msg = b'update_file_reference should not be called when merging'
800 raise error.ProgrammingError(msg)
793 raise error.ProgrammingError(msg)
801 entry = self._map.get(filename)
794 entry = self._map.get(filename)
802 if entry is None:
795 if entry is None:
803 wc_tracked = False
796 wc_tracked = False
804 else:
797 else:
805 wc_tracked = entry.tracked
798 wc_tracked = entry.tracked
806 if not (p1_tracked or wc_tracked):
799 if not (p1_tracked or wc_tracked):
807 # the file is no longer relevant to anyone
800 # the file is no longer relevant to anyone
808 if self._map.get(filename) is not None:
801 if self._map.get(filename) is not None:
809 self._map.reset_state(filename)
802 self._map.reset_state(filename)
810 self._dirty = True
803 self._dirty = True
811 elif (not p1_tracked) and wc_tracked:
804 elif (not p1_tracked) and wc_tracked:
812 if entry is not None and entry.added:
805 if entry is not None and entry.added:
813 return # avoid dropping copy information (maybe?)
806 return # avoid dropping copy information (maybe?)
814
807
815 self._map.reset_state(
808 self._map.reset_state(
816 filename,
809 filename,
817 wc_tracked,
810 wc_tracked,
818 p1_tracked,
811 p1_tracked,
819 # the underlying reference might have changed, we will have to
812 # the underlying reference might have changed, we will have to
820 # check it.
813 # check it.
821 has_meaningful_mtime=False,
814 has_meaningful_mtime=False,
822 )
815 )
823
816
824 @requires_changing_parents
817 @requires_changing_parents
825 def update_file(
818 def update_file(
826 self,
819 self,
827 filename,
820 filename,
828 wc_tracked,
821 wc_tracked,
829 p1_tracked,
822 p1_tracked,
830 p2_info=False,
823 p2_info=False,
831 possibly_dirty=False,
824 possibly_dirty=False,
832 parentfiledata=None,
825 parentfiledata=None,
833 ):
826 ):
834 """update the information about a file in the dirstate
827 """update the information about a file in the dirstate
835
828
836 This is to be called when the direstates parent changes to keep track
829 This is to be called when the direstates parent changes to keep track
837 of what is the file situation in regards to the working copy and its parent.
830 of what is the file situation in regards to the working copy and its parent.
838
831
839 This function must be called within a `dirstate.changing_parents` context.
832 This function must be called within a `dirstate.changing_parents` context.
840
833
841 note: the API is at an early stage and we might need to adjust it
834 note: the API is at an early stage and we might need to adjust it
842 depending of what information ends up being relevant and useful to
835 depending of what information ends up being relevant and useful to
843 other processing.
836 other processing.
844 """
837 """
845 self._update_file(
838 self._update_file(
846 filename=filename,
839 filename=filename,
847 wc_tracked=wc_tracked,
840 wc_tracked=wc_tracked,
848 p1_tracked=p1_tracked,
841 p1_tracked=p1_tracked,
849 p2_info=p2_info,
842 p2_info=p2_info,
850 possibly_dirty=possibly_dirty,
843 possibly_dirty=possibly_dirty,
851 parentfiledata=parentfiledata,
844 parentfiledata=parentfiledata,
852 )
845 )
853
846
854 def hacky_extension_update_file(self, *args, **kwargs):
847 def hacky_extension_update_file(self, *args, **kwargs):
855 """NEVER USE THIS, YOU DO NOT NEED IT
848 """NEVER USE THIS, YOU DO NOT NEED IT
856
849
857 This function is a variant of "update_file" to be called by a small set
850 This function is a variant of "update_file" to be called by a small set
858 of extensions, it also adjust the internal state of file, but can be
851 of extensions, it also adjust the internal state of file, but can be
859 called outside an `changing_parents` context.
852 called outside an `changing_parents` context.
860
853
861 A very small number of extension meddle with the working copy content
854 A very small number of extension meddle with the working copy content
862 in a way that requires to adjust the dirstate accordingly. At the time
855 in a way that requires to adjust the dirstate accordingly. At the time
863 this command is written they are :
856 this command is written they are :
864 - keyword,
857 - keyword,
865 - largefile,
858 - largefile,
866 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
859 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
867
860
868 This function could probably be replaced by more semantic one (like
861 This function could probably be replaced by more semantic one (like
869 "adjust expected size" or "always revalidate file content", etc)
862 "adjust expected size" or "always revalidate file content", etc)
870 however at the time where this is writen, this is too much of a detour
863 however at the time where this is writen, this is too much of a detour
871 to be considered.
864 to be considered.
872 """
865 """
873 if not (self._changing_level > 0 or self._running_status > 0):
866 if not (self._changing_level > 0 or self._running_status > 0):
874 msg = "requires a changes context"
867 msg = "requires a changes context"
875 raise error.ProgrammingError(msg)
868 raise error.ProgrammingError(msg)
876 self._update_file(
869 self._update_file(
877 *args,
870 *args,
878 **kwargs,
871 **kwargs,
879 )
872 )
880
873
881 def _update_file(
874 def _update_file(
882 self,
875 self,
883 filename,
876 filename,
884 wc_tracked,
877 wc_tracked,
885 p1_tracked,
878 p1_tracked,
886 p2_info=False,
879 p2_info=False,
887 possibly_dirty=False,
880 possibly_dirty=False,
888 parentfiledata=None,
881 parentfiledata=None,
889 ):
882 ):
890
883
891 # note: I do not think we need to double check name clash here since we
884 # note: I do not think we need to double check name clash here since we
892 # are in a update/merge case that should already have taken care of
885 # are in a update/merge case that should already have taken care of
893 # this. The test agrees
886 # this. The test agrees
894
887
895 self._dirty = True
888 self._dirty = True
896 old_entry = self._map.get(filename)
889 old_entry = self._map.get(filename)
897 if old_entry is None:
890 if old_entry is None:
898 prev_tracked = False
891 prev_tracked = False
899 else:
892 else:
900 prev_tracked = old_entry.tracked
893 prev_tracked = old_entry.tracked
901 if prev_tracked != wc_tracked:
894 if prev_tracked != wc_tracked:
902 self._dirty_tracked_set = True
895 self._dirty_tracked_set = True
903
896
904 self._map.reset_state(
897 self._map.reset_state(
905 filename,
898 filename,
906 wc_tracked,
899 wc_tracked,
907 p1_tracked,
900 p1_tracked,
908 p2_info=p2_info,
901 p2_info=p2_info,
909 has_meaningful_mtime=not possibly_dirty,
902 has_meaningful_mtime=not possibly_dirty,
910 parentfiledata=parentfiledata,
903 parentfiledata=parentfiledata,
911 )
904 )
912
905
913 def _check_new_tracked_filename(self, filename):
906 def _check_new_tracked_filename(self, filename):
914 scmutil.checkfilename(filename)
907 scmutil.checkfilename(filename)
915 if self._map.hastrackeddir(filename):
908 if self._map.hastrackeddir(filename):
916 msg = _(b'directory %r already in dirstate')
909 msg = _(b'directory %r already in dirstate')
917 msg %= pycompat.bytestr(filename)
910 msg %= pycompat.bytestr(filename)
918 raise error.Abort(msg)
911 raise error.Abort(msg)
919 # shadows
912 # shadows
920 for d in pathutil.finddirs(filename):
913 for d in pathutil.finddirs(filename):
921 if self._map.hastrackeddir(d):
914 if self._map.hastrackeddir(d):
922 break
915 break
923 entry = self._map.get(d)
916 entry = self._map.get(d)
924 if entry is not None and not entry.removed:
917 if entry is not None and not entry.removed:
925 msg = _(b'file %r in dirstate clashes with %r')
918 msg = _(b'file %r in dirstate clashes with %r')
926 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
919 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
927 raise error.Abort(msg)
920 raise error.Abort(msg)
928 self._check_sparse(filename)
921 self._check_sparse(filename)
929
922
930 def _check_sparse(self, filename):
923 def _check_sparse(self, filename):
931 """Check that a filename is inside the sparse profile"""
924 """Check that a filename is inside the sparse profile"""
932 sparsematch = self._sparsematcher
925 sparsematch = self._sparsematcher
933 if sparsematch is not None and not sparsematch.always():
926 if sparsematch is not None and not sparsematch.always():
934 if not sparsematch(filename):
927 if not sparsematch(filename):
935 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
928 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
936 hint = _(
929 hint = _(
937 b'include file with `hg debugsparse --include <pattern>` or use '
930 b'include file with `hg debugsparse --include <pattern>` or use '
938 b'`hg add -s <file>` to include file directory while adding'
931 b'`hg add -s <file>` to include file directory while adding'
939 )
932 )
940 raise error.Abort(msg % filename, hint=hint)
933 raise error.Abort(msg % filename, hint=hint)
941
934
942 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
935 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
943 if exists is None:
936 if exists is None:
944 exists = os.path.lexists(os.path.join(self._root, path))
937 exists = os.path.lexists(os.path.join(self._root, path))
945 if not exists:
938 if not exists:
946 # Maybe a path component exists
939 # Maybe a path component exists
947 if not ignoremissing and b'/' in path:
940 if not ignoremissing and b'/' in path:
948 d, f = path.rsplit(b'/', 1)
941 d, f = path.rsplit(b'/', 1)
949 d = self._normalize(d, False, ignoremissing, None)
942 d = self._normalize(d, False, ignoremissing, None)
950 folded = d + b"/" + f
943 folded = d + b"/" + f
951 else:
944 else:
952 # No path components, preserve original case
945 # No path components, preserve original case
953 folded = path
946 folded = path
954 else:
947 else:
955 # recursively normalize leading directory components
948 # recursively normalize leading directory components
956 # against dirstate
949 # against dirstate
957 if b'/' in normed:
950 if b'/' in normed:
958 d, f = normed.rsplit(b'/', 1)
951 d, f = normed.rsplit(b'/', 1)
959 d = self._normalize(d, False, ignoremissing, True)
952 d = self._normalize(d, False, ignoremissing, True)
960 r = self._root + b"/" + d
953 r = self._root + b"/" + d
961 folded = d + b"/" + util.fspath(f, r)
954 folded = d + b"/" + util.fspath(f, r)
962 else:
955 else:
963 folded = util.fspath(normed, self._root)
956 folded = util.fspath(normed, self._root)
964 storemap[normed] = folded
957 storemap[normed] = folded
965
958
966 return folded
959 return folded
967
960
968 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
961 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
969 normed = util.normcase(path)
962 normed = util.normcase(path)
970 folded = self._map.filefoldmap.get(normed, None)
963 folded = self._map.filefoldmap.get(normed, None)
971 if folded is None:
964 if folded is None:
972 if isknown:
965 if isknown:
973 folded = path
966 folded = path
974 else:
967 else:
975 folded = self._discoverpath(
968 folded = self._discoverpath(
976 path, normed, ignoremissing, exists, self._map.filefoldmap
969 path, normed, ignoremissing, exists, self._map.filefoldmap
977 )
970 )
978 return folded
971 return folded
979
972
980 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
973 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
981 normed = util.normcase(path)
974 normed = util.normcase(path)
982 folded = self._map.filefoldmap.get(normed, None)
975 folded = self._map.filefoldmap.get(normed, None)
983 if folded is None:
976 if folded is None:
984 folded = self._map.dirfoldmap.get(normed, None)
977 folded = self._map.dirfoldmap.get(normed, None)
985 if folded is None:
978 if folded is None:
986 if isknown:
979 if isknown:
987 folded = path
980 folded = path
988 else:
981 else:
989 # store discovered result in dirfoldmap so that future
982 # store discovered result in dirfoldmap so that future
990 # normalizefile calls don't start matching directories
983 # normalizefile calls don't start matching directories
991 folded = self._discoverpath(
984 folded = self._discoverpath(
992 path, normed, ignoremissing, exists, self._map.dirfoldmap
985 path, normed, ignoremissing, exists, self._map.dirfoldmap
993 )
986 )
994 return folded
987 return folded
995
988
996 def normalize(self, path, isknown=False, ignoremissing=False):
989 def normalize(self, path, isknown=False, ignoremissing=False):
997 """
990 """
998 normalize the case of a pathname when on a casefolding filesystem
991 normalize the case of a pathname when on a casefolding filesystem
999
992
1000 isknown specifies whether the filename came from walking the
993 isknown specifies whether the filename came from walking the
1001 disk, to avoid extra filesystem access.
994 disk, to avoid extra filesystem access.
1002
995
1003 If ignoremissing is True, missing path are returned
996 If ignoremissing is True, missing path are returned
1004 unchanged. Otherwise, we try harder to normalize possibly
997 unchanged. Otherwise, we try harder to normalize possibly
1005 existing path components.
998 existing path components.
1006
999
1007 The normalized case is determined based on the following precedence:
1000 The normalized case is determined based on the following precedence:
1008
1001
1009 - version of name already stored in the dirstate
1002 - version of name already stored in the dirstate
1010 - version of name stored on disk
1003 - version of name stored on disk
1011 - version provided via command arguments
1004 - version provided via command arguments
1012 """
1005 """
1013
1006
1014 if self._checkcase:
1007 if self._checkcase:
1015 return self._normalize(path, isknown, ignoremissing)
1008 return self._normalize(path, isknown, ignoremissing)
1016 return path
1009 return path
1017
1010
1018 # XXX this method is barely used, as a result:
1011 # XXX this method is barely used, as a result:
1019 # - its semantic is unclear
1012 # - its semantic is unclear
1020 # - do we really needs it ?
1013 # - do we really needs it ?
1021 @requires_changing_parents
1014 @requires_changing_parents
1022 def clear(self):
1015 def clear(self):
1023 self._map.clear()
1016 self._map.clear()
1024 self._dirty = True
1017 self._dirty = True
1025
1018
1026 @requires_changing_parents
1019 @requires_changing_parents
1027 def rebuild(self, parent, allfiles, changedfiles=None):
1020 def rebuild(self, parent, allfiles, changedfiles=None):
1028 matcher = self._sparsematcher
1021 matcher = self._sparsematcher
1029 if matcher is not None and not matcher.always():
1022 if matcher is not None and not matcher.always():
1030 # should not add non-matching files
1023 # should not add non-matching files
1031 allfiles = [f for f in allfiles if matcher(f)]
1024 allfiles = [f for f in allfiles if matcher(f)]
1032 if changedfiles:
1025 if changedfiles:
1033 changedfiles = [f for f in changedfiles if matcher(f)]
1026 changedfiles = [f for f in changedfiles if matcher(f)]
1034
1027
1035 if changedfiles is not None:
1028 if changedfiles is not None:
1036 # these files will be deleted from the dirstate when they are
1029 # these files will be deleted from the dirstate when they are
1037 # not found to be in allfiles
1030 # not found to be in allfiles
1038 dirstatefilestoremove = {f for f in self if not matcher(f)}
1031 dirstatefilestoremove = {f for f in self if not matcher(f)}
1039 changedfiles = dirstatefilestoremove.union(changedfiles)
1032 changedfiles = dirstatefilestoremove.union(changedfiles)
1040
1033
1041 if changedfiles is None:
1034 if changedfiles is None:
1042 # Rebuild entire dirstate
1035 # Rebuild entire dirstate
1043 to_lookup = allfiles
1036 to_lookup = allfiles
1044 to_drop = []
1037 to_drop = []
1045 self.clear()
1038 self.clear()
1046 elif len(changedfiles) < 10:
1039 elif len(changedfiles) < 10:
1047 # Avoid turning allfiles into a set, which can be expensive if it's
1040 # Avoid turning allfiles into a set, which can be expensive if it's
1048 # large.
1041 # large.
1049 to_lookup = []
1042 to_lookup = []
1050 to_drop = []
1043 to_drop = []
1051 for f in changedfiles:
1044 for f in changedfiles:
1052 if f in allfiles:
1045 if f in allfiles:
1053 to_lookup.append(f)
1046 to_lookup.append(f)
1054 else:
1047 else:
1055 to_drop.append(f)
1048 to_drop.append(f)
1056 else:
1049 else:
1057 changedfilesset = set(changedfiles)
1050 changedfilesset = set(changedfiles)
1058 to_lookup = changedfilesset & set(allfiles)
1051 to_lookup = changedfilesset & set(allfiles)
1059 to_drop = changedfilesset - to_lookup
1052 to_drop = changedfilesset - to_lookup
1060
1053
1061 if self._origpl is None:
1054 if self._origpl is None:
1062 self._origpl = self._pl
1055 self._origpl = self._pl
1063 self._map.setparents(parent, self._nodeconstants.nullid)
1056 self._map.setparents(parent, self._nodeconstants.nullid)
1064
1057
1065 for f in to_lookup:
1058 for f in to_lookup:
1066 if self.in_merge:
1059 if self.in_merge:
1067 self.set_tracked(f)
1060 self.set_tracked(f)
1068 else:
1061 else:
1069 self._map.reset_state(
1062 self._map.reset_state(
1070 f,
1063 f,
1071 wc_tracked=True,
1064 wc_tracked=True,
1072 p1_tracked=True,
1065 p1_tracked=True,
1073 )
1066 )
1074 for f in to_drop:
1067 for f in to_drop:
1075 self._map.reset_state(f)
1068 self._map.reset_state(f)
1076
1069
1077 self._dirty = True
1070 self._dirty = True
1078
1071
1079 def _setup_tr_abort(self, tr):
1072 def _setup_tr_abort(self, tr):
1080 """make sure we invalidate the current change on abort"""
1073 """make sure we invalidate the current change on abort"""
1081 if tr is None:
1074 if tr is None:
1082 return
1075 return
1083
1076
1084 def on_abort(tr):
1077 def on_abort(tr):
1085 self._attached_to_a_transaction = False
1078 self._attached_to_a_transaction = False
1086 self.invalidate()
1079 self.invalidate()
1087
1080
1088 tr.addabort(
1081 tr.addabort(
1089 b'dirstate-invalidate%s' % self._tr_key_suffix,
1082 b'dirstate-invalidate%s' % self._tr_key_suffix,
1090 on_abort,
1083 on_abort,
1091 )
1084 )
1092
1085
1093 def write(self, tr):
1086 def write(self, tr):
1094 if not self._dirty:
1087 if not self._dirty:
1095 return
1088 return
1096 # make sure we don't request a write of invalidated content
1089 # make sure we don't request a write of invalidated content
1097 # XXX move before the dirty check once `unlock` stop calling `write`
1090 # XXX move before the dirty check once `unlock` stop calling `write`
1098 assert not self._invalidated_context
1091 assert not self._invalidated_context
1099
1092
1100 write_key = self._use_tracked_hint and self._dirty_tracked_set
1093 write_key = self._use_tracked_hint and self._dirty_tracked_set
1101 if tr:
1094 if tr:
1102
1095
1103 self._setup_tr_abort(tr)
1096 self._setup_tr_abort(tr)
1104 self._attached_to_a_transaction = True
1097 self._attached_to_a_transaction = True
1105
1098
1106 def on_success(f):
1099 def on_success(f):
1107 self._attached_to_a_transaction = False
1100 self._attached_to_a_transaction = False
1108 self._writedirstate(tr, f),
1101 self._writedirstate(tr, f),
1109
1102
1110 # delay writing in-memory changes out
1103 # delay writing in-memory changes out
1111 tr.addfilegenerator(
1104 tr.addfilegenerator(
1112 b'dirstate-1-main%s' % self._tr_key_suffix,
1105 b'dirstate-1-main%s' % self._tr_key_suffix,
1113 (self._filename,),
1106 (self._filename,),
1114 on_success,
1107 on_success,
1115 location=b'plain',
1108 location=b'plain',
1116 post_finalize=True,
1109 post_finalize=True,
1117 )
1110 )
1118 if write_key:
1111 if write_key:
1119 tr.addfilegenerator(
1112 tr.addfilegenerator(
1120 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1113 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1121 (self._filename_th,),
1114 (self._filename_th,),
1122 lambda f: self._write_tracked_hint(tr, f),
1115 lambda f: self._write_tracked_hint(tr, f),
1123 location=b'plain',
1116 location=b'plain',
1124 post_finalize=True,
1117 post_finalize=True,
1125 )
1118 )
1126 return
1119 return
1127
1120
1128 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1121 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1129 with file(self._filename) as f:
1122 with file(self._filename) as f:
1130 self._writedirstate(tr, f)
1123 self._writedirstate(tr, f)
1131 if write_key:
1124 if write_key:
1132 # we update the key-file after writing to make sure reader have a
1125 # we update the key-file after writing to make sure reader have a
1133 # key that match the newly written content
1126 # key that match the newly written content
1134 with file(self._filename_th) as f:
1127 with file(self._filename_th) as f:
1135 self._write_tracked_hint(tr, f)
1128 self._write_tracked_hint(tr, f)
1136
1129
1137 def delete_tracked_hint(self):
1130 def delete_tracked_hint(self):
1138 """remove the tracked_hint file
1131 """remove the tracked_hint file
1139
1132
1140 To be used by format downgrades operation"""
1133 To be used by format downgrades operation"""
1141 self._opener.unlink(self._filename_th)
1134 self._opener.unlink(self._filename_th)
1142 self._use_tracked_hint = False
1135 self._use_tracked_hint = False
1143
1136
1144 def addparentchangecallback(self, category, callback):
1137 def addparentchangecallback(self, category, callback):
1145 """add a callback to be called when the wd parents are changed
1138 """add a callback to be called when the wd parents are changed
1146
1139
1147 Callback will be called with the following arguments:
1140 Callback will be called with the following arguments:
1148 dirstate, (oldp1, oldp2), (newp1, newp2)
1141 dirstate, (oldp1, oldp2), (newp1, newp2)
1149
1142
1150 Category is a unique identifier to allow overwriting an old callback
1143 Category is a unique identifier to allow overwriting an old callback
1151 with a newer callback.
1144 with a newer callback.
1152 """
1145 """
1153 self._plchangecallbacks[category] = callback
1146 self._plchangecallbacks[category] = callback
1154
1147
1155 def _writedirstate(self, tr, st):
1148 def _writedirstate(self, tr, st):
1156 # make sure we don't write invalidated content
1149 # make sure we don't write invalidated content
1157 assert not self._invalidated_context
1150 assert not self._invalidated_context
1158 # notify callbacks about parents change
1151 # notify callbacks about parents change
1159 if self._origpl is not None and self._origpl != self._pl:
1152 if self._origpl is not None and self._origpl != self._pl:
1160 for c, callback in sorted(self._plchangecallbacks.items()):
1153 for c, callback in sorted(self._plchangecallbacks.items()):
1161 callback(self, self._origpl, self._pl)
1154 callback(self, self._origpl, self._pl)
1162 self._origpl = None
1155 self._origpl = None
1163 self._map.write(tr, st)
1156 self._map.write(tr, st)
1164 self._dirty = False
1157 self._dirty = False
1165 self._dirty_tracked_set = False
1158 self._dirty_tracked_set = False
1166
1159
1167 def _write_tracked_hint(self, tr, f):
1160 def _write_tracked_hint(self, tr, f):
1168 key = node.hex(uuid.uuid4().bytes)
1161 key = node.hex(uuid.uuid4().bytes)
1169 f.write(b"1\n%s\n" % key) # 1 is the format version
1162 f.write(b"1\n%s\n" % key) # 1 is the format version
1170
1163
1171 def _dirignore(self, f):
1164 def _dirignore(self, f):
1172 if self._ignore(f):
1165 if self._ignore(f):
1173 return True
1166 return True
1174 for p in pathutil.finddirs(f):
1167 for p in pathutil.finddirs(f):
1175 if self._ignore(p):
1168 if self._ignore(p):
1176 return True
1169 return True
1177 return False
1170 return False
1178
1171
1179 def _ignorefiles(self):
1172 def _ignorefiles(self):
1180 files = []
1173 files = []
1181 if os.path.exists(self._join(b'.hgignore')):
1174 if os.path.exists(self._join(b'.hgignore')):
1182 files.append(self._join(b'.hgignore'))
1175 files.append(self._join(b'.hgignore'))
1183 for name, path in self._ui.configitems(b"ui"):
1176 for name, path in self._ui.configitems(b"ui"):
1184 if name == b'ignore' or name.startswith(b'ignore.'):
1177 if name == b'ignore' or name.startswith(b'ignore.'):
1185 # we need to use os.path.join here rather than self._join
1178 # we need to use os.path.join here rather than self._join
1186 # because path is arbitrary and user-specified
1179 # because path is arbitrary and user-specified
1187 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1180 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1188 return files
1181 return files
1189
1182
1190 def _ignorefileandline(self, f):
1183 def _ignorefileandline(self, f):
1191 files = collections.deque(self._ignorefiles())
1184 files = collections.deque(self._ignorefiles())
1192 visited = set()
1185 visited = set()
1193 while files:
1186 while files:
1194 i = files.popleft()
1187 i = files.popleft()
1195 patterns = matchmod.readpatternfile(
1188 patterns = matchmod.readpatternfile(
1196 i, self._ui.warn, sourceinfo=True
1189 i, self._ui.warn, sourceinfo=True
1197 )
1190 )
1198 for pattern, lineno, line in patterns:
1191 for pattern, lineno, line in patterns:
1199 kind, p = matchmod._patsplit(pattern, b'glob')
1192 kind, p = matchmod._patsplit(pattern, b'glob')
1200 if kind == b"subinclude":
1193 if kind == b"subinclude":
1201 if p not in visited:
1194 if p not in visited:
1202 files.append(p)
1195 files.append(p)
1203 continue
1196 continue
1204 m = matchmod.match(
1197 m = matchmod.match(
1205 self._root, b'', [], [pattern], warn=self._ui.warn
1198 self._root, b'', [], [pattern], warn=self._ui.warn
1206 )
1199 )
1207 if m(f):
1200 if m(f):
1208 return (i, lineno, line)
1201 return (i, lineno, line)
1209 visited.add(i)
1202 visited.add(i)
1210 return (None, -1, b"")
1203 return (None, -1, b"")
1211
1204
1212 def _walkexplicit(self, match, subrepos):
1205 def _walkexplicit(self, match, subrepos):
1213 """Get stat data about the files explicitly specified by match.
1206 """Get stat data about the files explicitly specified by match.
1214
1207
1215 Return a triple (results, dirsfound, dirsnotfound).
1208 Return a triple (results, dirsfound, dirsnotfound).
1216 - results is a mapping from filename to stat result. It also contains
1209 - results is a mapping from filename to stat result. It also contains
1217 listings mapping subrepos and .hg to None.
1210 listings mapping subrepos and .hg to None.
1218 - dirsfound is a list of files found to be directories.
1211 - dirsfound is a list of files found to be directories.
1219 - dirsnotfound is a list of files that the dirstate thinks are
1212 - dirsnotfound is a list of files that the dirstate thinks are
1220 directories and that were not found."""
1213 directories and that were not found."""
1221
1214
1222 def badtype(mode):
1215 def badtype(mode):
1223 kind = _(b'unknown')
1216 kind = _(b'unknown')
1224 if stat.S_ISCHR(mode):
1217 if stat.S_ISCHR(mode):
1225 kind = _(b'character device')
1218 kind = _(b'character device')
1226 elif stat.S_ISBLK(mode):
1219 elif stat.S_ISBLK(mode):
1227 kind = _(b'block device')
1220 kind = _(b'block device')
1228 elif stat.S_ISFIFO(mode):
1221 elif stat.S_ISFIFO(mode):
1229 kind = _(b'fifo')
1222 kind = _(b'fifo')
1230 elif stat.S_ISSOCK(mode):
1223 elif stat.S_ISSOCK(mode):
1231 kind = _(b'socket')
1224 kind = _(b'socket')
1232 elif stat.S_ISDIR(mode):
1225 elif stat.S_ISDIR(mode):
1233 kind = _(b'directory')
1226 kind = _(b'directory')
1234 return _(b'unsupported file type (type is %s)') % kind
1227 return _(b'unsupported file type (type is %s)') % kind
1235
1228
1236 badfn = match.bad
1229 badfn = match.bad
1237 dmap = self._map
1230 dmap = self._map
1238 lstat = os.lstat
1231 lstat = os.lstat
1239 getkind = stat.S_IFMT
1232 getkind = stat.S_IFMT
1240 dirkind = stat.S_IFDIR
1233 dirkind = stat.S_IFDIR
1241 regkind = stat.S_IFREG
1234 regkind = stat.S_IFREG
1242 lnkkind = stat.S_IFLNK
1235 lnkkind = stat.S_IFLNK
1243 join = self._join
1236 join = self._join
1244 dirsfound = []
1237 dirsfound = []
1245 foundadd = dirsfound.append
1238 foundadd = dirsfound.append
1246 dirsnotfound = []
1239 dirsnotfound = []
1247 notfoundadd = dirsnotfound.append
1240 notfoundadd = dirsnotfound.append
1248
1241
1249 if not match.isexact() and self._checkcase:
1242 if not match.isexact() and self._checkcase:
1250 normalize = self._normalize
1243 normalize = self._normalize
1251 else:
1244 else:
1252 normalize = None
1245 normalize = None
1253
1246
1254 files = sorted(match.files())
1247 files = sorted(match.files())
1255 subrepos.sort()
1248 subrepos.sort()
1256 i, j = 0, 0
1249 i, j = 0, 0
1257 while i < len(files) and j < len(subrepos):
1250 while i < len(files) and j < len(subrepos):
1258 subpath = subrepos[j] + b"/"
1251 subpath = subrepos[j] + b"/"
1259 if files[i] < subpath:
1252 if files[i] < subpath:
1260 i += 1
1253 i += 1
1261 continue
1254 continue
1262 while i < len(files) and files[i].startswith(subpath):
1255 while i < len(files) and files[i].startswith(subpath):
1263 del files[i]
1256 del files[i]
1264 j += 1
1257 j += 1
1265
1258
1266 if not files or b'' in files:
1259 if not files or b'' in files:
1267 files = [b'']
1260 files = [b'']
1268 # constructing the foldmap is expensive, so don't do it for the
1261 # constructing the foldmap is expensive, so don't do it for the
1269 # common case where files is ['']
1262 # common case where files is ['']
1270 normalize = None
1263 normalize = None
1271 results = dict.fromkeys(subrepos)
1264 results = dict.fromkeys(subrepos)
1272 results[b'.hg'] = None
1265 results[b'.hg'] = None
1273
1266
1274 for ff in files:
1267 for ff in files:
1275 if normalize:
1268 if normalize:
1276 nf = normalize(ff, False, True)
1269 nf = normalize(ff, False, True)
1277 else:
1270 else:
1278 nf = ff
1271 nf = ff
1279 if nf in results:
1272 if nf in results:
1280 continue
1273 continue
1281
1274
1282 try:
1275 try:
1283 st = lstat(join(nf))
1276 st = lstat(join(nf))
1284 kind = getkind(st.st_mode)
1277 kind = getkind(st.st_mode)
1285 if kind == dirkind:
1278 if kind == dirkind:
1286 if nf in dmap:
1279 if nf in dmap:
1287 # file replaced by dir on disk but still in dirstate
1280 # file replaced by dir on disk but still in dirstate
1288 results[nf] = None
1281 results[nf] = None
1289 foundadd((nf, ff))
1282 foundadd((nf, ff))
1290 elif kind == regkind or kind == lnkkind:
1283 elif kind == regkind or kind == lnkkind:
1291 results[nf] = st
1284 results[nf] = st
1292 else:
1285 else:
1293 badfn(ff, badtype(kind))
1286 badfn(ff, badtype(kind))
1294 if nf in dmap:
1287 if nf in dmap:
1295 results[nf] = None
1288 results[nf] = None
1296 except (OSError) as inst:
1289 except (OSError) as inst:
1297 # nf not found on disk - it is dirstate only
1290 # nf not found on disk - it is dirstate only
1298 if nf in dmap: # does it exactly match a missing file?
1291 if nf in dmap: # does it exactly match a missing file?
1299 results[nf] = None
1292 results[nf] = None
1300 else: # does it match a missing directory?
1293 else: # does it match a missing directory?
1301 if self._map.hasdir(nf):
1294 if self._map.hasdir(nf):
1302 notfoundadd(nf)
1295 notfoundadd(nf)
1303 else:
1296 else:
1304 badfn(ff, encoding.strtolocal(inst.strerror))
1297 badfn(ff, encoding.strtolocal(inst.strerror))
1305
1298
1306 # match.files() may contain explicitly-specified paths that shouldn't
1299 # match.files() may contain explicitly-specified paths that shouldn't
1307 # be taken; drop them from the list of files found. dirsfound/notfound
1300 # be taken; drop them from the list of files found. dirsfound/notfound
1308 # aren't filtered here because they will be tested later.
1301 # aren't filtered here because they will be tested later.
1309 if match.anypats():
1302 if match.anypats():
1310 for f in list(results):
1303 for f in list(results):
1311 if f == b'.hg' or f in subrepos:
1304 if f == b'.hg' or f in subrepos:
1312 # keep sentinel to disable further out-of-repo walks
1305 # keep sentinel to disable further out-of-repo walks
1313 continue
1306 continue
1314 if not match(f):
1307 if not match(f):
1315 del results[f]
1308 del results[f]
1316
1309
1317 # Case insensitive filesystems cannot rely on lstat() failing to detect
1310 # Case insensitive filesystems cannot rely on lstat() failing to detect
1318 # a case-only rename. Prune the stat object for any file that does not
1311 # a case-only rename. Prune the stat object for any file that does not
1319 # match the case in the filesystem, if there are multiple files that
1312 # match the case in the filesystem, if there are multiple files that
1320 # normalize to the same path.
1313 # normalize to the same path.
1321 if match.isexact() and self._checkcase:
1314 if match.isexact() and self._checkcase:
1322 normed = {}
1315 normed = {}
1323
1316
1324 for f, st in results.items():
1317 for f, st in results.items():
1325 if st is None:
1318 if st is None:
1326 continue
1319 continue
1327
1320
1328 nc = util.normcase(f)
1321 nc = util.normcase(f)
1329 paths = normed.get(nc)
1322 paths = normed.get(nc)
1330
1323
1331 if paths is None:
1324 if paths is None:
1332 paths = set()
1325 paths = set()
1333 normed[nc] = paths
1326 normed[nc] = paths
1334
1327
1335 paths.add(f)
1328 paths.add(f)
1336
1329
1337 for norm, paths in normed.items():
1330 for norm, paths in normed.items():
1338 if len(paths) > 1:
1331 if len(paths) > 1:
1339 for path in paths:
1332 for path in paths:
1340 folded = self._discoverpath(
1333 folded = self._discoverpath(
1341 path, norm, True, None, self._map.dirfoldmap
1334 path, norm, True, None, self._map.dirfoldmap
1342 )
1335 )
1343 if path != folded:
1336 if path != folded:
1344 results[path] = None
1337 results[path] = None
1345
1338
1346 return results, dirsfound, dirsnotfound
1339 return results, dirsfound, dirsnotfound
1347
1340
1348 def walk(self, match, subrepos, unknown, ignored, full=True):
1341 def walk(self, match, subrepos, unknown, ignored, full=True):
1349 """
1342 """
1350 Walk recursively through the directory tree, finding all files
1343 Walk recursively through the directory tree, finding all files
1351 matched by match.
1344 matched by match.
1352
1345
1353 If full is False, maybe skip some known-clean files.
1346 If full is False, maybe skip some known-clean files.
1354
1347
1355 Return a dict mapping filename to stat-like object (either
1348 Return a dict mapping filename to stat-like object (either
1356 mercurial.osutil.stat instance or return value of os.stat()).
1349 mercurial.osutil.stat instance or return value of os.stat()).
1357
1350
1358 """
1351 """
1359 # full is a flag that extensions that hook into walk can use -- this
1352 # full is a flag that extensions that hook into walk can use -- this
1360 # implementation doesn't use it at all. This satisfies the contract
1353 # implementation doesn't use it at all. This satisfies the contract
1361 # because we only guarantee a "maybe".
1354 # because we only guarantee a "maybe".
1362
1355
1363 if ignored:
1356 if ignored:
1364 ignore = util.never
1357 ignore = util.never
1365 dirignore = util.never
1358 dirignore = util.never
1366 elif unknown:
1359 elif unknown:
1367 ignore = self._ignore
1360 ignore = self._ignore
1368 dirignore = self._dirignore
1361 dirignore = self._dirignore
1369 else:
1362 else:
1370 # if not unknown and not ignored, drop dir recursion and step 2
1363 # if not unknown and not ignored, drop dir recursion and step 2
1371 ignore = util.always
1364 ignore = util.always
1372 dirignore = util.always
1365 dirignore = util.always
1373
1366
1374 if self._sparsematchfn is not None:
1367 if self._sparsematchfn is not None:
1375 em = matchmod.exact(match.files())
1368 em = matchmod.exact(match.files())
1376 sm = matchmod.unionmatcher([self._sparsematcher, em])
1369 sm = matchmod.unionmatcher([self._sparsematcher, em])
1377 match = matchmod.intersectmatchers(match, sm)
1370 match = matchmod.intersectmatchers(match, sm)
1378
1371
1379 matchfn = match.matchfn
1372 matchfn = match.matchfn
1380 matchalways = match.always()
1373 matchalways = match.always()
1381 matchtdir = match.traversedir
1374 matchtdir = match.traversedir
1382 dmap = self._map
1375 dmap = self._map
1383 listdir = util.listdir
1376 listdir = util.listdir
1384 lstat = os.lstat
1377 lstat = os.lstat
1385 dirkind = stat.S_IFDIR
1378 dirkind = stat.S_IFDIR
1386 regkind = stat.S_IFREG
1379 regkind = stat.S_IFREG
1387 lnkkind = stat.S_IFLNK
1380 lnkkind = stat.S_IFLNK
1388 join = self._join
1381 join = self._join
1389
1382
1390 exact = skipstep3 = False
1383 exact = skipstep3 = False
1391 if match.isexact(): # match.exact
1384 if match.isexact(): # match.exact
1392 exact = True
1385 exact = True
1393 dirignore = util.always # skip step 2
1386 dirignore = util.always # skip step 2
1394 elif match.prefix(): # match.match, no patterns
1387 elif match.prefix(): # match.match, no patterns
1395 skipstep3 = True
1388 skipstep3 = True
1396
1389
1397 if not exact and self._checkcase:
1390 if not exact and self._checkcase:
1398 normalize = self._normalize
1391 normalize = self._normalize
1399 normalizefile = self._normalizefile
1392 normalizefile = self._normalizefile
1400 skipstep3 = False
1393 skipstep3 = False
1401 else:
1394 else:
1402 normalize = self._normalize
1395 normalize = self._normalize
1403 normalizefile = None
1396 normalizefile = None
1404
1397
1405 # step 1: find all explicit files
1398 # step 1: find all explicit files
1406 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1399 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1407 if matchtdir:
1400 if matchtdir:
1408 for d in work:
1401 for d in work:
1409 matchtdir(d[0])
1402 matchtdir(d[0])
1410 for d in dirsnotfound:
1403 for d in dirsnotfound:
1411 matchtdir(d)
1404 matchtdir(d)
1412
1405
1413 skipstep3 = skipstep3 and not (work or dirsnotfound)
1406 skipstep3 = skipstep3 and not (work or dirsnotfound)
1414 work = [d for d in work if not dirignore(d[0])]
1407 work = [d for d in work if not dirignore(d[0])]
1415
1408
1416 # step 2: visit subdirectories
1409 # step 2: visit subdirectories
1417 def traverse(work, alreadynormed):
1410 def traverse(work, alreadynormed):
1418 wadd = work.append
1411 wadd = work.append
1419 while work:
1412 while work:
1420 tracing.counter('dirstate.walk work', len(work))
1413 tracing.counter('dirstate.walk work', len(work))
1421 nd = work.pop()
1414 nd = work.pop()
1422 visitentries = match.visitchildrenset(nd)
1415 visitentries = match.visitchildrenset(nd)
1423 if not visitentries:
1416 if not visitentries:
1424 continue
1417 continue
1425 if visitentries == b'this' or visitentries == b'all':
1418 if visitentries == b'this' or visitentries == b'all':
1426 visitentries = None
1419 visitentries = None
1427 skip = None
1420 skip = None
1428 if nd != b'':
1421 if nd != b'':
1429 skip = b'.hg'
1422 skip = b'.hg'
1430 try:
1423 try:
1431 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1424 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1432 entries = listdir(join(nd), stat=True, skip=skip)
1425 entries = listdir(join(nd), stat=True, skip=skip)
1433 except (PermissionError, FileNotFoundError) as inst:
1426 except (PermissionError, FileNotFoundError) as inst:
1434 match.bad(
1427 match.bad(
1435 self.pathto(nd), encoding.strtolocal(inst.strerror)
1428 self.pathto(nd), encoding.strtolocal(inst.strerror)
1436 )
1429 )
1437 continue
1430 continue
1438 for f, kind, st in entries:
1431 for f, kind, st in entries:
1439 # Some matchers may return files in the visitentries set,
1432 # Some matchers may return files in the visitentries set,
1440 # instead of 'this', if the matcher explicitly mentions them
1433 # instead of 'this', if the matcher explicitly mentions them
1441 # and is not an exactmatcher. This is acceptable; we do not
1434 # and is not an exactmatcher. This is acceptable; we do not
1442 # make any hard assumptions about file-or-directory below
1435 # make any hard assumptions about file-or-directory below
1443 # based on the presence of `f` in visitentries. If
1436 # based on the presence of `f` in visitentries. If
1444 # visitchildrenset returned a set, we can always skip the
1437 # visitchildrenset returned a set, we can always skip the
1445 # entries *not* in the set it provided regardless of whether
1438 # entries *not* in the set it provided regardless of whether
1446 # they're actually a file or a directory.
1439 # they're actually a file or a directory.
1447 if visitentries and f not in visitentries:
1440 if visitentries and f not in visitentries:
1448 continue
1441 continue
1449 if normalizefile:
1442 if normalizefile:
1450 # even though f might be a directory, we're only
1443 # even though f might be a directory, we're only
1451 # interested in comparing it to files currently in the
1444 # interested in comparing it to files currently in the
1452 # dmap -- therefore normalizefile is enough
1445 # dmap -- therefore normalizefile is enough
1453 nf = normalizefile(
1446 nf = normalizefile(
1454 nd and (nd + b"/" + f) or f, True, True
1447 nd and (nd + b"/" + f) or f, True, True
1455 )
1448 )
1456 else:
1449 else:
1457 nf = nd and (nd + b"/" + f) or f
1450 nf = nd and (nd + b"/" + f) or f
1458 if nf not in results:
1451 if nf not in results:
1459 if kind == dirkind:
1452 if kind == dirkind:
1460 if not ignore(nf):
1453 if not ignore(nf):
1461 if matchtdir:
1454 if matchtdir:
1462 matchtdir(nf)
1455 matchtdir(nf)
1463 wadd(nf)
1456 wadd(nf)
1464 if nf in dmap and (matchalways or matchfn(nf)):
1457 if nf in dmap and (matchalways or matchfn(nf)):
1465 results[nf] = None
1458 results[nf] = None
1466 elif kind == regkind or kind == lnkkind:
1459 elif kind == regkind or kind == lnkkind:
1467 if nf in dmap:
1460 if nf in dmap:
1468 if matchalways or matchfn(nf):
1461 if matchalways or matchfn(nf):
1469 results[nf] = st
1462 results[nf] = st
1470 elif (matchalways or matchfn(nf)) and not ignore(
1463 elif (matchalways or matchfn(nf)) and not ignore(
1471 nf
1464 nf
1472 ):
1465 ):
1473 # unknown file -- normalize if necessary
1466 # unknown file -- normalize if necessary
1474 if not alreadynormed:
1467 if not alreadynormed:
1475 nf = normalize(nf, False, True)
1468 nf = normalize(nf, False, True)
1476 results[nf] = st
1469 results[nf] = st
1477 elif nf in dmap and (matchalways or matchfn(nf)):
1470 elif nf in dmap and (matchalways or matchfn(nf)):
1478 results[nf] = None
1471 results[nf] = None
1479
1472
1480 for nd, d in work:
1473 for nd, d in work:
1481 # alreadynormed means that processwork doesn't have to do any
1474 # alreadynormed means that processwork doesn't have to do any
1482 # expensive directory normalization
1475 # expensive directory normalization
1483 alreadynormed = not normalize or nd == d
1476 alreadynormed = not normalize or nd == d
1484 traverse([d], alreadynormed)
1477 traverse([d], alreadynormed)
1485
1478
1486 for s in subrepos:
1479 for s in subrepos:
1487 del results[s]
1480 del results[s]
1488 del results[b'.hg']
1481 del results[b'.hg']
1489
1482
1490 # step 3: visit remaining files from dmap
1483 # step 3: visit remaining files from dmap
1491 if not skipstep3 and not exact:
1484 if not skipstep3 and not exact:
1492 # If a dmap file is not in results yet, it was either
1485 # If a dmap file is not in results yet, it was either
1493 # a) not matching matchfn b) ignored, c) missing, or d) under a
1486 # a) not matching matchfn b) ignored, c) missing, or d) under a
1494 # symlink directory.
1487 # symlink directory.
1495 if not results and matchalways:
1488 if not results and matchalways:
1496 visit = [f for f in dmap]
1489 visit = [f for f in dmap]
1497 else:
1490 else:
1498 visit = [f for f in dmap if f not in results and matchfn(f)]
1491 visit = [f for f in dmap if f not in results and matchfn(f)]
1499 visit.sort()
1492 visit.sort()
1500
1493
1501 if unknown:
1494 if unknown:
1502 # unknown == True means we walked all dirs under the roots
1495 # unknown == True means we walked all dirs under the roots
1503 # that wasn't ignored, and everything that matched was stat'ed
1496 # that wasn't ignored, and everything that matched was stat'ed
1504 # and is already in results.
1497 # and is already in results.
1505 # The rest must thus be ignored or under a symlink.
1498 # The rest must thus be ignored or under a symlink.
1506 audit_path = pathutil.pathauditor(self._root, cached=True)
1499 audit_path = pathutil.pathauditor(self._root, cached=True)
1507
1500
1508 for nf in iter(visit):
1501 for nf in iter(visit):
1509 # If a stat for the same file was already added with a
1502 # If a stat for the same file was already added with a
1510 # different case, don't add one for this, since that would
1503 # different case, don't add one for this, since that would
1511 # make it appear as if the file exists under both names
1504 # make it appear as if the file exists under both names
1512 # on disk.
1505 # on disk.
1513 if (
1506 if (
1514 normalizefile
1507 normalizefile
1515 and normalizefile(nf, True, True) in results
1508 and normalizefile(nf, True, True) in results
1516 ):
1509 ):
1517 results[nf] = None
1510 results[nf] = None
1518 # Report ignored items in the dmap as long as they are not
1511 # Report ignored items in the dmap as long as they are not
1519 # under a symlink directory.
1512 # under a symlink directory.
1520 elif audit_path.check(nf):
1513 elif audit_path.check(nf):
1521 try:
1514 try:
1522 results[nf] = lstat(join(nf))
1515 results[nf] = lstat(join(nf))
1523 # file was just ignored, no links, and exists
1516 # file was just ignored, no links, and exists
1524 except OSError:
1517 except OSError:
1525 # file doesn't exist
1518 # file doesn't exist
1526 results[nf] = None
1519 results[nf] = None
1527 else:
1520 else:
1528 # It's either missing or under a symlink directory
1521 # It's either missing or under a symlink directory
1529 # which we in this case report as missing
1522 # which we in this case report as missing
1530 results[nf] = None
1523 results[nf] = None
1531 else:
1524 else:
1532 # We may not have walked the full directory tree above,
1525 # We may not have walked the full directory tree above,
1533 # so stat and check everything we missed.
1526 # so stat and check everything we missed.
1534 iv = iter(visit)
1527 iv = iter(visit)
1535 for st in util.statfiles([join(i) for i in visit]):
1528 for st in util.statfiles([join(i) for i in visit]):
1536 results[next(iv)] = st
1529 results[next(iv)] = st
1537 return results
1530 return results
1538
1531
1539 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1532 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1540 if self._sparsematchfn is not None:
1533 if self._sparsematchfn is not None:
1541 em = matchmod.exact(matcher.files())
1534 em = matchmod.exact(matcher.files())
1542 sm = matchmod.unionmatcher([self._sparsematcher, em])
1535 sm = matchmod.unionmatcher([self._sparsematcher, em])
1543 matcher = matchmod.intersectmatchers(matcher, sm)
1536 matcher = matchmod.intersectmatchers(matcher, sm)
1544 # Force Rayon (Rust parallelism library) to respect the number of
1537 # Force Rayon (Rust parallelism library) to respect the number of
1545 # workers. This is a temporary workaround until Rust code knows
1538 # workers. This is a temporary workaround until Rust code knows
1546 # how to read the config file.
1539 # how to read the config file.
1547 numcpus = self._ui.configint(b"worker", b"numcpus")
1540 numcpus = self._ui.configint(b"worker", b"numcpus")
1548 if numcpus is not None:
1541 if numcpus is not None:
1549 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1542 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1550
1543
1551 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1544 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1552 if not workers_enabled:
1545 if not workers_enabled:
1553 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1546 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1554
1547
1555 (
1548 (
1556 lookup,
1549 lookup,
1557 modified,
1550 modified,
1558 added,
1551 added,
1559 removed,
1552 removed,
1560 deleted,
1553 deleted,
1561 clean,
1554 clean,
1562 ignored,
1555 ignored,
1563 unknown,
1556 unknown,
1564 warnings,
1557 warnings,
1565 bad,
1558 bad,
1566 traversed,
1559 traversed,
1567 dirty,
1560 dirty,
1568 ) = rustmod.status(
1561 ) = rustmod.status(
1569 self._map._map,
1562 self._map._map,
1570 matcher,
1563 matcher,
1571 self._rootdir,
1564 self._rootdir,
1572 self._ignorefiles(),
1565 self._ignorefiles(),
1573 self._checkexec,
1566 self._checkexec,
1574 bool(list_clean),
1567 bool(list_clean),
1575 bool(list_ignored),
1568 bool(list_ignored),
1576 bool(list_unknown),
1569 bool(list_unknown),
1577 bool(matcher.traversedir),
1570 bool(matcher.traversedir),
1578 )
1571 )
1579
1572
1580 self._dirty |= dirty
1573 self._dirty |= dirty
1581
1574
1582 if matcher.traversedir:
1575 if matcher.traversedir:
1583 for dir in traversed:
1576 for dir in traversed:
1584 matcher.traversedir(dir)
1577 matcher.traversedir(dir)
1585
1578
1586 if self._ui.warn:
1579 if self._ui.warn:
1587 for item in warnings:
1580 for item in warnings:
1588 if isinstance(item, tuple):
1581 if isinstance(item, tuple):
1589 file_path, syntax = item
1582 file_path, syntax = item
1590 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1583 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1591 file_path,
1584 file_path,
1592 syntax,
1585 syntax,
1593 )
1586 )
1594 self._ui.warn(msg)
1587 self._ui.warn(msg)
1595 else:
1588 else:
1596 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1589 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1597 self._ui.warn(
1590 self._ui.warn(
1598 msg
1591 msg
1599 % (
1592 % (
1600 pathutil.canonpath(
1593 pathutil.canonpath(
1601 self._rootdir, self._rootdir, item
1594 self._rootdir, self._rootdir, item
1602 ),
1595 ),
1603 b"No such file or directory",
1596 b"No such file or directory",
1604 )
1597 )
1605 )
1598 )
1606
1599
1607 for fn, message in bad:
1600 for fn, message in bad:
1608 matcher.bad(fn, encoding.strtolocal(message))
1601 matcher.bad(fn, encoding.strtolocal(message))
1609
1602
1610 status = scmutil.status(
1603 status = scmutil.status(
1611 modified=modified,
1604 modified=modified,
1612 added=added,
1605 added=added,
1613 removed=removed,
1606 removed=removed,
1614 deleted=deleted,
1607 deleted=deleted,
1615 unknown=unknown,
1608 unknown=unknown,
1616 ignored=ignored,
1609 ignored=ignored,
1617 clean=clean,
1610 clean=clean,
1618 )
1611 )
1619 return (lookup, status)
1612 return (lookup, status)
1620
1613
1621 def status(self, match, subrepos, ignored, clean, unknown):
1614 def status(self, match, subrepos, ignored, clean, unknown):
1622 """Determine the status of the working copy relative to the
1615 """Determine the status of the working copy relative to the
1623 dirstate and return a pair of (unsure, status), where status is of type
1616 dirstate and return a pair of (unsure, status), where status is of type
1624 scmutil.status and:
1617 scmutil.status and:
1625
1618
1626 unsure:
1619 unsure:
1627 files that might have been modified since the dirstate was
1620 files that might have been modified since the dirstate was
1628 written, but need to be read to be sure (size is the same
1621 written, but need to be read to be sure (size is the same
1629 but mtime differs)
1622 but mtime differs)
1630 status.modified:
1623 status.modified:
1631 files that have definitely been modified since the dirstate
1624 files that have definitely been modified since the dirstate
1632 was written (different size or mode)
1625 was written (different size or mode)
1633 status.clean:
1626 status.clean:
1634 files that have definitely not been modified since the
1627 files that have definitely not been modified since the
1635 dirstate was written
1628 dirstate was written
1636 """
1629 """
1637 if not self._running_status:
1630 if not self._running_status:
1638 msg = "Calling `status` outside a `running_status` context"
1631 msg = "Calling `status` outside a `running_status` context"
1639 raise error.ProgrammingError(msg)
1632 raise error.ProgrammingError(msg)
1640 listignored, listclean, listunknown = ignored, clean, unknown
1633 listignored, listclean, listunknown = ignored, clean, unknown
1641 lookup, modified, added, unknown, ignored = [], [], [], [], []
1634 lookup, modified, added, unknown, ignored = [], [], [], [], []
1642 removed, deleted, clean = [], [], []
1635 removed, deleted, clean = [], [], []
1643
1636
1644 dmap = self._map
1637 dmap = self._map
1645 dmap.preload()
1638 dmap.preload()
1646
1639
1647 use_rust = True
1640 use_rust = True
1648
1641
1649 allowed_matchers = (
1642 allowed_matchers = (
1650 matchmod.alwaysmatcher,
1643 matchmod.alwaysmatcher,
1651 matchmod.differencematcher,
1644 matchmod.differencematcher,
1652 matchmod.exactmatcher,
1645 matchmod.exactmatcher,
1653 matchmod.includematcher,
1646 matchmod.includematcher,
1654 matchmod.intersectionmatcher,
1647 matchmod.intersectionmatcher,
1655 matchmod.nevermatcher,
1648 matchmod.nevermatcher,
1656 matchmod.unionmatcher,
1649 matchmod.unionmatcher,
1657 )
1650 )
1658
1651
1659 if rustmod is None:
1652 if rustmod is None:
1660 use_rust = False
1653 use_rust = False
1661 elif self._checkcase:
1654 elif self._checkcase:
1662 # Case-insensitive filesystems are not handled yet
1655 # Case-insensitive filesystems are not handled yet
1663 use_rust = False
1656 use_rust = False
1664 elif subrepos:
1657 elif subrepos:
1665 use_rust = False
1658 use_rust = False
1666 elif not isinstance(match, allowed_matchers):
1659 elif not isinstance(match, allowed_matchers):
1667 # Some matchers have yet to be implemented
1660 # Some matchers have yet to be implemented
1668 use_rust = False
1661 use_rust = False
1669
1662
1670 # Get the time from the filesystem so we can disambiguate files that
1663 # Get the time from the filesystem so we can disambiguate files that
1671 # appear modified in the present or future.
1664 # appear modified in the present or future.
1672 try:
1665 try:
1673 mtime_boundary = timestamp.get_fs_now(self._opener)
1666 mtime_boundary = timestamp.get_fs_now(self._opener)
1674 except OSError:
1667 except OSError:
1675 # In largefiles or readonly context
1668 # In largefiles or readonly context
1676 mtime_boundary = None
1669 mtime_boundary = None
1677
1670
1678 if use_rust:
1671 if use_rust:
1679 try:
1672 try:
1680 res = self._rust_status(
1673 res = self._rust_status(
1681 match, listclean, listignored, listunknown
1674 match, listclean, listignored, listunknown
1682 )
1675 )
1683 return res + (mtime_boundary,)
1676 return res + (mtime_boundary,)
1684 except rustmod.FallbackError:
1677 except rustmod.FallbackError:
1685 pass
1678 pass
1686
1679
1687 def noop(f):
1680 def noop(f):
1688 pass
1681 pass
1689
1682
1690 dcontains = dmap.__contains__
1683 dcontains = dmap.__contains__
1691 dget = dmap.__getitem__
1684 dget = dmap.__getitem__
1692 ladd = lookup.append # aka "unsure"
1685 ladd = lookup.append # aka "unsure"
1693 madd = modified.append
1686 madd = modified.append
1694 aadd = added.append
1687 aadd = added.append
1695 uadd = unknown.append if listunknown else noop
1688 uadd = unknown.append if listunknown else noop
1696 iadd = ignored.append if listignored else noop
1689 iadd = ignored.append if listignored else noop
1697 radd = removed.append
1690 radd = removed.append
1698 dadd = deleted.append
1691 dadd = deleted.append
1699 cadd = clean.append if listclean else noop
1692 cadd = clean.append if listclean else noop
1700 mexact = match.exact
1693 mexact = match.exact
1701 dirignore = self._dirignore
1694 dirignore = self._dirignore
1702 checkexec = self._checkexec
1695 checkexec = self._checkexec
1703 checklink = self._checklink
1696 checklink = self._checklink
1704 copymap = self._map.copymap
1697 copymap = self._map.copymap
1705
1698
1706 # We need to do full walks when either
1699 # We need to do full walks when either
1707 # - we're listing all clean files, or
1700 # - we're listing all clean files, or
1708 # - match.traversedir does something, because match.traversedir should
1701 # - match.traversedir does something, because match.traversedir should
1709 # be called for every dir in the working dir
1702 # be called for every dir in the working dir
1710 full = listclean or match.traversedir is not None
1703 full = listclean or match.traversedir is not None
1711 for fn, st in self.walk(
1704 for fn, st in self.walk(
1712 match, subrepos, listunknown, listignored, full=full
1705 match, subrepos, listunknown, listignored, full=full
1713 ).items():
1706 ).items():
1714 if not dcontains(fn):
1707 if not dcontains(fn):
1715 if (listignored or mexact(fn)) and dirignore(fn):
1708 if (listignored or mexact(fn)) and dirignore(fn):
1716 if listignored:
1709 if listignored:
1717 iadd(fn)
1710 iadd(fn)
1718 else:
1711 else:
1719 uadd(fn)
1712 uadd(fn)
1720 continue
1713 continue
1721
1714
1722 t = dget(fn)
1715 t = dget(fn)
1723 mode = t.mode
1716 mode = t.mode
1724 size = t.size
1717 size = t.size
1725
1718
1726 if not st and t.tracked:
1719 if not st and t.tracked:
1727 dadd(fn)
1720 dadd(fn)
1728 elif t.p2_info:
1721 elif t.p2_info:
1729 madd(fn)
1722 madd(fn)
1730 elif t.added:
1723 elif t.added:
1731 aadd(fn)
1724 aadd(fn)
1732 elif t.removed:
1725 elif t.removed:
1733 radd(fn)
1726 radd(fn)
1734 elif t.tracked:
1727 elif t.tracked:
1735 if not checklink and t.has_fallback_symlink:
1728 if not checklink and t.has_fallback_symlink:
1736 # If the file system does not support symlink, the mode
1729 # If the file system does not support symlink, the mode
1737 # might not be correctly stored in the dirstate, so do not
1730 # might not be correctly stored in the dirstate, so do not
1738 # trust it.
1731 # trust it.
1739 ladd(fn)
1732 ladd(fn)
1740 elif not checkexec and t.has_fallback_exec:
1733 elif not checkexec and t.has_fallback_exec:
1741 # If the file system does not support exec bits, the mode
1734 # If the file system does not support exec bits, the mode
1742 # might not be correctly stored in the dirstate, so do not
1735 # might not be correctly stored in the dirstate, so do not
1743 # trust it.
1736 # trust it.
1744 ladd(fn)
1737 ladd(fn)
1745 elif (
1738 elif (
1746 size >= 0
1739 size >= 0
1747 and (
1740 and (
1748 (size != st.st_size and size != st.st_size & _rangemask)
1741 (size != st.st_size and size != st.st_size & _rangemask)
1749 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1742 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1750 )
1743 )
1751 or fn in copymap
1744 or fn in copymap
1752 ):
1745 ):
1753 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1746 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1754 # issue6456: Size returned may be longer due to
1747 # issue6456: Size returned may be longer due to
1755 # encryption on EXT-4 fscrypt, undecided.
1748 # encryption on EXT-4 fscrypt, undecided.
1756 ladd(fn)
1749 ladd(fn)
1757 else:
1750 else:
1758 madd(fn)
1751 madd(fn)
1759 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1752 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1760 # There might be a change in the future if for example the
1753 # There might be a change in the future if for example the
1761 # internal clock is off, but this is a case where the issues
1754 # internal clock is off, but this is a case where the issues
1762 # the user would face would be a lot worse and there is
1755 # the user would face would be a lot worse and there is
1763 # nothing we can really do.
1756 # nothing we can really do.
1764 ladd(fn)
1757 ladd(fn)
1765 elif listclean:
1758 elif listclean:
1766 cadd(fn)
1759 cadd(fn)
1767 status = scmutil.status(
1760 status = scmutil.status(
1768 modified, added, removed, deleted, unknown, ignored, clean
1761 modified, added, removed, deleted, unknown, ignored, clean
1769 )
1762 )
1770 return (lookup, status, mtime_boundary)
1763 return (lookup, status, mtime_boundary)
1771
1764
1772 def matches(self, match):
1765 def matches(self, match):
1773 """
1766 """
1774 return files in the dirstate (in whatever state) filtered by match
1767 return files in the dirstate (in whatever state) filtered by match
1775 """
1768 """
1776 dmap = self._map
1769 dmap = self._map
1777 if rustmod is not None:
1770 if rustmod is not None:
1778 dmap = self._map._map
1771 dmap = self._map._map
1779
1772
1780 if match.always():
1773 if match.always():
1781 return dmap.keys()
1774 return dmap.keys()
1782 files = match.files()
1775 files = match.files()
1783 if match.isexact():
1776 if match.isexact():
1784 # fast path -- filter the other way around, since typically files is
1777 # fast path -- filter the other way around, since typically files is
1785 # much smaller than dmap
1778 # much smaller than dmap
1786 return [f for f in files if f in dmap]
1779 return [f for f in files if f in dmap]
1787 if match.prefix() and all(fn in dmap for fn in files):
1780 if match.prefix() and all(fn in dmap for fn in files):
1788 # fast path -- all the values are known to be files, so just return
1781 # fast path -- all the values are known to be files, so just return
1789 # that
1782 # that
1790 return list(files)
1783 return list(files)
1791 return [f for f in dmap if match(f)]
1784 return [f for f in dmap if match(f)]
1792
1785
1793 def all_file_names(self):
1786 def all_file_names(self):
1794 """list all filename currently used by this dirstate
1787 """list all filename currently used by this dirstate
1795
1788
1796 This is only used to do `hg rollback` related backup in the transaction
1789 This is only used to do `hg rollback` related backup in the transaction
1797 """
1790 """
1798 files = [b'branch']
1791 files = [b'branch']
1799 if self._opener.exists(self._filename):
1792 if self._opener.exists(self._filename):
1800 files.append(self._filename)
1793 files.append(self._filename)
1801 if self._use_dirstate_v2:
1794 if self._use_dirstate_v2:
1802 files.append(self._map.docket.data_filename())
1795 files.append(self._map.docket.data_filename())
1803 return tuple(files)
1796 return tuple(files)
1804
1797
1805 def verify(self, m1, m2, p1, narrow_matcher=None):
1798 def verify(self, m1, m2, p1, narrow_matcher=None):
1806 """
1799 """
1807 check the dirstate contents against the parent manifest and yield errors
1800 check the dirstate contents against the parent manifest and yield errors
1808 """
1801 """
1809 missing_from_p1 = _(
1802 missing_from_p1 = _(
1810 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1803 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1811 )
1804 )
1812 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1805 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1813 missing_from_ps = _(
1806 missing_from_ps = _(
1814 b"%s marked as modified, but not in either manifest\n"
1807 b"%s marked as modified, but not in either manifest\n"
1815 )
1808 )
1816 missing_from_ds = _(
1809 missing_from_ds = _(
1817 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1810 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1818 )
1811 )
1819 for f, entry in self.items():
1812 for f, entry in self.items():
1820 if entry.p1_tracked:
1813 if entry.p1_tracked:
1821 if entry.modified and f not in m1 and f not in m2:
1814 if entry.modified and f not in m1 and f not in m2:
1822 yield missing_from_ps % f
1815 yield missing_from_ps % f
1823 elif f not in m1:
1816 elif f not in m1:
1824 yield missing_from_p1 % (f, node.short(p1))
1817 yield missing_from_p1 % (f, node.short(p1))
1825 if entry.added and f in m1:
1818 if entry.added and f in m1:
1826 yield unexpected_in_p1 % f
1819 yield unexpected_in_p1 % f
1827 for f in m1:
1820 for f in m1:
1828 if narrow_matcher is not None and not narrow_matcher(f):
1821 if narrow_matcher is not None and not narrow_matcher(f):
1829 continue
1822 continue
1830 entry = self.get_entry(f)
1823 entry = self.get_entry(f)
1831 if not entry.p1_tracked:
1824 if not entry.p1_tracked:
1832 yield missing_from_ds % (f, node.short(p1))
1825 yield missing_from_ds % (f, node.short(p1))
@@ -1,219 +1,219 b''
1 import contextlib
1 import contextlib
2
2
3 from . import util as interfaceutil
3 from . import util as interfaceutil
4
4
5
5
6 class idirstate(interfaceutil.Interface):
6 class idirstate(interfaceutil.Interface):
7 def __init__(
7 def __init__(
8 opener,
8 opener,
9 ui,
9 ui,
10 root,
10 root,
11 validate,
11 validate,
12 sparsematchfn,
12 sparsematchfn,
13 nodeconstants,
13 nodeconstants,
14 use_dirstate_v2,
14 use_dirstate_v2,
15 use_tracked_hint=False,
15 use_tracked_hint=False,
16 ):
16 ):
17 """Create a new dirstate object.
17 """Create a new dirstate object.
18
18
19 opener is an open()-like callable that can be used to open the
19 opener is an open()-like callable that can be used to open the
20 dirstate file; root is the root of the directory tracked by
20 dirstate file; root is the root of the directory tracked by
21 the dirstate.
21 the dirstate.
22 """
22 """
23
23
24 # TODO: all these private methods and attributes should be made
24 # TODO: all these private methods and attributes should be made
25 # public or removed from the interface.
25 # public or removed from the interface.
26 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
26 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
27 is_changing_any = interfaceutil.Attribute(
27 is_changing_any = interfaceutil.Attribute(
28 """True if any changes in progress."""
28 """True if any changes in progress."""
29 )
29 )
30 is_changing_parents = interfaceutil.Attribute(
30 is_changing_parents = interfaceutil.Attribute(
31 """True if parents changes in progress."""
31 """True if parents changes in progress."""
32 )
32 )
33 is_changing_files = interfaceutil.Attribute(
33 is_changing_files = interfaceutil.Attribute(
34 """True if file tracking changes in progress."""
34 """True if file tracking changes in progress."""
35 )
35 )
36
36
37 def _ignorefiles():
37 def _ignorefiles():
38 """Return a list of files containing patterns to ignore."""
38 """Return a list of files containing patterns to ignore."""
39
39
40 def _ignorefileandline(f):
40 def _ignorefileandline(f):
41 """Given a file `f`, return the ignore file and line that ignores it."""
41 """Given a file `f`, return the ignore file and line that ignores it."""
42
42
43 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
43 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
44 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
44 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
45
45
46 @contextlib.contextmanager
46 @contextlib.contextmanager
47 def changing_parents(repo):
47 def changing_parents(repo):
48 """Context manager for handling dirstate parents.
48 """Context manager for handling dirstate parents.
49
49
50 If an exception occurs in the scope of the context manager,
50 If an exception occurs in the scope of the context manager,
51 the incoherent dirstate won't be written when wlock is
51 the incoherent dirstate won't be written when wlock is
52 released.
52 released.
53 """
53 """
54
54
55 @contextlib.contextmanager
55 @contextlib.contextmanager
56 def changing_files(repo):
56 def changing_files(repo):
57 """Context manager for handling dirstate files.
57 """Context manager for handling dirstate files.
58
58
59 If an exception occurs in the scope of the context manager,
59 If an exception occurs in the scope of the context manager,
60 the incoherent dirstate won't be written when wlock is
60 the incoherent dirstate won't be written when wlock is
61 released.
61 released.
62 """
62 """
63
63
64 def hasdir(d):
64 def hasdir(d):
65 pass
65 pass
66
66
67 def flagfunc(buildfallback):
67 def flagfunc(buildfallback):
68 """build a callable that returns flags associated with a filename
68 """build a callable that returns flags associated with a filename
69
69
70 The information is extracted from three possible layers:
70 The information is extracted from three possible layers:
71 1. the file system if it supports the information
71 1. the file system if it supports the information
72 2. the "fallback" information stored in the dirstate if any
72 2. the "fallback" information stored in the dirstate if any
73 3. a more expensive mechanism inferring the flags from the parents.
73 3. a more expensive mechanism inferring the flags from the parents.
74 """
74 """
75
75
76 def getcwd():
76 def getcwd():
77 """Return the path from which a canonical path is calculated.
77 """Return the path from which a canonical path is calculated.
78
78
79 This path should be used to resolve file patterns or to convert
79 This path should be used to resolve file patterns or to convert
80 canonical paths back to file paths for display. It shouldn't be
80 canonical paths back to file paths for display. It shouldn't be
81 used to get real file paths. Use vfs functions instead.
81 used to get real file paths. Use vfs functions instead.
82 """
82 """
83
83
84 def pathto(f, cwd=None):
84 def pathto(f, cwd=None):
85 pass
85 pass
86
86
87 def get_entry(path):
87 def get_entry(path):
88 """return a DirstateItem for the associated path"""
88 """return a DirstateItem for the associated path"""
89
89
90 def __contains__(key):
90 def __contains__(key):
91 """Check if bytestring `key` is known to the dirstate."""
91 """Check if bytestring `key` is known to the dirstate."""
92
92
93 def __iter__():
93 def __iter__():
94 """Iterate the dirstate's contained filenames as bytestrings."""
94 """Iterate the dirstate's contained filenames as bytestrings."""
95
95
96 def items():
96 def items():
97 """Iterate the dirstate's entries as (filename, DirstateItem.
97 """Iterate the dirstate's entries as (filename, DirstateItem.
98
98
99 As usual, filename is a bytestring.
99 As usual, filename is a bytestring.
100 """
100 """
101
101
102 iteritems = items
102 iteritems = items
103
103
104 def parents():
104 def parents():
105 pass
105 pass
106
106
107 def p1():
107 def p1():
108 pass
108 pass
109
109
110 def p2():
110 def p2():
111 pass
111 pass
112
112
113 def branch():
113 def branch():
114 pass
114 pass
115
115
116 def setparents(p1, p2=None):
116 def setparents(p1, p2=None):
117 """Set dirstate parents to p1 and p2.
117 """Set dirstate parents to p1 and p2.
118
118
119 When moving from two parents to one, "merged" entries a
119 When moving from two parents to one, "merged" entries a
120 adjusted to normal and previous copy records discarded and
120 adjusted to normal and previous copy records discarded and
121 returned by the call.
121 returned by the call.
122
122
123 See localrepo.setparents()
123 See localrepo.setparents()
124 """
124 """
125
125
126 def setbranch(branch, transaction=None):
126 def setbranch(branch, transaction):
127 pass
127 pass
128
128
129 def invalidate():
129 def invalidate():
130 """Causes the next access to reread the dirstate.
130 """Causes the next access to reread the dirstate.
131
131
132 This is different from localrepo.invalidatedirstate() because it always
132 This is different from localrepo.invalidatedirstate() because it always
133 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
133 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
134 check whether the dirstate has changed before rereading it."""
134 check whether the dirstate has changed before rereading it."""
135
135
136 def copy(source, dest):
136 def copy(source, dest):
137 """Mark dest as a copy of source. Unmark dest if source is None."""
137 """Mark dest as a copy of source. Unmark dest if source is None."""
138
138
139 def copied(file):
139 def copied(file):
140 pass
140 pass
141
141
142 def copies():
142 def copies():
143 pass
143 pass
144
144
145 def normalize(path, isknown=False, ignoremissing=False):
145 def normalize(path, isknown=False, ignoremissing=False):
146 """
146 """
147 normalize the case of a pathname when on a casefolding filesystem
147 normalize the case of a pathname when on a casefolding filesystem
148
148
149 isknown specifies whether the filename came from walking the
149 isknown specifies whether the filename came from walking the
150 disk, to avoid extra filesystem access.
150 disk, to avoid extra filesystem access.
151
151
152 If ignoremissing is True, missing path are returned
152 If ignoremissing is True, missing path are returned
153 unchanged. Otherwise, we try harder to normalize possibly
153 unchanged. Otherwise, we try harder to normalize possibly
154 existing path components.
154 existing path components.
155
155
156 The normalized case is determined based on the following precedence:
156 The normalized case is determined based on the following precedence:
157
157
158 - version of name already stored in the dirstate
158 - version of name already stored in the dirstate
159 - version of name stored on disk
159 - version of name stored on disk
160 - version provided via command arguments
160 - version provided via command arguments
161 """
161 """
162
162
163 def clear():
163 def clear():
164 pass
164 pass
165
165
166 def rebuild(parent, allfiles, changedfiles=None):
166 def rebuild(parent, allfiles, changedfiles=None):
167 pass
167 pass
168
168
169 def write(tr):
169 def write(tr):
170 pass
170 pass
171
171
172 def addparentchangecallback(category, callback):
172 def addparentchangecallback(category, callback):
173 """add a callback to be called when the wd parents are changed
173 """add a callback to be called when the wd parents are changed
174
174
175 Callback will be called with the following arguments:
175 Callback will be called with the following arguments:
176 dirstate, (oldp1, oldp2), (newp1, newp2)
176 dirstate, (oldp1, oldp2), (newp1, newp2)
177
177
178 Category is a unique identifier to allow overwriting an old callback
178 Category is a unique identifier to allow overwriting an old callback
179 with a newer callback.
179 with a newer callback.
180 """
180 """
181
181
182 def walk(match, subrepos, unknown, ignored, full=True):
182 def walk(match, subrepos, unknown, ignored, full=True):
183 """
183 """
184 Walk recursively through the directory tree, finding all files
184 Walk recursively through the directory tree, finding all files
185 matched by match.
185 matched by match.
186
186
187 If full is False, maybe skip some known-clean files.
187 If full is False, maybe skip some known-clean files.
188
188
189 Return a dict mapping filename to stat-like object (either
189 Return a dict mapping filename to stat-like object (either
190 mercurial.osutil.stat instance or return value of os.stat()).
190 mercurial.osutil.stat instance or return value of os.stat()).
191
191
192 """
192 """
193
193
194 def status(match, subrepos, ignored, clean, unknown):
194 def status(match, subrepos, ignored, clean, unknown):
195 """Determine the status of the working copy relative to the
195 """Determine the status of the working copy relative to the
196 dirstate and return a pair of (unsure, status), where status is of type
196 dirstate and return a pair of (unsure, status), where status is of type
197 scmutil.status and:
197 scmutil.status and:
198
198
199 unsure:
199 unsure:
200 files that might have been modified since the dirstate was
200 files that might have been modified since the dirstate was
201 written, but need to be read to be sure (size is the same
201 written, but need to be read to be sure (size is the same
202 but mtime differs)
202 but mtime differs)
203 status.modified:
203 status.modified:
204 files that have definitely been modified since the dirstate
204 files that have definitely been modified since the dirstate
205 was written (different size or mode)
205 was written (different size or mode)
206 status.clean:
206 status.clean:
207 files that have definitely not been modified since the
207 files that have definitely not been modified since the
208 dirstate was written
208 dirstate was written
209 """
209 """
210
210
211 def matches(match):
211 def matches(match):
212 """
212 """
213 return files in the dirstate (in whatever state) filtered by match
213 return files in the dirstate (in whatever state) filtered by match
214 """
214 """
215
215
216 def verify(m1, m2, p1, narrow_matcher=None):
216 def verify(m1, m2, p1, narrow_matcher=None):
217 """
217 """
218 check the dirstate contents against the parent manifest and yield errors
218 check the dirstate contents against the parent manifest and yield errors
219 """
219 """
General Comments 0
You need to be logged in to leave comments. Login now