##// END OF EJS Templates
dirstate: subclass the new dirstate Protocol class...
Matt Harbison -
r53203:3a90a6fd default
parent child Browse files
Show More
@@ -1,399 +1,397
1 from __future__ import annotations
1 from __future__ import annotations
2
2
3 import contextlib
3 import contextlib
4 import os
4 import os
5
5
6 from mercurial.node import sha1nodeconstants
6 from mercurial.node import sha1nodeconstants
7 from mercurial import (
7 from mercurial import (
8 dirstatemap,
8 dirstatemap,
9 error,
9 error,
10 extensions,
10 extensions,
11 match as matchmod,
11 match as matchmod,
12 pycompat,
12 pycompat,
13 scmutil,
13 scmutil,
14 util,
14 util,
15 )
15 )
16 from mercurial.dirstateutils import (
16 from mercurial.dirstateutils import (
17 timestamp,
17 timestamp,
18 )
18 )
19 from mercurial.interfaces import (
19 from mercurial.interfaces import (
20 dirstate as intdirstate,
20 dirstate as intdirstate,
21 util as interfaceutil,
22 )
21 )
23
22
24 from . import gitutil
23 from . import gitutil
25
24
26
25
27 DirstateItem = dirstatemap.DirstateItem
26 DirstateItem = dirstatemap.DirstateItem
28 propertycache = util.propertycache
27 propertycache = util.propertycache
29 pygit2 = gitutil.get_pygit2()
28 pygit2 = gitutil.get_pygit2()
30
29
31
30
32 def readpatternfile(orig, filepath, warn, sourceinfo=False):
31 def readpatternfile(orig, filepath, warn, sourceinfo=False):
33 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
32 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
34 return orig(filepath, warn, sourceinfo=False)
33 return orig(filepath, warn, sourceinfo=False)
35 result = []
34 result = []
36 warnings = []
35 warnings = []
37 with open(filepath, 'rb') as fp:
36 with open(filepath, 'rb') as fp:
38 for l in fp:
37 for l in fp:
39 l = l.strip()
38 l = l.strip()
40 if not l or l.startswith(b'#'):
39 if not l or l.startswith(b'#'):
41 continue
40 continue
42 if l.startswith(b'!'):
41 if l.startswith(b'!'):
43 warnings.append(b'unsupported ignore pattern %s' % l)
42 warnings.append(b'unsupported ignore pattern %s' % l)
44 continue
43 continue
45 if l.startswith(b'/'):
44 if l.startswith(b'/'):
46 result.append(b'rootglob:' + l[1:])
45 result.append(b'rootglob:' + l[1:])
47 else:
46 else:
48 result.append(b'relglob:' + l)
47 result.append(b'relglob:' + l)
49 return result, warnings
48 return result, warnings
50
49
51
50
52 extensions.wrapfunction(matchmod, 'readpatternfile', readpatternfile)
51 extensions.wrapfunction(matchmod, 'readpatternfile', readpatternfile)
53
52
54
53
55 _STATUS_MAP = {}
54 _STATUS_MAP = {}
56 if pygit2:
55 if pygit2:
57 _STATUS_MAP = {
56 _STATUS_MAP = {
58 pygit2.GIT_STATUS_CONFLICTED: b'm',
57 pygit2.GIT_STATUS_CONFLICTED: b'm',
59 pygit2.GIT_STATUS_CURRENT: b'n',
58 pygit2.GIT_STATUS_CURRENT: b'n',
60 pygit2.GIT_STATUS_IGNORED: b'?',
59 pygit2.GIT_STATUS_IGNORED: b'?',
61 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
60 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
62 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
61 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
63 pygit2.GIT_STATUS_INDEX_NEW: b'a',
62 pygit2.GIT_STATUS_INDEX_NEW: b'a',
64 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
63 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
65 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
64 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
66 pygit2.GIT_STATUS_WT_DELETED: b'r',
65 pygit2.GIT_STATUS_WT_DELETED: b'r',
67 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
66 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
68 pygit2.GIT_STATUS_WT_NEW: b'?',
67 pygit2.GIT_STATUS_WT_NEW: b'?',
69 pygit2.GIT_STATUS_WT_RENAMED: b'a',
68 pygit2.GIT_STATUS_WT_RENAMED: b'a',
70 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
69 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
71 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
70 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
72 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
71 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
73 }
72 }
74
73
75
74
76 @interfaceutil.implementer(intdirstate.idirstate)
75 class gitdirstate(intdirstate.idirstate):
77 class gitdirstate:
78 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
76 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
79 self._ui = ui
77 self._ui = ui
80 self._root = os.path.dirname(vfs.base)
78 self._root = os.path.dirname(vfs.base)
81 self._opener = vfs
79 self._opener = vfs
82 self.git = gitrepo
80 self.git = gitrepo
83 self._plchangecallbacks = {}
81 self._plchangecallbacks = {}
84 # TODO: context.poststatusfixup is bad and uses this attribute
82 # TODO: context.poststatusfixup is bad and uses this attribute
85 self._dirty = False
83 self._dirty = False
86 self._mapcls = dirstatemap.dirstatemap
84 self._mapcls = dirstatemap.dirstatemap
87 self._use_dirstate_v2 = use_dirstate_v2
85 self._use_dirstate_v2 = use_dirstate_v2
88
86
89 @propertycache
87 @propertycache
90 def _map(self):
88 def _map(self):
91 """Return the dirstate contents (see documentation for dirstatemap)."""
89 """Return the dirstate contents (see documentation for dirstatemap)."""
92 self._map = self._mapcls(
90 self._map = self._mapcls(
93 self._ui,
91 self._ui,
94 self._opener,
92 self._opener,
95 self._root,
93 self._root,
96 sha1nodeconstants,
94 sha1nodeconstants,
97 self._use_dirstate_v2,
95 self._use_dirstate_v2,
98 )
96 )
99 return self._map
97 return self._map
100
98
101 def p1(self):
99 def p1(self):
102 try:
100 try:
103 return self.git.head.peel().id.raw
101 return self.git.head.peel().id.raw
104 except pygit2.GitError:
102 except pygit2.GitError:
105 # Typically happens when peeling HEAD fails, as in an
103 # Typically happens when peeling HEAD fails, as in an
106 # empty repository.
104 # empty repository.
107 return sha1nodeconstants.nullid
105 return sha1nodeconstants.nullid
108
106
109 def p2(self):
107 def p2(self):
110 # TODO: MERGE_HEAD? something like that, right?
108 # TODO: MERGE_HEAD? something like that, right?
111 return sha1nodeconstants.nullid
109 return sha1nodeconstants.nullid
112
110
113 def setparents(self, p1, p2=None):
111 def setparents(self, p1, p2=None):
114 if p2 is None:
112 if p2 is None:
115 p2 = sha1nodeconstants.nullid
113 p2 = sha1nodeconstants.nullid
116 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
114 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
117 self.git.head.set_target(gitutil.togitnode(p1))
115 self.git.head.set_target(gitutil.togitnode(p1))
118
116
119 @util.propertycache
117 @util.propertycache
120 def identity(self):
118 def identity(self):
121 return util.filestat.frompath(
119 return util.filestat.frompath(
122 os.path.join(self._root, b'.git', b'index')
120 os.path.join(self._root, b'.git', b'index')
123 )
121 )
124
122
125 def branch(self):
123 def branch(self):
126 return b'default'
124 return b'default'
127
125
128 def parents(self):
126 def parents(self):
129 # TODO how on earth do we find p2 if a merge is in flight?
127 # TODO how on earth do we find p2 if a merge is in flight?
130 return self.p1(), sha1nodeconstants.nullid
128 return self.p1(), sha1nodeconstants.nullid
131
129
132 def __iter__(self):
130 def __iter__(self):
133 return (pycompat.fsencode(f.path) for f in self.git.index)
131 return (pycompat.fsencode(f.path) for f in self.git.index)
134
132
135 def items(self):
133 def items(self):
136 for ie in self.git.index:
134 for ie in self.git.index:
137 yield ie.path, None # value should be a DirstateItem
135 yield ie.path, None # value should be a DirstateItem
138
136
139 # py2,3 compat forward
137 # py2,3 compat forward
140 iteritems = items
138 iteritems = items
141
139
142 def __getitem__(self, filename):
140 def __getitem__(self, filename):
143 try:
141 try:
144 gs = self.git.status_file(filename)
142 gs = self.git.status_file(filename)
145 except KeyError:
143 except KeyError:
146 return b'?'
144 return b'?'
147 return _STATUS_MAP[gs]
145 return _STATUS_MAP[gs]
148
146
149 def __contains__(self, filename):
147 def __contains__(self, filename):
150 try:
148 try:
151 gs = self.git.status_file(filename)
149 gs = self.git.status_file(filename)
152 return _STATUS_MAP[gs] != b'?'
150 return _STATUS_MAP[gs] != b'?'
153 except KeyError:
151 except KeyError:
154 return False
152 return False
155
153
156 def status(self, match, subrepos, ignored, clean, unknown):
154 def status(self, match, subrepos, ignored, clean, unknown):
157 listclean = clean
155 listclean = clean
158 # TODO handling of clean files - can we get that from git.status()?
156 # TODO handling of clean files - can we get that from git.status()?
159 modified, added, removed, deleted, unknown, ignored, clean = (
157 modified, added, removed, deleted, unknown, ignored, clean = (
160 [],
158 [],
161 [],
159 [],
162 [],
160 [],
163 [],
161 [],
164 [],
162 [],
165 [],
163 [],
166 [],
164 [],
167 )
165 )
168
166
169 try:
167 try:
170 mtime_boundary = timestamp.get_fs_now(self._opener)
168 mtime_boundary = timestamp.get_fs_now(self._opener)
171 except OSError:
169 except OSError:
172 # In largefiles or readonly context
170 # In largefiles or readonly context
173 mtime_boundary = None
171 mtime_boundary = None
174
172
175 gstatus = self.git.status()
173 gstatus = self.git.status()
176 for path, status in gstatus.items():
174 for path, status in gstatus.items():
177 path = pycompat.fsencode(path)
175 path = pycompat.fsencode(path)
178 if not match(path):
176 if not match(path):
179 continue
177 continue
180 if status == pygit2.GIT_STATUS_IGNORED:
178 if status == pygit2.GIT_STATUS_IGNORED:
181 if path.endswith(b'/'):
179 if path.endswith(b'/'):
182 continue
180 continue
183 ignored.append(path)
181 ignored.append(path)
184 elif status in (
182 elif status in (
185 pygit2.GIT_STATUS_WT_MODIFIED,
183 pygit2.GIT_STATUS_WT_MODIFIED,
186 pygit2.GIT_STATUS_INDEX_MODIFIED,
184 pygit2.GIT_STATUS_INDEX_MODIFIED,
187 pygit2.GIT_STATUS_WT_MODIFIED
185 pygit2.GIT_STATUS_WT_MODIFIED
188 | pygit2.GIT_STATUS_INDEX_MODIFIED,
186 | pygit2.GIT_STATUS_INDEX_MODIFIED,
189 ):
187 ):
190 modified.append(path)
188 modified.append(path)
191 elif status == pygit2.GIT_STATUS_INDEX_NEW:
189 elif status == pygit2.GIT_STATUS_INDEX_NEW:
192 added.append(path)
190 added.append(path)
193 elif status == pygit2.GIT_STATUS_WT_NEW:
191 elif status == pygit2.GIT_STATUS_WT_NEW:
194 unknown.append(path)
192 unknown.append(path)
195 elif status == pygit2.GIT_STATUS_WT_DELETED:
193 elif status == pygit2.GIT_STATUS_WT_DELETED:
196 deleted.append(path)
194 deleted.append(path)
197 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
195 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
198 removed.append(path)
196 removed.append(path)
199 else:
197 else:
200 raise error.Abort(
198 raise error.Abort(
201 b'unhandled case: status for %r is %r' % (path, status)
199 b'unhandled case: status for %r is %r' % (path, status)
202 )
200 )
203
201
204 if listclean:
202 if listclean:
205 observed = set(
203 observed = set(
206 modified + added + removed + deleted + unknown + ignored
204 modified + added + removed + deleted + unknown + ignored
207 )
205 )
208 index = self.git.index
206 index = self.git.index
209 index.read()
207 index.read()
210 for entry in index:
208 for entry in index:
211 path = pycompat.fsencode(entry.path)
209 path = pycompat.fsencode(entry.path)
212 if not match(path):
210 if not match(path):
213 continue
211 continue
214 if path in observed:
212 if path in observed:
215 continue # already in some other set
213 continue # already in some other set
216 if path[-1] == b'/':
214 if path[-1] == b'/':
217 continue # directory
215 continue # directory
218 clean.append(path)
216 clean.append(path)
219
217
220 # TODO are we really always sure of status here?
218 # TODO are we really always sure of status here?
221 return (
219 return (
222 False,
220 False,
223 scmutil.status(
221 scmutil.status(
224 modified, added, removed, deleted, unknown, ignored, clean
222 modified, added, removed, deleted, unknown, ignored, clean
225 ),
223 ),
226 mtime_boundary,
224 mtime_boundary,
227 )
225 )
228
226
229 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
230 # TODO we can do better
228 # TODO we can do better
231 return buildfallback()
229 return buildfallback()
232
230
233 def getcwd(self):
231 def getcwd(self):
234 # TODO is this a good way to do this?
232 # TODO is this a good way to do this?
235 return os.path.dirname(
233 return os.path.dirname(
236 os.path.dirname(pycompat.fsencode(self.git.path))
234 os.path.dirname(pycompat.fsencode(self.git.path))
237 )
235 )
238
236
239 def get_entry(self, path):
237 def get_entry(self, path):
240 """return a DirstateItem for the associated path"""
238 """return a DirstateItem for the associated path"""
241 entry = self._map.get(path)
239 entry = self._map.get(path)
242 if entry is None:
240 if entry is None:
243 return DirstateItem()
241 return DirstateItem()
244 return entry
242 return entry
245
243
246 def normalize(self, path, isknown=False, ignoremissing=False):
244 def normalize(self, path, isknown=False, ignoremissing=False):
247 normed = util.normcase(path)
245 normed = util.normcase(path)
248 assert normed == path, b"TODO handling of case folding: %s != %s" % (
246 assert normed == path, b"TODO handling of case folding: %s != %s" % (
249 normed,
247 normed,
250 path,
248 path,
251 )
249 )
252 return path
250 return path
253
251
254 @property
252 @property
255 def _checklink(self):
253 def _checklink(self):
256 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
254 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
257
255
258 def copies(self):
256 def copies(self):
259 # TODO support copies?
257 # TODO support copies?
260 return {}
258 return {}
261
259
262 # # TODO what the heck is this
260 # # TODO what the heck is this
263 _filecache = set()
261 _filecache = set()
264
262
265 @property
263 @property
266 def is_changing_parents(self):
264 def is_changing_parents(self):
267 # TODO: we need to implement the context manager bits and
265 # TODO: we need to implement the context manager bits and
268 # correctly stage/revert index edits.
266 # correctly stage/revert index edits.
269 return False
267 return False
270
268
271 @property
269 @property
272 def is_changing_any(self):
270 def is_changing_any(self):
273 # TODO: we need to implement the context manager bits and
271 # TODO: we need to implement the context manager bits and
274 # correctly stage/revert index edits.
272 # correctly stage/revert index edits.
275 return False
273 return False
276
274
277 def write(self, tr):
275 def write(self, tr):
278 # TODO: call parent change callbacks
276 # TODO: call parent change callbacks
279
277
280 if tr:
278 if tr:
281
279
282 def writeinner(category):
280 def writeinner(category):
283 self.git.index.write()
281 self.git.index.write()
284
282
285 tr.addpending(b'gitdirstate', writeinner)
283 tr.addpending(b'gitdirstate', writeinner)
286 else:
284 else:
287 self.git.index.write()
285 self.git.index.write()
288
286
289 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
290 if cwd is None:
288 if cwd is None:
291 cwd = self.getcwd()
289 cwd = self.getcwd()
292 # TODO core dirstate does something about slashes here
290 # TODO core dirstate does something about slashes here
293 assert isinstance(f, bytes)
291 assert isinstance(f, bytes)
294 r = util.pathto(self._root, cwd, f)
292 r = util.pathto(self._root, cwd, f)
295 return r
293 return r
296
294
297 def matches(self, match):
295 def matches(self, match):
298 for x in self.git.index:
296 for x in self.git.index:
299 p = pycompat.fsencode(x.path)
297 p = pycompat.fsencode(x.path)
300 if match(p):
298 if match(p):
301 yield p
299 yield p
302
300
303 def set_clean(self, f, parentfiledata):
301 def set_clean(self, f, parentfiledata):
304 """Mark a file normal and clean."""
302 """Mark a file normal and clean."""
305 # TODO: for now we just let libgit2 re-stat the file. We can
303 # TODO: for now we just let libgit2 re-stat the file. We can
306 # clearly do better.
304 # clearly do better.
307
305
308 def set_possibly_dirty(self, f):
306 def set_possibly_dirty(self, f):
309 """Mark a file normal, but possibly dirty."""
307 """Mark a file normal, but possibly dirty."""
310 # TODO: for now we just let libgit2 re-stat the file. We can
308 # TODO: for now we just let libgit2 re-stat the file. We can
311 # clearly do better.
309 # clearly do better.
312
310
313 def walk(self, match, subrepos, unknown, ignored, full=True):
311 def walk(self, match, subrepos, unknown, ignored, full=True):
314 # TODO: we need to use .status() and not iterate the index,
312 # TODO: we need to use .status() and not iterate the index,
315 # because the index doesn't force a re-walk and so `hg add` of
313 # because the index doesn't force a re-walk and so `hg add` of
316 # a new file without an intervening call to status will
314 # a new file without an intervening call to status will
317 # silently do nothing.
315 # silently do nothing.
318 r = {}
316 r = {}
319 cwd = self.getcwd()
317 cwd = self.getcwd()
320 for path, status in self.git.status().items():
318 for path, status in self.git.status().items():
321 if path.startswith('.hg/'):
319 if path.startswith('.hg/'):
322 continue
320 continue
323 path = pycompat.fsencode(path)
321 path = pycompat.fsencode(path)
324 if not match(path):
322 if not match(path):
325 continue
323 continue
326 # TODO construct the stat info from the status object?
324 # TODO construct the stat info from the status object?
327 try:
325 try:
328 s = os.stat(os.path.join(cwd, path))
326 s = os.stat(os.path.join(cwd, path))
329 except FileNotFoundError:
327 except FileNotFoundError:
330 continue
328 continue
331 r[path] = s
329 r[path] = s
332 return r
330 return r
333
331
334 def set_tracked(self, f, reset_copy=False):
332 def set_tracked(self, f, reset_copy=False):
335 # TODO: support copies and reset_copy=True
333 # TODO: support copies and reset_copy=True
336 uf = pycompat.fsdecode(f)
334 uf = pycompat.fsdecode(f)
337 if uf in self.git.index:
335 if uf in self.git.index:
338 return False
336 return False
339 index = self.git.index
337 index = self.git.index
340 index.read()
338 index.read()
341 index.add(uf)
339 index.add(uf)
342 index.write()
340 index.write()
343 return True
341 return True
344
342
345 def add(self, f):
343 def add(self, f):
346 index = self.git.index
344 index = self.git.index
347 index.read()
345 index.read()
348 index.add(pycompat.fsdecode(f))
346 index.add(pycompat.fsdecode(f))
349 index.write()
347 index.write()
350
348
351 def drop(self, f):
349 def drop(self, f):
352 index = self.git.index
350 index = self.git.index
353 index.read()
351 index.read()
354 fs = pycompat.fsdecode(f)
352 fs = pycompat.fsdecode(f)
355 if fs in index:
353 if fs in index:
356 index.remove(fs)
354 index.remove(fs)
357 index.write()
355 index.write()
358
356
359 def set_untracked(self, f):
357 def set_untracked(self, f):
360 index = self.git.index
358 index = self.git.index
361 index.read()
359 index.read()
362 fs = pycompat.fsdecode(f)
360 fs = pycompat.fsdecode(f)
363 if fs in index:
361 if fs in index:
364 index.remove(fs)
362 index.remove(fs)
365 index.write()
363 index.write()
366 return True
364 return True
367 return False
365 return False
368
366
369 def remove(self, f):
367 def remove(self, f):
370 index = self.git.index
368 index = self.git.index
371 index.read()
369 index.read()
372 index.remove(pycompat.fsdecode(f))
370 index.remove(pycompat.fsdecode(f))
373 index.write()
371 index.write()
374
372
375 def copied(self, path):
373 def copied(self, path):
376 # TODO: track copies?
374 # TODO: track copies?
377 return None
375 return None
378
376
379 def prefetch_parents(self):
377 def prefetch_parents(self):
380 # TODO
378 # TODO
381 pass
379 pass
382
380
383 def update_file(self, *args, **kwargs):
381 def update_file(self, *args, **kwargs):
384 # TODO
382 # TODO
385 pass
383 pass
386
384
387 @contextlib.contextmanager
385 @contextlib.contextmanager
388 def changing_parents(self, repo):
386 def changing_parents(self, repo):
389 # TODO: track this maybe?
387 # TODO: track this maybe?
390 yield
388 yield
391
389
392 def addparentchangecallback(self, category, callback):
390 def addparentchangecallback(self, category, callback):
393 # TODO: should this be added to the dirstate interface?
391 # TODO: should this be added to the dirstate interface?
394 self._plchangecallbacks[category] = callback
392 self._plchangecallbacks[category] = callback
395
393
396 def setbranch(self, branch, transaction):
394 def setbranch(self, branch, transaction):
397 raise error.Abort(
395 raise error.Abort(
398 b'git repos do not support branches. try using bookmarks'
396 b'git repos do not support branches. try using bookmarks'
399 )
397 )
@@ -1,1816 +1,1808
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import annotations
8 from __future__ import annotations
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import os
12 import os
13 import stat
13 import stat
14 import typing
15 import uuid
14 import uuid
16
15
17 from .i18n import _
16 from .i18n import _
18
17
19 from hgdemandimport import tracing
18 from hgdemandimport import tracing
20
19
21 from . import (
20 from . import (
22 dirstatemap,
21 dirstatemap,
23 encoding,
22 encoding,
24 error,
23 error,
25 match as matchmod,
24 match as matchmod,
26 node,
25 node,
27 pathutil,
26 pathutil,
28 policy,
27 policy,
29 pycompat,
28 pycompat,
30 scmutil,
29 scmutil,
31 txnutil,
30 txnutil,
32 util,
31 util,
33 )
32 )
34
33
35 from .dirstateutils import (
34 from .dirstateutils import (
36 timestamp,
35 timestamp,
37 )
36 )
38
37
39 from .interfaces import (
38 from .interfaces import (
40 dirstate as intdirstate,
39 dirstate as intdirstate,
41 util as interfaceutil,
42 )
40 )
43
41
44 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
45 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
46
44
47 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
48
46
49 propertycache = util.propertycache
47 propertycache = util.propertycache
50 filecache = scmutil.filecache
48 filecache = scmutil.filecache
51 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
52
50
53 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
54
52
55
53
56 class repocache(filecache):
54 class repocache(filecache):
57 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
58
56
59 def join(self, obj, fname):
57 def join(self, obj, fname):
60 return obj._opener.join(fname)
58 return obj._opener.join(fname)
61
59
62
60
63 class rootcache(filecache):
61 class rootcache(filecache):
64 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
65
63
66 def join(self, obj, fname):
64 def join(self, obj, fname):
67 return obj._join(fname)
65 return obj._join(fname)
68
66
69
67
70 def check_invalidated(func):
68 def check_invalidated(func):
71 """check that the func is called with a non-invalidated dirstate
69 """check that the func is called with a non-invalidated dirstate
72
70
73 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
74 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
75 such change.
73 such change.
76 """
74 """
77
75
78 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
79 if self._invalidated_context:
77 if self._invalidated_context:
80 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
81 msg %= func.__name__
79 msg %= func.__name__
82 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
83 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
84
82
85 return wrap
83 return wrap
86
84
87
85
88 def requires_changing_parents(func):
86 def requires_changing_parents(func):
89 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
90 if not self.is_changing_parents:
88 if not self.is_changing_parents:
91 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
92 msg %= func.__name__
90 msg %= func.__name__
93 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
94 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
95
93
96 return check_invalidated(wrap)
94 return check_invalidated(wrap)
97
95
98
96
99 def requires_changing_files(func):
97 def requires_changing_files(func):
100 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
101 if not self.is_changing_files:
99 if not self.is_changing_files:
102 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
103 msg %= func.__name__
101 msg %= func.__name__
104 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
105 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
106
104
107 return check_invalidated(wrap)
105 return check_invalidated(wrap)
108
106
109
107
110 def requires_changing_any(func):
108 def requires_changing_any(func):
111 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
112 if not self.is_changing_any:
110 if not self.is_changing_any:
113 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
114 msg %= func.__name__
112 msg %= func.__name__
115 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
116 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
117
115
118 return check_invalidated(wrap)
116 return check_invalidated(wrap)
119
117
120
118
121 def requires_changing_files_or_status(func):
119 def requires_changing_files_or_status(func):
122 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
123 if not (self.is_changing_files or self._running_status > 0):
121 if not (self.is_changing_files or self._running_status > 0):
124 msg = (
122 msg = (
125 'calling `%s` outside of a changing_files '
123 'calling `%s` outside of a changing_files '
126 'or running_status context'
124 'or running_status context'
127 )
125 )
128 msg %= func.__name__
126 msg %= func.__name__
129 raise error.ProgrammingError(msg)
127 raise error.ProgrammingError(msg)
130 return func(self, *args, **kwargs)
128 return func(self, *args, **kwargs)
131
129
132 return check_invalidated(wrap)
130 return check_invalidated(wrap)
133
131
134
132
135 CHANGE_TYPE_PARENTS = "parents"
133 CHANGE_TYPE_PARENTS = "parents"
136 CHANGE_TYPE_FILES = "files"
134 CHANGE_TYPE_FILES = "files"
137
135
138
136
139 class DirState:
137 class dirstate(intdirstate.idirstate):
140 # used by largefile to avoid overwritting transaction callback
138 # used by largefile to avoid overwritting transaction callback
141 _tr_key_suffix = b''
139 _tr_key_suffix = b''
142
140
143 def __init__(
141 def __init__(
144 self,
142 self,
145 opener,
143 opener,
146 ui,
144 ui,
147 root,
145 root,
148 validate,
146 validate,
149 sparsematchfn,
147 sparsematchfn,
150 nodeconstants,
148 nodeconstants,
151 use_dirstate_v2,
149 use_dirstate_v2,
152 use_tracked_hint=False,
150 use_tracked_hint=False,
153 ):
151 ):
154 """Create a new dirstate object.
152 """Create a new dirstate object.
155
153
156 opener is an open()-like callable that can be used to open the
154 opener is an open()-like callable that can be used to open the
157 dirstate file; root is the root of the directory tracked by
155 dirstate file; root is the root of the directory tracked by
158 the dirstate.
156 the dirstate.
159 """
157 """
160 self._use_dirstate_v2 = use_dirstate_v2
158 self._use_dirstate_v2 = use_dirstate_v2
161 self._use_tracked_hint = use_tracked_hint
159 self._use_tracked_hint = use_tracked_hint
162 self._nodeconstants = nodeconstants
160 self._nodeconstants = nodeconstants
163 self._opener = opener
161 self._opener = opener
164 self._validate = validate
162 self._validate = validate
165 self._root = root
163 self._root = root
166 # Either build a sparse-matcher or None if sparse is disabled
164 # Either build a sparse-matcher or None if sparse is disabled
167 self._sparsematchfn = sparsematchfn
165 self._sparsematchfn = sparsematchfn
168 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
166 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
169 # UNC path pointing to root share (issue4557)
167 # UNC path pointing to root share (issue4557)
170 self._rootdir = pathutil.normasprefix(root)
168 self._rootdir = pathutil.normasprefix(root)
171 # True is any internal state may be different
169 # True is any internal state may be different
172 self._dirty = False
170 self._dirty = False
173 # True if the set of tracked file may be different
171 # True if the set of tracked file may be different
174 self._dirty_tracked_set = False
172 self._dirty_tracked_set = False
175 self._ui = ui
173 self._ui = ui
176 self._filecache = {}
174 self._filecache = {}
177 # nesting level of `changing_parents` context
175 # nesting level of `changing_parents` context
178 self._changing_level = 0
176 self._changing_level = 0
179 # the change currently underway
177 # the change currently underway
180 self._change_type = None
178 self._change_type = None
181 # number of open _running_status context
179 # number of open _running_status context
182 self._running_status = 0
180 self._running_status = 0
183 # True if the current dirstate changing operations have been
181 # True if the current dirstate changing operations have been
184 # invalidated (used to make sure all nested contexts have been exited)
182 # invalidated (used to make sure all nested contexts have been exited)
185 self._invalidated_context = False
183 self._invalidated_context = False
186 self._attached_to_a_transaction = False
184 self._attached_to_a_transaction = False
187 self._filename = b'dirstate'
185 self._filename = b'dirstate'
188 self._filename_th = b'dirstate-tracked-hint'
186 self._filename_th = b'dirstate-tracked-hint'
189 self._pendingfilename = b'%s.pending' % self._filename
187 self._pendingfilename = b'%s.pending' % self._filename
190 self._plchangecallbacks = {}
188 self._plchangecallbacks = {}
191 self._origpl = None
189 self._origpl = None
192 self._mapcls = dirstatemap.dirstatemap
190 self._mapcls = dirstatemap.dirstatemap
193 # Access and cache cwd early, so we don't access it for the first time
191 # Access and cache cwd early, so we don't access it for the first time
194 # after a working-copy update caused it to not exist (accessing it then
192 # after a working-copy update caused it to not exist (accessing it then
195 # raises an exception).
193 # raises an exception).
196 self._cwd
194 self._cwd
197
195
198 def refresh(self):
196 def refresh(self):
199 # XXX if this happens, you likely did not enter the `changing_xxx`
197 # XXX if this happens, you likely did not enter the `changing_xxx`
200 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
198 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
201 # `refresh`.
199 # `refresh`.
202 if self.is_changing_any:
200 if self.is_changing_any:
203 msg = "refreshing the dirstate in the middle of a change"
201 msg = "refreshing the dirstate in the middle of a change"
204 raise error.ProgrammingError(msg)
202 raise error.ProgrammingError(msg)
205 if '_branch' in vars(self):
203 if '_branch' in vars(self):
206 del self._branch
204 del self._branch
207 if '_map' in vars(self) and self._map.may_need_refresh():
205 if '_map' in vars(self) and self._map.may_need_refresh():
208 self.invalidate()
206 self.invalidate()
209
207
210 def prefetch_parents(self):
208 def prefetch_parents(self):
211 """make sure the parents are loaded
209 """make sure the parents are loaded
212
210
213 Used to avoid a race condition.
211 Used to avoid a race condition.
214 """
212 """
215 self._pl
213 self._pl
216
214
217 @contextlib.contextmanager
215 @contextlib.contextmanager
218 @check_invalidated
216 @check_invalidated
219 def running_status(self, repo):
217 def running_status(self, repo):
220 """Wrap a status operation
218 """Wrap a status operation
221
219
222 This context is not mutally exclusive with the `changing_*` context. It
220 This context is not mutally exclusive with the `changing_*` context. It
223 also do not warrant for the `wlock` to be taken.
221 also do not warrant for the `wlock` to be taken.
224
222
225 If the wlock is taken, this context will behave in a simple way, and
223 If the wlock is taken, this context will behave in a simple way, and
226 ensure the data are scheduled for write when leaving the top level
224 ensure the data are scheduled for write when leaving the top level
227 context.
225 context.
228
226
229 If the lock is not taken, it will only warrant that the data are either
227 If the lock is not taken, it will only warrant that the data are either
230 committed (written) and rolled back (invalidated) when exiting the top
228 committed (written) and rolled back (invalidated) when exiting the top
231 level context. The write/invalidate action must be performed by the
229 level context. The write/invalidate action must be performed by the
232 wrapped code.
230 wrapped code.
233
231
234
232
235 The expected logic is:
233 The expected logic is:
236
234
237 A: read the dirstate
235 A: read the dirstate
238 B: run status
236 B: run status
239 This might make the dirstate dirty by updating cache,
237 This might make the dirstate dirty by updating cache,
240 especially in Rust.
238 especially in Rust.
241 C: do more "post status fixup if relevant
239 C: do more "post status fixup if relevant
242 D: try to take the w-lock (this will invalidate the changes if they were raced)
240 D: try to take the w-lock (this will invalidate the changes if they were raced)
243 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
241 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
244 E1: elif lock was acquired β†’ write the changes
242 E1: elif lock was acquired β†’ write the changes
245 E2: else β†’ discard the changes
243 E2: else β†’ discard the changes
246 """
244 """
247 has_lock = repo.currentwlock() is not None
245 has_lock = repo.currentwlock() is not None
248 is_changing = self.is_changing_any
246 is_changing = self.is_changing_any
249 tr = repo.currenttransaction()
247 tr = repo.currenttransaction()
250 has_tr = tr is not None
248 has_tr = tr is not None
251 nested = bool(self._running_status)
249 nested = bool(self._running_status)
252
250
253 first_and_alone = not (is_changing or has_tr or nested)
251 first_and_alone = not (is_changing or has_tr or nested)
254
252
255 # enforce no change happened outside of a proper context.
253 # enforce no change happened outside of a proper context.
256 if first_and_alone and self._dirty:
254 if first_and_alone and self._dirty:
257 has_tr = repo.currenttransaction() is not None
255 has_tr = repo.currenttransaction() is not None
258 if not has_tr and self._changing_level == 0 and self._dirty:
256 if not has_tr and self._changing_level == 0 and self._dirty:
259 msg = "entering a status context, but dirstate is already dirty"
257 msg = "entering a status context, but dirstate is already dirty"
260 raise error.ProgrammingError(msg)
258 raise error.ProgrammingError(msg)
261
259
262 should_write = has_lock and not (nested or is_changing)
260 should_write = has_lock and not (nested or is_changing)
263
261
264 self._running_status += 1
262 self._running_status += 1
265 try:
263 try:
266 yield
264 yield
267 except Exception:
265 except Exception:
268 self.invalidate()
266 self.invalidate()
269 raise
267 raise
270 finally:
268 finally:
271 self._running_status -= 1
269 self._running_status -= 1
272 if self._invalidated_context:
270 if self._invalidated_context:
273 should_write = False
271 should_write = False
274 self.invalidate()
272 self.invalidate()
275
273
276 if should_write:
274 if should_write:
277 assert repo.currenttransaction() is tr
275 assert repo.currenttransaction() is tr
278 self.write(tr)
276 self.write(tr)
279 elif not has_lock:
277 elif not has_lock:
280 if self._dirty:
278 if self._dirty:
281 msg = b'dirstate dirty while exiting an isolated status context'
279 msg = b'dirstate dirty while exiting an isolated status context'
282 repo.ui.develwarn(msg)
280 repo.ui.develwarn(msg)
283 self.invalidate()
281 self.invalidate()
284
282
285 @contextlib.contextmanager
283 @contextlib.contextmanager
286 @check_invalidated
284 @check_invalidated
287 def _changing(self, repo, change_type):
285 def _changing(self, repo, change_type):
288 if repo.currentwlock() is None:
286 if repo.currentwlock() is None:
289 msg = b"trying to change the dirstate without holding the wlock"
287 msg = b"trying to change the dirstate without holding the wlock"
290 raise error.ProgrammingError(msg)
288 raise error.ProgrammingError(msg)
291
289
292 has_tr = repo.currenttransaction() is not None
290 has_tr = repo.currenttransaction() is not None
293 if not has_tr and self._changing_level == 0 and self._dirty:
291 if not has_tr and self._changing_level == 0 and self._dirty:
294 msg = b"entering a changing context, but dirstate is already dirty"
292 msg = b"entering a changing context, but dirstate is already dirty"
295 repo.ui.develwarn(msg)
293 repo.ui.develwarn(msg)
296
294
297 assert self._changing_level >= 0
295 assert self._changing_level >= 0
298 # different type of change are mutually exclusive
296 # different type of change are mutually exclusive
299 if self._change_type is None:
297 if self._change_type is None:
300 assert self._changing_level == 0
298 assert self._changing_level == 0
301 self._change_type = change_type
299 self._change_type = change_type
302 elif self._change_type != change_type:
300 elif self._change_type != change_type:
303 msg = (
301 msg = (
304 'trying to open "%s" dirstate-changing context while a "%s" is'
302 'trying to open "%s" dirstate-changing context while a "%s" is'
305 ' already open'
303 ' already open'
306 )
304 )
307 msg %= (change_type, self._change_type)
305 msg %= (change_type, self._change_type)
308 raise error.ProgrammingError(msg)
306 raise error.ProgrammingError(msg)
309 should_write = False
307 should_write = False
310 self._changing_level += 1
308 self._changing_level += 1
311 try:
309 try:
312 yield
310 yield
313 except: # re-raises
311 except: # re-raises
314 self.invalidate() # this will set `_invalidated_context`
312 self.invalidate() # this will set `_invalidated_context`
315 raise
313 raise
316 finally:
314 finally:
317 assert self._changing_level > 0
315 assert self._changing_level > 0
318 self._changing_level -= 1
316 self._changing_level -= 1
319 # If the dirstate is being invalidated, call invalidate again.
317 # If the dirstate is being invalidated, call invalidate again.
320 # This will throw away anything added by a upper context and
318 # This will throw away anything added by a upper context and
321 # reset the `_invalidated_context` flag when relevant
319 # reset the `_invalidated_context` flag when relevant
322 if self._changing_level <= 0:
320 if self._changing_level <= 0:
323 self._change_type = None
321 self._change_type = None
324 assert self._changing_level == 0
322 assert self._changing_level == 0
325 if self._invalidated_context:
323 if self._invalidated_context:
326 # make sure we invalidate anything an upper context might
324 # make sure we invalidate anything an upper context might
327 # have changed.
325 # have changed.
328 self.invalidate()
326 self.invalidate()
329 else:
327 else:
330 should_write = self._changing_level <= 0
328 should_write = self._changing_level <= 0
331 tr = repo.currenttransaction()
329 tr = repo.currenttransaction()
332 if has_tr != (tr is not None):
330 if has_tr != (tr is not None):
333 if has_tr:
331 if has_tr:
334 m = "transaction vanished while changing dirstate"
332 m = "transaction vanished while changing dirstate"
335 else:
333 else:
336 m = "transaction appeared while changing dirstate"
334 m = "transaction appeared while changing dirstate"
337 raise error.ProgrammingError(m)
335 raise error.ProgrammingError(m)
338 if should_write:
336 if should_write:
339 self.write(tr)
337 self.write(tr)
340
338
341 @contextlib.contextmanager
339 @contextlib.contextmanager
342 def changing_parents(self, repo):
340 def changing_parents(self, repo):
343 """Wrap a dirstate change related to a change of working copy parents
341 """Wrap a dirstate change related to a change of working copy parents
344
342
345 This context scopes a series of dirstate modifications that match an
343 This context scopes a series of dirstate modifications that match an
346 update of the working copy parents (typically `hg update`, `hg merge`
344 update of the working copy parents (typically `hg update`, `hg merge`
347 etc).
345 etc).
348
346
349 The dirstate's methods that perform this kind of modifications require
347 The dirstate's methods that perform this kind of modifications require
350 this context to be present before being called.
348 this context to be present before being called.
351 Such methods are decorated with `@requires_changing_parents`.
349 Such methods are decorated with `@requires_changing_parents`.
352
350
353 The new dirstate contents will be written to disk when the top-most
351 The new dirstate contents will be written to disk when the top-most
354 `changing_parents` context exits successfully. If an exception is
352 `changing_parents` context exits successfully. If an exception is
355 raised during a `changing_parents` context of any level, all changes
353 raised during a `changing_parents` context of any level, all changes
356 are invalidated. If this context is open within an open transaction,
354 are invalidated. If this context is open within an open transaction,
357 the dirstate writing is delayed until that transaction is successfully
355 the dirstate writing is delayed until that transaction is successfully
358 committed (and the dirstate is invalidated on transaction abort).
356 committed (and the dirstate is invalidated on transaction abort).
359
357
360 The `changing_parents` operation is mutually exclusive with the
358 The `changing_parents` operation is mutually exclusive with the
361 `changing_files` one.
359 `changing_files` one.
362 """
360 """
363 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
361 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
364 yield c
362 yield c
365
363
366 @contextlib.contextmanager
364 @contextlib.contextmanager
367 def changing_files(self, repo):
365 def changing_files(self, repo):
368 """Wrap a dirstate change related to the set of tracked files
366 """Wrap a dirstate change related to the set of tracked files
369
367
370 This context scopes a series of dirstate modifications that change the
368 This context scopes a series of dirstate modifications that change the
371 set of tracked files. (typically `hg add`, `hg remove` etc) or some
369 set of tracked files. (typically `hg add`, `hg remove` etc) or some
372 dirstate stored information (like `hg rename --after`) but preserve
370 dirstate stored information (like `hg rename --after`) but preserve
373 the working copy parents.
371 the working copy parents.
374
372
375 The dirstate's methods that perform this kind of modifications require
373 The dirstate's methods that perform this kind of modifications require
376 this context to be present before being called.
374 this context to be present before being called.
377 Such methods are decorated with `@requires_changing_files`.
375 Such methods are decorated with `@requires_changing_files`.
378
376
379 The new dirstate contents will be written to disk when the top-most
377 The new dirstate contents will be written to disk when the top-most
380 `changing_files` context exits successfully. If an exception is raised
378 `changing_files` context exits successfully. If an exception is raised
381 during a `changing_files` context of any level, all changes are
379 during a `changing_files` context of any level, all changes are
382 invalidated. If this context is open within an open transaction, the
380 invalidated. If this context is open within an open transaction, the
383 dirstate writing is delayed until that transaction is successfully
381 dirstate writing is delayed until that transaction is successfully
384 committed (and the dirstate is invalidated on transaction abort).
382 committed (and the dirstate is invalidated on transaction abort).
385
383
386 The `changing_files` operation is mutually exclusive with the
384 The `changing_files` operation is mutually exclusive with the
387 `changing_parents` one.
385 `changing_parents` one.
388 """
386 """
389 with self._changing(repo, CHANGE_TYPE_FILES) as c:
387 with self._changing(repo, CHANGE_TYPE_FILES) as c:
390 yield c
388 yield c
391
389
392 # here to help migration to the new code
390 # here to help migration to the new code
393 def parentchange(self):
391 def parentchange(self):
394 msg = (
392 msg = (
395 "Mercurial 6.4 and later requires call to "
393 "Mercurial 6.4 and later requires call to "
396 "`dirstate.changing_parents(repo)`"
394 "`dirstate.changing_parents(repo)`"
397 )
395 )
398 raise error.ProgrammingError(msg)
396 raise error.ProgrammingError(msg)
399
397
400 @property
398 @property
401 def is_changing_any(self):
399 def is_changing_any(self):
402 """Returns true if the dirstate is in the middle of a set of changes.
400 """Returns true if the dirstate is in the middle of a set of changes.
403
401
404 This returns True for any kind of change.
402 This returns True for any kind of change.
405 """
403 """
406 return self._changing_level > 0
404 return self._changing_level > 0
407
405
408 @property
406 @property
409 def is_changing_parents(self):
407 def is_changing_parents(self):
410 """Returns true if the dirstate is in the middle of a set of changes
408 """Returns true if the dirstate is in the middle of a set of changes
411 that modify the dirstate parent.
409 that modify the dirstate parent.
412 """
410 """
413 if self._changing_level <= 0:
411 if self._changing_level <= 0:
414 return False
412 return False
415 return self._change_type == CHANGE_TYPE_PARENTS
413 return self._change_type == CHANGE_TYPE_PARENTS
416
414
417 @property
415 @property
418 def is_changing_files(self):
416 def is_changing_files(self):
419 """Returns true if the dirstate is in the middle of a set of changes
417 """Returns true if the dirstate is in the middle of a set of changes
420 that modify the files tracked or their sources.
418 that modify the files tracked or their sources.
421 """
419 """
422 if self._changing_level <= 0:
420 if self._changing_level <= 0:
423 return False
421 return False
424 return self._change_type == CHANGE_TYPE_FILES
422 return self._change_type == CHANGE_TYPE_FILES
425
423
426 @propertycache
424 @propertycache
427 def _map(self):
425 def _map(self):
428 """Return the dirstate contents (see documentation for dirstatemap)."""
426 """Return the dirstate contents (see documentation for dirstatemap)."""
429 return self._mapcls(
427 return self._mapcls(
430 self._ui,
428 self._ui,
431 self._opener,
429 self._opener,
432 self._root,
430 self._root,
433 self._nodeconstants,
431 self._nodeconstants,
434 self._use_dirstate_v2,
432 self._use_dirstate_v2,
435 )
433 )
436
434
437 @property
435 @property
438 def _sparsematcher(self):
436 def _sparsematcher(self):
439 """The matcher for the sparse checkout.
437 """The matcher for the sparse checkout.
440
438
441 The working directory may not include every file from a manifest. The
439 The working directory may not include every file from a manifest. The
442 matcher obtained by this property will match a path if it is to be
440 matcher obtained by this property will match a path if it is to be
443 included in the working directory.
441 included in the working directory.
444
442
445 When sparse if disabled, return None.
443 When sparse if disabled, return None.
446 """
444 """
447 if self._sparsematchfn is None:
445 if self._sparsematchfn is None:
448 return None
446 return None
449 # TODO there is potential to cache this property. For now, the matcher
447 # TODO there is potential to cache this property. For now, the matcher
450 # is resolved on every access. (But the called function does use a
448 # is resolved on every access. (But the called function does use a
451 # cache to keep the lookup fast.)
449 # cache to keep the lookup fast.)
452 return self._sparsematchfn()
450 return self._sparsematchfn()
453
451
454 @repocache(b'branch')
452 @repocache(b'branch')
455 def _branch(self):
453 def _branch(self):
456 f = None
454 f = None
457 data = b''
455 data = b''
458 try:
456 try:
459 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
457 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
460 data = f.read().strip()
458 data = f.read().strip()
461 except FileNotFoundError:
459 except FileNotFoundError:
462 pass
460 pass
463 finally:
461 finally:
464 if f is not None:
462 if f is not None:
465 f.close()
463 f.close()
466 if not data:
464 if not data:
467 return b"default"
465 return b"default"
468 return data
466 return data
469
467
470 @property
468 @property
471 def _pl(self):
469 def _pl(self):
472 return self._map.parents()
470 return self._map.parents()
473
471
474 def hasdir(self, d):
472 def hasdir(self, d):
475 return self._map.hastrackeddir(d)
473 return self._map.hastrackeddir(d)
476
474
477 @rootcache(b'.hgignore')
475 @rootcache(b'.hgignore')
478 def _ignore(self):
476 def _ignore(self):
479 files = self._ignorefiles()
477 files = self._ignorefiles()
480 if not files:
478 if not files:
481 return matchmod.never()
479 return matchmod.never()
482
480
483 pats = [b'include:%s' % f for f in files]
481 pats = [b'include:%s' % f for f in files]
484 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
482 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
485
483
486 @propertycache
484 @propertycache
487 def _slash(self):
485 def _slash(self):
488 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
486 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
489
487
490 @propertycache
488 @propertycache
491 def _checklink(self):
489 def _checklink(self):
492 return util.checklink(self._root)
490 return util.checklink(self._root)
493
491
494 @propertycache
492 @propertycache
495 def _checkexec(self):
493 def _checkexec(self):
496 return bool(util.checkexec(self._root))
494 return bool(util.checkexec(self._root))
497
495
498 @propertycache
496 @propertycache
499 def _checkcase(self):
497 def _checkcase(self):
500 return not util.fscasesensitive(self._join(b'.hg'))
498 return not util.fscasesensitive(self._join(b'.hg'))
501
499
502 def _join(self, f):
500 def _join(self, f):
503 # much faster than os.path.join()
501 # much faster than os.path.join()
504 # it's safe because f is always a relative path
502 # it's safe because f is always a relative path
505 return self._rootdir + f
503 return self._rootdir + f
506
504
507 def flagfunc(self, buildfallback):
505 def flagfunc(self, buildfallback):
508 """build a callable that returns flags associated with a filename
506 """build a callable that returns flags associated with a filename
509
507
510 The information is extracted from three possible layers:
508 The information is extracted from three possible layers:
511 1. the file system if it supports the information
509 1. the file system if it supports the information
512 2. the "fallback" information stored in the dirstate if any
510 2. the "fallback" information stored in the dirstate if any
513 3. a more expensive mechanism inferring the flags from the parents.
511 3. a more expensive mechanism inferring the flags from the parents.
514 """
512 """
515
513
516 # small hack to cache the result of buildfallback()
514 # small hack to cache the result of buildfallback()
517 fallback_func = []
515 fallback_func = []
518
516
519 def get_flags(x):
517 def get_flags(x):
520 entry = None
518 entry = None
521 fallback_value = None
519 fallback_value = None
522 try:
520 try:
523 st = os.lstat(self._join(x))
521 st = os.lstat(self._join(x))
524 except OSError:
522 except OSError:
525 return b''
523 return b''
526
524
527 if self._checklink:
525 if self._checklink:
528 if util.statislink(st):
526 if util.statislink(st):
529 return b'l'
527 return b'l'
530 else:
528 else:
531 entry = self.get_entry(x)
529 entry = self.get_entry(x)
532 if entry.has_fallback_symlink:
530 if entry.has_fallback_symlink:
533 if entry.fallback_symlink:
531 if entry.fallback_symlink:
534 return b'l'
532 return b'l'
535 else:
533 else:
536 if not fallback_func:
534 if not fallback_func:
537 fallback_func.append(buildfallback())
535 fallback_func.append(buildfallback())
538 fallback_value = fallback_func[0](x)
536 fallback_value = fallback_func[0](x)
539 if b'l' in fallback_value:
537 if b'l' in fallback_value:
540 return b'l'
538 return b'l'
541
539
542 if self._checkexec:
540 if self._checkexec:
543 if util.statisexec(st):
541 if util.statisexec(st):
544 return b'x'
542 return b'x'
545 else:
543 else:
546 if entry is None:
544 if entry is None:
547 entry = self.get_entry(x)
545 entry = self.get_entry(x)
548 if entry.has_fallback_exec:
546 if entry.has_fallback_exec:
549 if entry.fallback_exec:
547 if entry.fallback_exec:
550 return b'x'
548 return b'x'
551 else:
549 else:
552 if fallback_value is None:
550 if fallback_value is None:
553 if not fallback_func:
551 if not fallback_func:
554 fallback_func.append(buildfallback())
552 fallback_func.append(buildfallback())
555 fallback_value = fallback_func[0](x)
553 fallback_value = fallback_func[0](x)
556 if b'x' in fallback_value:
554 if b'x' in fallback_value:
557 return b'x'
555 return b'x'
558 return b''
556 return b''
559
557
560 return get_flags
558 return get_flags
561
559
562 @propertycache
560 @propertycache
563 def _cwd(self):
561 def _cwd(self):
564 # internal config: ui.forcecwd
562 # internal config: ui.forcecwd
565 forcecwd = self._ui.config(b'ui', b'forcecwd')
563 forcecwd = self._ui.config(b'ui', b'forcecwd')
566 if forcecwd:
564 if forcecwd:
567 return forcecwd
565 return forcecwd
568 return encoding.getcwd()
566 return encoding.getcwd()
569
567
570 def getcwd(self):
568 def getcwd(self):
571 """Return the path from which a canonical path is calculated.
569 """Return the path from which a canonical path is calculated.
572
570
573 This path should be used to resolve file patterns or to convert
571 This path should be used to resolve file patterns or to convert
574 canonical paths back to file paths for display. It shouldn't be
572 canonical paths back to file paths for display. It shouldn't be
575 used to get real file paths. Use vfs functions instead.
573 used to get real file paths. Use vfs functions instead.
576 """
574 """
577 cwd = self._cwd
575 cwd = self._cwd
578 if cwd == self._root:
576 if cwd == self._root:
579 return b''
577 return b''
580 # self._root ends with a path separator if self._root is '/' or 'C:\'
578 # self._root ends with a path separator if self._root is '/' or 'C:\'
581 rootsep = self._root
579 rootsep = self._root
582 if not util.endswithsep(rootsep):
580 if not util.endswithsep(rootsep):
583 rootsep += pycompat.ossep
581 rootsep += pycompat.ossep
584 if cwd.startswith(rootsep):
582 if cwd.startswith(rootsep):
585 return cwd[len(rootsep) :]
583 return cwd[len(rootsep) :]
586 else:
584 else:
587 # we're outside the repo. return an absolute path.
585 # we're outside the repo. return an absolute path.
588 return cwd
586 return cwd
589
587
590 def pathto(self, f, cwd=None):
588 def pathto(self, f, cwd=None):
591 if cwd is None:
589 if cwd is None:
592 cwd = self.getcwd()
590 cwd = self.getcwd()
593 path = util.pathto(self._root, cwd, f)
591 path = util.pathto(self._root, cwd, f)
594 if self._slash:
592 if self._slash:
595 return util.pconvert(path)
593 return util.pconvert(path)
596 return path
594 return path
597
595
598 def get_entry(self, path):
596 def get_entry(self, path):
599 """return a DirstateItem for the associated path"""
597 """return a DirstateItem for the associated path"""
600 entry = self._map.get(path)
598 entry = self._map.get(path)
601 if entry is None:
599 if entry is None:
602 return DirstateItem()
600 return DirstateItem()
603 return entry
601 return entry
604
602
605 def __contains__(self, key):
603 def __contains__(self, key):
606 return key in self._map
604 return key in self._map
607
605
608 def __iter__(self):
606 def __iter__(self):
609 return iter(sorted(self._map))
607 return iter(sorted(self._map))
610
608
611 def items(self):
609 def items(self):
612 return self._map.items()
610 return self._map.items()
613
611
614 iteritems = items
612 iteritems = items
615
613
616 def parents(self):
614 def parents(self):
617 return [self._validate(p) for p in self._pl]
615 return [self._validate(p) for p in self._pl]
618
616
619 def p1(self):
617 def p1(self):
620 return self._validate(self._pl[0])
618 return self._validate(self._pl[0])
621
619
622 def p2(self):
620 def p2(self):
623 return self._validate(self._pl[1])
621 return self._validate(self._pl[1])
624
622
625 @property
623 @property
626 def in_merge(self):
624 def in_merge(self):
627 """True if a merge is in progress"""
625 """True if a merge is in progress"""
628 return self._pl[1] != self._nodeconstants.nullid
626 return self._pl[1] != self._nodeconstants.nullid
629
627
630 def branch(self):
628 def branch(self):
631 return encoding.tolocal(self._branch)
629 return encoding.tolocal(self._branch)
632
630
633 @requires_changing_parents
631 @requires_changing_parents
634 def setparents(self, p1, p2=None):
632 def setparents(self, p1, p2=None):
635 """Set dirstate parents to p1 and p2.
633 """Set dirstate parents to p1 and p2.
636
634
637 When moving from two parents to one, "merged" entries a
635 When moving from two parents to one, "merged" entries a
638 adjusted to normal and previous copy records discarded and
636 adjusted to normal and previous copy records discarded and
639 returned by the call.
637 returned by the call.
640
638
641 See localrepo.setparents()
639 See localrepo.setparents()
642 """
640 """
643 if p2 is None:
641 if p2 is None:
644 p2 = self._nodeconstants.nullid
642 p2 = self._nodeconstants.nullid
645 if self._changing_level == 0:
643 if self._changing_level == 0:
646 raise ValueError(
644 raise ValueError(
647 "cannot set dirstate parent outside of "
645 "cannot set dirstate parent outside of "
648 "dirstate.changing_parents context manager"
646 "dirstate.changing_parents context manager"
649 )
647 )
650
648
651 self._dirty = True
649 self._dirty = True
652 oldp2 = self._pl[1]
650 oldp2 = self._pl[1]
653 if self._origpl is None:
651 if self._origpl is None:
654 self._origpl = self._pl
652 self._origpl = self._pl
655 nullid = self._nodeconstants.nullid
653 nullid = self._nodeconstants.nullid
656 # True if we need to fold p2 related state back to a linear case
654 # True if we need to fold p2 related state back to a linear case
657 fold_p2 = oldp2 != nullid and p2 == nullid
655 fold_p2 = oldp2 != nullid and p2 == nullid
658 return self._map.setparents(p1, p2, fold_p2=fold_p2)
656 return self._map.setparents(p1, p2, fold_p2=fold_p2)
659
657
660 def setbranch(self, branch, transaction):
658 def setbranch(self, branch, transaction):
661 self.__class__._branch.set(self, encoding.fromlocal(branch))
659 self.__class__._branch.set(self, encoding.fromlocal(branch))
662 if transaction is not None:
660 if transaction is not None:
663 self._setup_tr_abort(transaction)
661 self._setup_tr_abort(transaction)
664 transaction.addfilegenerator(
662 transaction.addfilegenerator(
665 b'dirstate-3-branch%s' % self._tr_key_suffix,
663 b'dirstate-3-branch%s' % self._tr_key_suffix,
666 (b'branch',),
664 (b'branch',),
667 self._write_branch,
665 self._write_branch,
668 location=b'plain',
666 location=b'plain',
669 post_finalize=True,
667 post_finalize=True,
670 )
668 )
671 return
669 return
672
670
673 vfs = self._opener
671 vfs = self._opener
674 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
672 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
675 self._write_branch(f)
673 self._write_branch(f)
676 # make sure filecache has the correct stat info for _branch after
674 # make sure filecache has the correct stat info for _branch after
677 # replacing the underlying file
675 # replacing the underlying file
678 #
676 #
679 # XXX do we actually need this,
677 # XXX do we actually need this,
680 # refreshing the attribute is quite cheap
678 # refreshing the attribute is quite cheap
681 ce = self._filecache[b'_branch']
679 ce = self._filecache[b'_branch']
682 if ce:
680 if ce:
683 ce.refresh()
681 ce.refresh()
684
682
685 def _write_branch(self, file_obj):
683 def _write_branch(self, file_obj):
686 file_obj.write(self._branch + b'\n')
684 file_obj.write(self._branch + b'\n')
687
685
688 def invalidate(self):
686 def invalidate(self):
689 """Causes the next access to reread the dirstate.
687 """Causes the next access to reread the dirstate.
690
688
691 This is different from localrepo.invalidatedirstate() because it always
689 This is different from localrepo.invalidatedirstate() because it always
692 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
690 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
693 check whether the dirstate has changed before rereading it."""
691 check whether the dirstate has changed before rereading it."""
694
692
695 for a in ("_map", "_branch", "_ignore"):
693 for a in ("_map", "_branch", "_ignore"):
696 if a in self.__dict__:
694 if a in self.__dict__:
697 delattr(self, a)
695 delattr(self, a)
698 self._dirty = False
696 self._dirty = False
699 self._dirty_tracked_set = False
697 self._dirty_tracked_set = False
700 self._invalidated_context = bool(
698 self._invalidated_context = bool(
701 self._changing_level > 0
699 self._changing_level > 0
702 or self._attached_to_a_transaction
700 or self._attached_to_a_transaction
703 or self._running_status
701 or self._running_status
704 )
702 )
705 self._origpl = None
703 self._origpl = None
706
704
707 @requires_changing_any
705 @requires_changing_any
708 def copy(self, source, dest):
706 def copy(self, source, dest):
709 """Mark dest as a copy of source. Unmark dest if source is None."""
707 """Mark dest as a copy of source. Unmark dest if source is None."""
710 if source == dest:
708 if source == dest:
711 return
709 return
712 self._dirty = True
710 self._dirty = True
713 if source is not None:
711 if source is not None:
714 self._check_sparse(source)
712 self._check_sparse(source)
715 self._map.copymap[dest] = source
713 self._map.copymap[dest] = source
716 else:
714 else:
717 self._map.copymap.pop(dest, None)
715 self._map.copymap.pop(dest, None)
718
716
719 def copied(self, file):
717 def copied(self, file):
720 return self._map.copymap.get(file, None)
718 return self._map.copymap.get(file, None)
721
719
722 def copies(self):
720 def copies(self):
723 return self._map.copymap
721 return self._map.copymap
724
722
725 @requires_changing_files
723 @requires_changing_files
726 def set_tracked(self, filename, reset_copy=False):
724 def set_tracked(self, filename, reset_copy=False):
727 """a "public" method for generic code to mark a file as tracked
725 """a "public" method for generic code to mark a file as tracked
728
726
729 This function is to be called outside of "update/merge" case. For
727 This function is to be called outside of "update/merge" case. For
730 example by a command like `hg add X`.
728 example by a command like `hg add X`.
731
729
732 if reset_copy is set, any existing copy information will be dropped.
730 if reset_copy is set, any existing copy information will be dropped.
733
731
734 return True the file was previously untracked, False otherwise.
732 return True the file was previously untracked, False otherwise.
735 """
733 """
736 self._dirty = True
734 self._dirty = True
737 entry = self._map.get(filename)
735 entry = self._map.get(filename)
738 if entry is None or not entry.tracked:
736 if entry is None or not entry.tracked:
739 self._check_new_tracked_filename(filename)
737 self._check_new_tracked_filename(filename)
740 pre_tracked = self._map.set_tracked(filename)
738 pre_tracked = self._map.set_tracked(filename)
741 if reset_copy:
739 if reset_copy:
742 self._map.copymap.pop(filename, None)
740 self._map.copymap.pop(filename, None)
743 if pre_tracked:
741 if pre_tracked:
744 self._dirty_tracked_set = True
742 self._dirty_tracked_set = True
745 return pre_tracked
743 return pre_tracked
746
744
747 @requires_changing_files
745 @requires_changing_files
748 def set_untracked(self, filename):
746 def set_untracked(self, filename):
749 """a "public" method for generic code to mark a file as untracked
747 """a "public" method for generic code to mark a file as untracked
750
748
751 This function is to be called outside of "update/merge" case. For
749 This function is to be called outside of "update/merge" case. For
752 example by a command like `hg remove X`.
750 example by a command like `hg remove X`.
753
751
754 return True the file was previously tracked, False otherwise.
752 return True the file was previously tracked, False otherwise.
755 """
753 """
756 ret = self._map.set_untracked(filename)
754 ret = self._map.set_untracked(filename)
757 if ret:
755 if ret:
758 self._dirty = True
756 self._dirty = True
759 self._dirty_tracked_set = True
757 self._dirty_tracked_set = True
760 return ret
758 return ret
761
759
762 @requires_changing_files_or_status
760 @requires_changing_files_or_status
763 def set_clean(self, filename, parentfiledata):
761 def set_clean(self, filename, parentfiledata):
764 """record that the current state of the file on disk is known to be clean"""
762 """record that the current state of the file on disk is known to be clean"""
765 self._dirty = True
763 self._dirty = True
766 if not self._map[filename].tracked:
764 if not self._map[filename].tracked:
767 self._check_new_tracked_filename(filename)
765 self._check_new_tracked_filename(filename)
768 (mode, size, mtime) = parentfiledata
766 (mode, size, mtime) = parentfiledata
769 self._map.set_clean(filename, mode, size, mtime)
767 self._map.set_clean(filename, mode, size, mtime)
770
768
771 @requires_changing_files_or_status
769 @requires_changing_files_or_status
772 def set_possibly_dirty(self, filename):
770 def set_possibly_dirty(self, filename):
773 """record that the current state of the file on disk is unknown"""
771 """record that the current state of the file on disk is unknown"""
774 self._dirty = True
772 self._dirty = True
775 self._map.set_possibly_dirty(filename)
773 self._map.set_possibly_dirty(filename)
776
774
777 @requires_changing_parents
775 @requires_changing_parents
778 def update_file_p1(
776 def update_file_p1(
779 self,
777 self,
780 filename,
778 filename,
781 p1_tracked,
779 p1_tracked,
782 ):
780 ):
783 """Set a file as tracked in the parent (or not)
781 """Set a file as tracked in the parent (or not)
784
782
785 This is to be called when adjust the dirstate to a new parent after an history
783 This is to be called when adjust the dirstate to a new parent after an history
786 rewriting operation.
784 rewriting operation.
787
785
788 It should not be called during a merge (p2 != nullid) and only within
786 It should not be called during a merge (p2 != nullid) and only within
789 a `with dirstate.changing_parents(repo):` context.
787 a `with dirstate.changing_parents(repo):` context.
790 """
788 """
791 if self.in_merge:
789 if self.in_merge:
792 msg = 'update_file_reference should not be called when merging'
790 msg = 'update_file_reference should not be called when merging'
793 raise error.ProgrammingError(msg)
791 raise error.ProgrammingError(msg)
794 entry = self._map.get(filename)
792 entry = self._map.get(filename)
795 if entry is None:
793 if entry is None:
796 wc_tracked = False
794 wc_tracked = False
797 else:
795 else:
798 wc_tracked = entry.tracked
796 wc_tracked = entry.tracked
799 if not (p1_tracked or wc_tracked):
797 if not (p1_tracked or wc_tracked):
800 # the file is no longer relevant to anyone
798 # the file is no longer relevant to anyone
801 if self._map.get(filename) is not None:
799 if self._map.get(filename) is not None:
802 self._map.reset_state(filename)
800 self._map.reset_state(filename)
803 self._dirty = True
801 self._dirty = True
804 elif (not p1_tracked) and wc_tracked:
802 elif (not p1_tracked) and wc_tracked:
805 if entry is not None and entry.added:
803 if entry is not None and entry.added:
806 return # avoid dropping copy information (maybe?)
804 return # avoid dropping copy information (maybe?)
807
805
808 self._map.reset_state(
806 self._map.reset_state(
809 filename,
807 filename,
810 wc_tracked,
808 wc_tracked,
811 p1_tracked,
809 p1_tracked,
812 # the underlying reference might have changed, we will have to
810 # the underlying reference might have changed, we will have to
813 # check it.
811 # check it.
814 has_meaningful_mtime=False,
812 has_meaningful_mtime=False,
815 )
813 )
816
814
817 @requires_changing_parents
815 @requires_changing_parents
818 def update_file(
816 def update_file(
819 self,
817 self,
820 filename,
818 filename,
821 wc_tracked,
819 wc_tracked,
822 p1_tracked,
820 p1_tracked,
823 p2_info=False,
821 p2_info=False,
824 possibly_dirty=False,
822 possibly_dirty=False,
825 parentfiledata=None,
823 parentfiledata=None,
826 ):
824 ):
827 """update the information about a file in the dirstate
825 """update the information about a file in the dirstate
828
826
829 This is to be called when the direstates parent changes to keep track
827 This is to be called when the direstates parent changes to keep track
830 of what is the file situation in regards to the working copy and its parent.
828 of what is the file situation in regards to the working copy and its parent.
831
829
832 This function must be called within a `dirstate.changing_parents` context.
830 This function must be called within a `dirstate.changing_parents` context.
833
831
834 note: the API is at an early stage and we might need to adjust it
832 note: the API is at an early stage and we might need to adjust it
835 depending of what information ends up being relevant and useful to
833 depending of what information ends up being relevant and useful to
836 other processing.
834 other processing.
837 """
835 """
838 self._update_file(
836 self._update_file(
839 filename=filename,
837 filename=filename,
840 wc_tracked=wc_tracked,
838 wc_tracked=wc_tracked,
841 p1_tracked=p1_tracked,
839 p1_tracked=p1_tracked,
842 p2_info=p2_info,
840 p2_info=p2_info,
843 possibly_dirty=possibly_dirty,
841 possibly_dirty=possibly_dirty,
844 parentfiledata=parentfiledata,
842 parentfiledata=parentfiledata,
845 )
843 )
846
844
847 def hacky_extension_update_file(self, *args, **kwargs):
845 def hacky_extension_update_file(self, *args, **kwargs):
848 """NEVER USE THIS, YOU DO NOT NEED IT
846 """NEVER USE THIS, YOU DO NOT NEED IT
849
847
850 This function is a variant of "update_file" to be called by a small set
848 This function is a variant of "update_file" to be called by a small set
851 of extensions, it also adjust the internal state of file, but can be
849 of extensions, it also adjust the internal state of file, but can be
852 called outside an `changing_parents` context.
850 called outside an `changing_parents` context.
853
851
854 A very small number of extension meddle with the working copy content
852 A very small number of extension meddle with the working copy content
855 in a way that requires to adjust the dirstate accordingly. At the time
853 in a way that requires to adjust the dirstate accordingly. At the time
856 this command is written they are :
854 this command is written they are :
857 - keyword,
855 - keyword,
858 - largefile,
856 - largefile,
859 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
857 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
860
858
861 This function could probably be replaced by more semantic one (like
859 This function could probably be replaced by more semantic one (like
862 "adjust expected size" or "always revalidate file content", etc)
860 "adjust expected size" or "always revalidate file content", etc)
863 however at the time where this is writen, this is too much of a detour
861 however at the time where this is writen, this is too much of a detour
864 to be considered.
862 to be considered.
865 """
863 """
866 if not (self._changing_level > 0 or self._running_status > 0):
864 if not (self._changing_level > 0 or self._running_status > 0):
867 msg = "requires a changes context"
865 msg = "requires a changes context"
868 raise error.ProgrammingError(msg)
866 raise error.ProgrammingError(msg)
869 self._update_file(
867 self._update_file(
870 *args,
868 *args,
871 **kwargs,
869 **kwargs,
872 )
870 )
873
871
874 def _update_file(
872 def _update_file(
875 self,
873 self,
876 filename,
874 filename,
877 wc_tracked,
875 wc_tracked,
878 p1_tracked,
876 p1_tracked,
879 p2_info=False,
877 p2_info=False,
880 possibly_dirty=False,
878 possibly_dirty=False,
881 parentfiledata=None,
879 parentfiledata=None,
882 ):
880 ):
883 # note: I do not think we need to double check name clash here since we
881 # note: I do not think we need to double check name clash here since we
884 # are in a update/merge case that should already have taken care of
882 # are in a update/merge case that should already have taken care of
885 # this. The test agrees
883 # this. The test agrees
886
884
887 self._dirty = True
885 self._dirty = True
888 old_entry = self._map.get(filename)
886 old_entry = self._map.get(filename)
889 if old_entry is None:
887 if old_entry is None:
890 prev_tracked = False
888 prev_tracked = False
891 else:
889 else:
892 prev_tracked = old_entry.tracked
890 prev_tracked = old_entry.tracked
893 if prev_tracked != wc_tracked:
891 if prev_tracked != wc_tracked:
894 self._dirty_tracked_set = True
892 self._dirty_tracked_set = True
895
893
896 self._map.reset_state(
894 self._map.reset_state(
897 filename,
895 filename,
898 wc_tracked,
896 wc_tracked,
899 p1_tracked,
897 p1_tracked,
900 p2_info=p2_info,
898 p2_info=p2_info,
901 has_meaningful_mtime=not possibly_dirty,
899 has_meaningful_mtime=not possibly_dirty,
902 parentfiledata=parentfiledata,
900 parentfiledata=parentfiledata,
903 )
901 )
904
902
905 def _check_new_tracked_filename(self, filename):
903 def _check_new_tracked_filename(self, filename):
906 scmutil.checkfilename(filename)
904 scmutil.checkfilename(filename)
907 if self._map.hastrackeddir(filename):
905 if self._map.hastrackeddir(filename):
908 msg = _(b'directory %r already in dirstate')
906 msg = _(b'directory %r already in dirstate')
909 msg %= pycompat.bytestr(filename)
907 msg %= pycompat.bytestr(filename)
910 raise error.Abort(msg)
908 raise error.Abort(msg)
911 # shadows
909 # shadows
912 for d in pathutil.finddirs(filename):
910 for d in pathutil.finddirs(filename):
913 if self._map.hastrackeddir(d):
911 if self._map.hastrackeddir(d):
914 break
912 break
915 entry = self._map.get(d)
913 entry = self._map.get(d)
916 if entry is not None and not entry.removed:
914 if entry is not None and not entry.removed:
917 msg = _(b'file %r in dirstate clashes with %r')
915 msg = _(b'file %r in dirstate clashes with %r')
918 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
916 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
919 raise error.Abort(msg)
917 raise error.Abort(msg)
920 self._check_sparse(filename)
918 self._check_sparse(filename)
921
919
922 def _check_sparse(self, filename):
920 def _check_sparse(self, filename):
923 """Check that a filename is inside the sparse profile"""
921 """Check that a filename is inside the sparse profile"""
924 sparsematch = self._sparsematcher
922 sparsematch = self._sparsematcher
925 if sparsematch is not None and not sparsematch.always():
923 if sparsematch is not None and not sparsematch.always():
926 if not sparsematch(filename):
924 if not sparsematch(filename):
927 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
925 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
928 hint = _(
926 hint = _(
929 b'include file with `hg debugsparse --include <pattern>` or use '
927 b'include file with `hg debugsparse --include <pattern>` or use '
930 b'`hg add -s <file>` to include file directory while adding'
928 b'`hg add -s <file>` to include file directory while adding'
931 )
929 )
932 raise error.Abort(msg % filename, hint=hint)
930 raise error.Abort(msg % filename, hint=hint)
933
931
934 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
932 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
935 if exists is None:
933 if exists is None:
936 exists = os.path.lexists(os.path.join(self._root, path))
934 exists = os.path.lexists(os.path.join(self._root, path))
937 if not exists:
935 if not exists:
938 # Maybe a path component exists
936 # Maybe a path component exists
939 if not ignoremissing and b'/' in path:
937 if not ignoremissing and b'/' in path:
940 d, f = path.rsplit(b'/', 1)
938 d, f = path.rsplit(b'/', 1)
941 d = self._normalize(d, False, ignoremissing, None)
939 d = self._normalize(d, False, ignoremissing, None)
942 folded = d + b"/" + f
940 folded = d + b"/" + f
943 else:
941 else:
944 # No path components, preserve original case
942 # No path components, preserve original case
945 folded = path
943 folded = path
946 else:
944 else:
947 # recursively normalize leading directory components
945 # recursively normalize leading directory components
948 # against dirstate
946 # against dirstate
949 if b'/' in normed:
947 if b'/' in normed:
950 d, f = normed.rsplit(b'/', 1)
948 d, f = normed.rsplit(b'/', 1)
951 d = self._normalize(d, False, ignoremissing, True)
949 d = self._normalize(d, False, ignoremissing, True)
952 r = self._root + b"/" + d
950 r = self._root + b"/" + d
953 folded = d + b"/" + util.fspath(f, r)
951 folded = d + b"/" + util.fspath(f, r)
954 else:
952 else:
955 folded = util.fspath(normed, self._root)
953 folded = util.fspath(normed, self._root)
956 storemap[normed] = folded
954 storemap[normed] = folded
957
955
958 return folded
956 return folded
959
957
960 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
958 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
961 normed = util.normcase(path)
959 normed = util.normcase(path)
962 folded = self._map.filefoldmap.get(normed, None)
960 folded = self._map.filefoldmap.get(normed, None)
963 if folded is None:
961 if folded is None:
964 if isknown:
962 if isknown:
965 folded = path
963 folded = path
966 else:
964 else:
967 folded = self._discoverpath(
965 folded = self._discoverpath(
968 path, normed, ignoremissing, exists, self._map.filefoldmap
966 path, normed, ignoremissing, exists, self._map.filefoldmap
969 )
967 )
970 return folded
968 return folded
971
969
972 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
970 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
973 normed = util.normcase(path)
971 normed = util.normcase(path)
974 folded = self._map.filefoldmap.get(normed, None)
972 folded = self._map.filefoldmap.get(normed, None)
975 if folded is None:
973 if folded is None:
976 folded = self._map.dirfoldmap.get(normed, None)
974 folded = self._map.dirfoldmap.get(normed, None)
977 if folded is None:
975 if folded is None:
978 if isknown:
976 if isknown:
979 folded = path
977 folded = path
980 else:
978 else:
981 # store discovered result in dirfoldmap so that future
979 # store discovered result in dirfoldmap so that future
982 # normalizefile calls don't start matching directories
980 # normalizefile calls don't start matching directories
983 folded = self._discoverpath(
981 folded = self._discoverpath(
984 path, normed, ignoremissing, exists, self._map.dirfoldmap
982 path, normed, ignoremissing, exists, self._map.dirfoldmap
985 )
983 )
986 return folded
984 return folded
987
985
988 def normalize(self, path, isknown=False, ignoremissing=False):
986 def normalize(self, path, isknown=False, ignoremissing=False):
989 """
987 """
990 normalize the case of a pathname when on a casefolding filesystem
988 normalize the case of a pathname when on a casefolding filesystem
991
989
992 isknown specifies whether the filename came from walking the
990 isknown specifies whether the filename came from walking the
993 disk, to avoid extra filesystem access.
991 disk, to avoid extra filesystem access.
994
992
995 If ignoremissing is True, missing path are returned
993 If ignoremissing is True, missing path are returned
996 unchanged. Otherwise, we try harder to normalize possibly
994 unchanged. Otherwise, we try harder to normalize possibly
997 existing path components.
995 existing path components.
998
996
999 The normalized case is determined based on the following precedence:
997 The normalized case is determined based on the following precedence:
1000
998
1001 - version of name already stored in the dirstate
999 - version of name already stored in the dirstate
1002 - version of name stored on disk
1000 - version of name stored on disk
1003 - version provided via command arguments
1001 - version provided via command arguments
1004 """
1002 """
1005
1003
1006 if self._checkcase:
1004 if self._checkcase:
1007 return self._normalize(path, isknown, ignoremissing)
1005 return self._normalize(path, isknown, ignoremissing)
1008 return path
1006 return path
1009
1007
1010 # XXX this method is barely used, as a result:
1008 # XXX this method is barely used, as a result:
1011 # - its semantic is unclear
1009 # - its semantic is unclear
1012 # - do we really needs it ?
1010 # - do we really needs it ?
1013 @requires_changing_parents
1011 @requires_changing_parents
1014 def clear(self):
1012 def clear(self):
1015 self._map.clear()
1013 self._map.clear()
1016 self._dirty = True
1014 self._dirty = True
1017
1015
1018 @requires_changing_parents
1016 @requires_changing_parents
1019 def rebuild(self, parent, allfiles, changedfiles=None):
1017 def rebuild(self, parent, allfiles, changedfiles=None):
1020 matcher = self._sparsematcher
1018 matcher = self._sparsematcher
1021 if matcher is not None and not matcher.always():
1019 if matcher is not None and not matcher.always():
1022 # should not add non-matching files
1020 # should not add non-matching files
1023 allfiles = [f for f in allfiles if matcher(f)]
1021 allfiles = [f for f in allfiles if matcher(f)]
1024 if changedfiles:
1022 if changedfiles:
1025 changedfiles = [f for f in changedfiles if matcher(f)]
1023 changedfiles = [f for f in changedfiles if matcher(f)]
1026
1024
1027 if changedfiles is not None:
1025 if changedfiles is not None:
1028 # these files will be deleted from the dirstate when they are
1026 # these files will be deleted from the dirstate when they are
1029 # not found to be in allfiles
1027 # not found to be in allfiles
1030 dirstatefilestoremove = {f for f in self if not matcher(f)}
1028 dirstatefilestoremove = {f for f in self if not matcher(f)}
1031 changedfiles = dirstatefilestoremove.union(changedfiles)
1029 changedfiles = dirstatefilestoremove.union(changedfiles)
1032
1030
1033 if changedfiles is None:
1031 if changedfiles is None:
1034 # Rebuild entire dirstate
1032 # Rebuild entire dirstate
1035 to_lookup = allfiles
1033 to_lookup = allfiles
1036 to_drop = []
1034 to_drop = []
1037 self.clear()
1035 self.clear()
1038 elif len(changedfiles) < 10:
1036 elif len(changedfiles) < 10:
1039 # Avoid turning allfiles into a set, which can be expensive if it's
1037 # Avoid turning allfiles into a set, which can be expensive if it's
1040 # large.
1038 # large.
1041 to_lookup = []
1039 to_lookup = []
1042 to_drop = []
1040 to_drop = []
1043 for f in changedfiles:
1041 for f in changedfiles:
1044 if f in allfiles:
1042 if f in allfiles:
1045 to_lookup.append(f)
1043 to_lookup.append(f)
1046 else:
1044 else:
1047 to_drop.append(f)
1045 to_drop.append(f)
1048 else:
1046 else:
1049 changedfilesset = set(changedfiles)
1047 changedfilesset = set(changedfiles)
1050 to_lookup = changedfilesset & set(allfiles)
1048 to_lookup = changedfilesset & set(allfiles)
1051 to_drop = changedfilesset - to_lookup
1049 to_drop = changedfilesset - to_lookup
1052
1050
1053 if self._origpl is None:
1051 if self._origpl is None:
1054 self._origpl = self._pl
1052 self._origpl = self._pl
1055 self._map.setparents(parent, self._nodeconstants.nullid)
1053 self._map.setparents(parent, self._nodeconstants.nullid)
1056
1054
1057 for f in to_lookup:
1055 for f in to_lookup:
1058 if self.in_merge:
1056 if self.in_merge:
1059 self.set_tracked(f)
1057 self.set_tracked(f)
1060 else:
1058 else:
1061 self._map.reset_state(
1059 self._map.reset_state(
1062 f,
1060 f,
1063 wc_tracked=True,
1061 wc_tracked=True,
1064 p1_tracked=True,
1062 p1_tracked=True,
1065 )
1063 )
1066 for f in to_drop:
1064 for f in to_drop:
1067 self._map.reset_state(f)
1065 self._map.reset_state(f)
1068
1066
1069 self._dirty = True
1067 self._dirty = True
1070
1068
1071 def _setup_tr_abort(self, tr):
1069 def _setup_tr_abort(self, tr):
1072 """make sure we invalidate the current change on abort"""
1070 """make sure we invalidate the current change on abort"""
1073 if tr is None:
1071 if tr is None:
1074 return
1072 return
1075
1073
1076 def on_abort(tr):
1074 def on_abort(tr):
1077 self._attached_to_a_transaction = False
1075 self._attached_to_a_transaction = False
1078 self.invalidate()
1076 self.invalidate()
1079
1077
1080 tr.addabort(
1078 tr.addabort(
1081 b'dirstate-invalidate%s' % self._tr_key_suffix,
1079 b'dirstate-invalidate%s' % self._tr_key_suffix,
1082 on_abort,
1080 on_abort,
1083 )
1081 )
1084
1082
1085 def write(self, tr):
1083 def write(self, tr):
1086 if not self._dirty:
1084 if not self._dirty:
1087 return
1085 return
1088 # make sure we don't request a write of invalidated content
1086 # make sure we don't request a write of invalidated content
1089 # XXX move before the dirty check once `unlock` stop calling `write`
1087 # XXX move before the dirty check once `unlock` stop calling `write`
1090 assert not self._invalidated_context
1088 assert not self._invalidated_context
1091
1089
1092 write_key = self._use_tracked_hint and self._dirty_tracked_set
1090 write_key = self._use_tracked_hint and self._dirty_tracked_set
1093 if tr:
1091 if tr:
1094 self._setup_tr_abort(tr)
1092 self._setup_tr_abort(tr)
1095 self._attached_to_a_transaction = True
1093 self._attached_to_a_transaction = True
1096
1094
1097 def on_success(f):
1095 def on_success(f):
1098 self._attached_to_a_transaction = False
1096 self._attached_to_a_transaction = False
1099 self._writedirstate(tr, f),
1097 self._writedirstate(tr, f),
1100
1098
1101 # delay writing in-memory changes out
1099 # delay writing in-memory changes out
1102 tr.addfilegenerator(
1100 tr.addfilegenerator(
1103 b'dirstate-1-main%s' % self._tr_key_suffix,
1101 b'dirstate-1-main%s' % self._tr_key_suffix,
1104 (self._filename,),
1102 (self._filename,),
1105 on_success,
1103 on_success,
1106 location=b'plain',
1104 location=b'plain',
1107 post_finalize=True,
1105 post_finalize=True,
1108 )
1106 )
1109 if write_key:
1107 if write_key:
1110 tr.addfilegenerator(
1108 tr.addfilegenerator(
1111 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1109 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1112 (self._filename_th,),
1110 (self._filename_th,),
1113 lambda f: self._write_tracked_hint(tr, f),
1111 lambda f: self._write_tracked_hint(tr, f),
1114 location=b'plain',
1112 location=b'plain',
1115 post_finalize=True,
1113 post_finalize=True,
1116 )
1114 )
1117 return
1115 return
1118
1116
1119 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1117 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1120 with file(self._filename) as f:
1118 with file(self._filename) as f:
1121 self._writedirstate(tr, f)
1119 self._writedirstate(tr, f)
1122 if write_key:
1120 if write_key:
1123 # we update the key-file after writing to make sure reader have a
1121 # we update the key-file after writing to make sure reader have a
1124 # key that match the newly written content
1122 # key that match the newly written content
1125 with file(self._filename_th) as f:
1123 with file(self._filename_th) as f:
1126 self._write_tracked_hint(tr, f)
1124 self._write_tracked_hint(tr, f)
1127
1125
1128 def delete_tracked_hint(self):
1126 def delete_tracked_hint(self):
1129 """remove the tracked_hint file
1127 """remove the tracked_hint file
1130
1128
1131 To be used by format downgrades operation"""
1129 To be used by format downgrades operation"""
1132 self._opener.unlink(self._filename_th)
1130 self._opener.unlink(self._filename_th)
1133 self._use_tracked_hint = False
1131 self._use_tracked_hint = False
1134
1132
1135 def addparentchangecallback(self, category, callback):
1133 def addparentchangecallback(self, category, callback):
1136 """add a callback to be called when the wd parents are changed
1134 """add a callback to be called when the wd parents are changed
1137
1135
1138 Callback will be called with the following arguments:
1136 Callback will be called with the following arguments:
1139 dirstate, (oldp1, oldp2), (newp1, newp2)
1137 dirstate, (oldp1, oldp2), (newp1, newp2)
1140
1138
1141 Category is a unique identifier to allow overwriting an old callback
1139 Category is a unique identifier to allow overwriting an old callback
1142 with a newer callback.
1140 with a newer callback.
1143 """
1141 """
1144 self._plchangecallbacks[category] = callback
1142 self._plchangecallbacks[category] = callback
1145
1143
1146 def _writedirstate(self, tr, st):
1144 def _writedirstate(self, tr, st):
1147 # make sure we don't write invalidated content
1145 # make sure we don't write invalidated content
1148 assert not self._invalidated_context
1146 assert not self._invalidated_context
1149 # notify callbacks about parents change
1147 # notify callbacks about parents change
1150 if self._origpl is not None and self._origpl != self._pl:
1148 if self._origpl is not None and self._origpl != self._pl:
1151 for c, callback in sorted(self._plchangecallbacks.items()):
1149 for c, callback in sorted(self._plchangecallbacks.items()):
1152 callback(self, self._origpl, self._pl)
1150 callback(self, self._origpl, self._pl)
1153 self._origpl = None
1151 self._origpl = None
1154 self._map.write(tr, st)
1152 self._map.write(tr, st)
1155 self._dirty = False
1153 self._dirty = False
1156 self._dirty_tracked_set = False
1154 self._dirty_tracked_set = False
1157
1155
1158 def _write_tracked_hint(self, tr, f):
1156 def _write_tracked_hint(self, tr, f):
1159 key = node.hex(uuid.uuid4().bytes)
1157 key = node.hex(uuid.uuid4().bytes)
1160 f.write(b"1\n%s\n" % key) # 1 is the format version
1158 f.write(b"1\n%s\n" % key) # 1 is the format version
1161
1159
1162 def _dirignore(self, f):
1160 def _dirignore(self, f):
1163 if self._ignore(f):
1161 if self._ignore(f):
1164 return True
1162 return True
1165 for p in pathutil.finddirs(f):
1163 for p in pathutil.finddirs(f):
1166 if self._ignore(p):
1164 if self._ignore(p):
1167 return True
1165 return True
1168 return False
1166 return False
1169
1167
1170 def _ignorefiles(self):
1168 def _ignorefiles(self):
1171 files = []
1169 files = []
1172 if os.path.exists(self._join(b'.hgignore')):
1170 if os.path.exists(self._join(b'.hgignore')):
1173 files.append(self._join(b'.hgignore'))
1171 files.append(self._join(b'.hgignore'))
1174 for name, path in self._ui.configitems(b"ui"):
1172 for name, path in self._ui.configitems(b"ui"):
1175 if name == b'ignore' or name.startswith(b'ignore.'):
1173 if name == b'ignore' or name.startswith(b'ignore.'):
1176 # we need to use os.path.join here rather than self._join
1174 # we need to use os.path.join here rather than self._join
1177 # because path is arbitrary and user-specified
1175 # because path is arbitrary and user-specified
1178 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1176 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1179 return files
1177 return files
1180
1178
1181 def _ignorefileandline(self, f):
1179 def _ignorefileandline(self, f):
1182 files = collections.deque(self._ignorefiles())
1180 files = collections.deque(self._ignorefiles())
1183 visited = set()
1181 visited = set()
1184 while files:
1182 while files:
1185 i = files.popleft()
1183 i = files.popleft()
1186 patterns = matchmod.readpatternfile(
1184 patterns = matchmod.readpatternfile(
1187 i, self._ui.warn, sourceinfo=True
1185 i, self._ui.warn, sourceinfo=True
1188 )
1186 )
1189 for pattern, lineno, line in patterns:
1187 for pattern, lineno, line in patterns:
1190 kind, p = matchmod._patsplit(pattern, b'glob')
1188 kind, p = matchmod._patsplit(pattern, b'glob')
1191 if kind == b"subinclude":
1189 if kind == b"subinclude":
1192 if p not in visited:
1190 if p not in visited:
1193 files.append(p)
1191 files.append(p)
1194 continue
1192 continue
1195 m = matchmod.match(
1193 m = matchmod.match(
1196 self._root, b'', [], [pattern], warn=self._ui.warn
1194 self._root, b'', [], [pattern], warn=self._ui.warn
1197 )
1195 )
1198 if m(f):
1196 if m(f):
1199 return (i, lineno, line)
1197 return (i, lineno, line)
1200 visited.add(i)
1198 visited.add(i)
1201 return (None, -1, b"")
1199 return (None, -1, b"")
1202
1200
1203 def _walkexplicit(self, match, subrepos):
1201 def _walkexplicit(self, match, subrepos):
1204 """Get stat data about the files explicitly specified by match.
1202 """Get stat data about the files explicitly specified by match.
1205
1203
1206 Return a triple (results, dirsfound, dirsnotfound).
1204 Return a triple (results, dirsfound, dirsnotfound).
1207 - results is a mapping from filename to stat result. It also contains
1205 - results is a mapping from filename to stat result. It also contains
1208 listings mapping subrepos and .hg to None.
1206 listings mapping subrepos and .hg to None.
1209 - dirsfound is a list of files found to be directories.
1207 - dirsfound is a list of files found to be directories.
1210 - dirsnotfound is a list of files that the dirstate thinks are
1208 - dirsnotfound is a list of files that the dirstate thinks are
1211 directories and that were not found."""
1209 directories and that were not found."""
1212
1210
1213 def badtype(mode):
1211 def badtype(mode):
1214 kind = _(b'unknown')
1212 kind = _(b'unknown')
1215 if stat.S_ISCHR(mode):
1213 if stat.S_ISCHR(mode):
1216 kind = _(b'character device')
1214 kind = _(b'character device')
1217 elif stat.S_ISBLK(mode):
1215 elif stat.S_ISBLK(mode):
1218 kind = _(b'block device')
1216 kind = _(b'block device')
1219 elif stat.S_ISFIFO(mode):
1217 elif stat.S_ISFIFO(mode):
1220 kind = _(b'fifo')
1218 kind = _(b'fifo')
1221 elif stat.S_ISSOCK(mode):
1219 elif stat.S_ISSOCK(mode):
1222 kind = _(b'socket')
1220 kind = _(b'socket')
1223 elif stat.S_ISDIR(mode):
1221 elif stat.S_ISDIR(mode):
1224 kind = _(b'directory')
1222 kind = _(b'directory')
1225 return _(b'unsupported file type (type is %s)') % kind
1223 return _(b'unsupported file type (type is %s)') % kind
1226
1224
1227 badfn = match.bad
1225 badfn = match.bad
1228 dmap = self._map
1226 dmap = self._map
1229 lstat = os.lstat
1227 lstat = os.lstat
1230 getkind = stat.S_IFMT
1228 getkind = stat.S_IFMT
1231 dirkind = stat.S_IFDIR
1229 dirkind = stat.S_IFDIR
1232 regkind = stat.S_IFREG
1230 regkind = stat.S_IFREG
1233 lnkkind = stat.S_IFLNK
1231 lnkkind = stat.S_IFLNK
1234 join = self._join
1232 join = self._join
1235 dirsfound = []
1233 dirsfound = []
1236 foundadd = dirsfound.append
1234 foundadd = dirsfound.append
1237 dirsnotfound = []
1235 dirsnotfound = []
1238 notfoundadd = dirsnotfound.append
1236 notfoundadd = dirsnotfound.append
1239
1237
1240 if not match.isexact() and self._checkcase:
1238 if not match.isexact() and self._checkcase:
1241 normalize = self._normalize
1239 normalize = self._normalize
1242 else:
1240 else:
1243 normalize = None
1241 normalize = None
1244
1242
1245 files = sorted(match.files())
1243 files = sorted(match.files())
1246 subrepos.sort()
1244 subrepos.sort()
1247 i, j = 0, 0
1245 i, j = 0, 0
1248 while i < len(files) and j < len(subrepos):
1246 while i < len(files) and j < len(subrepos):
1249 subpath = subrepos[j] + b"/"
1247 subpath = subrepos[j] + b"/"
1250 if files[i] < subpath:
1248 if files[i] < subpath:
1251 i += 1
1249 i += 1
1252 continue
1250 continue
1253 while i < len(files) and files[i].startswith(subpath):
1251 while i < len(files) and files[i].startswith(subpath):
1254 del files[i]
1252 del files[i]
1255 j += 1
1253 j += 1
1256
1254
1257 if not files or b'' in files:
1255 if not files or b'' in files:
1258 files = [b'']
1256 files = [b'']
1259 # constructing the foldmap is expensive, so don't do it for the
1257 # constructing the foldmap is expensive, so don't do it for the
1260 # common case where files is ['']
1258 # common case where files is ['']
1261 normalize = None
1259 normalize = None
1262 results = dict.fromkeys(subrepos)
1260 results = dict.fromkeys(subrepos)
1263 results[b'.hg'] = None
1261 results[b'.hg'] = None
1264
1262
1265 for ff in files:
1263 for ff in files:
1266 if normalize:
1264 if normalize:
1267 nf = normalize(ff, False, True)
1265 nf = normalize(ff, False, True)
1268 else:
1266 else:
1269 nf = ff
1267 nf = ff
1270 if nf in results:
1268 if nf in results:
1271 continue
1269 continue
1272
1270
1273 try:
1271 try:
1274 st = lstat(join(nf))
1272 st = lstat(join(nf))
1275 kind = getkind(st.st_mode)
1273 kind = getkind(st.st_mode)
1276 if kind == dirkind:
1274 if kind == dirkind:
1277 if nf in dmap:
1275 if nf in dmap:
1278 # file replaced by dir on disk but still in dirstate
1276 # file replaced by dir on disk but still in dirstate
1279 results[nf] = None
1277 results[nf] = None
1280 foundadd((nf, ff))
1278 foundadd((nf, ff))
1281 elif kind == regkind or kind == lnkkind:
1279 elif kind == regkind or kind == lnkkind:
1282 results[nf] = st
1280 results[nf] = st
1283 else:
1281 else:
1284 badfn(ff, badtype(kind))
1282 badfn(ff, badtype(kind))
1285 if nf in dmap:
1283 if nf in dmap:
1286 results[nf] = None
1284 results[nf] = None
1287 except OSError as inst:
1285 except OSError as inst:
1288 # nf not found on disk - it is dirstate only
1286 # nf not found on disk - it is dirstate only
1289 if nf in dmap: # does it exactly match a missing file?
1287 if nf in dmap: # does it exactly match a missing file?
1290 results[nf] = None
1288 results[nf] = None
1291 else: # does it match a missing directory?
1289 else: # does it match a missing directory?
1292 if self._map.hasdir(nf):
1290 if self._map.hasdir(nf):
1293 notfoundadd(nf)
1291 notfoundadd(nf)
1294 else:
1292 else:
1295 badfn(ff, encoding.strtolocal(inst.strerror))
1293 badfn(ff, encoding.strtolocal(inst.strerror))
1296
1294
1297 # match.files() may contain explicitly-specified paths that shouldn't
1295 # match.files() may contain explicitly-specified paths that shouldn't
1298 # be taken; drop them from the list of files found. dirsfound/notfound
1296 # be taken; drop them from the list of files found. dirsfound/notfound
1299 # aren't filtered here because they will be tested later.
1297 # aren't filtered here because they will be tested later.
1300 if match.anypats():
1298 if match.anypats():
1301 for f in list(results):
1299 for f in list(results):
1302 if f == b'.hg' or f in subrepos:
1300 if f == b'.hg' or f in subrepos:
1303 # keep sentinel to disable further out-of-repo walks
1301 # keep sentinel to disable further out-of-repo walks
1304 continue
1302 continue
1305 if not match(f):
1303 if not match(f):
1306 del results[f]
1304 del results[f]
1307
1305
1308 # Case insensitive filesystems cannot rely on lstat() failing to detect
1306 # Case insensitive filesystems cannot rely on lstat() failing to detect
1309 # a case-only rename. Prune the stat object for any file that does not
1307 # a case-only rename. Prune the stat object for any file that does not
1310 # match the case in the filesystem, if there are multiple files that
1308 # match the case in the filesystem, if there are multiple files that
1311 # normalize to the same path.
1309 # normalize to the same path.
1312 if match.isexact() and self._checkcase:
1310 if match.isexact() and self._checkcase:
1313 normed = {}
1311 normed = {}
1314
1312
1315 for f, st in results.items():
1313 for f, st in results.items():
1316 if st is None:
1314 if st is None:
1317 continue
1315 continue
1318
1316
1319 nc = util.normcase(f)
1317 nc = util.normcase(f)
1320 paths = normed.get(nc)
1318 paths = normed.get(nc)
1321
1319
1322 if paths is None:
1320 if paths is None:
1323 paths = set()
1321 paths = set()
1324 normed[nc] = paths
1322 normed[nc] = paths
1325
1323
1326 paths.add(f)
1324 paths.add(f)
1327
1325
1328 for norm, paths in normed.items():
1326 for norm, paths in normed.items():
1329 if len(paths) > 1:
1327 if len(paths) > 1:
1330 for path in paths:
1328 for path in paths:
1331 folded = self._discoverpath(
1329 folded = self._discoverpath(
1332 path, norm, True, None, self._map.dirfoldmap
1330 path, norm, True, None, self._map.dirfoldmap
1333 )
1331 )
1334 if path != folded:
1332 if path != folded:
1335 results[path] = None
1333 results[path] = None
1336
1334
1337 return results, dirsfound, dirsnotfound
1335 return results, dirsfound, dirsnotfound
1338
1336
1339 def walk(self, match, subrepos, unknown, ignored, full=True):
1337 def walk(self, match, subrepos, unknown, ignored, full=True):
1340 """
1338 """
1341 Walk recursively through the directory tree, finding all files
1339 Walk recursively through the directory tree, finding all files
1342 matched by match.
1340 matched by match.
1343
1341
1344 If full is False, maybe skip some known-clean files.
1342 If full is False, maybe skip some known-clean files.
1345
1343
1346 Return a dict mapping filename to stat-like object (either
1344 Return a dict mapping filename to stat-like object (either
1347 mercurial.osutil.stat instance or return value of os.stat()).
1345 mercurial.osutil.stat instance or return value of os.stat()).
1348
1346
1349 """
1347 """
1350 # full is a flag that extensions that hook into walk can use -- this
1348 # full is a flag that extensions that hook into walk can use -- this
1351 # implementation doesn't use it at all. This satisfies the contract
1349 # implementation doesn't use it at all. This satisfies the contract
1352 # because we only guarantee a "maybe".
1350 # because we only guarantee a "maybe".
1353
1351
1354 if ignored:
1352 if ignored:
1355 ignore = util.never
1353 ignore = util.never
1356 dirignore = util.never
1354 dirignore = util.never
1357 elif unknown:
1355 elif unknown:
1358 ignore = self._ignore
1356 ignore = self._ignore
1359 dirignore = self._dirignore
1357 dirignore = self._dirignore
1360 else:
1358 else:
1361 # if not unknown and not ignored, drop dir recursion and step 2
1359 # if not unknown and not ignored, drop dir recursion and step 2
1362 ignore = util.always
1360 ignore = util.always
1363 dirignore = util.always
1361 dirignore = util.always
1364
1362
1365 if self._sparsematchfn is not None:
1363 if self._sparsematchfn is not None:
1366 em = matchmod.exact(match.files())
1364 em = matchmod.exact(match.files())
1367 sm = matchmod.unionmatcher([self._sparsematcher, em])
1365 sm = matchmod.unionmatcher([self._sparsematcher, em])
1368 match = matchmod.intersectmatchers(match, sm)
1366 match = matchmod.intersectmatchers(match, sm)
1369
1367
1370 matchfn = match.matchfn
1368 matchfn = match.matchfn
1371 matchalways = match.always()
1369 matchalways = match.always()
1372 matchtdir = match.traversedir
1370 matchtdir = match.traversedir
1373 dmap = self._map
1371 dmap = self._map
1374 listdir = util.listdir
1372 listdir = util.listdir
1375 lstat = os.lstat
1373 lstat = os.lstat
1376 dirkind = stat.S_IFDIR
1374 dirkind = stat.S_IFDIR
1377 regkind = stat.S_IFREG
1375 regkind = stat.S_IFREG
1378 lnkkind = stat.S_IFLNK
1376 lnkkind = stat.S_IFLNK
1379 join = self._join
1377 join = self._join
1380
1378
1381 exact = skipstep3 = False
1379 exact = skipstep3 = False
1382 if match.isexact(): # match.exact
1380 if match.isexact(): # match.exact
1383 exact = True
1381 exact = True
1384 dirignore = util.always # skip step 2
1382 dirignore = util.always # skip step 2
1385 elif match.prefix(): # match.match, no patterns
1383 elif match.prefix(): # match.match, no patterns
1386 skipstep3 = True
1384 skipstep3 = True
1387
1385
1388 if not exact and self._checkcase:
1386 if not exact and self._checkcase:
1389 normalize = self._normalize
1387 normalize = self._normalize
1390 normalizefile = self._normalizefile
1388 normalizefile = self._normalizefile
1391 skipstep3 = False
1389 skipstep3 = False
1392 else:
1390 else:
1393 normalize = self._normalize
1391 normalize = self._normalize
1394 normalizefile = None
1392 normalizefile = None
1395
1393
1396 # step 1: find all explicit files
1394 # step 1: find all explicit files
1397 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1395 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1398 if matchtdir:
1396 if matchtdir:
1399 for d in work:
1397 for d in work:
1400 matchtdir(d[0])
1398 matchtdir(d[0])
1401 for d in dirsnotfound:
1399 for d in dirsnotfound:
1402 matchtdir(d)
1400 matchtdir(d)
1403
1401
1404 skipstep3 = skipstep3 and not (work or dirsnotfound)
1402 skipstep3 = skipstep3 and not (work or dirsnotfound)
1405 work = [d for d in work if not dirignore(d[0])]
1403 work = [d for d in work if not dirignore(d[0])]
1406
1404
1407 # step 2: visit subdirectories
1405 # step 2: visit subdirectories
1408 def traverse(work, alreadynormed):
1406 def traverse(work, alreadynormed):
1409 wadd = work.append
1407 wadd = work.append
1410 while work:
1408 while work:
1411 tracing.counter('dirstate.walk work', len(work))
1409 tracing.counter('dirstate.walk work', len(work))
1412 nd = work.pop()
1410 nd = work.pop()
1413 visitentries = match.visitchildrenset(nd)
1411 visitentries = match.visitchildrenset(nd)
1414 if not visitentries:
1412 if not visitentries:
1415 continue
1413 continue
1416 if visitentries == b'this' or visitentries == b'all':
1414 if visitentries == b'this' or visitentries == b'all':
1417 visitentries = None
1415 visitentries = None
1418 skip = None
1416 skip = None
1419 if nd != b'':
1417 if nd != b'':
1420 skip = b'.hg'
1418 skip = b'.hg'
1421 try:
1419 try:
1422 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1420 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1423 entries = listdir(join(nd), stat=True, skip=skip)
1421 entries = listdir(join(nd), stat=True, skip=skip)
1424 except (PermissionError, FileNotFoundError) as inst:
1422 except (PermissionError, FileNotFoundError) as inst:
1425 match.bad(
1423 match.bad(
1426 self.pathto(nd), encoding.strtolocal(inst.strerror)
1424 self.pathto(nd), encoding.strtolocal(inst.strerror)
1427 )
1425 )
1428 continue
1426 continue
1429 for f, kind, st in entries:
1427 for f, kind, st in entries:
1430 # Some matchers may return files in the visitentries set,
1428 # Some matchers may return files in the visitentries set,
1431 # instead of 'this', if the matcher explicitly mentions them
1429 # instead of 'this', if the matcher explicitly mentions them
1432 # and is not an exactmatcher. This is acceptable; we do not
1430 # and is not an exactmatcher. This is acceptable; we do not
1433 # make any hard assumptions about file-or-directory below
1431 # make any hard assumptions about file-or-directory below
1434 # based on the presence of `f` in visitentries. If
1432 # based on the presence of `f` in visitentries. If
1435 # visitchildrenset returned a set, we can always skip the
1433 # visitchildrenset returned a set, we can always skip the
1436 # entries *not* in the set it provided regardless of whether
1434 # entries *not* in the set it provided regardless of whether
1437 # they're actually a file or a directory.
1435 # they're actually a file or a directory.
1438 if visitentries and f not in visitentries:
1436 if visitentries and f not in visitentries:
1439 continue
1437 continue
1440 if normalizefile:
1438 if normalizefile:
1441 # even though f might be a directory, we're only
1439 # even though f might be a directory, we're only
1442 # interested in comparing it to files currently in the
1440 # interested in comparing it to files currently in the
1443 # dmap -- therefore normalizefile is enough
1441 # dmap -- therefore normalizefile is enough
1444 nf = normalizefile(
1442 nf = normalizefile(
1445 nd and (nd + b"/" + f) or f, True, True
1443 nd and (nd + b"/" + f) or f, True, True
1446 )
1444 )
1447 else:
1445 else:
1448 nf = nd and (nd + b"/" + f) or f
1446 nf = nd and (nd + b"/" + f) or f
1449 if nf not in results:
1447 if nf not in results:
1450 if kind == dirkind:
1448 if kind == dirkind:
1451 if not ignore(nf):
1449 if not ignore(nf):
1452 if matchtdir:
1450 if matchtdir:
1453 matchtdir(nf)
1451 matchtdir(nf)
1454 wadd(nf)
1452 wadd(nf)
1455 if nf in dmap and (matchalways or matchfn(nf)):
1453 if nf in dmap and (matchalways or matchfn(nf)):
1456 results[nf] = None
1454 results[nf] = None
1457 elif kind == regkind or kind == lnkkind:
1455 elif kind == regkind or kind == lnkkind:
1458 if nf in dmap:
1456 if nf in dmap:
1459 if matchalways or matchfn(nf):
1457 if matchalways or matchfn(nf):
1460 results[nf] = st
1458 results[nf] = st
1461 elif (matchalways or matchfn(nf)) and not ignore(
1459 elif (matchalways or matchfn(nf)) and not ignore(
1462 nf
1460 nf
1463 ):
1461 ):
1464 # unknown file -- normalize if necessary
1462 # unknown file -- normalize if necessary
1465 if not alreadynormed:
1463 if not alreadynormed:
1466 nf = normalize(nf, False, True)
1464 nf = normalize(nf, False, True)
1467 results[nf] = st
1465 results[nf] = st
1468 elif nf in dmap and (matchalways or matchfn(nf)):
1466 elif nf in dmap and (matchalways or matchfn(nf)):
1469 results[nf] = None
1467 results[nf] = None
1470
1468
1471 for nd, d in work:
1469 for nd, d in work:
1472 # alreadynormed means that processwork doesn't have to do any
1470 # alreadynormed means that processwork doesn't have to do any
1473 # expensive directory normalization
1471 # expensive directory normalization
1474 alreadynormed = not normalize or nd == d
1472 alreadynormed = not normalize or nd == d
1475 traverse([d], alreadynormed)
1473 traverse([d], alreadynormed)
1476
1474
1477 for s in subrepos:
1475 for s in subrepos:
1478 del results[s]
1476 del results[s]
1479 del results[b'.hg']
1477 del results[b'.hg']
1480
1478
1481 # step 3: visit remaining files from dmap
1479 # step 3: visit remaining files from dmap
1482 if not skipstep3 and not exact:
1480 if not skipstep3 and not exact:
1483 # If a dmap file is not in results yet, it was either
1481 # If a dmap file is not in results yet, it was either
1484 # a) not matching matchfn b) ignored, c) missing, or d) under a
1482 # a) not matching matchfn b) ignored, c) missing, or d) under a
1485 # symlink directory.
1483 # symlink directory.
1486 if not results and matchalways:
1484 if not results and matchalways:
1487 visit = [f for f in dmap]
1485 visit = [f for f in dmap]
1488 else:
1486 else:
1489 visit = [f for f in dmap if f not in results and matchfn(f)]
1487 visit = [f for f in dmap if f not in results and matchfn(f)]
1490 visit.sort()
1488 visit.sort()
1491
1489
1492 if unknown:
1490 if unknown:
1493 # unknown == True means we walked all dirs under the roots
1491 # unknown == True means we walked all dirs under the roots
1494 # that wasn't ignored, and everything that matched was stat'ed
1492 # that wasn't ignored, and everything that matched was stat'ed
1495 # and is already in results.
1493 # and is already in results.
1496 # The rest must thus be ignored or under a symlink.
1494 # The rest must thus be ignored or under a symlink.
1497 audit_path = pathutil.pathauditor(self._root, cached=True)
1495 audit_path = pathutil.pathauditor(self._root, cached=True)
1498
1496
1499 for nf in iter(visit):
1497 for nf in iter(visit):
1500 # If a stat for the same file was already added with a
1498 # If a stat for the same file was already added with a
1501 # different case, don't add one for this, since that would
1499 # different case, don't add one for this, since that would
1502 # make it appear as if the file exists under both names
1500 # make it appear as if the file exists under both names
1503 # on disk.
1501 # on disk.
1504 if (
1502 if (
1505 normalizefile
1503 normalizefile
1506 and normalizefile(nf, True, True) in results
1504 and normalizefile(nf, True, True) in results
1507 ):
1505 ):
1508 results[nf] = None
1506 results[nf] = None
1509 # Report ignored items in the dmap as long as they are not
1507 # Report ignored items in the dmap as long as they are not
1510 # under a symlink directory.
1508 # under a symlink directory.
1511 elif audit_path.check(nf):
1509 elif audit_path.check(nf):
1512 try:
1510 try:
1513 results[nf] = lstat(join(nf))
1511 results[nf] = lstat(join(nf))
1514 # file was just ignored, no links, and exists
1512 # file was just ignored, no links, and exists
1515 except OSError:
1513 except OSError:
1516 # file doesn't exist
1514 # file doesn't exist
1517 results[nf] = None
1515 results[nf] = None
1518 else:
1516 else:
1519 # It's either missing or under a symlink directory
1517 # It's either missing or under a symlink directory
1520 # which we in this case report as missing
1518 # which we in this case report as missing
1521 results[nf] = None
1519 results[nf] = None
1522 else:
1520 else:
1523 # We may not have walked the full directory tree above,
1521 # We may not have walked the full directory tree above,
1524 # so stat and check everything we missed.
1522 # so stat and check everything we missed.
1525 iv = iter(visit)
1523 iv = iter(visit)
1526 for st in util.statfiles([join(i) for i in visit]):
1524 for st in util.statfiles([join(i) for i in visit]):
1527 results[next(iv)] = st
1525 results[next(iv)] = st
1528 return results
1526 return results
1529
1527
1530 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1528 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1531 if self._sparsematchfn is not None:
1529 if self._sparsematchfn is not None:
1532 em = matchmod.exact(matcher.files())
1530 em = matchmod.exact(matcher.files())
1533 sm = matchmod.unionmatcher([self._sparsematcher, em])
1531 sm = matchmod.unionmatcher([self._sparsematcher, em])
1534 matcher = matchmod.intersectmatchers(matcher, sm)
1532 matcher = matchmod.intersectmatchers(matcher, sm)
1535 # Force Rayon (Rust parallelism library) to respect the number of
1533 # Force Rayon (Rust parallelism library) to respect the number of
1536 # workers. This is a temporary workaround until Rust code knows
1534 # workers. This is a temporary workaround until Rust code knows
1537 # how to read the config file.
1535 # how to read the config file.
1538 numcpus = self._ui.configint(b"worker", b"numcpus")
1536 numcpus = self._ui.configint(b"worker", b"numcpus")
1539 if numcpus is not None:
1537 if numcpus is not None:
1540 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1538 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1541
1539
1542 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1540 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1543 if not workers_enabled:
1541 if not workers_enabled:
1544 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1542 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1545
1543
1546 (
1544 (
1547 lookup,
1545 lookup,
1548 modified,
1546 modified,
1549 added,
1547 added,
1550 removed,
1548 removed,
1551 deleted,
1549 deleted,
1552 clean,
1550 clean,
1553 ignored,
1551 ignored,
1554 unknown,
1552 unknown,
1555 warnings,
1553 warnings,
1556 bad,
1554 bad,
1557 traversed,
1555 traversed,
1558 dirty,
1556 dirty,
1559 ) = rustmod.status(
1557 ) = rustmod.status(
1560 self._map._map,
1558 self._map._map,
1561 matcher,
1559 matcher,
1562 self._rootdir,
1560 self._rootdir,
1563 self._ignorefiles(),
1561 self._ignorefiles(),
1564 self._checkexec,
1562 self._checkexec,
1565 bool(list_clean),
1563 bool(list_clean),
1566 bool(list_ignored),
1564 bool(list_ignored),
1567 bool(list_unknown),
1565 bool(list_unknown),
1568 bool(matcher.traversedir),
1566 bool(matcher.traversedir),
1569 )
1567 )
1570
1568
1571 self._dirty |= dirty
1569 self._dirty |= dirty
1572
1570
1573 if matcher.traversedir:
1571 if matcher.traversedir:
1574 for dir in traversed:
1572 for dir in traversed:
1575 matcher.traversedir(dir)
1573 matcher.traversedir(dir)
1576
1574
1577 if self._ui.warn:
1575 if self._ui.warn:
1578 for item in warnings:
1576 for item in warnings:
1579 if isinstance(item, tuple):
1577 if isinstance(item, tuple):
1580 file_path, syntax = item
1578 file_path, syntax = item
1581 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1579 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1582 file_path,
1580 file_path,
1583 syntax,
1581 syntax,
1584 )
1582 )
1585 self._ui.warn(msg)
1583 self._ui.warn(msg)
1586 else:
1584 else:
1587 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1585 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1588 self._ui.warn(
1586 self._ui.warn(
1589 msg
1587 msg
1590 % (
1588 % (
1591 pathutil.canonpath(
1589 pathutil.canonpath(
1592 self._rootdir, self._rootdir, item
1590 self._rootdir, self._rootdir, item
1593 ),
1591 ),
1594 b"No such file or directory",
1592 b"No such file or directory",
1595 )
1593 )
1596 )
1594 )
1597
1595
1598 for fn, message in sorted(bad):
1596 for fn, message in sorted(bad):
1599 matcher.bad(fn, encoding.strtolocal(message))
1597 matcher.bad(fn, encoding.strtolocal(message))
1600
1598
1601 status = scmutil.status(
1599 status = scmutil.status(
1602 modified=modified,
1600 modified=modified,
1603 added=added,
1601 added=added,
1604 removed=removed,
1602 removed=removed,
1605 deleted=deleted,
1603 deleted=deleted,
1606 unknown=unknown,
1604 unknown=unknown,
1607 ignored=ignored,
1605 ignored=ignored,
1608 clean=clean,
1606 clean=clean,
1609 )
1607 )
1610 return (lookup, status)
1608 return (lookup, status)
1611
1609
1612 def status(self, match, subrepos, ignored, clean, unknown):
1610 def status(self, match, subrepos, ignored, clean, unknown):
1613 """Determine the status of the working copy relative to the
1611 """Determine the status of the working copy relative to the
1614 dirstate and return a pair of (unsure, status), where status is of type
1612 dirstate and return a pair of (unsure, status), where status is of type
1615 scmutil.status and:
1613 scmutil.status and:
1616
1614
1617 unsure:
1615 unsure:
1618 files that might have been modified since the dirstate was
1616 files that might have been modified since the dirstate was
1619 written, but need to be read to be sure (size is the same
1617 written, but need to be read to be sure (size is the same
1620 but mtime differs)
1618 but mtime differs)
1621 status.modified:
1619 status.modified:
1622 files that have definitely been modified since the dirstate
1620 files that have definitely been modified since the dirstate
1623 was written (different size or mode)
1621 was written (different size or mode)
1624 status.clean:
1622 status.clean:
1625 files that have definitely not been modified since the
1623 files that have definitely not been modified since the
1626 dirstate was written
1624 dirstate was written
1627 """
1625 """
1628 if not self._running_status:
1626 if not self._running_status:
1629 msg = "Calling `status` outside a `running_status` context"
1627 msg = "Calling `status` outside a `running_status` context"
1630 raise error.ProgrammingError(msg)
1628 raise error.ProgrammingError(msg)
1631 listignored, listclean, listunknown = ignored, clean, unknown
1629 listignored, listclean, listunknown = ignored, clean, unknown
1632 lookup, modified, added, unknown, ignored = [], [], [], [], []
1630 lookup, modified, added, unknown, ignored = [], [], [], [], []
1633 removed, deleted, clean = [], [], []
1631 removed, deleted, clean = [], [], []
1634
1632
1635 dmap = self._map
1633 dmap = self._map
1636 dmap.preload()
1634 dmap.preload()
1637
1635
1638 use_rust = True
1636 use_rust = True
1639
1637
1640 if rustmod is None:
1638 if rustmod is None:
1641 use_rust = False
1639 use_rust = False
1642 elif self._checkcase:
1640 elif self._checkcase:
1643 # Case-insensitive filesystems are not handled yet
1641 # Case-insensitive filesystems are not handled yet
1644 use_rust = False
1642 use_rust = False
1645 elif subrepos:
1643 elif subrepos:
1646 use_rust = False
1644 use_rust = False
1647
1645
1648 # Get the time from the filesystem so we can disambiguate files that
1646 # Get the time from the filesystem so we can disambiguate files that
1649 # appear modified in the present or future.
1647 # appear modified in the present or future.
1650 try:
1648 try:
1651 mtime_boundary = timestamp.get_fs_now(self._opener)
1649 mtime_boundary = timestamp.get_fs_now(self._opener)
1652 except OSError:
1650 except OSError:
1653 # In largefiles or readonly context
1651 # In largefiles or readonly context
1654 mtime_boundary = None
1652 mtime_boundary = None
1655
1653
1656 if use_rust:
1654 if use_rust:
1657 try:
1655 try:
1658 res = self._rust_status(
1656 res = self._rust_status(
1659 match, listclean, listignored, listunknown
1657 match, listclean, listignored, listunknown
1660 )
1658 )
1661 return res + (mtime_boundary,)
1659 return res + (mtime_boundary,)
1662 except rustmod.FallbackError:
1660 except rustmod.FallbackError:
1663 pass
1661 pass
1664
1662
1665 def noop(f):
1663 def noop(f):
1666 pass
1664 pass
1667
1665
1668 dcontains = dmap.__contains__
1666 dcontains = dmap.__contains__
1669 dget = dmap.__getitem__
1667 dget = dmap.__getitem__
1670 ladd = lookup.append # aka "unsure"
1668 ladd = lookup.append # aka "unsure"
1671 madd = modified.append
1669 madd = modified.append
1672 aadd = added.append
1670 aadd = added.append
1673 uadd = unknown.append if listunknown else noop
1671 uadd = unknown.append if listunknown else noop
1674 iadd = ignored.append if listignored else noop
1672 iadd = ignored.append if listignored else noop
1675 radd = removed.append
1673 radd = removed.append
1676 dadd = deleted.append
1674 dadd = deleted.append
1677 cadd = clean.append if listclean else noop
1675 cadd = clean.append if listclean else noop
1678 mexact = match.exact
1676 mexact = match.exact
1679 dirignore = self._dirignore
1677 dirignore = self._dirignore
1680 checkexec = self._checkexec
1678 checkexec = self._checkexec
1681 checklink = self._checklink
1679 checklink = self._checklink
1682 copymap = self._map.copymap
1680 copymap = self._map.copymap
1683
1681
1684 # We need to do full walks when either
1682 # We need to do full walks when either
1685 # - we're listing all clean files, or
1683 # - we're listing all clean files, or
1686 # - match.traversedir does something, because match.traversedir should
1684 # - match.traversedir does something, because match.traversedir should
1687 # be called for every dir in the working dir
1685 # be called for every dir in the working dir
1688 full = listclean or match.traversedir is not None
1686 full = listclean or match.traversedir is not None
1689 for fn, st in self.walk(
1687 for fn, st in self.walk(
1690 match, subrepos, listunknown, listignored, full=full
1688 match, subrepos, listunknown, listignored, full=full
1691 ).items():
1689 ).items():
1692 if not dcontains(fn):
1690 if not dcontains(fn):
1693 if (listignored or mexact(fn)) and dirignore(fn):
1691 if (listignored or mexact(fn)) and dirignore(fn):
1694 if listignored:
1692 if listignored:
1695 iadd(fn)
1693 iadd(fn)
1696 else:
1694 else:
1697 uadd(fn)
1695 uadd(fn)
1698 continue
1696 continue
1699
1697
1700 t = dget(fn)
1698 t = dget(fn)
1701 mode = t.mode
1699 mode = t.mode
1702 size = t.size
1700 size = t.size
1703
1701
1704 if not st and t.tracked:
1702 if not st and t.tracked:
1705 dadd(fn)
1703 dadd(fn)
1706 elif t.p2_info:
1704 elif t.p2_info:
1707 madd(fn)
1705 madd(fn)
1708 elif t.added:
1706 elif t.added:
1709 aadd(fn)
1707 aadd(fn)
1710 elif t.removed:
1708 elif t.removed:
1711 radd(fn)
1709 radd(fn)
1712 elif t.tracked:
1710 elif t.tracked:
1713 if not checklink and t.has_fallback_symlink:
1711 if not checklink and t.has_fallback_symlink:
1714 # If the file system does not support symlink, the mode
1712 # If the file system does not support symlink, the mode
1715 # might not be correctly stored in the dirstate, so do not
1713 # might not be correctly stored in the dirstate, so do not
1716 # trust it.
1714 # trust it.
1717 ladd(fn)
1715 ladd(fn)
1718 elif not checkexec and t.has_fallback_exec:
1716 elif not checkexec and t.has_fallback_exec:
1719 # If the file system does not support exec bits, the mode
1717 # If the file system does not support exec bits, the mode
1720 # might not be correctly stored in the dirstate, so do not
1718 # might not be correctly stored in the dirstate, so do not
1721 # trust it.
1719 # trust it.
1722 ladd(fn)
1720 ladd(fn)
1723 elif (
1721 elif (
1724 size >= 0
1722 size >= 0
1725 and (
1723 and (
1726 (size != st.st_size and size != st.st_size & _rangemask)
1724 (size != st.st_size and size != st.st_size & _rangemask)
1727 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1725 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1728 )
1726 )
1729 or fn in copymap
1727 or fn in copymap
1730 ):
1728 ):
1731 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1729 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1732 # issue6456: Size returned may be longer due to
1730 # issue6456: Size returned may be longer due to
1733 # encryption on EXT-4 fscrypt, undecided.
1731 # encryption on EXT-4 fscrypt, undecided.
1734 ladd(fn)
1732 ladd(fn)
1735 else:
1733 else:
1736 madd(fn)
1734 madd(fn)
1737 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1735 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1738 # There might be a change in the future if for example the
1736 # There might be a change in the future if for example the
1739 # internal clock is off, but this is a case where the issues
1737 # internal clock is off, but this is a case where the issues
1740 # the user would face would be a lot worse and there is
1738 # the user would face would be a lot worse and there is
1741 # nothing we can really do.
1739 # nothing we can really do.
1742 ladd(fn)
1740 ladd(fn)
1743 elif listclean:
1741 elif listclean:
1744 cadd(fn)
1742 cadd(fn)
1745 status = scmutil.status(
1743 status = scmutil.status(
1746 modified, added, removed, deleted, unknown, ignored, clean
1744 modified, added, removed, deleted, unknown, ignored, clean
1747 )
1745 )
1748 return (lookup, status, mtime_boundary)
1746 return (lookup, status, mtime_boundary)
1749
1747
1750 def matches(self, match):
1748 def matches(self, match):
1751 """
1749 """
1752 return files in the dirstate (in whatever state) filtered by match
1750 return files in the dirstate (in whatever state) filtered by match
1753 """
1751 """
1754 dmap = self._map
1752 dmap = self._map
1755 if rustmod is not None:
1753 if rustmod is not None:
1756 dmap = self._map._map
1754 dmap = self._map._map
1757
1755
1758 if match.always():
1756 if match.always():
1759 return dmap.keys()
1757 return dmap.keys()
1760 files = match.files()
1758 files = match.files()
1761 if match.isexact():
1759 if match.isexact():
1762 # fast path -- filter the other way around, since typically files is
1760 # fast path -- filter the other way around, since typically files is
1763 # much smaller than dmap
1761 # much smaller than dmap
1764 return [f for f in files if f in dmap]
1762 return [f for f in files if f in dmap]
1765 if match.prefix() and all(fn in dmap for fn in files):
1763 if match.prefix() and all(fn in dmap for fn in files):
1766 # fast path -- all the values are known to be files, so just return
1764 # fast path -- all the values are known to be files, so just return
1767 # that
1765 # that
1768 return list(files)
1766 return list(files)
1769 return [f for f in dmap if match(f)]
1767 return [f for f in dmap if match(f)]
1770
1768
1771 def all_file_names(self):
1769 def all_file_names(self):
1772 """list all filename currently used by this dirstate
1770 """list all filename currently used by this dirstate
1773
1771
1774 This is only used to do `hg rollback` related backup in the transaction
1772 This is only used to do `hg rollback` related backup in the transaction
1775 """
1773 """
1776 files = [b'branch']
1774 files = [b'branch']
1777 if self._opener.exists(self._filename):
1775 if self._opener.exists(self._filename):
1778 files.append(self._filename)
1776 files.append(self._filename)
1779 if self._use_dirstate_v2:
1777 if self._use_dirstate_v2:
1780 files.append(self._map.docket.data_filename())
1778 files.append(self._map.docket.data_filename())
1781 return tuple(files)
1779 return tuple(files)
1782
1780
1783 def verify(self, m1, m2, p1, narrow_matcher=None):
1781 def verify(self, m1, m2, p1, narrow_matcher=None):
1784 """
1782 """
1785 check the dirstate contents against the parent manifest and yield errors
1783 check the dirstate contents against the parent manifest and yield errors
1786 """
1784 """
1787 missing_from_p1 = _(
1785 missing_from_p1 = _(
1788 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1786 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1789 )
1787 )
1790 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1788 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1791 missing_from_ps = _(
1789 missing_from_ps = _(
1792 b"%s marked as modified, but not in either manifest\n"
1790 b"%s marked as modified, but not in either manifest\n"
1793 )
1791 )
1794 missing_from_ds = _(
1792 missing_from_ds = _(
1795 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1793 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1796 )
1794 )
1797 for f, entry in self.items():
1795 for f, entry in self.items():
1798 if entry.p1_tracked:
1796 if entry.p1_tracked:
1799 if entry.modified and f not in m1 and f not in m2:
1797 if entry.modified and f not in m1 and f not in m2:
1800 yield missing_from_ps % f
1798 yield missing_from_ps % f
1801 elif f not in m1:
1799 elif f not in m1:
1802 yield missing_from_p1 % (f, node.short(p1))
1800 yield missing_from_p1 % (f, node.short(p1))
1803 if entry.added and f in m1:
1801 if entry.added and f in m1:
1804 yield unexpected_in_p1 % f
1802 yield unexpected_in_p1 % f
1805 for f in m1:
1803 for f in m1:
1806 if narrow_matcher is not None and not narrow_matcher(f):
1804 if narrow_matcher is not None and not narrow_matcher(f):
1807 continue
1805 continue
1808 entry = self.get_entry(f)
1806 entry = self.get_entry(f)
1809 if not entry.p1_tracked:
1807 if not entry.p1_tracked:
1810 yield missing_from_ds % (f, node.short(p1))
1808 yield missing_from_ds % (f, node.short(p1))
1811
1812
1813 dirstate = interfaceutil.implementer(intdirstate.idirstate)(DirState)
1814
1815 if typing.TYPE_CHECKING:
1816 dirstate = DirState
General Comments 0
You need to be logged in to leave comments. Login now