##// END OF EJS Templates
dirstate: add an explicit `from_p2` parameter to `_addpath`...
marmoute -
r48281:1f571077 default
parent child Browse files
Show More
@@ -1,1995 +1,2008 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .pycompat import delattr
18 18
19 19 from hgdemandimport import tracing
20 20
21 21 from . import (
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 pathutil,
26 26 policy,
27 27 pycompat,
28 28 scmutil,
29 29 sparse,
30 30 txnutil,
31 31 util,
32 32 )
33 33
34 34 from .interfaces import (
35 35 dirstate as intdirstate,
36 36 util as interfaceutil,
37 37 )
38 38
39 39 parsers = policy.importmod('parsers')
40 40 rustmod = policy.importrust('dirstate')
41 41
42 42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43 43
44 44 propertycache = util.propertycache
45 45 filecache = scmutil.filecache
46 46 _rangemask = 0x7FFFFFFF
47 47
48 48 dirstatetuple = parsers.dirstatetuple
49 49
50 50
51 51 # a special value used internally for `size` if the file come from the other parent
52 52 FROM_P2 = -2
53 53
54 54 # a special value used internally for `size` if the file is modified/merged/added
55 55 NONNORMAL = -1
56 56
57 57 # a special value used internally for `time` if the time is ambigeous
58 58 AMBIGUOUS_TIME = -1
59 59
60 60
61 61 class repocache(filecache):
62 62 """filecache for files in .hg/"""
63 63
64 64 def join(self, obj, fname):
65 65 return obj._opener.join(fname)
66 66
67 67
68 68 class rootcache(filecache):
69 69 """filecache for files in the repository root"""
70 70
71 71 def join(self, obj, fname):
72 72 return obj._join(fname)
73 73
74 74
75 75 def _getfsnow(vfs):
76 76 '''Get "now" timestamp on filesystem'''
77 77 tmpfd, tmpname = vfs.mkstemp()
78 78 try:
79 79 return os.fstat(tmpfd)[stat.ST_MTIME]
80 80 finally:
81 81 os.close(tmpfd)
82 82 vfs.unlink(tmpname)
83 83
84 84
85 85 @interfaceutil.implementer(intdirstate.idirstate)
86 86 class dirstate(object):
87 87 def __init__(
88 88 self,
89 89 opener,
90 90 ui,
91 91 root,
92 92 validate,
93 93 sparsematchfn,
94 94 nodeconstants,
95 95 use_dirstate_v2,
96 96 ):
97 97 """Create a new dirstate object.
98 98
99 99 opener is an open()-like callable that can be used to open the
100 100 dirstate file; root is the root of the directory tracked by
101 101 the dirstate.
102 102 """
103 103 self._use_dirstate_v2 = use_dirstate_v2
104 104 self._nodeconstants = nodeconstants
105 105 self._opener = opener
106 106 self._validate = validate
107 107 self._root = root
108 108 self._sparsematchfn = sparsematchfn
109 109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
110 110 # UNC path pointing to root share (issue4557)
111 111 self._rootdir = pathutil.normasprefix(root)
112 112 self._dirty = False
113 113 self._lastnormaltime = 0
114 114 self._ui = ui
115 115 self._filecache = {}
116 116 self._parentwriters = 0
117 117 self._filename = b'dirstate'
118 118 self._pendingfilename = b'%s.pending' % self._filename
119 119 self._plchangecallbacks = {}
120 120 self._origpl = None
121 121 self._updatedfiles = set()
122 122 self._mapcls = dirstatemap
123 123 # Access and cache cwd early, so we don't access it for the first time
124 124 # after a working-copy update caused it to not exist (accessing it then
125 125 # raises an exception).
126 126 self._cwd
127 127
128 128 def prefetch_parents(self):
129 129 """make sure the parents are loaded
130 130
131 131 Used to avoid a race condition.
132 132 """
133 133 self._pl
134 134
135 135 @contextlib.contextmanager
136 136 def parentchange(self):
137 137 """Context manager for handling dirstate parents.
138 138
139 139 If an exception occurs in the scope of the context manager,
140 140 the incoherent dirstate won't be written when wlock is
141 141 released.
142 142 """
143 143 self._parentwriters += 1
144 144 yield
145 145 # Typically we want the "undo" step of a context manager in a
146 146 # finally block so it happens even when an exception
147 147 # occurs. In this case, however, we only want to decrement
148 148 # parentwriters if the code in the with statement exits
149 149 # normally, so we don't have a try/finally here on purpose.
150 150 self._parentwriters -= 1
151 151
152 152 def pendingparentchange(self):
153 153 """Returns true if the dirstate is in the middle of a set of changes
154 154 that modify the dirstate parent.
155 155 """
156 156 return self._parentwriters > 0
157 157
158 158 @propertycache
159 159 def _map(self):
160 160 """Return the dirstate contents (see documentation for dirstatemap)."""
161 161 self._map = self._mapcls(
162 162 self._ui,
163 163 self._opener,
164 164 self._root,
165 165 self._nodeconstants,
166 166 self._use_dirstate_v2,
167 167 )
168 168 return self._map
169 169
170 170 @property
171 171 def _sparsematcher(self):
172 172 """The matcher for the sparse checkout.
173 173
174 174 The working directory may not include every file from a manifest. The
175 175 matcher obtained by this property will match a path if it is to be
176 176 included in the working directory.
177 177 """
178 178 # TODO there is potential to cache this property. For now, the matcher
179 179 # is resolved on every access. (But the called function does use a
180 180 # cache to keep the lookup fast.)
181 181 return self._sparsematchfn()
182 182
183 183 @repocache(b'branch')
184 184 def _branch(self):
185 185 try:
186 186 return self._opener.read(b"branch").strip() or b"default"
187 187 except IOError as inst:
188 188 if inst.errno != errno.ENOENT:
189 189 raise
190 190 return b"default"
191 191
192 192 @property
193 193 def _pl(self):
194 194 return self._map.parents()
195 195
196 196 def hasdir(self, d):
197 197 return self._map.hastrackeddir(d)
198 198
199 199 @rootcache(b'.hgignore')
200 200 def _ignore(self):
201 201 files = self._ignorefiles()
202 202 if not files:
203 203 return matchmod.never()
204 204
205 205 pats = [b'include:%s' % f for f in files]
206 206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
207 207
208 208 @propertycache
209 209 def _slash(self):
210 210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
211 211
212 212 @propertycache
213 213 def _checklink(self):
214 214 return util.checklink(self._root)
215 215
216 216 @propertycache
217 217 def _checkexec(self):
218 218 return bool(util.checkexec(self._root))
219 219
220 220 @propertycache
221 221 def _checkcase(self):
222 222 return not util.fscasesensitive(self._join(b'.hg'))
223 223
224 224 def _join(self, f):
225 225 # much faster than os.path.join()
226 226 # it's safe because f is always a relative path
227 227 return self._rootdir + f
228 228
229 229 def flagfunc(self, buildfallback):
230 230 if self._checklink and self._checkexec:
231 231
232 232 def f(x):
233 233 try:
234 234 st = os.lstat(self._join(x))
235 235 if util.statislink(st):
236 236 return b'l'
237 237 if util.statisexec(st):
238 238 return b'x'
239 239 except OSError:
240 240 pass
241 241 return b''
242 242
243 243 return f
244 244
245 245 fallback = buildfallback()
246 246 if self._checklink:
247 247
248 248 def f(x):
249 249 if os.path.islink(self._join(x)):
250 250 return b'l'
251 251 if b'x' in fallback(x):
252 252 return b'x'
253 253 return b''
254 254
255 255 return f
256 256 if self._checkexec:
257 257
258 258 def f(x):
259 259 if b'l' in fallback(x):
260 260 return b'l'
261 261 if util.isexec(self._join(x)):
262 262 return b'x'
263 263 return b''
264 264
265 265 return f
266 266 else:
267 267 return fallback
268 268
269 269 @propertycache
270 270 def _cwd(self):
271 271 # internal config: ui.forcecwd
272 272 forcecwd = self._ui.config(b'ui', b'forcecwd')
273 273 if forcecwd:
274 274 return forcecwd
275 275 return encoding.getcwd()
276 276
277 277 def getcwd(self):
278 278 """Return the path from which a canonical path is calculated.
279 279
280 280 This path should be used to resolve file patterns or to convert
281 281 canonical paths back to file paths for display. It shouldn't be
282 282 used to get real file paths. Use vfs functions instead.
283 283 """
284 284 cwd = self._cwd
285 285 if cwd == self._root:
286 286 return b''
287 287 # self._root ends with a path separator if self._root is '/' or 'C:\'
288 288 rootsep = self._root
289 289 if not util.endswithsep(rootsep):
290 290 rootsep += pycompat.ossep
291 291 if cwd.startswith(rootsep):
292 292 return cwd[len(rootsep) :]
293 293 else:
294 294 # we're outside the repo. return an absolute path.
295 295 return cwd
296 296
297 297 def pathto(self, f, cwd=None):
298 298 if cwd is None:
299 299 cwd = self.getcwd()
300 300 path = util.pathto(self._root, cwd, f)
301 301 if self._slash:
302 302 return util.pconvert(path)
303 303 return path
304 304
305 305 def __getitem__(self, key):
306 306 """Return the current state of key (a filename) in the dirstate.
307 307
308 308 States are:
309 309 n normal
310 310 m needs merging
311 311 r marked for removal
312 312 a marked for addition
313 313 ? not tracked
314 314 """
315 315 return self._map.get(key, (b"?",))[0]
316 316
317 317 def __contains__(self, key):
318 318 return key in self._map
319 319
320 320 def __iter__(self):
321 321 return iter(sorted(self._map))
322 322
323 323 def items(self):
324 324 return pycompat.iteritems(self._map)
325 325
326 326 iteritems = items
327 327
328 328 def directories(self):
329 329 return self._map.directories()
330 330
331 331 def parents(self):
332 332 return [self._validate(p) for p in self._pl]
333 333
334 334 def p1(self):
335 335 return self._validate(self._pl[0])
336 336
337 337 def p2(self):
338 338 return self._validate(self._pl[1])
339 339
340 340 def branch(self):
341 341 return encoding.tolocal(self._branch)
342 342
343 343 def setparents(self, p1, p2=None):
344 344 """Set dirstate parents to p1 and p2.
345 345
346 346 When moving from two parents to one, 'm' merged entries a
347 347 adjusted to normal and previous copy records discarded and
348 348 returned by the call.
349 349
350 350 See localrepo.setparents()
351 351 """
352 352 if p2 is None:
353 353 p2 = self._nodeconstants.nullid
354 354 if self._parentwriters == 0:
355 355 raise ValueError(
356 356 b"cannot set dirstate parent outside of "
357 357 b"dirstate.parentchange context manager"
358 358 )
359 359
360 360 self._dirty = True
361 361 oldp2 = self._pl[1]
362 362 if self._origpl is None:
363 363 self._origpl = self._pl
364 364 self._map.setparents(p1, p2)
365 365 copies = {}
366 366 if (
367 367 oldp2 != self._nodeconstants.nullid
368 368 and p2 == self._nodeconstants.nullid
369 369 ):
370 370 candidatefiles = self._map.non_normal_or_other_parent_paths()
371 371
372 372 for f in candidatefiles:
373 373 s = self._map.get(f)
374 374 if s is None:
375 375 continue
376 376
377 377 # Discard 'm' markers when moving away from a merge state
378 378 if s[0] == b'm':
379 379 source = self._map.copymap.get(f)
380 380 if source:
381 381 copies[f] = source
382 382 self.normallookup(f)
383 383 # Also fix up otherparent markers
384 384 elif s[0] == b'n' and s[2] == FROM_P2:
385 385 source = self._map.copymap.get(f)
386 386 if source:
387 387 copies[f] = source
388 388 self.add(f)
389 389 return copies
390 390
391 391 def setbranch(self, branch):
392 392 self.__class__._branch.set(self, encoding.fromlocal(branch))
393 393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
394 394 try:
395 395 f.write(self._branch + b'\n')
396 396 f.close()
397 397
398 398 # make sure filecache has the correct stat info for _branch after
399 399 # replacing the underlying file
400 400 ce = self._filecache[b'_branch']
401 401 if ce:
402 402 ce.refresh()
403 403 except: # re-raises
404 404 f.discard()
405 405 raise
406 406
407 407 def invalidate(self):
408 408 """Causes the next access to reread the dirstate.
409 409
410 410 This is different from localrepo.invalidatedirstate() because it always
411 411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
412 412 check whether the dirstate has changed before rereading it."""
413 413
414 414 for a in ("_map", "_branch", "_ignore"):
415 415 if a in self.__dict__:
416 416 delattr(self, a)
417 417 self._lastnormaltime = 0
418 418 self._dirty = False
419 419 self._updatedfiles.clear()
420 420 self._parentwriters = 0
421 421 self._origpl = None
422 422
423 423 def copy(self, source, dest):
424 424 """Mark dest as a copy of source. Unmark dest if source is None."""
425 425 if source == dest:
426 426 return
427 427 self._dirty = True
428 428 if source is not None:
429 429 self._map.copymap[dest] = source
430 430 self._updatedfiles.add(source)
431 431 self._updatedfiles.add(dest)
432 432 elif self._map.copymap.pop(dest, None):
433 433 self._updatedfiles.add(dest)
434 434
435 435 def copied(self, file):
436 436 return self._map.copymap.get(file, None)
437 437
438 438 def copies(self):
439 439 return self._map.copymap
440 440
441 def _addpath(self, f, state, mode, size=NONNORMAL, mtime=AMBIGUOUS_TIME):
441 def _addpath(
442 self,
443 f,
444 state,
445 mode,
446 size=NONNORMAL,
447 mtime=AMBIGUOUS_TIME,
448 from_p2=False,
449 ):
442 450 oldstate = self[f]
443 451 if state == b'a' or oldstate == b'r':
444 452 scmutil.checkfilename(f)
445 453 if self._map.hastrackeddir(f):
446 454 msg = _(b'directory %r already in dirstate')
447 455 msg %= pycompat.bytestr(f)
448 456 raise error.Abort(msg)
449 457 # shadows
450 458 for d in pathutil.finddirs(f):
451 459 if self._map.hastrackeddir(d):
452 460 break
453 461 entry = self._map.get(d)
454 462 if entry is not None and entry[0] != b'r':
455 463 msg = _(b'file %r in dirstate clashes with %r')
456 464 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
457 465 raise error.Abort(msg)
458 if size != NONNORMAL and size != FROM_P2:
459 size = size & _rangemask
460 if mtime != AMBIGUOUS_TIME:
461 mtime = mtime & _rangemask
466 if from_p2:
467 size = FROM_P2
468 mtime = AMBIGUOUS_TIME
469 else:
470 assert size != FROM_P2
471 if size != NONNORMAL:
472 size = size & _rangemask
473 if mtime != AMBIGUOUS_TIME:
474 mtime = mtime & _rangemask
462 475 self._dirty = True
463 476 self._updatedfiles.add(f)
464 477 self._map.addfile(f, oldstate, state, mode, size, mtime)
465 478
466 479 def normal(self, f, parentfiledata=None):
467 480 """Mark a file normal and clean.
468 481
469 482 parentfiledata: (mode, size, mtime) of the clean file
470 483
471 484 parentfiledata should be computed from memory (for mode,
472 485 size), as or close as possible from the point where we
473 486 determined the file was clean, to limit the risk of the
474 487 file having been changed by an external process between the
475 488 moment where the file was determined to be clean and now."""
476 489 if parentfiledata:
477 490 (mode, size, mtime) = parentfiledata
478 491 else:
479 492 s = os.lstat(self._join(f))
480 493 mode = s.st_mode
481 494 size = s.st_size
482 495 mtime = s[stat.ST_MTIME]
483 496 self._addpath(f, b'n', mode, size, mtime)
484 497 self._map.copymap.pop(f, None)
485 498 if f in self._map.nonnormalset:
486 499 self._map.nonnormalset.remove(f)
487 500 if mtime > self._lastnormaltime:
488 501 # Remember the most recent modification timeslot for status(),
489 502 # to make sure we won't miss future size-preserving file content
490 503 # modifications that happen within the same timeslot.
491 504 self._lastnormaltime = mtime
492 505
493 506 def normallookup(self, f):
494 507 '''Mark a file normal, but possibly dirty.'''
495 508 if self._pl[1] != self._nodeconstants.nullid:
496 509 # if there is a merge going on and the file was either
497 510 # in state 'm' (-1) or coming from other parent (-2) before
498 511 # being removed, restore that state.
499 512 entry = self._map.get(f)
500 513 if entry is not None:
501 514 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
502 515 source = self._map.copymap.get(f)
503 516 if entry[2] == NONNORMAL:
504 517 self.merge(f)
505 518 elif entry[2] == FROM_P2:
506 519 self.otherparent(f)
507 520 if source:
508 521 self.copy(source, f)
509 522 return
510 523 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
511 524 return
512 525 self._addpath(f, b'n', 0)
513 526 self._map.copymap.pop(f, None)
514 527
515 528 def otherparent(self, f):
516 529 '''Mark as coming from the other parent, always dirty.'''
517 530 if self._pl[1] == self._nodeconstants.nullid:
518 531 msg = _(b"setting %r to other parent only allowed in merges") % f
519 532 raise error.Abort(msg)
520 533 if f in self and self[f] == b'n':
521 534 # merge-like
522 self._addpath(f, b'm', 0, FROM_P2)
535 self._addpath(f, b'm', 0, from_p2=True)
523 536 else:
524 537 # add-like
525 self._addpath(f, b'n', 0, FROM_P2)
538 self._addpath(f, b'n', 0, from_p2=True)
526 539 self._map.copymap.pop(f, None)
527 540
528 541 def add(self, f):
529 542 '''Mark a file added.'''
530 543 self._addpath(f, b'a', 0)
531 544 self._map.copymap.pop(f, None)
532 545
533 546 def remove(self, f):
534 547 '''Mark a file removed.'''
535 548 self._dirty = True
536 549 oldstate = self[f]
537 550 size = 0
538 551 if self._pl[1] != self._nodeconstants.nullid:
539 552 entry = self._map.get(f)
540 553 if entry is not None:
541 554 # backup the previous state
542 555 if entry[0] == b'm': # merge
543 556 size = NONNORMAL
544 557 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
545 558 size = FROM_P2
546 559 self._map.otherparentset.add(f)
547 560 self._updatedfiles.add(f)
548 561 self._map.removefile(f, oldstate, size)
549 562 if size == 0:
550 563 self._map.copymap.pop(f, None)
551 564
552 565 def merge(self, f):
553 566 '''Mark a file merged.'''
554 567 if self._pl[1] == self._nodeconstants.nullid:
555 568 return self.normallookup(f)
556 569 return self.otherparent(f)
557 570
558 571 def drop(self, f):
559 572 '''Drop a file from the dirstate'''
560 573 oldstate = self[f]
561 574 if self._map.dropfile(f, oldstate):
562 575 self._dirty = True
563 576 self._updatedfiles.add(f)
564 577 self._map.copymap.pop(f, None)
565 578
566 579 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
567 580 if exists is None:
568 581 exists = os.path.lexists(os.path.join(self._root, path))
569 582 if not exists:
570 583 # Maybe a path component exists
571 584 if not ignoremissing and b'/' in path:
572 585 d, f = path.rsplit(b'/', 1)
573 586 d = self._normalize(d, False, ignoremissing, None)
574 587 folded = d + b"/" + f
575 588 else:
576 589 # No path components, preserve original case
577 590 folded = path
578 591 else:
579 592 # recursively normalize leading directory components
580 593 # against dirstate
581 594 if b'/' in normed:
582 595 d, f = normed.rsplit(b'/', 1)
583 596 d = self._normalize(d, False, ignoremissing, True)
584 597 r = self._root + b"/" + d
585 598 folded = d + b"/" + util.fspath(f, r)
586 599 else:
587 600 folded = util.fspath(normed, self._root)
588 601 storemap[normed] = folded
589 602
590 603 return folded
591 604
592 605 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
593 606 normed = util.normcase(path)
594 607 folded = self._map.filefoldmap.get(normed, None)
595 608 if folded is None:
596 609 if isknown:
597 610 folded = path
598 611 else:
599 612 folded = self._discoverpath(
600 613 path, normed, ignoremissing, exists, self._map.filefoldmap
601 614 )
602 615 return folded
603 616
604 617 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
605 618 normed = util.normcase(path)
606 619 folded = self._map.filefoldmap.get(normed, None)
607 620 if folded is None:
608 621 folded = self._map.dirfoldmap.get(normed, None)
609 622 if folded is None:
610 623 if isknown:
611 624 folded = path
612 625 else:
613 626 # store discovered result in dirfoldmap so that future
614 627 # normalizefile calls don't start matching directories
615 628 folded = self._discoverpath(
616 629 path, normed, ignoremissing, exists, self._map.dirfoldmap
617 630 )
618 631 return folded
619 632
620 633 def normalize(self, path, isknown=False, ignoremissing=False):
621 634 """
622 635 normalize the case of a pathname when on a casefolding filesystem
623 636
624 637 isknown specifies whether the filename came from walking the
625 638 disk, to avoid extra filesystem access.
626 639
627 640 If ignoremissing is True, missing path are returned
628 641 unchanged. Otherwise, we try harder to normalize possibly
629 642 existing path components.
630 643
631 644 The normalized case is determined based on the following precedence:
632 645
633 646 - version of name already stored in the dirstate
634 647 - version of name stored on disk
635 648 - version provided via command arguments
636 649 """
637 650
638 651 if self._checkcase:
639 652 return self._normalize(path, isknown, ignoremissing)
640 653 return path
641 654
642 655 def clear(self):
643 656 self._map.clear()
644 657 self._lastnormaltime = 0
645 658 self._updatedfiles.clear()
646 659 self._dirty = True
647 660
648 661 def rebuild(self, parent, allfiles, changedfiles=None):
649 662 if changedfiles is None:
650 663 # Rebuild entire dirstate
651 664 to_lookup = allfiles
652 665 to_drop = []
653 666 lastnormaltime = self._lastnormaltime
654 667 self.clear()
655 668 self._lastnormaltime = lastnormaltime
656 669 elif len(changedfiles) < 10:
657 670 # Avoid turning allfiles into a set, which can be expensive if it's
658 671 # large.
659 672 to_lookup = []
660 673 to_drop = []
661 674 for f in changedfiles:
662 675 if f in allfiles:
663 676 to_lookup.append(f)
664 677 else:
665 678 to_drop.append(f)
666 679 else:
667 680 changedfilesset = set(changedfiles)
668 681 to_lookup = changedfilesset & set(allfiles)
669 682 to_drop = changedfilesset - to_lookup
670 683
671 684 if self._origpl is None:
672 685 self._origpl = self._pl
673 686 self._map.setparents(parent, self._nodeconstants.nullid)
674 687
675 688 for f in to_lookup:
676 689 self.normallookup(f)
677 690 for f in to_drop:
678 691 self.drop(f)
679 692
680 693 self._dirty = True
681 694
682 695 def identity(self):
683 696 """Return identity of dirstate itself to detect changing in storage
684 697
685 698 If identity of previous dirstate is equal to this, writing
686 699 changes based on the former dirstate out can keep consistency.
687 700 """
688 701 return self._map.identity
689 702
690 703 def write(self, tr):
691 704 if not self._dirty:
692 705 return
693 706
694 707 filename = self._filename
695 708 if tr:
696 709 # 'dirstate.write()' is not only for writing in-memory
697 710 # changes out, but also for dropping ambiguous timestamp.
698 711 # delayed writing re-raise "ambiguous timestamp issue".
699 712 # See also the wiki page below for detail:
700 713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
701 714
702 715 # emulate dropping timestamp in 'parsers.pack_dirstate'
703 716 now = _getfsnow(self._opener)
704 717 self._map.clearambiguoustimes(self._updatedfiles, now)
705 718
706 719 # emulate that all 'dirstate.normal' results are written out
707 720 self._lastnormaltime = 0
708 721 self._updatedfiles.clear()
709 722
710 723 # delay writing in-memory changes out
711 724 tr.addfilegenerator(
712 725 b'dirstate',
713 726 (self._filename,),
714 727 self._writedirstate,
715 728 location=b'plain',
716 729 )
717 730 return
718 731
719 732 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
720 733 self._writedirstate(st)
721 734
722 735 def addparentchangecallback(self, category, callback):
723 736 """add a callback to be called when the wd parents are changed
724 737
725 738 Callback will be called with the following arguments:
726 739 dirstate, (oldp1, oldp2), (newp1, newp2)
727 740
728 741 Category is a unique identifier to allow overwriting an old callback
729 742 with a newer callback.
730 743 """
731 744 self._plchangecallbacks[category] = callback
732 745
733 746 def _writedirstate(self, st):
734 747 # notify callbacks about parents change
735 748 if self._origpl is not None and self._origpl != self._pl:
736 749 for c, callback in sorted(
737 750 pycompat.iteritems(self._plchangecallbacks)
738 751 ):
739 752 callback(self, self._origpl, self._pl)
740 753 self._origpl = None
741 754 # use the modification time of the newly created temporary file as the
742 755 # filesystem's notion of 'now'
743 756 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
744 757
745 758 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
746 759 # timestamp of each entries in dirstate, because of 'now > mtime'
747 760 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
748 761 if delaywrite > 0:
749 762 # do we have any files to delay for?
750 763 for f, e in pycompat.iteritems(self._map):
751 764 if e[0] == b'n' and e[3] == now:
752 765 import time # to avoid useless import
753 766
754 767 # rather than sleep n seconds, sleep until the next
755 768 # multiple of n seconds
756 769 clock = time.time()
757 770 start = int(clock) - (int(clock) % delaywrite)
758 771 end = start + delaywrite
759 772 time.sleep(end - clock)
760 773 now = end # trust our estimate that the end is near now
761 774 break
762 775
763 776 self._map.write(st, now)
764 777 self._lastnormaltime = 0
765 778 self._dirty = False
766 779
767 780 def _dirignore(self, f):
768 781 if self._ignore(f):
769 782 return True
770 783 for p in pathutil.finddirs(f):
771 784 if self._ignore(p):
772 785 return True
773 786 return False
774 787
775 788 def _ignorefiles(self):
776 789 files = []
777 790 if os.path.exists(self._join(b'.hgignore')):
778 791 files.append(self._join(b'.hgignore'))
779 792 for name, path in self._ui.configitems(b"ui"):
780 793 if name == b'ignore' or name.startswith(b'ignore.'):
781 794 # we need to use os.path.join here rather than self._join
782 795 # because path is arbitrary and user-specified
783 796 files.append(os.path.join(self._rootdir, util.expandpath(path)))
784 797 return files
785 798
786 799 def _ignorefileandline(self, f):
787 800 files = collections.deque(self._ignorefiles())
788 801 visited = set()
789 802 while files:
790 803 i = files.popleft()
791 804 patterns = matchmod.readpatternfile(
792 805 i, self._ui.warn, sourceinfo=True
793 806 )
794 807 for pattern, lineno, line in patterns:
795 808 kind, p = matchmod._patsplit(pattern, b'glob')
796 809 if kind == b"subinclude":
797 810 if p not in visited:
798 811 files.append(p)
799 812 continue
800 813 m = matchmod.match(
801 814 self._root, b'', [], [pattern], warn=self._ui.warn
802 815 )
803 816 if m(f):
804 817 return (i, lineno, line)
805 818 visited.add(i)
806 819 return (None, -1, b"")
807 820
808 821 def _walkexplicit(self, match, subrepos):
809 822 """Get stat data about the files explicitly specified by match.
810 823
811 824 Return a triple (results, dirsfound, dirsnotfound).
812 825 - results is a mapping from filename to stat result. It also contains
813 826 listings mapping subrepos and .hg to None.
814 827 - dirsfound is a list of files found to be directories.
815 828 - dirsnotfound is a list of files that the dirstate thinks are
816 829 directories and that were not found."""
817 830
818 831 def badtype(mode):
819 832 kind = _(b'unknown')
820 833 if stat.S_ISCHR(mode):
821 834 kind = _(b'character device')
822 835 elif stat.S_ISBLK(mode):
823 836 kind = _(b'block device')
824 837 elif stat.S_ISFIFO(mode):
825 838 kind = _(b'fifo')
826 839 elif stat.S_ISSOCK(mode):
827 840 kind = _(b'socket')
828 841 elif stat.S_ISDIR(mode):
829 842 kind = _(b'directory')
830 843 return _(b'unsupported file type (type is %s)') % kind
831 844
832 845 badfn = match.bad
833 846 dmap = self._map
834 847 lstat = os.lstat
835 848 getkind = stat.S_IFMT
836 849 dirkind = stat.S_IFDIR
837 850 regkind = stat.S_IFREG
838 851 lnkkind = stat.S_IFLNK
839 852 join = self._join
840 853 dirsfound = []
841 854 foundadd = dirsfound.append
842 855 dirsnotfound = []
843 856 notfoundadd = dirsnotfound.append
844 857
845 858 if not match.isexact() and self._checkcase:
846 859 normalize = self._normalize
847 860 else:
848 861 normalize = None
849 862
850 863 files = sorted(match.files())
851 864 subrepos.sort()
852 865 i, j = 0, 0
853 866 while i < len(files) and j < len(subrepos):
854 867 subpath = subrepos[j] + b"/"
855 868 if files[i] < subpath:
856 869 i += 1
857 870 continue
858 871 while i < len(files) and files[i].startswith(subpath):
859 872 del files[i]
860 873 j += 1
861 874
862 875 if not files or b'' in files:
863 876 files = [b'']
864 877 # constructing the foldmap is expensive, so don't do it for the
865 878 # common case where files is ['']
866 879 normalize = None
867 880 results = dict.fromkeys(subrepos)
868 881 results[b'.hg'] = None
869 882
870 883 for ff in files:
871 884 if normalize:
872 885 nf = normalize(ff, False, True)
873 886 else:
874 887 nf = ff
875 888 if nf in results:
876 889 continue
877 890
878 891 try:
879 892 st = lstat(join(nf))
880 893 kind = getkind(st.st_mode)
881 894 if kind == dirkind:
882 895 if nf in dmap:
883 896 # file replaced by dir on disk but still in dirstate
884 897 results[nf] = None
885 898 foundadd((nf, ff))
886 899 elif kind == regkind or kind == lnkkind:
887 900 results[nf] = st
888 901 else:
889 902 badfn(ff, badtype(kind))
890 903 if nf in dmap:
891 904 results[nf] = None
892 905 except OSError as inst: # nf not found on disk - it is dirstate only
893 906 if nf in dmap: # does it exactly match a missing file?
894 907 results[nf] = None
895 908 else: # does it match a missing directory?
896 909 if self._map.hasdir(nf):
897 910 notfoundadd(nf)
898 911 else:
899 912 badfn(ff, encoding.strtolocal(inst.strerror))
900 913
901 914 # match.files() may contain explicitly-specified paths that shouldn't
902 915 # be taken; drop them from the list of files found. dirsfound/notfound
903 916 # aren't filtered here because they will be tested later.
904 917 if match.anypats():
905 918 for f in list(results):
906 919 if f == b'.hg' or f in subrepos:
907 920 # keep sentinel to disable further out-of-repo walks
908 921 continue
909 922 if not match(f):
910 923 del results[f]
911 924
912 925 # Case insensitive filesystems cannot rely on lstat() failing to detect
913 926 # a case-only rename. Prune the stat object for any file that does not
914 927 # match the case in the filesystem, if there are multiple files that
915 928 # normalize to the same path.
916 929 if match.isexact() and self._checkcase:
917 930 normed = {}
918 931
919 932 for f, st in pycompat.iteritems(results):
920 933 if st is None:
921 934 continue
922 935
923 936 nc = util.normcase(f)
924 937 paths = normed.get(nc)
925 938
926 939 if paths is None:
927 940 paths = set()
928 941 normed[nc] = paths
929 942
930 943 paths.add(f)
931 944
932 945 for norm, paths in pycompat.iteritems(normed):
933 946 if len(paths) > 1:
934 947 for path in paths:
935 948 folded = self._discoverpath(
936 949 path, norm, True, None, self._map.dirfoldmap
937 950 )
938 951 if path != folded:
939 952 results[path] = None
940 953
941 954 return results, dirsfound, dirsnotfound
942 955
943 956 def walk(self, match, subrepos, unknown, ignored, full=True):
944 957 """
945 958 Walk recursively through the directory tree, finding all files
946 959 matched by match.
947 960
948 961 If full is False, maybe skip some known-clean files.
949 962
950 963 Return a dict mapping filename to stat-like object (either
951 964 mercurial.osutil.stat instance or return value of os.stat()).
952 965
953 966 """
954 967 # full is a flag that extensions that hook into walk can use -- this
955 968 # implementation doesn't use it at all. This satisfies the contract
956 969 # because we only guarantee a "maybe".
957 970
958 971 if ignored:
959 972 ignore = util.never
960 973 dirignore = util.never
961 974 elif unknown:
962 975 ignore = self._ignore
963 976 dirignore = self._dirignore
964 977 else:
965 978 # if not unknown and not ignored, drop dir recursion and step 2
966 979 ignore = util.always
967 980 dirignore = util.always
968 981
969 982 matchfn = match.matchfn
970 983 matchalways = match.always()
971 984 matchtdir = match.traversedir
972 985 dmap = self._map
973 986 listdir = util.listdir
974 987 lstat = os.lstat
975 988 dirkind = stat.S_IFDIR
976 989 regkind = stat.S_IFREG
977 990 lnkkind = stat.S_IFLNK
978 991 join = self._join
979 992
980 993 exact = skipstep3 = False
981 994 if match.isexact(): # match.exact
982 995 exact = True
983 996 dirignore = util.always # skip step 2
984 997 elif match.prefix(): # match.match, no patterns
985 998 skipstep3 = True
986 999
987 1000 if not exact and self._checkcase:
988 1001 normalize = self._normalize
989 1002 normalizefile = self._normalizefile
990 1003 skipstep3 = False
991 1004 else:
992 1005 normalize = self._normalize
993 1006 normalizefile = None
994 1007
995 1008 # step 1: find all explicit files
996 1009 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
997 1010 if matchtdir:
998 1011 for d in work:
999 1012 matchtdir(d[0])
1000 1013 for d in dirsnotfound:
1001 1014 matchtdir(d)
1002 1015
1003 1016 skipstep3 = skipstep3 and not (work or dirsnotfound)
1004 1017 work = [d for d in work if not dirignore(d[0])]
1005 1018
1006 1019 # step 2: visit subdirectories
1007 1020 def traverse(work, alreadynormed):
1008 1021 wadd = work.append
1009 1022 while work:
1010 1023 tracing.counter('dirstate.walk work', len(work))
1011 1024 nd = work.pop()
1012 1025 visitentries = match.visitchildrenset(nd)
1013 1026 if not visitentries:
1014 1027 continue
1015 1028 if visitentries == b'this' or visitentries == b'all':
1016 1029 visitentries = None
1017 1030 skip = None
1018 1031 if nd != b'':
1019 1032 skip = b'.hg'
1020 1033 try:
1021 1034 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1022 1035 entries = listdir(join(nd), stat=True, skip=skip)
1023 1036 except OSError as inst:
1024 1037 if inst.errno in (errno.EACCES, errno.ENOENT):
1025 1038 match.bad(
1026 1039 self.pathto(nd), encoding.strtolocal(inst.strerror)
1027 1040 )
1028 1041 continue
1029 1042 raise
1030 1043 for f, kind, st in entries:
1031 1044 # Some matchers may return files in the visitentries set,
1032 1045 # instead of 'this', if the matcher explicitly mentions them
1033 1046 # and is not an exactmatcher. This is acceptable; we do not
1034 1047 # make any hard assumptions about file-or-directory below
1035 1048 # based on the presence of `f` in visitentries. If
1036 1049 # visitchildrenset returned a set, we can always skip the
1037 1050 # entries *not* in the set it provided regardless of whether
1038 1051 # they're actually a file or a directory.
1039 1052 if visitentries and f not in visitentries:
1040 1053 continue
1041 1054 if normalizefile:
1042 1055 # even though f might be a directory, we're only
1043 1056 # interested in comparing it to files currently in the
1044 1057 # dmap -- therefore normalizefile is enough
1045 1058 nf = normalizefile(
1046 1059 nd and (nd + b"/" + f) or f, True, True
1047 1060 )
1048 1061 else:
1049 1062 nf = nd and (nd + b"/" + f) or f
1050 1063 if nf not in results:
1051 1064 if kind == dirkind:
1052 1065 if not ignore(nf):
1053 1066 if matchtdir:
1054 1067 matchtdir(nf)
1055 1068 wadd(nf)
1056 1069 if nf in dmap and (matchalways or matchfn(nf)):
1057 1070 results[nf] = None
1058 1071 elif kind == regkind or kind == lnkkind:
1059 1072 if nf in dmap:
1060 1073 if matchalways or matchfn(nf):
1061 1074 results[nf] = st
1062 1075 elif (matchalways or matchfn(nf)) and not ignore(
1063 1076 nf
1064 1077 ):
1065 1078 # unknown file -- normalize if necessary
1066 1079 if not alreadynormed:
1067 1080 nf = normalize(nf, False, True)
1068 1081 results[nf] = st
1069 1082 elif nf in dmap and (matchalways or matchfn(nf)):
1070 1083 results[nf] = None
1071 1084
1072 1085 for nd, d in work:
1073 1086 # alreadynormed means that processwork doesn't have to do any
1074 1087 # expensive directory normalization
1075 1088 alreadynormed = not normalize or nd == d
1076 1089 traverse([d], alreadynormed)
1077 1090
1078 1091 for s in subrepos:
1079 1092 del results[s]
1080 1093 del results[b'.hg']
1081 1094
1082 1095 # step 3: visit remaining files from dmap
1083 1096 if not skipstep3 and not exact:
1084 1097 # If a dmap file is not in results yet, it was either
1085 1098 # a) not matching matchfn b) ignored, c) missing, or d) under a
1086 1099 # symlink directory.
1087 1100 if not results and matchalways:
1088 1101 visit = [f for f in dmap]
1089 1102 else:
1090 1103 visit = [f for f in dmap if f not in results and matchfn(f)]
1091 1104 visit.sort()
1092 1105
1093 1106 if unknown:
1094 1107 # unknown == True means we walked all dirs under the roots
1095 1108 # that wasn't ignored, and everything that matched was stat'ed
1096 1109 # and is already in results.
1097 1110 # The rest must thus be ignored or under a symlink.
1098 1111 audit_path = pathutil.pathauditor(self._root, cached=True)
1099 1112
1100 1113 for nf in iter(visit):
1101 1114 # If a stat for the same file was already added with a
1102 1115 # different case, don't add one for this, since that would
1103 1116 # make it appear as if the file exists under both names
1104 1117 # on disk.
1105 1118 if (
1106 1119 normalizefile
1107 1120 and normalizefile(nf, True, True) in results
1108 1121 ):
1109 1122 results[nf] = None
1110 1123 # Report ignored items in the dmap as long as they are not
1111 1124 # under a symlink directory.
1112 1125 elif audit_path.check(nf):
1113 1126 try:
1114 1127 results[nf] = lstat(join(nf))
1115 1128 # file was just ignored, no links, and exists
1116 1129 except OSError:
1117 1130 # file doesn't exist
1118 1131 results[nf] = None
1119 1132 else:
1120 1133 # It's either missing or under a symlink directory
1121 1134 # which we in this case report as missing
1122 1135 results[nf] = None
1123 1136 else:
1124 1137 # We may not have walked the full directory tree above,
1125 1138 # so stat and check everything we missed.
1126 1139 iv = iter(visit)
1127 1140 for st in util.statfiles([join(i) for i in visit]):
1128 1141 results[next(iv)] = st
1129 1142 return results
1130 1143
1131 1144 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1132 1145 # Force Rayon (Rust parallelism library) to respect the number of
1133 1146 # workers. This is a temporary workaround until Rust code knows
1134 1147 # how to read the config file.
1135 1148 numcpus = self._ui.configint(b"worker", b"numcpus")
1136 1149 if numcpus is not None:
1137 1150 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1138 1151
1139 1152 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1140 1153 if not workers_enabled:
1141 1154 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1142 1155
1143 1156 (
1144 1157 lookup,
1145 1158 modified,
1146 1159 added,
1147 1160 removed,
1148 1161 deleted,
1149 1162 clean,
1150 1163 ignored,
1151 1164 unknown,
1152 1165 warnings,
1153 1166 bad,
1154 1167 traversed,
1155 1168 dirty,
1156 1169 ) = rustmod.status(
1157 1170 self._map._rustmap,
1158 1171 matcher,
1159 1172 self._rootdir,
1160 1173 self._ignorefiles(),
1161 1174 self._checkexec,
1162 1175 self._lastnormaltime,
1163 1176 bool(list_clean),
1164 1177 bool(list_ignored),
1165 1178 bool(list_unknown),
1166 1179 bool(matcher.traversedir),
1167 1180 )
1168 1181
1169 1182 self._dirty |= dirty
1170 1183
1171 1184 if matcher.traversedir:
1172 1185 for dir in traversed:
1173 1186 matcher.traversedir(dir)
1174 1187
1175 1188 if self._ui.warn:
1176 1189 for item in warnings:
1177 1190 if isinstance(item, tuple):
1178 1191 file_path, syntax = item
1179 1192 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1180 1193 file_path,
1181 1194 syntax,
1182 1195 )
1183 1196 self._ui.warn(msg)
1184 1197 else:
1185 1198 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1186 1199 self._ui.warn(
1187 1200 msg
1188 1201 % (
1189 1202 pathutil.canonpath(
1190 1203 self._rootdir, self._rootdir, item
1191 1204 ),
1192 1205 b"No such file or directory",
1193 1206 )
1194 1207 )
1195 1208
1196 1209 for (fn, message) in bad:
1197 1210 matcher.bad(fn, encoding.strtolocal(message))
1198 1211
1199 1212 status = scmutil.status(
1200 1213 modified=modified,
1201 1214 added=added,
1202 1215 removed=removed,
1203 1216 deleted=deleted,
1204 1217 unknown=unknown,
1205 1218 ignored=ignored,
1206 1219 clean=clean,
1207 1220 )
1208 1221 return (lookup, status)
1209 1222
1210 1223 def status(self, match, subrepos, ignored, clean, unknown):
1211 1224 """Determine the status of the working copy relative to the
1212 1225 dirstate and return a pair of (unsure, status), where status is of type
1213 1226 scmutil.status and:
1214 1227
1215 1228 unsure:
1216 1229 files that might have been modified since the dirstate was
1217 1230 written, but need to be read to be sure (size is the same
1218 1231 but mtime differs)
1219 1232 status.modified:
1220 1233 files that have definitely been modified since the dirstate
1221 1234 was written (different size or mode)
1222 1235 status.clean:
1223 1236 files that have definitely not been modified since the
1224 1237 dirstate was written
1225 1238 """
1226 1239 listignored, listclean, listunknown = ignored, clean, unknown
1227 1240 lookup, modified, added, unknown, ignored = [], [], [], [], []
1228 1241 removed, deleted, clean = [], [], []
1229 1242
1230 1243 dmap = self._map
1231 1244 dmap.preload()
1232 1245
1233 1246 use_rust = True
1234 1247
1235 1248 allowed_matchers = (
1236 1249 matchmod.alwaysmatcher,
1237 1250 matchmod.exactmatcher,
1238 1251 matchmod.includematcher,
1239 1252 )
1240 1253
1241 1254 if rustmod is None:
1242 1255 use_rust = False
1243 1256 elif self._checkcase:
1244 1257 # Case-insensitive filesystems are not handled yet
1245 1258 use_rust = False
1246 1259 elif subrepos:
1247 1260 use_rust = False
1248 1261 elif sparse.enabled:
1249 1262 use_rust = False
1250 1263 elif not isinstance(match, allowed_matchers):
1251 1264 # Some matchers have yet to be implemented
1252 1265 use_rust = False
1253 1266
1254 1267 if use_rust:
1255 1268 try:
1256 1269 return self._rust_status(
1257 1270 match, listclean, listignored, listunknown
1258 1271 )
1259 1272 except rustmod.FallbackError:
1260 1273 pass
1261 1274
1262 1275 def noop(f):
1263 1276 pass
1264 1277
1265 1278 dcontains = dmap.__contains__
1266 1279 dget = dmap.__getitem__
1267 1280 ladd = lookup.append # aka "unsure"
1268 1281 madd = modified.append
1269 1282 aadd = added.append
1270 1283 uadd = unknown.append if listunknown else noop
1271 1284 iadd = ignored.append if listignored else noop
1272 1285 radd = removed.append
1273 1286 dadd = deleted.append
1274 1287 cadd = clean.append if listclean else noop
1275 1288 mexact = match.exact
1276 1289 dirignore = self._dirignore
1277 1290 checkexec = self._checkexec
1278 1291 copymap = self._map.copymap
1279 1292 lastnormaltime = self._lastnormaltime
1280 1293
1281 1294 # We need to do full walks when either
1282 1295 # - we're listing all clean files, or
1283 1296 # - match.traversedir does something, because match.traversedir should
1284 1297 # be called for every dir in the working dir
1285 1298 full = listclean or match.traversedir is not None
1286 1299 for fn, st in pycompat.iteritems(
1287 1300 self.walk(match, subrepos, listunknown, listignored, full=full)
1288 1301 ):
1289 1302 if not dcontains(fn):
1290 1303 if (listignored or mexact(fn)) and dirignore(fn):
1291 1304 if listignored:
1292 1305 iadd(fn)
1293 1306 else:
1294 1307 uadd(fn)
1295 1308 continue
1296 1309
1297 1310 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1298 1311 # written like that for performance reasons. dmap[fn] is not a
1299 1312 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1300 1313 # opcode has fast paths when the value to be unpacked is a tuple or
1301 1314 # a list, but falls back to creating a full-fledged iterator in
1302 1315 # general. That is much slower than simply accessing and storing the
1303 1316 # tuple members one by one.
1304 1317 t = dget(fn)
1305 1318 state = t[0]
1306 1319 mode = t[1]
1307 1320 size = t[2]
1308 1321 time = t[3]
1309 1322
1310 1323 if not st and state in b"nma":
1311 1324 dadd(fn)
1312 1325 elif state == b'n':
1313 1326 if (
1314 1327 size >= 0
1315 1328 and (
1316 1329 (size != st.st_size and size != st.st_size & _rangemask)
1317 1330 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1318 1331 )
1319 1332 or size == FROM_P2 # other parent
1320 1333 or fn in copymap
1321 1334 ):
1322 1335 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1323 1336 # issue6456: Size returned may be longer due to
1324 1337 # encryption on EXT-4 fscrypt, undecided.
1325 1338 ladd(fn)
1326 1339 else:
1327 1340 madd(fn)
1328 1341 elif (
1329 1342 time != st[stat.ST_MTIME]
1330 1343 and time != st[stat.ST_MTIME] & _rangemask
1331 1344 ):
1332 1345 ladd(fn)
1333 1346 elif st[stat.ST_MTIME] == lastnormaltime:
1334 1347 # fn may have just been marked as normal and it may have
1335 1348 # changed in the same second without changing its size.
1336 1349 # This can happen if we quickly do multiple commits.
1337 1350 # Force lookup, so we don't miss such a racy file change.
1338 1351 ladd(fn)
1339 1352 elif listclean:
1340 1353 cadd(fn)
1341 1354 elif state == b'm':
1342 1355 madd(fn)
1343 1356 elif state == b'a':
1344 1357 aadd(fn)
1345 1358 elif state == b'r':
1346 1359 radd(fn)
1347 1360 status = scmutil.status(
1348 1361 modified, added, removed, deleted, unknown, ignored, clean
1349 1362 )
1350 1363 return (lookup, status)
1351 1364
1352 1365 def matches(self, match):
1353 1366 """
1354 1367 return files in the dirstate (in whatever state) filtered by match
1355 1368 """
1356 1369 dmap = self._map
1357 1370 if rustmod is not None:
1358 1371 dmap = self._map._rustmap
1359 1372
1360 1373 if match.always():
1361 1374 return dmap.keys()
1362 1375 files = match.files()
1363 1376 if match.isexact():
1364 1377 # fast path -- filter the other way around, since typically files is
1365 1378 # much smaller than dmap
1366 1379 return [f for f in files if f in dmap]
1367 1380 if match.prefix() and all(fn in dmap for fn in files):
1368 1381 # fast path -- all the values are known to be files, so just return
1369 1382 # that
1370 1383 return list(files)
1371 1384 return [f for f in dmap if match(f)]
1372 1385
1373 1386 def _actualfilename(self, tr):
1374 1387 if tr:
1375 1388 return self._pendingfilename
1376 1389 else:
1377 1390 return self._filename
1378 1391
1379 1392 def savebackup(self, tr, backupname):
1380 1393 '''Save current dirstate into backup file'''
1381 1394 filename = self._actualfilename(tr)
1382 1395 assert backupname != filename
1383 1396
1384 1397 # use '_writedirstate' instead of 'write' to write changes certainly,
1385 1398 # because the latter omits writing out if transaction is running.
1386 1399 # output file will be used to create backup of dirstate at this point.
1387 1400 if self._dirty or not self._opener.exists(filename):
1388 1401 self._writedirstate(
1389 1402 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1390 1403 )
1391 1404
1392 1405 if tr:
1393 1406 # ensure that subsequent tr.writepending returns True for
1394 1407 # changes written out above, even if dirstate is never
1395 1408 # changed after this
1396 1409 tr.addfilegenerator(
1397 1410 b'dirstate',
1398 1411 (self._filename,),
1399 1412 self._writedirstate,
1400 1413 location=b'plain',
1401 1414 )
1402 1415
1403 1416 # ensure that pending file written above is unlinked at
1404 1417 # failure, even if tr.writepending isn't invoked until the
1405 1418 # end of this transaction
1406 1419 tr.registertmp(filename, location=b'plain')
1407 1420
1408 1421 self._opener.tryunlink(backupname)
1409 1422 # hardlink backup is okay because _writedirstate is always called
1410 1423 # with an "atomictemp=True" file.
1411 1424 util.copyfile(
1412 1425 self._opener.join(filename),
1413 1426 self._opener.join(backupname),
1414 1427 hardlink=True,
1415 1428 )
1416 1429
1417 1430 def restorebackup(self, tr, backupname):
1418 1431 '''Restore dirstate by backup file'''
1419 1432 # this "invalidate()" prevents "wlock.release()" from writing
1420 1433 # changes of dirstate out after restoring from backup file
1421 1434 self.invalidate()
1422 1435 filename = self._actualfilename(tr)
1423 1436 o = self._opener
1424 1437 if util.samefile(o.join(backupname), o.join(filename)):
1425 1438 o.unlink(backupname)
1426 1439 else:
1427 1440 o.rename(backupname, filename, checkambig=True)
1428 1441
1429 1442 def clearbackup(self, tr, backupname):
1430 1443 '''Clear backup file'''
1431 1444 self._opener.unlink(backupname)
1432 1445
1433 1446
1434 1447 class dirstatemap(object):
1435 1448 """Map encapsulating the dirstate's contents.
1436 1449
1437 1450 The dirstate contains the following state:
1438 1451
1439 1452 - `identity` is the identity of the dirstate file, which can be used to
1440 1453 detect when changes have occurred to the dirstate file.
1441 1454
1442 1455 - `parents` is a pair containing the parents of the working copy. The
1443 1456 parents are updated by calling `setparents`.
1444 1457
1445 1458 - the state map maps filenames to tuples of (state, mode, size, mtime),
1446 1459 where state is a single character representing 'normal', 'added',
1447 1460 'removed', or 'merged'. It is read by treating the dirstate as a
1448 1461 dict. File state is updated by calling the `addfile`, `removefile` and
1449 1462 `dropfile` methods.
1450 1463
1451 1464 - `copymap` maps destination filenames to their source filename.
1452 1465
1453 1466 The dirstate also provides the following views onto the state:
1454 1467
1455 1468 - `nonnormalset` is a set of the filenames that have state other
1456 1469 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1457 1470
1458 1471 - `otherparentset` is a set of the filenames that are marked as coming
1459 1472 from the second parent when the dirstate is currently being merged.
1460 1473
1461 1474 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1462 1475 form that they appear as in the dirstate.
1463 1476
1464 1477 - `dirfoldmap` is a dict mapping normalized directory names to the
1465 1478 denormalized form that they appear as in the dirstate.
1466 1479 """
1467 1480
1468 1481 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1469 1482 self._ui = ui
1470 1483 self._opener = opener
1471 1484 self._root = root
1472 1485 self._filename = b'dirstate'
1473 1486 self._nodelen = 20
1474 1487 self._nodeconstants = nodeconstants
1475 1488 assert (
1476 1489 not use_dirstate_v2
1477 1490 ), "should have detected unsupported requirement"
1478 1491
1479 1492 self._parents = None
1480 1493 self._dirtyparents = False
1481 1494
1482 1495 # for consistent view between _pl() and _read() invocations
1483 1496 self._pendingmode = None
1484 1497
1485 1498 @propertycache
1486 1499 def _map(self):
1487 1500 self._map = {}
1488 1501 self.read()
1489 1502 return self._map
1490 1503
1491 1504 @propertycache
1492 1505 def copymap(self):
1493 1506 self.copymap = {}
1494 1507 self._map
1495 1508 return self.copymap
1496 1509
1497 1510 def directories(self):
1498 1511 # Rust / dirstate-v2 only
1499 1512 return []
1500 1513
1501 1514 def clear(self):
1502 1515 self._map.clear()
1503 1516 self.copymap.clear()
1504 1517 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1505 1518 util.clearcachedproperty(self, b"_dirs")
1506 1519 util.clearcachedproperty(self, b"_alldirs")
1507 1520 util.clearcachedproperty(self, b"filefoldmap")
1508 1521 util.clearcachedproperty(self, b"dirfoldmap")
1509 1522 util.clearcachedproperty(self, b"nonnormalset")
1510 1523 util.clearcachedproperty(self, b"otherparentset")
1511 1524
1512 1525 def items(self):
1513 1526 return pycompat.iteritems(self._map)
1514 1527
1515 1528 # forward for python2,3 compat
1516 1529 iteritems = items
1517 1530
1518 1531 def __len__(self):
1519 1532 return len(self._map)
1520 1533
1521 1534 def __iter__(self):
1522 1535 return iter(self._map)
1523 1536
1524 1537 def get(self, key, default=None):
1525 1538 return self._map.get(key, default)
1526 1539
1527 1540 def __contains__(self, key):
1528 1541 return key in self._map
1529 1542
1530 1543 def __getitem__(self, key):
1531 1544 return self._map[key]
1532 1545
1533 1546 def keys(self):
1534 1547 return self._map.keys()
1535 1548
1536 1549 def preload(self):
1537 1550 """Loads the underlying data, if it's not already loaded"""
1538 1551 self._map
1539 1552
1540 1553 def addfile(self, f, oldstate, state, mode, size, mtime):
1541 1554 """Add a tracked file to the dirstate."""
1542 1555 if oldstate in b"?r" and "_dirs" in self.__dict__:
1543 1556 self._dirs.addpath(f)
1544 1557 if oldstate == b"?" and "_alldirs" in self.__dict__:
1545 1558 self._alldirs.addpath(f)
1546 1559 self._map[f] = dirstatetuple(state, mode, size, mtime)
1547 1560 if state != b'n' or mtime == AMBIGUOUS_TIME:
1548 1561 self.nonnormalset.add(f)
1549 1562 if size == FROM_P2:
1550 1563 self.otherparentset.add(f)
1551 1564
1552 1565 def removefile(self, f, oldstate, size):
1553 1566 """
1554 1567 Mark a file as removed in the dirstate.
1555 1568
1556 1569 The `size` parameter is used to store sentinel values that indicate
1557 1570 the file's previous state. In the future, we should refactor this
1558 1571 to be more explicit about what that state is.
1559 1572 """
1560 1573 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1561 1574 self._dirs.delpath(f)
1562 1575 if oldstate == b"?" and "_alldirs" in self.__dict__:
1563 1576 self._alldirs.addpath(f)
1564 1577 if "filefoldmap" in self.__dict__:
1565 1578 normed = util.normcase(f)
1566 1579 self.filefoldmap.pop(normed, None)
1567 1580 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1568 1581 self.nonnormalset.add(f)
1569 1582
1570 1583 def dropfile(self, f, oldstate):
1571 1584 """
1572 1585 Remove a file from the dirstate. Returns True if the file was
1573 1586 previously recorded.
1574 1587 """
1575 1588 exists = self._map.pop(f, None) is not None
1576 1589 if exists:
1577 1590 if oldstate != b"r" and "_dirs" in self.__dict__:
1578 1591 self._dirs.delpath(f)
1579 1592 if "_alldirs" in self.__dict__:
1580 1593 self._alldirs.delpath(f)
1581 1594 if "filefoldmap" in self.__dict__:
1582 1595 normed = util.normcase(f)
1583 1596 self.filefoldmap.pop(normed, None)
1584 1597 self.nonnormalset.discard(f)
1585 1598 return exists
1586 1599
1587 1600 def clearambiguoustimes(self, files, now):
1588 1601 for f in files:
1589 1602 e = self.get(f)
1590 1603 if e is not None and e[0] == b'n' and e[3] == now:
1591 1604 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1592 1605 self.nonnormalset.add(f)
1593 1606
1594 1607 def nonnormalentries(self):
1595 1608 '''Compute the nonnormal dirstate entries from the dmap'''
1596 1609 try:
1597 1610 return parsers.nonnormalotherparententries(self._map)
1598 1611 except AttributeError:
1599 1612 nonnorm = set()
1600 1613 otherparent = set()
1601 1614 for fname, e in pycompat.iteritems(self._map):
1602 1615 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1603 1616 nonnorm.add(fname)
1604 1617 if e[0] == b'n' and e[2] == FROM_P2:
1605 1618 otherparent.add(fname)
1606 1619 return nonnorm, otherparent
1607 1620
1608 1621 @propertycache
1609 1622 def filefoldmap(self):
1610 1623 """Returns a dictionary mapping normalized case paths to their
1611 1624 non-normalized versions.
1612 1625 """
1613 1626 try:
1614 1627 makefilefoldmap = parsers.make_file_foldmap
1615 1628 except AttributeError:
1616 1629 pass
1617 1630 else:
1618 1631 return makefilefoldmap(
1619 1632 self._map, util.normcasespec, util.normcasefallback
1620 1633 )
1621 1634
1622 1635 f = {}
1623 1636 normcase = util.normcase
1624 1637 for name, s in pycompat.iteritems(self._map):
1625 1638 if s[0] != b'r':
1626 1639 f[normcase(name)] = name
1627 1640 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1628 1641 return f
1629 1642
1630 1643 def hastrackeddir(self, d):
1631 1644 """
1632 1645 Returns True if the dirstate contains a tracked (not removed) file
1633 1646 in this directory.
1634 1647 """
1635 1648 return d in self._dirs
1636 1649
1637 1650 def hasdir(self, d):
1638 1651 """
1639 1652 Returns True if the dirstate contains a file (tracked or removed)
1640 1653 in this directory.
1641 1654 """
1642 1655 return d in self._alldirs
1643 1656
1644 1657 @propertycache
1645 1658 def _dirs(self):
1646 1659 return pathutil.dirs(self._map, b'r')
1647 1660
1648 1661 @propertycache
1649 1662 def _alldirs(self):
1650 1663 return pathutil.dirs(self._map)
1651 1664
1652 1665 def _opendirstatefile(self):
1653 1666 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1654 1667 if self._pendingmode is not None and self._pendingmode != mode:
1655 1668 fp.close()
1656 1669 raise error.Abort(
1657 1670 _(b'working directory state may be changed parallelly')
1658 1671 )
1659 1672 self._pendingmode = mode
1660 1673 return fp
1661 1674
1662 1675 def parents(self):
1663 1676 if not self._parents:
1664 1677 try:
1665 1678 fp = self._opendirstatefile()
1666 1679 st = fp.read(2 * self._nodelen)
1667 1680 fp.close()
1668 1681 except IOError as err:
1669 1682 if err.errno != errno.ENOENT:
1670 1683 raise
1671 1684 # File doesn't exist, so the current state is empty
1672 1685 st = b''
1673 1686
1674 1687 l = len(st)
1675 1688 if l == self._nodelen * 2:
1676 1689 self._parents = (
1677 1690 st[: self._nodelen],
1678 1691 st[self._nodelen : 2 * self._nodelen],
1679 1692 )
1680 1693 elif l == 0:
1681 1694 self._parents = (
1682 1695 self._nodeconstants.nullid,
1683 1696 self._nodeconstants.nullid,
1684 1697 )
1685 1698 else:
1686 1699 raise error.Abort(
1687 1700 _(b'working directory state appears damaged!')
1688 1701 )
1689 1702
1690 1703 return self._parents
1691 1704
1692 1705 def setparents(self, p1, p2):
1693 1706 self._parents = (p1, p2)
1694 1707 self._dirtyparents = True
1695 1708
1696 1709 def read(self):
1697 1710 # ignore HG_PENDING because identity is used only for writing
1698 1711 self.identity = util.filestat.frompath(
1699 1712 self._opener.join(self._filename)
1700 1713 )
1701 1714
1702 1715 try:
1703 1716 fp = self._opendirstatefile()
1704 1717 try:
1705 1718 st = fp.read()
1706 1719 finally:
1707 1720 fp.close()
1708 1721 except IOError as err:
1709 1722 if err.errno != errno.ENOENT:
1710 1723 raise
1711 1724 return
1712 1725 if not st:
1713 1726 return
1714 1727
1715 1728 if util.safehasattr(parsers, b'dict_new_presized'):
1716 1729 # Make an estimate of the number of files in the dirstate based on
1717 1730 # its size. This trades wasting some memory for avoiding costly
1718 1731 # resizes. Each entry have a prefix of 17 bytes followed by one or
1719 1732 # two path names. Studies on various large-scale real-world repositories
1720 1733 # found 54 bytes a reasonable upper limit for the average path names.
1721 1734 # Copy entries are ignored for the sake of this estimate.
1722 1735 self._map = parsers.dict_new_presized(len(st) // 71)
1723 1736
1724 1737 # Python's garbage collector triggers a GC each time a certain number
1725 1738 # of container objects (the number being defined by
1726 1739 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1727 1740 # for each file in the dirstate. The C version then immediately marks
1728 1741 # them as not to be tracked by the collector. However, this has no
1729 1742 # effect on when GCs are triggered, only on what objects the GC looks
1730 1743 # into. This means that O(number of files) GCs are unavoidable.
1731 1744 # Depending on when in the process's lifetime the dirstate is parsed,
1732 1745 # this can get very expensive. As a workaround, disable GC while
1733 1746 # parsing the dirstate.
1734 1747 #
1735 1748 # (we cannot decorate the function directly since it is in a C module)
1736 1749 parse_dirstate = util.nogc(parsers.parse_dirstate)
1737 1750 p = parse_dirstate(self._map, self.copymap, st)
1738 1751 if not self._dirtyparents:
1739 1752 self.setparents(*p)
1740 1753
1741 1754 # Avoid excess attribute lookups by fast pathing certain checks
1742 1755 self.__contains__ = self._map.__contains__
1743 1756 self.__getitem__ = self._map.__getitem__
1744 1757 self.get = self._map.get
1745 1758
1746 1759 def write(self, st, now):
1747 1760 st.write(
1748 1761 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1749 1762 )
1750 1763 st.close()
1751 1764 self._dirtyparents = False
1752 1765 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1753 1766
1754 1767 @propertycache
1755 1768 def nonnormalset(self):
1756 1769 nonnorm, otherparents = self.nonnormalentries()
1757 1770 self.otherparentset = otherparents
1758 1771 return nonnorm
1759 1772
1760 1773 @propertycache
1761 1774 def otherparentset(self):
1762 1775 nonnorm, otherparents = self.nonnormalentries()
1763 1776 self.nonnormalset = nonnorm
1764 1777 return otherparents
1765 1778
1766 1779 def non_normal_or_other_parent_paths(self):
1767 1780 return self.nonnormalset.union(self.otherparentset)
1768 1781
1769 1782 @propertycache
1770 1783 def identity(self):
1771 1784 self._map
1772 1785 return self.identity
1773 1786
1774 1787 @propertycache
1775 1788 def dirfoldmap(self):
1776 1789 f = {}
1777 1790 normcase = util.normcase
1778 1791 for name in self._dirs:
1779 1792 f[normcase(name)] = name
1780 1793 return f
1781 1794
1782 1795
1783 1796 if rustmod is not None:
1784 1797
1785 1798 class dirstatemap(object):
1786 1799 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1787 1800 self._use_dirstate_v2 = use_dirstate_v2
1788 1801 self._nodeconstants = nodeconstants
1789 1802 self._ui = ui
1790 1803 self._opener = opener
1791 1804 self._root = root
1792 1805 self._filename = b'dirstate'
1793 1806 self._nodelen = 20 # Also update Rust code when changing this!
1794 1807 self._parents = None
1795 1808 self._dirtyparents = False
1796 1809
1797 1810 # for consistent view between _pl() and _read() invocations
1798 1811 self._pendingmode = None
1799 1812
1800 1813 self._use_dirstate_tree = self._ui.configbool(
1801 1814 b"experimental",
1802 1815 b"dirstate-tree.in-memory",
1803 1816 False,
1804 1817 )
1805 1818
1806 1819 def addfile(self, *args, **kwargs):
1807 1820 return self._rustmap.addfile(*args, **kwargs)
1808 1821
1809 1822 def removefile(self, *args, **kwargs):
1810 1823 return self._rustmap.removefile(*args, **kwargs)
1811 1824
1812 1825 def dropfile(self, *args, **kwargs):
1813 1826 return self._rustmap.dropfile(*args, **kwargs)
1814 1827
1815 1828 def clearambiguoustimes(self, *args, **kwargs):
1816 1829 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1817 1830
1818 1831 def nonnormalentries(self):
1819 1832 return self._rustmap.nonnormalentries()
1820 1833
1821 1834 def get(self, *args, **kwargs):
1822 1835 return self._rustmap.get(*args, **kwargs)
1823 1836
1824 1837 @property
1825 1838 def copymap(self):
1826 1839 return self._rustmap.copymap()
1827 1840
1828 1841 def directories(self):
1829 1842 return self._rustmap.directories()
1830 1843
1831 1844 def preload(self):
1832 1845 self._rustmap
1833 1846
1834 1847 def clear(self):
1835 1848 self._rustmap.clear()
1836 1849 self.setparents(
1837 1850 self._nodeconstants.nullid, self._nodeconstants.nullid
1838 1851 )
1839 1852 util.clearcachedproperty(self, b"_dirs")
1840 1853 util.clearcachedproperty(self, b"_alldirs")
1841 1854 util.clearcachedproperty(self, b"dirfoldmap")
1842 1855
1843 1856 def items(self):
1844 1857 return self._rustmap.items()
1845 1858
1846 1859 def keys(self):
1847 1860 return iter(self._rustmap)
1848 1861
1849 1862 def __contains__(self, key):
1850 1863 return key in self._rustmap
1851 1864
1852 1865 def __getitem__(self, item):
1853 1866 return self._rustmap[item]
1854 1867
1855 1868 def __len__(self):
1856 1869 return len(self._rustmap)
1857 1870
1858 1871 def __iter__(self):
1859 1872 return iter(self._rustmap)
1860 1873
1861 1874 # forward for python2,3 compat
1862 1875 iteritems = items
1863 1876
1864 1877 def _opendirstatefile(self):
1865 1878 fp, mode = txnutil.trypending(
1866 1879 self._root, self._opener, self._filename
1867 1880 )
1868 1881 if self._pendingmode is not None and self._pendingmode != mode:
1869 1882 fp.close()
1870 1883 raise error.Abort(
1871 1884 _(b'working directory state may be changed parallelly')
1872 1885 )
1873 1886 self._pendingmode = mode
1874 1887 return fp
1875 1888
1876 1889 def setparents(self, p1, p2):
1877 1890 self._parents = (p1, p2)
1878 1891 self._dirtyparents = True
1879 1892
1880 1893 def parents(self):
1881 1894 if not self._parents:
1882 1895 if self._use_dirstate_v2:
1883 1896 offset = len(rustmod.V2_FORMAT_MARKER)
1884 1897 else:
1885 1898 offset = 0
1886 1899 read_len = offset + self._nodelen * 2
1887 1900 try:
1888 1901 fp = self._opendirstatefile()
1889 1902 st = fp.read(read_len)
1890 1903 fp.close()
1891 1904 except IOError as err:
1892 1905 if err.errno != errno.ENOENT:
1893 1906 raise
1894 1907 # File doesn't exist, so the current state is empty
1895 1908 st = b''
1896 1909
1897 1910 l = len(st)
1898 1911 if l == read_len:
1899 1912 st = st[offset:]
1900 1913 self._parents = (
1901 1914 st[: self._nodelen],
1902 1915 st[self._nodelen : 2 * self._nodelen],
1903 1916 )
1904 1917 elif l == 0:
1905 1918 self._parents = (
1906 1919 self._nodeconstants.nullid,
1907 1920 self._nodeconstants.nullid,
1908 1921 )
1909 1922 else:
1910 1923 raise error.Abort(
1911 1924 _(b'working directory state appears damaged!')
1912 1925 )
1913 1926
1914 1927 return self._parents
1915 1928
1916 1929 @propertycache
1917 1930 def _rustmap(self):
1918 1931 """
1919 1932 Fills the Dirstatemap when called.
1920 1933 """
1921 1934 # ignore HG_PENDING because identity is used only for writing
1922 1935 self.identity = util.filestat.frompath(
1923 1936 self._opener.join(self._filename)
1924 1937 )
1925 1938
1926 1939 try:
1927 1940 fp = self._opendirstatefile()
1928 1941 try:
1929 1942 st = fp.read()
1930 1943 finally:
1931 1944 fp.close()
1932 1945 except IOError as err:
1933 1946 if err.errno != errno.ENOENT:
1934 1947 raise
1935 1948 st = b''
1936 1949
1937 1950 self._rustmap, parents = rustmod.DirstateMap.new(
1938 1951 self._use_dirstate_tree, self._use_dirstate_v2, st
1939 1952 )
1940 1953
1941 1954 if parents and not self._dirtyparents:
1942 1955 self.setparents(*parents)
1943 1956
1944 1957 self.__contains__ = self._rustmap.__contains__
1945 1958 self.__getitem__ = self._rustmap.__getitem__
1946 1959 self.get = self._rustmap.get
1947 1960 return self._rustmap
1948 1961
1949 1962 def write(self, st, now):
1950 1963 parents = self.parents()
1951 1964 packed = self._rustmap.write(
1952 1965 self._use_dirstate_v2, parents[0], parents[1], now
1953 1966 )
1954 1967 st.write(packed)
1955 1968 st.close()
1956 1969 self._dirtyparents = False
1957 1970
1958 1971 @propertycache
1959 1972 def filefoldmap(self):
1960 1973 """Returns a dictionary mapping normalized case paths to their
1961 1974 non-normalized versions.
1962 1975 """
1963 1976 return self._rustmap.filefoldmapasdict()
1964 1977
1965 1978 def hastrackeddir(self, d):
1966 1979 return self._rustmap.hastrackeddir(d)
1967 1980
1968 1981 def hasdir(self, d):
1969 1982 return self._rustmap.hasdir(d)
1970 1983
1971 1984 @propertycache
1972 1985 def identity(self):
1973 1986 self._rustmap
1974 1987 return self.identity
1975 1988
1976 1989 @property
1977 1990 def nonnormalset(self):
1978 1991 nonnorm = self._rustmap.non_normal_entries()
1979 1992 return nonnorm
1980 1993
1981 1994 @propertycache
1982 1995 def otherparentset(self):
1983 1996 otherparents = self._rustmap.other_parent_entries()
1984 1997 return otherparents
1985 1998
1986 1999 def non_normal_or_other_parent_paths(self):
1987 2000 return self._rustmap.non_normal_or_other_parent_paths()
1988 2001
1989 2002 @propertycache
1990 2003 def dirfoldmap(self):
1991 2004 f = {}
1992 2005 normcase = util.normcase
1993 2006 for name, _pseudo_entry in self.directories():
1994 2007 f[normcase(name)] = name
1995 2008 return f
General Comments 0
You need to be logged in to leave comments. Login now