##// END OF EJS Templates
dirstate: add default value to _addpath...
marmoute -
r48280:523c0038 default
parent child Browse files
Show More
@@ -1,1995 +1,1995 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .pycompat import delattr
18 18
19 19 from hgdemandimport import tracing
20 20
21 21 from . import (
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 pathutil,
26 26 policy,
27 27 pycompat,
28 28 scmutil,
29 29 sparse,
30 30 txnutil,
31 31 util,
32 32 )
33 33
34 34 from .interfaces import (
35 35 dirstate as intdirstate,
36 36 util as interfaceutil,
37 37 )
38 38
39 39 parsers = policy.importmod('parsers')
40 40 rustmod = policy.importrust('dirstate')
41 41
42 42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43 43
44 44 propertycache = util.propertycache
45 45 filecache = scmutil.filecache
46 46 _rangemask = 0x7FFFFFFF
47 47
48 48 dirstatetuple = parsers.dirstatetuple
49 49
50 50
51 51 # a special value used internally for `size` if the file come from the other parent
52 52 FROM_P2 = -2
53 53
54 54 # a special value used internally for `size` if the file is modified/merged/added
55 55 NONNORMAL = -1
56 56
57 57 # a special value used internally for `time` if the time is ambigeous
58 58 AMBIGUOUS_TIME = -1
59 59
60 60
61 61 class repocache(filecache):
62 62 """filecache for files in .hg/"""
63 63
64 64 def join(self, obj, fname):
65 65 return obj._opener.join(fname)
66 66
67 67
68 68 class rootcache(filecache):
69 69 """filecache for files in the repository root"""
70 70
71 71 def join(self, obj, fname):
72 72 return obj._join(fname)
73 73
74 74
75 75 def _getfsnow(vfs):
76 76 '''Get "now" timestamp on filesystem'''
77 77 tmpfd, tmpname = vfs.mkstemp()
78 78 try:
79 79 return os.fstat(tmpfd)[stat.ST_MTIME]
80 80 finally:
81 81 os.close(tmpfd)
82 82 vfs.unlink(tmpname)
83 83
84 84
85 85 @interfaceutil.implementer(intdirstate.idirstate)
86 86 class dirstate(object):
87 87 def __init__(
88 88 self,
89 89 opener,
90 90 ui,
91 91 root,
92 92 validate,
93 93 sparsematchfn,
94 94 nodeconstants,
95 95 use_dirstate_v2,
96 96 ):
97 97 """Create a new dirstate object.
98 98
99 99 opener is an open()-like callable that can be used to open the
100 100 dirstate file; root is the root of the directory tracked by
101 101 the dirstate.
102 102 """
103 103 self._use_dirstate_v2 = use_dirstate_v2
104 104 self._nodeconstants = nodeconstants
105 105 self._opener = opener
106 106 self._validate = validate
107 107 self._root = root
108 108 self._sparsematchfn = sparsematchfn
109 109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
110 110 # UNC path pointing to root share (issue4557)
111 111 self._rootdir = pathutil.normasprefix(root)
112 112 self._dirty = False
113 113 self._lastnormaltime = 0
114 114 self._ui = ui
115 115 self._filecache = {}
116 116 self._parentwriters = 0
117 117 self._filename = b'dirstate'
118 118 self._pendingfilename = b'%s.pending' % self._filename
119 119 self._plchangecallbacks = {}
120 120 self._origpl = None
121 121 self._updatedfiles = set()
122 122 self._mapcls = dirstatemap
123 123 # Access and cache cwd early, so we don't access it for the first time
124 124 # after a working-copy update caused it to not exist (accessing it then
125 125 # raises an exception).
126 126 self._cwd
127 127
128 128 def prefetch_parents(self):
129 129 """make sure the parents are loaded
130 130
131 131 Used to avoid a race condition.
132 132 """
133 133 self._pl
134 134
135 135 @contextlib.contextmanager
136 136 def parentchange(self):
137 137 """Context manager for handling dirstate parents.
138 138
139 139 If an exception occurs in the scope of the context manager,
140 140 the incoherent dirstate won't be written when wlock is
141 141 released.
142 142 """
143 143 self._parentwriters += 1
144 144 yield
145 145 # Typically we want the "undo" step of a context manager in a
146 146 # finally block so it happens even when an exception
147 147 # occurs. In this case, however, we only want to decrement
148 148 # parentwriters if the code in the with statement exits
149 149 # normally, so we don't have a try/finally here on purpose.
150 150 self._parentwriters -= 1
151 151
152 152 def pendingparentchange(self):
153 153 """Returns true if the dirstate is in the middle of a set of changes
154 154 that modify the dirstate parent.
155 155 """
156 156 return self._parentwriters > 0
157 157
158 158 @propertycache
159 159 def _map(self):
160 160 """Return the dirstate contents (see documentation for dirstatemap)."""
161 161 self._map = self._mapcls(
162 162 self._ui,
163 163 self._opener,
164 164 self._root,
165 165 self._nodeconstants,
166 166 self._use_dirstate_v2,
167 167 )
168 168 return self._map
169 169
170 170 @property
171 171 def _sparsematcher(self):
172 172 """The matcher for the sparse checkout.
173 173
174 174 The working directory may not include every file from a manifest. The
175 175 matcher obtained by this property will match a path if it is to be
176 176 included in the working directory.
177 177 """
178 178 # TODO there is potential to cache this property. For now, the matcher
179 179 # is resolved on every access. (But the called function does use a
180 180 # cache to keep the lookup fast.)
181 181 return self._sparsematchfn()
182 182
183 183 @repocache(b'branch')
184 184 def _branch(self):
185 185 try:
186 186 return self._opener.read(b"branch").strip() or b"default"
187 187 except IOError as inst:
188 188 if inst.errno != errno.ENOENT:
189 189 raise
190 190 return b"default"
191 191
192 192 @property
193 193 def _pl(self):
194 194 return self._map.parents()
195 195
196 196 def hasdir(self, d):
197 197 return self._map.hastrackeddir(d)
198 198
199 199 @rootcache(b'.hgignore')
200 200 def _ignore(self):
201 201 files = self._ignorefiles()
202 202 if not files:
203 203 return matchmod.never()
204 204
205 205 pats = [b'include:%s' % f for f in files]
206 206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
207 207
208 208 @propertycache
209 209 def _slash(self):
210 210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
211 211
212 212 @propertycache
213 213 def _checklink(self):
214 214 return util.checklink(self._root)
215 215
216 216 @propertycache
217 217 def _checkexec(self):
218 218 return bool(util.checkexec(self._root))
219 219
220 220 @propertycache
221 221 def _checkcase(self):
222 222 return not util.fscasesensitive(self._join(b'.hg'))
223 223
224 224 def _join(self, f):
225 225 # much faster than os.path.join()
226 226 # it's safe because f is always a relative path
227 227 return self._rootdir + f
228 228
229 229 def flagfunc(self, buildfallback):
230 230 if self._checklink and self._checkexec:
231 231
232 232 def f(x):
233 233 try:
234 234 st = os.lstat(self._join(x))
235 235 if util.statislink(st):
236 236 return b'l'
237 237 if util.statisexec(st):
238 238 return b'x'
239 239 except OSError:
240 240 pass
241 241 return b''
242 242
243 243 return f
244 244
245 245 fallback = buildfallback()
246 246 if self._checklink:
247 247
248 248 def f(x):
249 249 if os.path.islink(self._join(x)):
250 250 return b'l'
251 251 if b'x' in fallback(x):
252 252 return b'x'
253 253 return b''
254 254
255 255 return f
256 256 if self._checkexec:
257 257
258 258 def f(x):
259 259 if b'l' in fallback(x):
260 260 return b'l'
261 261 if util.isexec(self._join(x)):
262 262 return b'x'
263 263 return b''
264 264
265 265 return f
266 266 else:
267 267 return fallback
268 268
269 269 @propertycache
270 270 def _cwd(self):
271 271 # internal config: ui.forcecwd
272 272 forcecwd = self._ui.config(b'ui', b'forcecwd')
273 273 if forcecwd:
274 274 return forcecwd
275 275 return encoding.getcwd()
276 276
277 277 def getcwd(self):
278 278 """Return the path from which a canonical path is calculated.
279 279
280 280 This path should be used to resolve file patterns or to convert
281 281 canonical paths back to file paths for display. It shouldn't be
282 282 used to get real file paths. Use vfs functions instead.
283 283 """
284 284 cwd = self._cwd
285 285 if cwd == self._root:
286 286 return b''
287 287 # self._root ends with a path separator if self._root is '/' or 'C:\'
288 288 rootsep = self._root
289 289 if not util.endswithsep(rootsep):
290 290 rootsep += pycompat.ossep
291 291 if cwd.startswith(rootsep):
292 292 return cwd[len(rootsep) :]
293 293 else:
294 294 # we're outside the repo. return an absolute path.
295 295 return cwd
296 296
297 297 def pathto(self, f, cwd=None):
298 298 if cwd is None:
299 299 cwd = self.getcwd()
300 300 path = util.pathto(self._root, cwd, f)
301 301 if self._slash:
302 302 return util.pconvert(path)
303 303 return path
304 304
305 305 def __getitem__(self, key):
306 306 """Return the current state of key (a filename) in the dirstate.
307 307
308 308 States are:
309 309 n normal
310 310 m needs merging
311 311 r marked for removal
312 312 a marked for addition
313 313 ? not tracked
314 314 """
315 315 return self._map.get(key, (b"?",))[0]
316 316
317 317 def __contains__(self, key):
318 318 return key in self._map
319 319
320 320 def __iter__(self):
321 321 return iter(sorted(self._map))
322 322
323 323 def items(self):
324 324 return pycompat.iteritems(self._map)
325 325
326 326 iteritems = items
327 327
328 328 def directories(self):
329 329 return self._map.directories()
330 330
331 331 def parents(self):
332 332 return [self._validate(p) for p in self._pl]
333 333
334 334 def p1(self):
335 335 return self._validate(self._pl[0])
336 336
337 337 def p2(self):
338 338 return self._validate(self._pl[1])
339 339
340 340 def branch(self):
341 341 return encoding.tolocal(self._branch)
342 342
343 343 def setparents(self, p1, p2=None):
344 344 """Set dirstate parents to p1 and p2.
345 345
346 346 When moving from two parents to one, 'm' merged entries a
347 347 adjusted to normal and previous copy records discarded and
348 348 returned by the call.
349 349
350 350 See localrepo.setparents()
351 351 """
352 352 if p2 is None:
353 353 p2 = self._nodeconstants.nullid
354 354 if self._parentwriters == 0:
355 355 raise ValueError(
356 356 b"cannot set dirstate parent outside of "
357 357 b"dirstate.parentchange context manager"
358 358 )
359 359
360 360 self._dirty = True
361 361 oldp2 = self._pl[1]
362 362 if self._origpl is None:
363 363 self._origpl = self._pl
364 364 self._map.setparents(p1, p2)
365 365 copies = {}
366 366 if (
367 367 oldp2 != self._nodeconstants.nullid
368 368 and p2 == self._nodeconstants.nullid
369 369 ):
370 370 candidatefiles = self._map.non_normal_or_other_parent_paths()
371 371
372 372 for f in candidatefiles:
373 373 s = self._map.get(f)
374 374 if s is None:
375 375 continue
376 376
377 377 # Discard 'm' markers when moving away from a merge state
378 378 if s[0] == b'm':
379 379 source = self._map.copymap.get(f)
380 380 if source:
381 381 copies[f] = source
382 382 self.normallookup(f)
383 383 # Also fix up otherparent markers
384 384 elif s[0] == b'n' and s[2] == FROM_P2:
385 385 source = self._map.copymap.get(f)
386 386 if source:
387 387 copies[f] = source
388 388 self.add(f)
389 389 return copies
390 390
391 391 def setbranch(self, branch):
392 392 self.__class__._branch.set(self, encoding.fromlocal(branch))
393 393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
394 394 try:
395 395 f.write(self._branch + b'\n')
396 396 f.close()
397 397
398 398 # make sure filecache has the correct stat info for _branch after
399 399 # replacing the underlying file
400 400 ce = self._filecache[b'_branch']
401 401 if ce:
402 402 ce.refresh()
403 403 except: # re-raises
404 404 f.discard()
405 405 raise
406 406
407 407 def invalidate(self):
408 408 """Causes the next access to reread the dirstate.
409 409
410 410 This is different from localrepo.invalidatedirstate() because it always
411 411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
412 412 check whether the dirstate has changed before rereading it."""
413 413
414 414 for a in ("_map", "_branch", "_ignore"):
415 415 if a in self.__dict__:
416 416 delattr(self, a)
417 417 self._lastnormaltime = 0
418 418 self._dirty = False
419 419 self._updatedfiles.clear()
420 420 self._parentwriters = 0
421 421 self._origpl = None
422 422
423 423 def copy(self, source, dest):
424 424 """Mark dest as a copy of source. Unmark dest if source is None."""
425 425 if source == dest:
426 426 return
427 427 self._dirty = True
428 428 if source is not None:
429 429 self._map.copymap[dest] = source
430 430 self._updatedfiles.add(source)
431 431 self._updatedfiles.add(dest)
432 432 elif self._map.copymap.pop(dest, None):
433 433 self._updatedfiles.add(dest)
434 434
435 435 def copied(self, file):
436 436 return self._map.copymap.get(file, None)
437 437
438 438 def copies(self):
439 439 return self._map.copymap
440 440
441 def _addpath(self, f, state, mode, size, mtime):
441 def _addpath(self, f, state, mode, size=NONNORMAL, mtime=AMBIGUOUS_TIME):
442 442 oldstate = self[f]
443 443 if state == b'a' or oldstate == b'r':
444 444 scmutil.checkfilename(f)
445 445 if self._map.hastrackeddir(f):
446 446 msg = _(b'directory %r already in dirstate')
447 447 msg %= pycompat.bytestr(f)
448 448 raise error.Abort(msg)
449 449 # shadows
450 450 for d in pathutil.finddirs(f):
451 451 if self._map.hastrackeddir(d):
452 452 break
453 453 entry = self._map.get(d)
454 454 if entry is not None and entry[0] != b'r':
455 455 msg = _(b'file %r in dirstate clashes with %r')
456 456 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
457 457 raise error.Abort(msg)
458 458 if size != NONNORMAL and size != FROM_P2:
459 459 size = size & _rangemask
460 460 if mtime != AMBIGUOUS_TIME:
461 461 mtime = mtime & _rangemask
462 462 self._dirty = True
463 463 self._updatedfiles.add(f)
464 464 self._map.addfile(f, oldstate, state, mode, size, mtime)
465 465
466 466 def normal(self, f, parentfiledata=None):
467 467 """Mark a file normal and clean.
468 468
469 469 parentfiledata: (mode, size, mtime) of the clean file
470 470
471 471 parentfiledata should be computed from memory (for mode,
472 472 size), as or close as possible from the point where we
473 473 determined the file was clean, to limit the risk of the
474 474 file having been changed by an external process between the
475 475 moment where the file was determined to be clean and now."""
476 476 if parentfiledata:
477 477 (mode, size, mtime) = parentfiledata
478 478 else:
479 479 s = os.lstat(self._join(f))
480 480 mode = s.st_mode
481 481 size = s.st_size
482 482 mtime = s[stat.ST_MTIME]
483 483 self._addpath(f, b'n', mode, size, mtime)
484 484 self._map.copymap.pop(f, None)
485 485 if f in self._map.nonnormalset:
486 486 self._map.nonnormalset.remove(f)
487 487 if mtime > self._lastnormaltime:
488 488 # Remember the most recent modification timeslot for status(),
489 489 # to make sure we won't miss future size-preserving file content
490 490 # modifications that happen within the same timeslot.
491 491 self._lastnormaltime = mtime
492 492
493 493 def normallookup(self, f):
494 494 '''Mark a file normal, but possibly dirty.'''
495 495 if self._pl[1] != self._nodeconstants.nullid:
496 496 # if there is a merge going on and the file was either
497 497 # in state 'm' (-1) or coming from other parent (-2) before
498 498 # being removed, restore that state.
499 499 entry = self._map.get(f)
500 500 if entry is not None:
501 501 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
502 502 source = self._map.copymap.get(f)
503 503 if entry[2] == NONNORMAL:
504 504 self.merge(f)
505 505 elif entry[2] == FROM_P2:
506 506 self.otherparent(f)
507 507 if source:
508 508 self.copy(source, f)
509 509 return
510 510 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
511 511 return
512 self._addpath(f, b'n', 0, NONNORMAL, AMBIGUOUS_TIME)
512 self._addpath(f, b'n', 0)
513 513 self._map.copymap.pop(f, None)
514 514
515 515 def otherparent(self, f):
516 516 '''Mark as coming from the other parent, always dirty.'''
517 517 if self._pl[1] == self._nodeconstants.nullid:
518 518 msg = _(b"setting %r to other parent only allowed in merges") % f
519 519 raise error.Abort(msg)
520 520 if f in self and self[f] == b'n':
521 521 # merge-like
522 self._addpath(f, b'm', 0, FROM_P2, AMBIGUOUS_TIME)
522 self._addpath(f, b'm', 0, FROM_P2)
523 523 else:
524 524 # add-like
525 self._addpath(f, b'n', 0, FROM_P2, AMBIGUOUS_TIME)
525 self._addpath(f, b'n', 0, FROM_P2)
526 526 self._map.copymap.pop(f, None)
527 527
528 528 def add(self, f):
529 529 '''Mark a file added.'''
530 self._addpath(f, b'a', 0, NONNORMAL, AMBIGUOUS_TIME)
530 self._addpath(f, b'a', 0)
531 531 self._map.copymap.pop(f, None)
532 532
533 533 def remove(self, f):
534 534 '''Mark a file removed.'''
535 535 self._dirty = True
536 536 oldstate = self[f]
537 537 size = 0
538 538 if self._pl[1] != self._nodeconstants.nullid:
539 539 entry = self._map.get(f)
540 540 if entry is not None:
541 541 # backup the previous state
542 542 if entry[0] == b'm': # merge
543 543 size = NONNORMAL
544 544 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
545 545 size = FROM_P2
546 546 self._map.otherparentset.add(f)
547 547 self._updatedfiles.add(f)
548 548 self._map.removefile(f, oldstate, size)
549 549 if size == 0:
550 550 self._map.copymap.pop(f, None)
551 551
552 552 def merge(self, f):
553 553 '''Mark a file merged.'''
554 554 if self._pl[1] == self._nodeconstants.nullid:
555 555 return self.normallookup(f)
556 556 return self.otherparent(f)
557 557
558 558 def drop(self, f):
559 559 '''Drop a file from the dirstate'''
560 560 oldstate = self[f]
561 561 if self._map.dropfile(f, oldstate):
562 562 self._dirty = True
563 563 self._updatedfiles.add(f)
564 564 self._map.copymap.pop(f, None)
565 565
566 566 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
567 567 if exists is None:
568 568 exists = os.path.lexists(os.path.join(self._root, path))
569 569 if not exists:
570 570 # Maybe a path component exists
571 571 if not ignoremissing and b'/' in path:
572 572 d, f = path.rsplit(b'/', 1)
573 573 d = self._normalize(d, False, ignoremissing, None)
574 574 folded = d + b"/" + f
575 575 else:
576 576 # No path components, preserve original case
577 577 folded = path
578 578 else:
579 579 # recursively normalize leading directory components
580 580 # against dirstate
581 581 if b'/' in normed:
582 582 d, f = normed.rsplit(b'/', 1)
583 583 d = self._normalize(d, False, ignoremissing, True)
584 584 r = self._root + b"/" + d
585 585 folded = d + b"/" + util.fspath(f, r)
586 586 else:
587 587 folded = util.fspath(normed, self._root)
588 588 storemap[normed] = folded
589 589
590 590 return folded
591 591
592 592 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
593 593 normed = util.normcase(path)
594 594 folded = self._map.filefoldmap.get(normed, None)
595 595 if folded is None:
596 596 if isknown:
597 597 folded = path
598 598 else:
599 599 folded = self._discoverpath(
600 600 path, normed, ignoremissing, exists, self._map.filefoldmap
601 601 )
602 602 return folded
603 603
604 604 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
605 605 normed = util.normcase(path)
606 606 folded = self._map.filefoldmap.get(normed, None)
607 607 if folded is None:
608 608 folded = self._map.dirfoldmap.get(normed, None)
609 609 if folded is None:
610 610 if isknown:
611 611 folded = path
612 612 else:
613 613 # store discovered result in dirfoldmap so that future
614 614 # normalizefile calls don't start matching directories
615 615 folded = self._discoverpath(
616 616 path, normed, ignoremissing, exists, self._map.dirfoldmap
617 617 )
618 618 return folded
619 619
620 620 def normalize(self, path, isknown=False, ignoremissing=False):
621 621 """
622 622 normalize the case of a pathname when on a casefolding filesystem
623 623
624 624 isknown specifies whether the filename came from walking the
625 625 disk, to avoid extra filesystem access.
626 626
627 627 If ignoremissing is True, missing path are returned
628 628 unchanged. Otherwise, we try harder to normalize possibly
629 629 existing path components.
630 630
631 631 The normalized case is determined based on the following precedence:
632 632
633 633 - version of name already stored in the dirstate
634 634 - version of name stored on disk
635 635 - version provided via command arguments
636 636 """
637 637
638 638 if self._checkcase:
639 639 return self._normalize(path, isknown, ignoremissing)
640 640 return path
641 641
642 642 def clear(self):
643 643 self._map.clear()
644 644 self._lastnormaltime = 0
645 645 self._updatedfiles.clear()
646 646 self._dirty = True
647 647
648 648 def rebuild(self, parent, allfiles, changedfiles=None):
649 649 if changedfiles is None:
650 650 # Rebuild entire dirstate
651 651 to_lookup = allfiles
652 652 to_drop = []
653 653 lastnormaltime = self._lastnormaltime
654 654 self.clear()
655 655 self._lastnormaltime = lastnormaltime
656 656 elif len(changedfiles) < 10:
657 657 # Avoid turning allfiles into a set, which can be expensive if it's
658 658 # large.
659 659 to_lookup = []
660 660 to_drop = []
661 661 for f in changedfiles:
662 662 if f in allfiles:
663 663 to_lookup.append(f)
664 664 else:
665 665 to_drop.append(f)
666 666 else:
667 667 changedfilesset = set(changedfiles)
668 668 to_lookup = changedfilesset & set(allfiles)
669 669 to_drop = changedfilesset - to_lookup
670 670
671 671 if self._origpl is None:
672 672 self._origpl = self._pl
673 673 self._map.setparents(parent, self._nodeconstants.nullid)
674 674
675 675 for f in to_lookup:
676 676 self.normallookup(f)
677 677 for f in to_drop:
678 678 self.drop(f)
679 679
680 680 self._dirty = True
681 681
682 682 def identity(self):
683 683 """Return identity of dirstate itself to detect changing in storage
684 684
685 685 If identity of previous dirstate is equal to this, writing
686 686 changes based on the former dirstate out can keep consistency.
687 687 """
688 688 return self._map.identity
689 689
690 690 def write(self, tr):
691 691 if not self._dirty:
692 692 return
693 693
694 694 filename = self._filename
695 695 if tr:
696 696 # 'dirstate.write()' is not only for writing in-memory
697 697 # changes out, but also for dropping ambiguous timestamp.
698 698 # delayed writing re-raise "ambiguous timestamp issue".
699 699 # See also the wiki page below for detail:
700 700 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
701 701
702 702 # emulate dropping timestamp in 'parsers.pack_dirstate'
703 703 now = _getfsnow(self._opener)
704 704 self._map.clearambiguoustimes(self._updatedfiles, now)
705 705
706 706 # emulate that all 'dirstate.normal' results are written out
707 707 self._lastnormaltime = 0
708 708 self._updatedfiles.clear()
709 709
710 710 # delay writing in-memory changes out
711 711 tr.addfilegenerator(
712 712 b'dirstate',
713 713 (self._filename,),
714 714 self._writedirstate,
715 715 location=b'plain',
716 716 )
717 717 return
718 718
719 719 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
720 720 self._writedirstate(st)
721 721
722 722 def addparentchangecallback(self, category, callback):
723 723 """add a callback to be called when the wd parents are changed
724 724
725 725 Callback will be called with the following arguments:
726 726 dirstate, (oldp1, oldp2), (newp1, newp2)
727 727
728 728 Category is a unique identifier to allow overwriting an old callback
729 729 with a newer callback.
730 730 """
731 731 self._plchangecallbacks[category] = callback
732 732
733 733 def _writedirstate(self, st):
734 734 # notify callbacks about parents change
735 735 if self._origpl is not None and self._origpl != self._pl:
736 736 for c, callback in sorted(
737 737 pycompat.iteritems(self._plchangecallbacks)
738 738 ):
739 739 callback(self, self._origpl, self._pl)
740 740 self._origpl = None
741 741 # use the modification time of the newly created temporary file as the
742 742 # filesystem's notion of 'now'
743 743 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
744 744
745 745 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
746 746 # timestamp of each entries in dirstate, because of 'now > mtime'
747 747 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
748 748 if delaywrite > 0:
749 749 # do we have any files to delay for?
750 750 for f, e in pycompat.iteritems(self._map):
751 751 if e[0] == b'n' and e[3] == now:
752 752 import time # to avoid useless import
753 753
754 754 # rather than sleep n seconds, sleep until the next
755 755 # multiple of n seconds
756 756 clock = time.time()
757 757 start = int(clock) - (int(clock) % delaywrite)
758 758 end = start + delaywrite
759 759 time.sleep(end - clock)
760 760 now = end # trust our estimate that the end is near now
761 761 break
762 762
763 763 self._map.write(st, now)
764 764 self._lastnormaltime = 0
765 765 self._dirty = False
766 766
767 767 def _dirignore(self, f):
768 768 if self._ignore(f):
769 769 return True
770 770 for p in pathutil.finddirs(f):
771 771 if self._ignore(p):
772 772 return True
773 773 return False
774 774
775 775 def _ignorefiles(self):
776 776 files = []
777 777 if os.path.exists(self._join(b'.hgignore')):
778 778 files.append(self._join(b'.hgignore'))
779 779 for name, path in self._ui.configitems(b"ui"):
780 780 if name == b'ignore' or name.startswith(b'ignore.'):
781 781 # we need to use os.path.join here rather than self._join
782 782 # because path is arbitrary and user-specified
783 783 files.append(os.path.join(self._rootdir, util.expandpath(path)))
784 784 return files
785 785
786 786 def _ignorefileandline(self, f):
787 787 files = collections.deque(self._ignorefiles())
788 788 visited = set()
789 789 while files:
790 790 i = files.popleft()
791 791 patterns = matchmod.readpatternfile(
792 792 i, self._ui.warn, sourceinfo=True
793 793 )
794 794 for pattern, lineno, line in patterns:
795 795 kind, p = matchmod._patsplit(pattern, b'glob')
796 796 if kind == b"subinclude":
797 797 if p not in visited:
798 798 files.append(p)
799 799 continue
800 800 m = matchmod.match(
801 801 self._root, b'', [], [pattern], warn=self._ui.warn
802 802 )
803 803 if m(f):
804 804 return (i, lineno, line)
805 805 visited.add(i)
806 806 return (None, -1, b"")
807 807
808 808 def _walkexplicit(self, match, subrepos):
809 809 """Get stat data about the files explicitly specified by match.
810 810
811 811 Return a triple (results, dirsfound, dirsnotfound).
812 812 - results is a mapping from filename to stat result. It also contains
813 813 listings mapping subrepos and .hg to None.
814 814 - dirsfound is a list of files found to be directories.
815 815 - dirsnotfound is a list of files that the dirstate thinks are
816 816 directories and that were not found."""
817 817
818 818 def badtype(mode):
819 819 kind = _(b'unknown')
820 820 if stat.S_ISCHR(mode):
821 821 kind = _(b'character device')
822 822 elif stat.S_ISBLK(mode):
823 823 kind = _(b'block device')
824 824 elif stat.S_ISFIFO(mode):
825 825 kind = _(b'fifo')
826 826 elif stat.S_ISSOCK(mode):
827 827 kind = _(b'socket')
828 828 elif stat.S_ISDIR(mode):
829 829 kind = _(b'directory')
830 830 return _(b'unsupported file type (type is %s)') % kind
831 831
832 832 badfn = match.bad
833 833 dmap = self._map
834 834 lstat = os.lstat
835 835 getkind = stat.S_IFMT
836 836 dirkind = stat.S_IFDIR
837 837 regkind = stat.S_IFREG
838 838 lnkkind = stat.S_IFLNK
839 839 join = self._join
840 840 dirsfound = []
841 841 foundadd = dirsfound.append
842 842 dirsnotfound = []
843 843 notfoundadd = dirsnotfound.append
844 844
845 845 if not match.isexact() and self._checkcase:
846 846 normalize = self._normalize
847 847 else:
848 848 normalize = None
849 849
850 850 files = sorted(match.files())
851 851 subrepos.sort()
852 852 i, j = 0, 0
853 853 while i < len(files) and j < len(subrepos):
854 854 subpath = subrepos[j] + b"/"
855 855 if files[i] < subpath:
856 856 i += 1
857 857 continue
858 858 while i < len(files) and files[i].startswith(subpath):
859 859 del files[i]
860 860 j += 1
861 861
862 862 if not files or b'' in files:
863 863 files = [b'']
864 864 # constructing the foldmap is expensive, so don't do it for the
865 865 # common case where files is ['']
866 866 normalize = None
867 867 results = dict.fromkeys(subrepos)
868 868 results[b'.hg'] = None
869 869
870 870 for ff in files:
871 871 if normalize:
872 872 nf = normalize(ff, False, True)
873 873 else:
874 874 nf = ff
875 875 if nf in results:
876 876 continue
877 877
878 878 try:
879 879 st = lstat(join(nf))
880 880 kind = getkind(st.st_mode)
881 881 if kind == dirkind:
882 882 if nf in dmap:
883 883 # file replaced by dir on disk but still in dirstate
884 884 results[nf] = None
885 885 foundadd((nf, ff))
886 886 elif kind == regkind or kind == lnkkind:
887 887 results[nf] = st
888 888 else:
889 889 badfn(ff, badtype(kind))
890 890 if nf in dmap:
891 891 results[nf] = None
892 892 except OSError as inst: # nf not found on disk - it is dirstate only
893 893 if nf in dmap: # does it exactly match a missing file?
894 894 results[nf] = None
895 895 else: # does it match a missing directory?
896 896 if self._map.hasdir(nf):
897 897 notfoundadd(nf)
898 898 else:
899 899 badfn(ff, encoding.strtolocal(inst.strerror))
900 900
901 901 # match.files() may contain explicitly-specified paths that shouldn't
902 902 # be taken; drop them from the list of files found. dirsfound/notfound
903 903 # aren't filtered here because they will be tested later.
904 904 if match.anypats():
905 905 for f in list(results):
906 906 if f == b'.hg' or f in subrepos:
907 907 # keep sentinel to disable further out-of-repo walks
908 908 continue
909 909 if not match(f):
910 910 del results[f]
911 911
912 912 # Case insensitive filesystems cannot rely on lstat() failing to detect
913 913 # a case-only rename. Prune the stat object for any file that does not
914 914 # match the case in the filesystem, if there are multiple files that
915 915 # normalize to the same path.
916 916 if match.isexact() and self._checkcase:
917 917 normed = {}
918 918
919 919 for f, st in pycompat.iteritems(results):
920 920 if st is None:
921 921 continue
922 922
923 923 nc = util.normcase(f)
924 924 paths = normed.get(nc)
925 925
926 926 if paths is None:
927 927 paths = set()
928 928 normed[nc] = paths
929 929
930 930 paths.add(f)
931 931
932 932 for norm, paths in pycompat.iteritems(normed):
933 933 if len(paths) > 1:
934 934 for path in paths:
935 935 folded = self._discoverpath(
936 936 path, norm, True, None, self._map.dirfoldmap
937 937 )
938 938 if path != folded:
939 939 results[path] = None
940 940
941 941 return results, dirsfound, dirsnotfound
942 942
943 943 def walk(self, match, subrepos, unknown, ignored, full=True):
944 944 """
945 945 Walk recursively through the directory tree, finding all files
946 946 matched by match.
947 947
948 948 If full is False, maybe skip some known-clean files.
949 949
950 950 Return a dict mapping filename to stat-like object (either
951 951 mercurial.osutil.stat instance or return value of os.stat()).
952 952
953 953 """
954 954 # full is a flag that extensions that hook into walk can use -- this
955 955 # implementation doesn't use it at all. This satisfies the contract
956 956 # because we only guarantee a "maybe".
957 957
958 958 if ignored:
959 959 ignore = util.never
960 960 dirignore = util.never
961 961 elif unknown:
962 962 ignore = self._ignore
963 963 dirignore = self._dirignore
964 964 else:
965 965 # if not unknown and not ignored, drop dir recursion and step 2
966 966 ignore = util.always
967 967 dirignore = util.always
968 968
969 969 matchfn = match.matchfn
970 970 matchalways = match.always()
971 971 matchtdir = match.traversedir
972 972 dmap = self._map
973 973 listdir = util.listdir
974 974 lstat = os.lstat
975 975 dirkind = stat.S_IFDIR
976 976 regkind = stat.S_IFREG
977 977 lnkkind = stat.S_IFLNK
978 978 join = self._join
979 979
980 980 exact = skipstep3 = False
981 981 if match.isexact(): # match.exact
982 982 exact = True
983 983 dirignore = util.always # skip step 2
984 984 elif match.prefix(): # match.match, no patterns
985 985 skipstep3 = True
986 986
987 987 if not exact and self._checkcase:
988 988 normalize = self._normalize
989 989 normalizefile = self._normalizefile
990 990 skipstep3 = False
991 991 else:
992 992 normalize = self._normalize
993 993 normalizefile = None
994 994
995 995 # step 1: find all explicit files
996 996 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
997 997 if matchtdir:
998 998 for d in work:
999 999 matchtdir(d[0])
1000 1000 for d in dirsnotfound:
1001 1001 matchtdir(d)
1002 1002
1003 1003 skipstep3 = skipstep3 and not (work or dirsnotfound)
1004 1004 work = [d for d in work if not dirignore(d[0])]
1005 1005
1006 1006 # step 2: visit subdirectories
1007 1007 def traverse(work, alreadynormed):
1008 1008 wadd = work.append
1009 1009 while work:
1010 1010 tracing.counter('dirstate.walk work', len(work))
1011 1011 nd = work.pop()
1012 1012 visitentries = match.visitchildrenset(nd)
1013 1013 if not visitentries:
1014 1014 continue
1015 1015 if visitentries == b'this' or visitentries == b'all':
1016 1016 visitentries = None
1017 1017 skip = None
1018 1018 if nd != b'':
1019 1019 skip = b'.hg'
1020 1020 try:
1021 1021 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1022 1022 entries = listdir(join(nd), stat=True, skip=skip)
1023 1023 except OSError as inst:
1024 1024 if inst.errno in (errno.EACCES, errno.ENOENT):
1025 1025 match.bad(
1026 1026 self.pathto(nd), encoding.strtolocal(inst.strerror)
1027 1027 )
1028 1028 continue
1029 1029 raise
1030 1030 for f, kind, st in entries:
1031 1031 # Some matchers may return files in the visitentries set,
1032 1032 # instead of 'this', if the matcher explicitly mentions them
1033 1033 # and is not an exactmatcher. This is acceptable; we do not
1034 1034 # make any hard assumptions about file-or-directory below
1035 1035 # based on the presence of `f` in visitentries. If
1036 1036 # visitchildrenset returned a set, we can always skip the
1037 1037 # entries *not* in the set it provided regardless of whether
1038 1038 # they're actually a file or a directory.
1039 1039 if visitentries and f not in visitentries:
1040 1040 continue
1041 1041 if normalizefile:
1042 1042 # even though f might be a directory, we're only
1043 1043 # interested in comparing it to files currently in the
1044 1044 # dmap -- therefore normalizefile is enough
1045 1045 nf = normalizefile(
1046 1046 nd and (nd + b"/" + f) or f, True, True
1047 1047 )
1048 1048 else:
1049 1049 nf = nd and (nd + b"/" + f) or f
1050 1050 if nf not in results:
1051 1051 if kind == dirkind:
1052 1052 if not ignore(nf):
1053 1053 if matchtdir:
1054 1054 matchtdir(nf)
1055 1055 wadd(nf)
1056 1056 if nf in dmap and (matchalways or matchfn(nf)):
1057 1057 results[nf] = None
1058 1058 elif kind == regkind or kind == lnkkind:
1059 1059 if nf in dmap:
1060 1060 if matchalways or matchfn(nf):
1061 1061 results[nf] = st
1062 1062 elif (matchalways or matchfn(nf)) and not ignore(
1063 1063 nf
1064 1064 ):
1065 1065 # unknown file -- normalize if necessary
1066 1066 if not alreadynormed:
1067 1067 nf = normalize(nf, False, True)
1068 1068 results[nf] = st
1069 1069 elif nf in dmap and (matchalways or matchfn(nf)):
1070 1070 results[nf] = None
1071 1071
1072 1072 for nd, d in work:
1073 1073 # alreadynormed means that processwork doesn't have to do any
1074 1074 # expensive directory normalization
1075 1075 alreadynormed = not normalize or nd == d
1076 1076 traverse([d], alreadynormed)
1077 1077
1078 1078 for s in subrepos:
1079 1079 del results[s]
1080 1080 del results[b'.hg']
1081 1081
1082 1082 # step 3: visit remaining files from dmap
1083 1083 if not skipstep3 and not exact:
1084 1084 # If a dmap file is not in results yet, it was either
1085 1085 # a) not matching matchfn b) ignored, c) missing, or d) under a
1086 1086 # symlink directory.
1087 1087 if not results and matchalways:
1088 1088 visit = [f for f in dmap]
1089 1089 else:
1090 1090 visit = [f for f in dmap if f not in results and matchfn(f)]
1091 1091 visit.sort()
1092 1092
1093 1093 if unknown:
1094 1094 # unknown == True means we walked all dirs under the roots
1095 1095 # that wasn't ignored, and everything that matched was stat'ed
1096 1096 # and is already in results.
1097 1097 # The rest must thus be ignored or under a symlink.
1098 1098 audit_path = pathutil.pathauditor(self._root, cached=True)
1099 1099
1100 1100 for nf in iter(visit):
1101 1101 # If a stat for the same file was already added with a
1102 1102 # different case, don't add one for this, since that would
1103 1103 # make it appear as if the file exists under both names
1104 1104 # on disk.
1105 1105 if (
1106 1106 normalizefile
1107 1107 and normalizefile(nf, True, True) in results
1108 1108 ):
1109 1109 results[nf] = None
1110 1110 # Report ignored items in the dmap as long as they are not
1111 1111 # under a symlink directory.
1112 1112 elif audit_path.check(nf):
1113 1113 try:
1114 1114 results[nf] = lstat(join(nf))
1115 1115 # file was just ignored, no links, and exists
1116 1116 except OSError:
1117 1117 # file doesn't exist
1118 1118 results[nf] = None
1119 1119 else:
1120 1120 # It's either missing or under a symlink directory
1121 1121 # which we in this case report as missing
1122 1122 results[nf] = None
1123 1123 else:
1124 1124 # We may not have walked the full directory tree above,
1125 1125 # so stat and check everything we missed.
1126 1126 iv = iter(visit)
1127 1127 for st in util.statfiles([join(i) for i in visit]):
1128 1128 results[next(iv)] = st
1129 1129 return results
1130 1130
1131 1131 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1132 1132 # Force Rayon (Rust parallelism library) to respect the number of
1133 1133 # workers. This is a temporary workaround until Rust code knows
1134 1134 # how to read the config file.
1135 1135 numcpus = self._ui.configint(b"worker", b"numcpus")
1136 1136 if numcpus is not None:
1137 1137 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1138 1138
1139 1139 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1140 1140 if not workers_enabled:
1141 1141 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1142 1142
1143 1143 (
1144 1144 lookup,
1145 1145 modified,
1146 1146 added,
1147 1147 removed,
1148 1148 deleted,
1149 1149 clean,
1150 1150 ignored,
1151 1151 unknown,
1152 1152 warnings,
1153 1153 bad,
1154 1154 traversed,
1155 1155 dirty,
1156 1156 ) = rustmod.status(
1157 1157 self._map._rustmap,
1158 1158 matcher,
1159 1159 self._rootdir,
1160 1160 self._ignorefiles(),
1161 1161 self._checkexec,
1162 1162 self._lastnormaltime,
1163 1163 bool(list_clean),
1164 1164 bool(list_ignored),
1165 1165 bool(list_unknown),
1166 1166 bool(matcher.traversedir),
1167 1167 )
1168 1168
1169 1169 self._dirty |= dirty
1170 1170
1171 1171 if matcher.traversedir:
1172 1172 for dir in traversed:
1173 1173 matcher.traversedir(dir)
1174 1174
1175 1175 if self._ui.warn:
1176 1176 for item in warnings:
1177 1177 if isinstance(item, tuple):
1178 1178 file_path, syntax = item
1179 1179 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1180 1180 file_path,
1181 1181 syntax,
1182 1182 )
1183 1183 self._ui.warn(msg)
1184 1184 else:
1185 1185 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1186 1186 self._ui.warn(
1187 1187 msg
1188 1188 % (
1189 1189 pathutil.canonpath(
1190 1190 self._rootdir, self._rootdir, item
1191 1191 ),
1192 1192 b"No such file or directory",
1193 1193 )
1194 1194 )
1195 1195
1196 1196 for (fn, message) in bad:
1197 1197 matcher.bad(fn, encoding.strtolocal(message))
1198 1198
1199 1199 status = scmutil.status(
1200 1200 modified=modified,
1201 1201 added=added,
1202 1202 removed=removed,
1203 1203 deleted=deleted,
1204 1204 unknown=unknown,
1205 1205 ignored=ignored,
1206 1206 clean=clean,
1207 1207 )
1208 1208 return (lookup, status)
1209 1209
1210 1210 def status(self, match, subrepos, ignored, clean, unknown):
1211 1211 """Determine the status of the working copy relative to the
1212 1212 dirstate and return a pair of (unsure, status), where status is of type
1213 1213 scmutil.status and:
1214 1214
1215 1215 unsure:
1216 1216 files that might have been modified since the dirstate was
1217 1217 written, but need to be read to be sure (size is the same
1218 1218 but mtime differs)
1219 1219 status.modified:
1220 1220 files that have definitely been modified since the dirstate
1221 1221 was written (different size or mode)
1222 1222 status.clean:
1223 1223 files that have definitely not been modified since the
1224 1224 dirstate was written
1225 1225 """
1226 1226 listignored, listclean, listunknown = ignored, clean, unknown
1227 1227 lookup, modified, added, unknown, ignored = [], [], [], [], []
1228 1228 removed, deleted, clean = [], [], []
1229 1229
1230 1230 dmap = self._map
1231 1231 dmap.preload()
1232 1232
1233 1233 use_rust = True
1234 1234
1235 1235 allowed_matchers = (
1236 1236 matchmod.alwaysmatcher,
1237 1237 matchmod.exactmatcher,
1238 1238 matchmod.includematcher,
1239 1239 )
1240 1240
1241 1241 if rustmod is None:
1242 1242 use_rust = False
1243 1243 elif self._checkcase:
1244 1244 # Case-insensitive filesystems are not handled yet
1245 1245 use_rust = False
1246 1246 elif subrepos:
1247 1247 use_rust = False
1248 1248 elif sparse.enabled:
1249 1249 use_rust = False
1250 1250 elif not isinstance(match, allowed_matchers):
1251 1251 # Some matchers have yet to be implemented
1252 1252 use_rust = False
1253 1253
1254 1254 if use_rust:
1255 1255 try:
1256 1256 return self._rust_status(
1257 1257 match, listclean, listignored, listunknown
1258 1258 )
1259 1259 except rustmod.FallbackError:
1260 1260 pass
1261 1261
1262 1262 def noop(f):
1263 1263 pass
1264 1264
1265 1265 dcontains = dmap.__contains__
1266 1266 dget = dmap.__getitem__
1267 1267 ladd = lookup.append # aka "unsure"
1268 1268 madd = modified.append
1269 1269 aadd = added.append
1270 1270 uadd = unknown.append if listunknown else noop
1271 1271 iadd = ignored.append if listignored else noop
1272 1272 radd = removed.append
1273 1273 dadd = deleted.append
1274 1274 cadd = clean.append if listclean else noop
1275 1275 mexact = match.exact
1276 1276 dirignore = self._dirignore
1277 1277 checkexec = self._checkexec
1278 1278 copymap = self._map.copymap
1279 1279 lastnormaltime = self._lastnormaltime
1280 1280
1281 1281 # We need to do full walks when either
1282 1282 # - we're listing all clean files, or
1283 1283 # - match.traversedir does something, because match.traversedir should
1284 1284 # be called for every dir in the working dir
1285 1285 full = listclean or match.traversedir is not None
1286 1286 for fn, st in pycompat.iteritems(
1287 1287 self.walk(match, subrepos, listunknown, listignored, full=full)
1288 1288 ):
1289 1289 if not dcontains(fn):
1290 1290 if (listignored or mexact(fn)) and dirignore(fn):
1291 1291 if listignored:
1292 1292 iadd(fn)
1293 1293 else:
1294 1294 uadd(fn)
1295 1295 continue
1296 1296
1297 1297 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1298 1298 # written like that for performance reasons. dmap[fn] is not a
1299 1299 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1300 1300 # opcode has fast paths when the value to be unpacked is a tuple or
1301 1301 # a list, but falls back to creating a full-fledged iterator in
1302 1302 # general. That is much slower than simply accessing and storing the
1303 1303 # tuple members one by one.
1304 1304 t = dget(fn)
1305 1305 state = t[0]
1306 1306 mode = t[1]
1307 1307 size = t[2]
1308 1308 time = t[3]
1309 1309
1310 1310 if not st and state in b"nma":
1311 1311 dadd(fn)
1312 1312 elif state == b'n':
1313 1313 if (
1314 1314 size >= 0
1315 1315 and (
1316 1316 (size != st.st_size and size != st.st_size & _rangemask)
1317 1317 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1318 1318 )
1319 1319 or size == FROM_P2 # other parent
1320 1320 or fn in copymap
1321 1321 ):
1322 1322 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1323 1323 # issue6456: Size returned may be longer due to
1324 1324 # encryption on EXT-4 fscrypt, undecided.
1325 1325 ladd(fn)
1326 1326 else:
1327 1327 madd(fn)
1328 1328 elif (
1329 1329 time != st[stat.ST_MTIME]
1330 1330 and time != st[stat.ST_MTIME] & _rangemask
1331 1331 ):
1332 1332 ladd(fn)
1333 1333 elif st[stat.ST_MTIME] == lastnormaltime:
1334 1334 # fn may have just been marked as normal and it may have
1335 1335 # changed in the same second without changing its size.
1336 1336 # This can happen if we quickly do multiple commits.
1337 1337 # Force lookup, so we don't miss such a racy file change.
1338 1338 ladd(fn)
1339 1339 elif listclean:
1340 1340 cadd(fn)
1341 1341 elif state == b'm':
1342 1342 madd(fn)
1343 1343 elif state == b'a':
1344 1344 aadd(fn)
1345 1345 elif state == b'r':
1346 1346 radd(fn)
1347 1347 status = scmutil.status(
1348 1348 modified, added, removed, deleted, unknown, ignored, clean
1349 1349 )
1350 1350 return (lookup, status)
1351 1351
1352 1352 def matches(self, match):
1353 1353 """
1354 1354 return files in the dirstate (in whatever state) filtered by match
1355 1355 """
1356 1356 dmap = self._map
1357 1357 if rustmod is not None:
1358 1358 dmap = self._map._rustmap
1359 1359
1360 1360 if match.always():
1361 1361 return dmap.keys()
1362 1362 files = match.files()
1363 1363 if match.isexact():
1364 1364 # fast path -- filter the other way around, since typically files is
1365 1365 # much smaller than dmap
1366 1366 return [f for f in files if f in dmap]
1367 1367 if match.prefix() and all(fn in dmap for fn in files):
1368 1368 # fast path -- all the values are known to be files, so just return
1369 1369 # that
1370 1370 return list(files)
1371 1371 return [f for f in dmap if match(f)]
1372 1372
1373 1373 def _actualfilename(self, tr):
1374 1374 if tr:
1375 1375 return self._pendingfilename
1376 1376 else:
1377 1377 return self._filename
1378 1378
1379 1379 def savebackup(self, tr, backupname):
1380 1380 '''Save current dirstate into backup file'''
1381 1381 filename = self._actualfilename(tr)
1382 1382 assert backupname != filename
1383 1383
1384 1384 # use '_writedirstate' instead of 'write' to write changes certainly,
1385 1385 # because the latter omits writing out if transaction is running.
1386 1386 # output file will be used to create backup of dirstate at this point.
1387 1387 if self._dirty or not self._opener.exists(filename):
1388 1388 self._writedirstate(
1389 1389 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1390 1390 )
1391 1391
1392 1392 if tr:
1393 1393 # ensure that subsequent tr.writepending returns True for
1394 1394 # changes written out above, even if dirstate is never
1395 1395 # changed after this
1396 1396 tr.addfilegenerator(
1397 1397 b'dirstate',
1398 1398 (self._filename,),
1399 1399 self._writedirstate,
1400 1400 location=b'plain',
1401 1401 )
1402 1402
1403 1403 # ensure that pending file written above is unlinked at
1404 1404 # failure, even if tr.writepending isn't invoked until the
1405 1405 # end of this transaction
1406 1406 tr.registertmp(filename, location=b'plain')
1407 1407
1408 1408 self._opener.tryunlink(backupname)
1409 1409 # hardlink backup is okay because _writedirstate is always called
1410 1410 # with an "atomictemp=True" file.
1411 1411 util.copyfile(
1412 1412 self._opener.join(filename),
1413 1413 self._opener.join(backupname),
1414 1414 hardlink=True,
1415 1415 )
1416 1416
1417 1417 def restorebackup(self, tr, backupname):
1418 1418 '''Restore dirstate by backup file'''
1419 1419 # this "invalidate()" prevents "wlock.release()" from writing
1420 1420 # changes of dirstate out after restoring from backup file
1421 1421 self.invalidate()
1422 1422 filename = self._actualfilename(tr)
1423 1423 o = self._opener
1424 1424 if util.samefile(o.join(backupname), o.join(filename)):
1425 1425 o.unlink(backupname)
1426 1426 else:
1427 1427 o.rename(backupname, filename, checkambig=True)
1428 1428
1429 1429 def clearbackup(self, tr, backupname):
1430 1430 '''Clear backup file'''
1431 1431 self._opener.unlink(backupname)
1432 1432
1433 1433
1434 1434 class dirstatemap(object):
1435 1435 """Map encapsulating the dirstate's contents.
1436 1436
1437 1437 The dirstate contains the following state:
1438 1438
1439 1439 - `identity` is the identity of the dirstate file, which can be used to
1440 1440 detect when changes have occurred to the dirstate file.
1441 1441
1442 1442 - `parents` is a pair containing the parents of the working copy. The
1443 1443 parents are updated by calling `setparents`.
1444 1444
1445 1445 - the state map maps filenames to tuples of (state, mode, size, mtime),
1446 1446 where state is a single character representing 'normal', 'added',
1447 1447 'removed', or 'merged'. It is read by treating the dirstate as a
1448 1448 dict. File state is updated by calling the `addfile`, `removefile` and
1449 1449 `dropfile` methods.
1450 1450
1451 1451 - `copymap` maps destination filenames to their source filename.
1452 1452
1453 1453 The dirstate also provides the following views onto the state:
1454 1454
1455 1455 - `nonnormalset` is a set of the filenames that have state other
1456 1456 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1457 1457
1458 1458 - `otherparentset` is a set of the filenames that are marked as coming
1459 1459 from the second parent when the dirstate is currently being merged.
1460 1460
1461 1461 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1462 1462 form that they appear as in the dirstate.
1463 1463
1464 1464 - `dirfoldmap` is a dict mapping normalized directory names to the
1465 1465 denormalized form that they appear as in the dirstate.
1466 1466 """
1467 1467
1468 1468 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1469 1469 self._ui = ui
1470 1470 self._opener = opener
1471 1471 self._root = root
1472 1472 self._filename = b'dirstate'
1473 1473 self._nodelen = 20
1474 1474 self._nodeconstants = nodeconstants
1475 1475 assert (
1476 1476 not use_dirstate_v2
1477 1477 ), "should have detected unsupported requirement"
1478 1478
1479 1479 self._parents = None
1480 1480 self._dirtyparents = False
1481 1481
1482 1482 # for consistent view between _pl() and _read() invocations
1483 1483 self._pendingmode = None
1484 1484
1485 1485 @propertycache
1486 1486 def _map(self):
1487 1487 self._map = {}
1488 1488 self.read()
1489 1489 return self._map
1490 1490
1491 1491 @propertycache
1492 1492 def copymap(self):
1493 1493 self.copymap = {}
1494 1494 self._map
1495 1495 return self.copymap
1496 1496
1497 1497 def directories(self):
1498 1498 # Rust / dirstate-v2 only
1499 1499 return []
1500 1500
1501 1501 def clear(self):
1502 1502 self._map.clear()
1503 1503 self.copymap.clear()
1504 1504 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1505 1505 util.clearcachedproperty(self, b"_dirs")
1506 1506 util.clearcachedproperty(self, b"_alldirs")
1507 1507 util.clearcachedproperty(self, b"filefoldmap")
1508 1508 util.clearcachedproperty(self, b"dirfoldmap")
1509 1509 util.clearcachedproperty(self, b"nonnormalset")
1510 1510 util.clearcachedproperty(self, b"otherparentset")
1511 1511
1512 1512 def items(self):
1513 1513 return pycompat.iteritems(self._map)
1514 1514
1515 1515 # forward for python2,3 compat
1516 1516 iteritems = items
1517 1517
1518 1518 def __len__(self):
1519 1519 return len(self._map)
1520 1520
1521 1521 def __iter__(self):
1522 1522 return iter(self._map)
1523 1523
1524 1524 def get(self, key, default=None):
1525 1525 return self._map.get(key, default)
1526 1526
1527 1527 def __contains__(self, key):
1528 1528 return key in self._map
1529 1529
1530 1530 def __getitem__(self, key):
1531 1531 return self._map[key]
1532 1532
1533 1533 def keys(self):
1534 1534 return self._map.keys()
1535 1535
1536 1536 def preload(self):
1537 1537 """Loads the underlying data, if it's not already loaded"""
1538 1538 self._map
1539 1539
1540 1540 def addfile(self, f, oldstate, state, mode, size, mtime):
1541 1541 """Add a tracked file to the dirstate."""
1542 1542 if oldstate in b"?r" and "_dirs" in self.__dict__:
1543 1543 self._dirs.addpath(f)
1544 1544 if oldstate == b"?" and "_alldirs" in self.__dict__:
1545 1545 self._alldirs.addpath(f)
1546 1546 self._map[f] = dirstatetuple(state, mode, size, mtime)
1547 1547 if state != b'n' or mtime == AMBIGUOUS_TIME:
1548 1548 self.nonnormalset.add(f)
1549 1549 if size == FROM_P2:
1550 1550 self.otherparentset.add(f)
1551 1551
1552 1552 def removefile(self, f, oldstate, size):
1553 1553 """
1554 1554 Mark a file as removed in the dirstate.
1555 1555
1556 1556 The `size` parameter is used to store sentinel values that indicate
1557 1557 the file's previous state. In the future, we should refactor this
1558 1558 to be more explicit about what that state is.
1559 1559 """
1560 1560 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1561 1561 self._dirs.delpath(f)
1562 1562 if oldstate == b"?" and "_alldirs" in self.__dict__:
1563 1563 self._alldirs.addpath(f)
1564 1564 if "filefoldmap" in self.__dict__:
1565 1565 normed = util.normcase(f)
1566 1566 self.filefoldmap.pop(normed, None)
1567 1567 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1568 1568 self.nonnormalset.add(f)
1569 1569
1570 1570 def dropfile(self, f, oldstate):
1571 1571 """
1572 1572 Remove a file from the dirstate. Returns True if the file was
1573 1573 previously recorded.
1574 1574 """
1575 1575 exists = self._map.pop(f, None) is not None
1576 1576 if exists:
1577 1577 if oldstate != b"r" and "_dirs" in self.__dict__:
1578 1578 self._dirs.delpath(f)
1579 1579 if "_alldirs" in self.__dict__:
1580 1580 self._alldirs.delpath(f)
1581 1581 if "filefoldmap" in self.__dict__:
1582 1582 normed = util.normcase(f)
1583 1583 self.filefoldmap.pop(normed, None)
1584 1584 self.nonnormalset.discard(f)
1585 1585 return exists
1586 1586
1587 1587 def clearambiguoustimes(self, files, now):
1588 1588 for f in files:
1589 1589 e = self.get(f)
1590 1590 if e is not None and e[0] == b'n' and e[3] == now:
1591 1591 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1592 1592 self.nonnormalset.add(f)
1593 1593
1594 1594 def nonnormalentries(self):
1595 1595 '''Compute the nonnormal dirstate entries from the dmap'''
1596 1596 try:
1597 1597 return parsers.nonnormalotherparententries(self._map)
1598 1598 except AttributeError:
1599 1599 nonnorm = set()
1600 1600 otherparent = set()
1601 1601 for fname, e in pycompat.iteritems(self._map):
1602 1602 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1603 1603 nonnorm.add(fname)
1604 1604 if e[0] == b'n' and e[2] == FROM_P2:
1605 1605 otherparent.add(fname)
1606 1606 return nonnorm, otherparent
1607 1607
1608 1608 @propertycache
1609 1609 def filefoldmap(self):
1610 1610 """Returns a dictionary mapping normalized case paths to their
1611 1611 non-normalized versions.
1612 1612 """
1613 1613 try:
1614 1614 makefilefoldmap = parsers.make_file_foldmap
1615 1615 except AttributeError:
1616 1616 pass
1617 1617 else:
1618 1618 return makefilefoldmap(
1619 1619 self._map, util.normcasespec, util.normcasefallback
1620 1620 )
1621 1621
1622 1622 f = {}
1623 1623 normcase = util.normcase
1624 1624 for name, s in pycompat.iteritems(self._map):
1625 1625 if s[0] != b'r':
1626 1626 f[normcase(name)] = name
1627 1627 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1628 1628 return f
1629 1629
1630 1630 def hastrackeddir(self, d):
1631 1631 """
1632 1632 Returns True if the dirstate contains a tracked (not removed) file
1633 1633 in this directory.
1634 1634 """
1635 1635 return d in self._dirs
1636 1636
1637 1637 def hasdir(self, d):
1638 1638 """
1639 1639 Returns True if the dirstate contains a file (tracked or removed)
1640 1640 in this directory.
1641 1641 """
1642 1642 return d in self._alldirs
1643 1643
1644 1644 @propertycache
1645 1645 def _dirs(self):
1646 1646 return pathutil.dirs(self._map, b'r')
1647 1647
1648 1648 @propertycache
1649 1649 def _alldirs(self):
1650 1650 return pathutil.dirs(self._map)
1651 1651
1652 1652 def _opendirstatefile(self):
1653 1653 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1654 1654 if self._pendingmode is not None and self._pendingmode != mode:
1655 1655 fp.close()
1656 1656 raise error.Abort(
1657 1657 _(b'working directory state may be changed parallelly')
1658 1658 )
1659 1659 self._pendingmode = mode
1660 1660 return fp
1661 1661
1662 1662 def parents(self):
1663 1663 if not self._parents:
1664 1664 try:
1665 1665 fp = self._opendirstatefile()
1666 1666 st = fp.read(2 * self._nodelen)
1667 1667 fp.close()
1668 1668 except IOError as err:
1669 1669 if err.errno != errno.ENOENT:
1670 1670 raise
1671 1671 # File doesn't exist, so the current state is empty
1672 1672 st = b''
1673 1673
1674 1674 l = len(st)
1675 1675 if l == self._nodelen * 2:
1676 1676 self._parents = (
1677 1677 st[: self._nodelen],
1678 1678 st[self._nodelen : 2 * self._nodelen],
1679 1679 )
1680 1680 elif l == 0:
1681 1681 self._parents = (
1682 1682 self._nodeconstants.nullid,
1683 1683 self._nodeconstants.nullid,
1684 1684 )
1685 1685 else:
1686 1686 raise error.Abort(
1687 1687 _(b'working directory state appears damaged!')
1688 1688 )
1689 1689
1690 1690 return self._parents
1691 1691
1692 1692 def setparents(self, p1, p2):
1693 1693 self._parents = (p1, p2)
1694 1694 self._dirtyparents = True
1695 1695
1696 1696 def read(self):
1697 1697 # ignore HG_PENDING because identity is used only for writing
1698 1698 self.identity = util.filestat.frompath(
1699 1699 self._opener.join(self._filename)
1700 1700 )
1701 1701
1702 1702 try:
1703 1703 fp = self._opendirstatefile()
1704 1704 try:
1705 1705 st = fp.read()
1706 1706 finally:
1707 1707 fp.close()
1708 1708 except IOError as err:
1709 1709 if err.errno != errno.ENOENT:
1710 1710 raise
1711 1711 return
1712 1712 if not st:
1713 1713 return
1714 1714
1715 1715 if util.safehasattr(parsers, b'dict_new_presized'):
1716 1716 # Make an estimate of the number of files in the dirstate based on
1717 1717 # its size. This trades wasting some memory for avoiding costly
1718 1718 # resizes. Each entry have a prefix of 17 bytes followed by one or
1719 1719 # two path names. Studies on various large-scale real-world repositories
1720 1720 # found 54 bytes a reasonable upper limit for the average path names.
1721 1721 # Copy entries are ignored for the sake of this estimate.
1722 1722 self._map = parsers.dict_new_presized(len(st) // 71)
1723 1723
1724 1724 # Python's garbage collector triggers a GC each time a certain number
1725 1725 # of container objects (the number being defined by
1726 1726 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1727 1727 # for each file in the dirstate. The C version then immediately marks
1728 1728 # them as not to be tracked by the collector. However, this has no
1729 1729 # effect on when GCs are triggered, only on what objects the GC looks
1730 1730 # into. This means that O(number of files) GCs are unavoidable.
1731 1731 # Depending on when in the process's lifetime the dirstate is parsed,
1732 1732 # this can get very expensive. As a workaround, disable GC while
1733 1733 # parsing the dirstate.
1734 1734 #
1735 1735 # (we cannot decorate the function directly since it is in a C module)
1736 1736 parse_dirstate = util.nogc(parsers.parse_dirstate)
1737 1737 p = parse_dirstate(self._map, self.copymap, st)
1738 1738 if not self._dirtyparents:
1739 1739 self.setparents(*p)
1740 1740
1741 1741 # Avoid excess attribute lookups by fast pathing certain checks
1742 1742 self.__contains__ = self._map.__contains__
1743 1743 self.__getitem__ = self._map.__getitem__
1744 1744 self.get = self._map.get
1745 1745
1746 1746 def write(self, st, now):
1747 1747 st.write(
1748 1748 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1749 1749 )
1750 1750 st.close()
1751 1751 self._dirtyparents = False
1752 1752 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1753 1753
1754 1754 @propertycache
1755 1755 def nonnormalset(self):
1756 1756 nonnorm, otherparents = self.nonnormalentries()
1757 1757 self.otherparentset = otherparents
1758 1758 return nonnorm
1759 1759
1760 1760 @propertycache
1761 1761 def otherparentset(self):
1762 1762 nonnorm, otherparents = self.nonnormalentries()
1763 1763 self.nonnormalset = nonnorm
1764 1764 return otherparents
1765 1765
1766 1766 def non_normal_or_other_parent_paths(self):
1767 1767 return self.nonnormalset.union(self.otherparentset)
1768 1768
1769 1769 @propertycache
1770 1770 def identity(self):
1771 1771 self._map
1772 1772 return self.identity
1773 1773
1774 1774 @propertycache
1775 1775 def dirfoldmap(self):
1776 1776 f = {}
1777 1777 normcase = util.normcase
1778 1778 for name in self._dirs:
1779 1779 f[normcase(name)] = name
1780 1780 return f
1781 1781
1782 1782
1783 1783 if rustmod is not None:
1784 1784
1785 1785 class dirstatemap(object):
1786 1786 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1787 1787 self._use_dirstate_v2 = use_dirstate_v2
1788 1788 self._nodeconstants = nodeconstants
1789 1789 self._ui = ui
1790 1790 self._opener = opener
1791 1791 self._root = root
1792 1792 self._filename = b'dirstate'
1793 1793 self._nodelen = 20 # Also update Rust code when changing this!
1794 1794 self._parents = None
1795 1795 self._dirtyparents = False
1796 1796
1797 1797 # for consistent view between _pl() and _read() invocations
1798 1798 self._pendingmode = None
1799 1799
1800 1800 self._use_dirstate_tree = self._ui.configbool(
1801 1801 b"experimental",
1802 1802 b"dirstate-tree.in-memory",
1803 1803 False,
1804 1804 )
1805 1805
1806 1806 def addfile(self, *args, **kwargs):
1807 1807 return self._rustmap.addfile(*args, **kwargs)
1808 1808
1809 1809 def removefile(self, *args, **kwargs):
1810 1810 return self._rustmap.removefile(*args, **kwargs)
1811 1811
1812 1812 def dropfile(self, *args, **kwargs):
1813 1813 return self._rustmap.dropfile(*args, **kwargs)
1814 1814
1815 1815 def clearambiguoustimes(self, *args, **kwargs):
1816 1816 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1817 1817
1818 1818 def nonnormalentries(self):
1819 1819 return self._rustmap.nonnormalentries()
1820 1820
1821 1821 def get(self, *args, **kwargs):
1822 1822 return self._rustmap.get(*args, **kwargs)
1823 1823
1824 1824 @property
1825 1825 def copymap(self):
1826 1826 return self._rustmap.copymap()
1827 1827
1828 1828 def directories(self):
1829 1829 return self._rustmap.directories()
1830 1830
1831 1831 def preload(self):
1832 1832 self._rustmap
1833 1833
1834 1834 def clear(self):
1835 1835 self._rustmap.clear()
1836 1836 self.setparents(
1837 1837 self._nodeconstants.nullid, self._nodeconstants.nullid
1838 1838 )
1839 1839 util.clearcachedproperty(self, b"_dirs")
1840 1840 util.clearcachedproperty(self, b"_alldirs")
1841 1841 util.clearcachedproperty(self, b"dirfoldmap")
1842 1842
1843 1843 def items(self):
1844 1844 return self._rustmap.items()
1845 1845
1846 1846 def keys(self):
1847 1847 return iter(self._rustmap)
1848 1848
1849 1849 def __contains__(self, key):
1850 1850 return key in self._rustmap
1851 1851
1852 1852 def __getitem__(self, item):
1853 1853 return self._rustmap[item]
1854 1854
1855 1855 def __len__(self):
1856 1856 return len(self._rustmap)
1857 1857
1858 1858 def __iter__(self):
1859 1859 return iter(self._rustmap)
1860 1860
1861 1861 # forward for python2,3 compat
1862 1862 iteritems = items
1863 1863
1864 1864 def _opendirstatefile(self):
1865 1865 fp, mode = txnutil.trypending(
1866 1866 self._root, self._opener, self._filename
1867 1867 )
1868 1868 if self._pendingmode is not None and self._pendingmode != mode:
1869 1869 fp.close()
1870 1870 raise error.Abort(
1871 1871 _(b'working directory state may be changed parallelly')
1872 1872 )
1873 1873 self._pendingmode = mode
1874 1874 return fp
1875 1875
1876 1876 def setparents(self, p1, p2):
1877 1877 self._parents = (p1, p2)
1878 1878 self._dirtyparents = True
1879 1879
1880 1880 def parents(self):
1881 1881 if not self._parents:
1882 1882 if self._use_dirstate_v2:
1883 1883 offset = len(rustmod.V2_FORMAT_MARKER)
1884 1884 else:
1885 1885 offset = 0
1886 1886 read_len = offset + self._nodelen * 2
1887 1887 try:
1888 1888 fp = self._opendirstatefile()
1889 1889 st = fp.read(read_len)
1890 1890 fp.close()
1891 1891 except IOError as err:
1892 1892 if err.errno != errno.ENOENT:
1893 1893 raise
1894 1894 # File doesn't exist, so the current state is empty
1895 1895 st = b''
1896 1896
1897 1897 l = len(st)
1898 1898 if l == read_len:
1899 1899 st = st[offset:]
1900 1900 self._parents = (
1901 1901 st[: self._nodelen],
1902 1902 st[self._nodelen : 2 * self._nodelen],
1903 1903 )
1904 1904 elif l == 0:
1905 1905 self._parents = (
1906 1906 self._nodeconstants.nullid,
1907 1907 self._nodeconstants.nullid,
1908 1908 )
1909 1909 else:
1910 1910 raise error.Abort(
1911 1911 _(b'working directory state appears damaged!')
1912 1912 )
1913 1913
1914 1914 return self._parents
1915 1915
1916 1916 @propertycache
1917 1917 def _rustmap(self):
1918 1918 """
1919 1919 Fills the Dirstatemap when called.
1920 1920 """
1921 1921 # ignore HG_PENDING because identity is used only for writing
1922 1922 self.identity = util.filestat.frompath(
1923 1923 self._opener.join(self._filename)
1924 1924 )
1925 1925
1926 1926 try:
1927 1927 fp = self._opendirstatefile()
1928 1928 try:
1929 1929 st = fp.read()
1930 1930 finally:
1931 1931 fp.close()
1932 1932 except IOError as err:
1933 1933 if err.errno != errno.ENOENT:
1934 1934 raise
1935 1935 st = b''
1936 1936
1937 1937 self._rustmap, parents = rustmod.DirstateMap.new(
1938 1938 self._use_dirstate_tree, self._use_dirstate_v2, st
1939 1939 )
1940 1940
1941 1941 if parents and not self._dirtyparents:
1942 1942 self.setparents(*parents)
1943 1943
1944 1944 self.__contains__ = self._rustmap.__contains__
1945 1945 self.__getitem__ = self._rustmap.__getitem__
1946 1946 self.get = self._rustmap.get
1947 1947 return self._rustmap
1948 1948
1949 1949 def write(self, st, now):
1950 1950 parents = self.parents()
1951 1951 packed = self._rustmap.write(
1952 1952 self._use_dirstate_v2, parents[0], parents[1], now
1953 1953 )
1954 1954 st.write(packed)
1955 1955 st.close()
1956 1956 self._dirtyparents = False
1957 1957
1958 1958 @propertycache
1959 1959 def filefoldmap(self):
1960 1960 """Returns a dictionary mapping normalized case paths to their
1961 1961 non-normalized versions.
1962 1962 """
1963 1963 return self._rustmap.filefoldmapasdict()
1964 1964
1965 1965 def hastrackeddir(self, d):
1966 1966 return self._rustmap.hastrackeddir(d)
1967 1967
1968 1968 def hasdir(self, d):
1969 1969 return self._rustmap.hasdir(d)
1970 1970
1971 1971 @propertycache
1972 1972 def identity(self):
1973 1973 self._rustmap
1974 1974 return self.identity
1975 1975
1976 1976 @property
1977 1977 def nonnormalset(self):
1978 1978 nonnorm = self._rustmap.non_normal_entries()
1979 1979 return nonnorm
1980 1980
1981 1981 @propertycache
1982 1982 def otherparentset(self):
1983 1983 otherparents = self._rustmap.other_parent_entries()
1984 1984 return otherparents
1985 1985
1986 1986 def non_normal_or_other_parent_paths(self):
1987 1987 return self._rustmap.non_normal_or_other_parent_paths()
1988 1988
1989 1989 @propertycache
1990 1990 def dirfoldmap(self):
1991 1991 f = {}
1992 1992 normcase = util.normcase
1993 1993 for name, _pseudo_entry in self.directories():
1994 1994 f[normcase(name)] = name
1995 1995 return f
General Comments 0
You need to be logged in to leave comments. Login now