##// END OF EJS Templates
dirstate: introduce a symbolic constant for the AMBIGUOUS_TIME marker...
marmoute -
r48278:3f13dfa1 default
parent child Browse files
Show More
@@ -1,1988 +1,1991 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .pycompat import delattr
18 18
19 19 from hgdemandimport import tracing
20 20
21 21 from . import (
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 pathutil,
26 26 policy,
27 27 pycompat,
28 28 scmutil,
29 29 sparse,
30 30 txnutil,
31 31 util,
32 32 )
33 33
34 34 from .interfaces import (
35 35 dirstate as intdirstate,
36 36 util as interfaceutil,
37 37 )
38 38
39 39 parsers = policy.importmod('parsers')
40 40 rustmod = policy.importrust('dirstate')
41 41
42 42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43 43
44 44 propertycache = util.propertycache
45 45 filecache = scmutil.filecache
46 46 _rangemask = 0x7FFFFFFF
47 47
48 48 dirstatetuple = parsers.dirstatetuple
49 49
50 50
51 51 # a special value used internally for `size` if the file come from the other parent
52 52 FROM_P2 = -2
53 53
54 54 # a special value used internally for `size` if the file is modified/merged/added
55 55 NONNORMAL = -1
56 56
57 # a special value used internally for `time` if the time is ambigeous
58 AMBIGUOUS_TIME = -1
59
57 60
58 61 class repocache(filecache):
59 62 """filecache for files in .hg/"""
60 63
61 64 def join(self, obj, fname):
62 65 return obj._opener.join(fname)
63 66
64 67
65 68 class rootcache(filecache):
66 69 """filecache for files in the repository root"""
67 70
68 71 def join(self, obj, fname):
69 72 return obj._join(fname)
70 73
71 74
72 75 def _getfsnow(vfs):
73 76 '''Get "now" timestamp on filesystem'''
74 77 tmpfd, tmpname = vfs.mkstemp()
75 78 try:
76 79 return os.fstat(tmpfd)[stat.ST_MTIME]
77 80 finally:
78 81 os.close(tmpfd)
79 82 vfs.unlink(tmpname)
80 83
81 84
82 85 @interfaceutil.implementer(intdirstate.idirstate)
83 86 class dirstate(object):
84 87 def __init__(
85 88 self,
86 89 opener,
87 90 ui,
88 91 root,
89 92 validate,
90 93 sparsematchfn,
91 94 nodeconstants,
92 95 use_dirstate_v2,
93 96 ):
94 97 """Create a new dirstate object.
95 98
96 99 opener is an open()-like callable that can be used to open the
97 100 dirstate file; root is the root of the directory tracked by
98 101 the dirstate.
99 102 """
100 103 self._use_dirstate_v2 = use_dirstate_v2
101 104 self._nodeconstants = nodeconstants
102 105 self._opener = opener
103 106 self._validate = validate
104 107 self._root = root
105 108 self._sparsematchfn = sparsematchfn
106 109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
107 110 # UNC path pointing to root share (issue4557)
108 111 self._rootdir = pathutil.normasprefix(root)
109 112 self._dirty = False
110 113 self._lastnormaltime = 0
111 114 self._ui = ui
112 115 self._filecache = {}
113 116 self._parentwriters = 0
114 117 self._filename = b'dirstate'
115 118 self._pendingfilename = b'%s.pending' % self._filename
116 119 self._plchangecallbacks = {}
117 120 self._origpl = None
118 121 self._updatedfiles = set()
119 122 self._mapcls = dirstatemap
120 123 # Access and cache cwd early, so we don't access it for the first time
121 124 # after a working-copy update caused it to not exist (accessing it then
122 125 # raises an exception).
123 126 self._cwd
124 127
125 128 def prefetch_parents(self):
126 129 """make sure the parents are loaded
127 130
128 131 Used to avoid a race condition.
129 132 """
130 133 self._pl
131 134
132 135 @contextlib.contextmanager
133 136 def parentchange(self):
134 137 """Context manager for handling dirstate parents.
135 138
136 139 If an exception occurs in the scope of the context manager,
137 140 the incoherent dirstate won't be written when wlock is
138 141 released.
139 142 """
140 143 self._parentwriters += 1
141 144 yield
142 145 # Typically we want the "undo" step of a context manager in a
143 146 # finally block so it happens even when an exception
144 147 # occurs. In this case, however, we only want to decrement
145 148 # parentwriters if the code in the with statement exits
146 149 # normally, so we don't have a try/finally here on purpose.
147 150 self._parentwriters -= 1
148 151
149 152 def pendingparentchange(self):
150 153 """Returns true if the dirstate is in the middle of a set of changes
151 154 that modify the dirstate parent.
152 155 """
153 156 return self._parentwriters > 0
154 157
155 158 @propertycache
156 159 def _map(self):
157 160 """Return the dirstate contents (see documentation for dirstatemap)."""
158 161 self._map = self._mapcls(
159 162 self._ui,
160 163 self._opener,
161 164 self._root,
162 165 self._nodeconstants,
163 166 self._use_dirstate_v2,
164 167 )
165 168 return self._map
166 169
167 170 @property
168 171 def _sparsematcher(self):
169 172 """The matcher for the sparse checkout.
170 173
171 174 The working directory may not include every file from a manifest. The
172 175 matcher obtained by this property will match a path if it is to be
173 176 included in the working directory.
174 177 """
175 178 # TODO there is potential to cache this property. For now, the matcher
176 179 # is resolved on every access. (But the called function does use a
177 180 # cache to keep the lookup fast.)
178 181 return self._sparsematchfn()
179 182
180 183 @repocache(b'branch')
181 184 def _branch(self):
182 185 try:
183 186 return self._opener.read(b"branch").strip() or b"default"
184 187 except IOError as inst:
185 188 if inst.errno != errno.ENOENT:
186 189 raise
187 190 return b"default"
188 191
189 192 @property
190 193 def _pl(self):
191 194 return self._map.parents()
192 195
193 196 def hasdir(self, d):
194 197 return self._map.hastrackeddir(d)
195 198
196 199 @rootcache(b'.hgignore')
197 200 def _ignore(self):
198 201 files = self._ignorefiles()
199 202 if not files:
200 203 return matchmod.never()
201 204
202 205 pats = [b'include:%s' % f for f in files]
203 206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
204 207
205 208 @propertycache
206 209 def _slash(self):
207 210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
208 211
209 212 @propertycache
210 213 def _checklink(self):
211 214 return util.checklink(self._root)
212 215
213 216 @propertycache
214 217 def _checkexec(self):
215 218 return bool(util.checkexec(self._root))
216 219
217 220 @propertycache
218 221 def _checkcase(self):
219 222 return not util.fscasesensitive(self._join(b'.hg'))
220 223
221 224 def _join(self, f):
222 225 # much faster than os.path.join()
223 226 # it's safe because f is always a relative path
224 227 return self._rootdir + f
225 228
226 229 def flagfunc(self, buildfallback):
227 230 if self._checklink and self._checkexec:
228 231
229 232 def f(x):
230 233 try:
231 234 st = os.lstat(self._join(x))
232 235 if util.statislink(st):
233 236 return b'l'
234 237 if util.statisexec(st):
235 238 return b'x'
236 239 except OSError:
237 240 pass
238 241 return b''
239 242
240 243 return f
241 244
242 245 fallback = buildfallback()
243 246 if self._checklink:
244 247
245 248 def f(x):
246 249 if os.path.islink(self._join(x)):
247 250 return b'l'
248 251 if b'x' in fallback(x):
249 252 return b'x'
250 253 return b''
251 254
252 255 return f
253 256 if self._checkexec:
254 257
255 258 def f(x):
256 259 if b'l' in fallback(x):
257 260 return b'l'
258 261 if util.isexec(self._join(x)):
259 262 return b'x'
260 263 return b''
261 264
262 265 return f
263 266 else:
264 267 return fallback
265 268
266 269 @propertycache
267 270 def _cwd(self):
268 271 # internal config: ui.forcecwd
269 272 forcecwd = self._ui.config(b'ui', b'forcecwd')
270 273 if forcecwd:
271 274 return forcecwd
272 275 return encoding.getcwd()
273 276
274 277 def getcwd(self):
275 278 """Return the path from which a canonical path is calculated.
276 279
277 280 This path should be used to resolve file patterns or to convert
278 281 canonical paths back to file paths for display. It shouldn't be
279 282 used to get real file paths. Use vfs functions instead.
280 283 """
281 284 cwd = self._cwd
282 285 if cwd == self._root:
283 286 return b''
284 287 # self._root ends with a path separator if self._root is '/' or 'C:\'
285 288 rootsep = self._root
286 289 if not util.endswithsep(rootsep):
287 290 rootsep += pycompat.ossep
288 291 if cwd.startswith(rootsep):
289 292 return cwd[len(rootsep) :]
290 293 else:
291 294 # we're outside the repo. return an absolute path.
292 295 return cwd
293 296
294 297 def pathto(self, f, cwd=None):
295 298 if cwd is None:
296 299 cwd = self.getcwd()
297 300 path = util.pathto(self._root, cwd, f)
298 301 if self._slash:
299 302 return util.pconvert(path)
300 303 return path
301 304
302 305 def __getitem__(self, key):
303 306 """Return the current state of key (a filename) in the dirstate.
304 307
305 308 States are:
306 309 n normal
307 310 m needs merging
308 311 r marked for removal
309 312 a marked for addition
310 313 ? not tracked
311 314 """
312 315 return self._map.get(key, (b"?",))[0]
313 316
314 317 def __contains__(self, key):
315 318 return key in self._map
316 319
317 320 def __iter__(self):
318 321 return iter(sorted(self._map))
319 322
320 323 def items(self):
321 324 return pycompat.iteritems(self._map)
322 325
323 326 iteritems = items
324 327
325 328 def directories(self):
326 329 return self._map.directories()
327 330
328 331 def parents(self):
329 332 return [self._validate(p) for p in self._pl]
330 333
331 334 def p1(self):
332 335 return self._validate(self._pl[0])
333 336
334 337 def p2(self):
335 338 return self._validate(self._pl[1])
336 339
337 340 def branch(self):
338 341 return encoding.tolocal(self._branch)
339 342
340 343 def setparents(self, p1, p2=None):
341 344 """Set dirstate parents to p1 and p2.
342 345
343 346 When moving from two parents to one, 'm' merged entries a
344 347 adjusted to normal and previous copy records discarded and
345 348 returned by the call.
346 349
347 350 See localrepo.setparents()
348 351 """
349 352 if p2 is None:
350 353 p2 = self._nodeconstants.nullid
351 354 if self._parentwriters == 0:
352 355 raise ValueError(
353 356 b"cannot set dirstate parent outside of "
354 357 b"dirstate.parentchange context manager"
355 358 )
356 359
357 360 self._dirty = True
358 361 oldp2 = self._pl[1]
359 362 if self._origpl is None:
360 363 self._origpl = self._pl
361 364 self._map.setparents(p1, p2)
362 365 copies = {}
363 366 if (
364 367 oldp2 != self._nodeconstants.nullid
365 368 and p2 == self._nodeconstants.nullid
366 369 ):
367 370 candidatefiles = self._map.non_normal_or_other_parent_paths()
368 371
369 372 for f in candidatefiles:
370 373 s = self._map.get(f)
371 374 if s is None:
372 375 continue
373 376
374 377 # Discard 'm' markers when moving away from a merge state
375 378 if s[0] == b'm':
376 379 source = self._map.copymap.get(f)
377 380 if source:
378 381 copies[f] = source
379 382 self.normallookup(f)
380 383 # Also fix up otherparent markers
381 384 elif s[0] == b'n' and s[2] == FROM_P2:
382 385 source = self._map.copymap.get(f)
383 386 if source:
384 387 copies[f] = source
385 388 self.add(f)
386 389 return copies
387 390
388 391 def setbranch(self, branch):
389 392 self.__class__._branch.set(self, encoding.fromlocal(branch))
390 393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
391 394 try:
392 395 f.write(self._branch + b'\n')
393 396 f.close()
394 397
395 398 # make sure filecache has the correct stat info for _branch after
396 399 # replacing the underlying file
397 400 ce = self._filecache[b'_branch']
398 401 if ce:
399 402 ce.refresh()
400 403 except: # re-raises
401 404 f.discard()
402 405 raise
403 406
404 407 def invalidate(self):
405 408 """Causes the next access to reread the dirstate.
406 409
407 410 This is different from localrepo.invalidatedirstate() because it always
408 411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
409 412 check whether the dirstate has changed before rereading it."""
410 413
411 414 for a in ("_map", "_branch", "_ignore"):
412 415 if a in self.__dict__:
413 416 delattr(self, a)
414 417 self._lastnormaltime = 0
415 418 self._dirty = False
416 419 self._updatedfiles.clear()
417 420 self._parentwriters = 0
418 421 self._origpl = None
419 422
420 423 def copy(self, source, dest):
421 424 """Mark dest as a copy of source. Unmark dest if source is None."""
422 425 if source == dest:
423 426 return
424 427 self._dirty = True
425 428 if source is not None:
426 429 self._map.copymap[dest] = source
427 430 self._updatedfiles.add(source)
428 431 self._updatedfiles.add(dest)
429 432 elif self._map.copymap.pop(dest, None):
430 433 self._updatedfiles.add(dest)
431 434
432 435 def copied(self, file):
433 436 return self._map.copymap.get(file, None)
434 437
435 438 def copies(self):
436 439 return self._map.copymap
437 440
438 441 def _addpath(self, f, state, mode, size, mtime):
439 442 oldstate = self[f]
440 443 if state == b'a' or oldstate == b'r':
441 444 scmutil.checkfilename(f)
442 445 if self._map.hastrackeddir(f):
443 446 msg = _(b'directory %r already in dirstate')
444 447 msg %= pycompat.bytestr(f)
445 448 raise error.Abort(msg)
446 449 # shadows
447 450 for d in pathutil.finddirs(f):
448 451 if self._map.hastrackeddir(d):
449 452 break
450 453 entry = self._map.get(d)
451 454 if entry is not None and entry[0] != b'r':
452 455 msg = _(b'file %r in dirstate clashes with %r')
453 456 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
454 457 raise error.Abort(msg)
455 458 self._dirty = True
456 459 self._updatedfiles.add(f)
457 460 self._map.addfile(f, oldstate, state, mode, size, mtime)
458 461
459 462 def normal(self, f, parentfiledata=None):
460 463 """Mark a file normal and clean.
461 464
462 465 parentfiledata: (mode, size, mtime) of the clean file
463 466
464 467 parentfiledata should be computed from memory (for mode,
465 468 size), as or close as possible from the point where we
466 469 determined the file was clean, to limit the risk of the
467 470 file having been changed by an external process between the
468 471 moment where the file was determined to be clean and now."""
469 472 if parentfiledata:
470 473 (mode, size, mtime) = parentfiledata
471 474 else:
472 475 s = os.lstat(self._join(f))
473 476 mode = s.st_mode
474 477 size = s.st_size
475 478 mtime = s[stat.ST_MTIME]
476 479 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
477 480 self._map.copymap.pop(f, None)
478 481 if f in self._map.nonnormalset:
479 482 self._map.nonnormalset.remove(f)
480 483 if mtime > self._lastnormaltime:
481 484 # Remember the most recent modification timeslot for status(),
482 485 # to make sure we won't miss future size-preserving file content
483 486 # modifications that happen within the same timeslot.
484 487 self._lastnormaltime = mtime
485 488
486 489 def normallookup(self, f):
487 490 '''Mark a file normal, but possibly dirty.'''
488 491 if self._pl[1] != self._nodeconstants.nullid:
489 492 # if there is a merge going on and the file was either
490 493 # in state 'm' (-1) or coming from other parent (-2) before
491 494 # being removed, restore that state.
492 495 entry = self._map.get(f)
493 496 if entry is not None:
494 497 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
495 498 source = self._map.copymap.get(f)
496 499 if entry[2] == NONNORMAL:
497 500 self.merge(f)
498 501 elif entry[2] == FROM_P2:
499 502 self.otherparent(f)
500 503 if source:
501 504 self.copy(source, f)
502 505 return
503 506 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
504 507 return
505 self._addpath(f, b'n', 0, NONNORMAL, -1)
508 self._addpath(f, b'n', 0, NONNORMAL, AMBIGUOUS_TIME)
506 509 self._map.copymap.pop(f, None)
507 510
508 511 def otherparent(self, f):
509 512 '''Mark as coming from the other parent, always dirty.'''
510 513 if self._pl[1] == self._nodeconstants.nullid:
511 514 msg = _(b"setting %r to other parent only allowed in merges") % f
512 515 raise error.Abort(msg)
513 516 if f in self and self[f] == b'n':
514 517 # merge-like
515 self._addpath(f, b'm', 0, FROM_P2, -1)
518 self._addpath(f, b'm', 0, FROM_P2, AMBIGUOUS_TIME)
516 519 else:
517 520 # add-like
518 self._addpath(f, b'n', 0, FROM_P2, -1)
521 self._addpath(f, b'n', 0, FROM_P2, AMBIGUOUS_TIME)
519 522 self._map.copymap.pop(f, None)
520 523
521 524 def add(self, f):
522 525 '''Mark a file added.'''
523 self._addpath(f, b'a', 0, NONNORMAL, -1)
526 self._addpath(f, b'a', 0, NONNORMAL, AMBIGUOUS_TIME)
524 527 self._map.copymap.pop(f, None)
525 528
526 529 def remove(self, f):
527 530 '''Mark a file removed.'''
528 531 self._dirty = True
529 532 oldstate = self[f]
530 533 size = 0
531 534 if self._pl[1] != self._nodeconstants.nullid:
532 535 entry = self._map.get(f)
533 536 if entry is not None:
534 537 # backup the previous state
535 538 if entry[0] == b'm': # merge
536 539 size = NONNORMAL
537 540 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
538 541 size = FROM_P2
539 542 self._map.otherparentset.add(f)
540 543 self._updatedfiles.add(f)
541 544 self._map.removefile(f, oldstate, size)
542 545 if size == 0:
543 546 self._map.copymap.pop(f, None)
544 547
545 548 def merge(self, f):
546 549 '''Mark a file merged.'''
547 550 if self._pl[1] == self._nodeconstants.nullid:
548 551 return self.normallookup(f)
549 552 return self.otherparent(f)
550 553
551 554 def drop(self, f):
552 555 '''Drop a file from the dirstate'''
553 556 oldstate = self[f]
554 557 if self._map.dropfile(f, oldstate):
555 558 self._dirty = True
556 559 self._updatedfiles.add(f)
557 560 self._map.copymap.pop(f, None)
558 561
559 562 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
560 563 if exists is None:
561 564 exists = os.path.lexists(os.path.join(self._root, path))
562 565 if not exists:
563 566 # Maybe a path component exists
564 567 if not ignoremissing and b'/' in path:
565 568 d, f = path.rsplit(b'/', 1)
566 569 d = self._normalize(d, False, ignoremissing, None)
567 570 folded = d + b"/" + f
568 571 else:
569 572 # No path components, preserve original case
570 573 folded = path
571 574 else:
572 575 # recursively normalize leading directory components
573 576 # against dirstate
574 577 if b'/' in normed:
575 578 d, f = normed.rsplit(b'/', 1)
576 579 d = self._normalize(d, False, ignoremissing, True)
577 580 r = self._root + b"/" + d
578 581 folded = d + b"/" + util.fspath(f, r)
579 582 else:
580 583 folded = util.fspath(normed, self._root)
581 584 storemap[normed] = folded
582 585
583 586 return folded
584 587
585 588 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
586 589 normed = util.normcase(path)
587 590 folded = self._map.filefoldmap.get(normed, None)
588 591 if folded is None:
589 592 if isknown:
590 593 folded = path
591 594 else:
592 595 folded = self._discoverpath(
593 596 path, normed, ignoremissing, exists, self._map.filefoldmap
594 597 )
595 598 return folded
596 599
597 600 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
598 601 normed = util.normcase(path)
599 602 folded = self._map.filefoldmap.get(normed, None)
600 603 if folded is None:
601 604 folded = self._map.dirfoldmap.get(normed, None)
602 605 if folded is None:
603 606 if isknown:
604 607 folded = path
605 608 else:
606 609 # store discovered result in dirfoldmap so that future
607 610 # normalizefile calls don't start matching directories
608 611 folded = self._discoverpath(
609 612 path, normed, ignoremissing, exists, self._map.dirfoldmap
610 613 )
611 614 return folded
612 615
613 616 def normalize(self, path, isknown=False, ignoremissing=False):
614 617 """
615 618 normalize the case of a pathname when on a casefolding filesystem
616 619
617 620 isknown specifies whether the filename came from walking the
618 621 disk, to avoid extra filesystem access.
619 622
620 623 If ignoremissing is True, missing path are returned
621 624 unchanged. Otherwise, we try harder to normalize possibly
622 625 existing path components.
623 626
624 627 The normalized case is determined based on the following precedence:
625 628
626 629 - version of name already stored in the dirstate
627 630 - version of name stored on disk
628 631 - version provided via command arguments
629 632 """
630 633
631 634 if self._checkcase:
632 635 return self._normalize(path, isknown, ignoremissing)
633 636 return path
634 637
635 638 def clear(self):
636 639 self._map.clear()
637 640 self._lastnormaltime = 0
638 641 self._updatedfiles.clear()
639 642 self._dirty = True
640 643
641 644 def rebuild(self, parent, allfiles, changedfiles=None):
642 645 if changedfiles is None:
643 646 # Rebuild entire dirstate
644 647 to_lookup = allfiles
645 648 to_drop = []
646 649 lastnormaltime = self._lastnormaltime
647 650 self.clear()
648 651 self._lastnormaltime = lastnormaltime
649 652 elif len(changedfiles) < 10:
650 653 # Avoid turning allfiles into a set, which can be expensive if it's
651 654 # large.
652 655 to_lookup = []
653 656 to_drop = []
654 657 for f in changedfiles:
655 658 if f in allfiles:
656 659 to_lookup.append(f)
657 660 else:
658 661 to_drop.append(f)
659 662 else:
660 663 changedfilesset = set(changedfiles)
661 664 to_lookup = changedfilesset & set(allfiles)
662 665 to_drop = changedfilesset - to_lookup
663 666
664 667 if self._origpl is None:
665 668 self._origpl = self._pl
666 669 self._map.setparents(parent, self._nodeconstants.nullid)
667 670
668 671 for f in to_lookup:
669 672 self.normallookup(f)
670 673 for f in to_drop:
671 674 self.drop(f)
672 675
673 676 self._dirty = True
674 677
675 678 def identity(self):
676 679 """Return identity of dirstate itself to detect changing in storage
677 680
678 681 If identity of previous dirstate is equal to this, writing
679 682 changes based on the former dirstate out can keep consistency.
680 683 """
681 684 return self._map.identity
682 685
683 686 def write(self, tr):
684 687 if not self._dirty:
685 688 return
686 689
687 690 filename = self._filename
688 691 if tr:
689 692 # 'dirstate.write()' is not only for writing in-memory
690 693 # changes out, but also for dropping ambiguous timestamp.
691 694 # delayed writing re-raise "ambiguous timestamp issue".
692 695 # See also the wiki page below for detail:
693 696 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
694 697
695 698 # emulate dropping timestamp in 'parsers.pack_dirstate'
696 699 now = _getfsnow(self._opener)
697 700 self._map.clearambiguoustimes(self._updatedfiles, now)
698 701
699 702 # emulate that all 'dirstate.normal' results are written out
700 703 self._lastnormaltime = 0
701 704 self._updatedfiles.clear()
702 705
703 706 # delay writing in-memory changes out
704 707 tr.addfilegenerator(
705 708 b'dirstate',
706 709 (self._filename,),
707 710 self._writedirstate,
708 711 location=b'plain',
709 712 )
710 713 return
711 714
712 715 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
713 716 self._writedirstate(st)
714 717
715 718 def addparentchangecallback(self, category, callback):
716 719 """add a callback to be called when the wd parents are changed
717 720
718 721 Callback will be called with the following arguments:
719 722 dirstate, (oldp1, oldp2), (newp1, newp2)
720 723
721 724 Category is a unique identifier to allow overwriting an old callback
722 725 with a newer callback.
723 726 """
724 727 self._plchangecallbacks[category] = callback
725 728
726 729 def _writedirstate(self, st):
727 730 # notify callbacks about parents change
728 731 if self._origpl is not None and self._origpl != self._pl:
729 732 for c, callback in sorted(
730 733 pycompat.iteritems(self._plchangecallbacks)
731 734 ):
732 735 callback(self, self._origpl, self._pl)
733 736 self._origpl = None
734 737 # use the modification time of the newly created temporary file as the
735 738 # filesystem's notion of 'now'
736 739 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
737 740
738 741 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
739 742 # timestamp of each entries in dirstate, because of 'now > mtime'
740 743 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
741 744 if delaywrite > 0:
742 745 # do we have any files to delay for?
743 746 for f, e in pycompat.iteritems(self._map):
744 747 if e[0] == b'n' and e[3] == now:
745 748 import time # to avoid useless import
746 749
747 750 # rather than sleep n seconds, sleep until the next
748 751 # multiple of n seconds
749 752 clock = time.time()
750 753 start = int(clock) - (int(clock) % delaywrite)
751 754 end = start + delaywrite
752 755 time.sleep(end - clock)
753 756 now = end # trust our estimate that the end is near now
754 757 break
755 758
756 759 self._map.write(st, now)
757 760 self._lastnormaltime = 0
758 761 self._dirty = False
759 762
760 763 def _dirignore(self, f):
761 764 if self._ignore(f):
762 765 return True
763 766 for p in pathutil.finddirs(f):
764 767 if self._ignore(p):
765 768 return True
766 769 return False
767 770
768 771 def _ignorefiles(self):
769 772 files = []
770 773 if os.path.exists(self._join(b'.hgignore')):
771 774 files.append(self._join(b'.hgignore'))
772 775 for name, path in self._ui.configitems(b"ui"):
773 776 if name == b'ignore' or name.startswith(b'ignore.'):
774 777 # we need to use os.path.join here rather than self._join
775 778 # because path is arbitrary and user-specified
776 779 files.append(os.path.join(self._rootdir, util.expandpath(path)))
777 780 return files
778 781
779 782 def _ignorefileandline(self, f):
780 783 files = collections.deque(self._ignorefiles())
781 784 visited = set()
782 785 while files:
783 786 i = files.popleft()
784 787 patterns = matchmod.readpatternfile(
785 788 i, self._ui.warn, sourceinfo=True
786 789 )
787 790 for pattern, lineno, line in patterns:
788 791 kind, p = matchmod._patsplit(pattern, b'glob')
789 792 if kind == b"subinclude":
790 793 if p not in visited:
791 794 files.append(p)
792 795 continue
793 796 m = matchmod.match(
794 797 self._root, b'', [], [pattern], warn=self._ui.warn
795 798 )
796 799 if m(f):
797 800 return (i, lineno, line)
798 801 visited.add(i)
799 802 return (None, -1, b"")
800 803
801 804 def _walkexplicit(self, match, subrepos):
802 805 """Get stat data about the files explicitly specified by match.
803 806
804 807 Return a triple (results, dirsfound, dirsnotfound).
805 808 - results is a mapping from filename to stat result. It also contains
806 809 listings mapping subrepos and .hg to None.
807 810 - dirsfound is a list of files found to be directories.
808 811 - dirsnotfound is a list of files that the dirstate thinks are
809 812 directories and that were not found."""
810 813
811 814 def badtype(mode):
812 815 kind = _(b'unknown')
813 816 if stat.S_ISCHR(mode):
814 817 kind = _(b'character device')
815 818 elif stat.S_ISBLK(mode):
816 819 kind = _(b'block device')
817 820 elif stat.S_ISFIFO(mode):
818 821 kind = _(b'fifo')
819 822 elif stat.S_ISSOCK(mode):
820 823 kind = _(b'socket')
821 824 elif stat.S_ISDIR(mode):
822 825 kind = _(b'directory')
823 826 return _(b'unsupported file type (type is %s)') % kind
824 827
825 828 badfn = match.bad
826 829 dmap = self._map
827 830 lstat = os.lstat
828 831 getkind = stat.S_IFMT
829 832 dirkind = stat.S_IFDIR
830 833 regkind = stat.S_IFREG
831 834 lnkkind = stat.S_IFLNK
832 835 join = self._join
833 836 dirsfound = []
834 837 foundadd = dirsfound.append
835 838 dirsnotfound = []
836 839 notfoundadd = dirsnotfound.append
837 840
838 841 if not match.isexact() and self._checkcase:
839 842 normalize = self._normalize
840 843 else:
841 844 normalize = None
842 845
843 846 files = sorted(match.files())
844 847 subrepos.sort()
845 848 i, j = 0, 0
846 849 while i < len(files) and j < len(subrepos):
847 850 subpath = subrepos[j] + b"/"
848 851 if files[i] < subpath:
849 852 i += 1
850 853 continue
851 854 while i < len(files) and files[i].startswith(subpath):
852 855 del files[i]
853 856 j += 1
854 857
855 858 if not files or b'' in files:
856 859 files = [b'']
857 860 # constructing the foldmap is expensive, so don't do it for the
858 861 # common case where files is ['']
859 862 normalize = None
860 863 results = dict.fromkeys(subrepos)
861 864 results[b'.hg'] = None
862 865
863 866 for ff in files:
864 867 if normalize:
865 868 nf = normalize(ff, False, True)
866 869 else:
867 870 nf = ff
868 871 if nf in results:
869 872 continue
870 873
871 874 try:
872 875 st = lstat(join(nf))
873 876 kind = getkind(st.st_mode)
874 877 if kind == dirkind:
875 878 if nf in dmap:
876 879 # file replaced by dir on disk but still in dirstate
877 880 results[nf] = None
878 881 foundadd((nf, ff))
879 882 elif kind == regkind or kind == lnkkind:
880 883 results[nf] = st
881 884 else:
882 885 badfn(ff, badtype(kind))
883 886 if nf in dmap:
884 887 results[nf] = None
885 888 except OSError as inst: # nf not found on disk - it is dirstate only
886 889 if nf in dmap: # does it exactly match a missing file?
887 890 results[nf] = None
888 891 else: # does it match a missing directory?
889 892 if self._map.hasdir(nf):
890 893 notfoundadd(nf)
891 894 else:
892 895 badfn(ff, encoding.strtolocal(inst.strerror))
893 896
894 897 # match.files() may contain explicitly-specified paths that shouldn't
895 898 # be taken; drop them from the list of files found. dirsfound/notfound
896 899 # aren't filtered here because they will be tested later.
897 900 if match.anypats():
898 901 for f in list(results):
899 902 if f == b'.hg' or f in subrepos:
900 903 # keep sentinel to disable further out-of-repo walks
901 904 continue
902 905 if not match(f):
903 906 del results[f]
904 907
905 908 # Case insensitive filesystems cannot rely on lstat() failing to detect
906 909 # a case-only rename. Prune the stat object for any file that does not
907 910 # match the case in the filesystem, if there are multiple files that
908 911 # normalize to the same path.
909 912 if match.isexact() and self._checkcase:
910 913 normed = {}
911 914
912 915 for f, st in pycompat.iteritems(results):
913 916 if st is None:
914 917 continue
915 918
916 919 nc = util.normcase(f)
917 920 paths = normed.get(nc)
918 921
919 922 if paths is None:
920 923 paths = set()
921 924 normed[nc] = paths
922 925
923 926 paths.add(f)
924 927
925 928 for norm, paths in pycompat.iteritems(normed):
926 929 if len(paths) > 1:
927 930 for path in paths:
928 931 folded = self._discoverpath(
929 932 path, norm, True, None, self._map.dirfoldmap
930 933 )
931 934 if path != folded:
932 935 results[path] = None
933 936
934 937 return results, dirsfound, dirsnotfound
935 938
936 939 def walk(self, match, subrepos, unknown, ignored, full=True):
937 940 """
938 941 Walk recursively through the directory tree, finding all files
939 942 matched by match.
940 943
941 944 If full is False, maybe skip some known-clean files.
942 945
943 946 Return a dict mapping filename to stat-like object (either
944 947 mercurial.osutil.stat instance or return value of os.stat()).
945 948
946 949 """
947 950 # full is a flag that extensions that hook into walk can use -- this
948 951 # implementation doesn't use it at all. This satisfies the contract
949 952 # because we only guarantee a "maybe".
950 953
951 954 if ignored:
952 955 ignore = util.never
953 956 dirignore = util.never
954 957 elif unknown:
955 958 ignore = self._ignore
956 959 dirignore = self._dirignore
957 960 else:
958 961 # if not unknown and not ignored, drop dir recursion and step 2
959 962 ignore = util.always
960 963 dirignore = util.always
961 964
962 965 matchfn = match.matchfn
963 966 matchalways = match.always()
964 967 matchtdir = match.traversedir
965 968 dmap = self._map
966 969 listdir = util.listdir
967 970 lstat = os.lstat
968 971 dirkind = stat.S_IFDIR
969 972 regkind = stat.S_IFREG
970 973 lnkkind = stat.S_IFLNK
971 974 join = self._join
972 975
973 976 exact = skipstep3 = False
974 977 if match.isexact(): # match.exact
975 978 exact = True
976 979 dirignore = util.always # skip step 2
977 980 elif match.prefix(): # match.match, no patterns
978 981 skipstep3 = True
979 982
980 983 if not exact and self._checkcase:
981 984 normalize = self._normalize
982 985 normalizefile = self._normalizefile
983 986 skipstep3 = False
984 987 else:
985 988 normalize = self._normalize
986 989 normalizefile = None
987 990
988 991 # step 1: find all explicit files
989 992 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
990 993 if matchtdir:
991 994 for d in work:
992 995 matchtdir(d[0])
993 996 for d in dirsnotfound:
994 997 matchtdir(d)
995 998
996 999 skipstep3 = skipstep3 and not (work or dirsnotfound)
997 1000 work = [d for d in work if not dirignore(d[0])]
998 1001
999 1002 # step 2: visit subdirectories
1000 1003 def traverse(work, alreadynormed):
1001 1004 wadd = work.append
1002 1005 while work:
1003 1006 tracing.counter('dirstate.walk work', len(work))
1004 1007 nd = work.pop()
1005 1008 visitentries = match.visitchildrenset(nd)
1006 1009 if not visitentries:
1007 1010 continue
1008 1011 if visitentries == b'this' or visitentries == b'all':
1009 1012 visitentries = None
1010 1013 skip = None
1011 1014 if nd != b'':
1012 1015 skip = b'.hg'
1013 1016 try:
1014 1017 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1015 1018 entries = listdir(join(nd), stat=True, skip=skip)
1016 1019 except OSError as inst:
1017 1020 if inst.errno in (errno.EACCES, errno.ENOENT):
1018 1021 match.bad(
1019 1022 self.pathto(nd), encoding.strtolocal(inst.strerror)
1020 1023 )
1021 1024 continue
1022 1025 raise
1023 1026 for f, kind, st in entries:
1024 1027 # Some matchers may return files in the visitentries set,
1025 1028 # instead of 'this', if the matcher explicitly mentions them
1026 1029 # and is not an exactmatcher. This is acceptable; we do not
1027 1030 # make any hard assumptions about file-or-directory below
1028 1031 # based on the presence of `f` in visitentries. If
1029 1032 # visitchildrenset returned a set, we can always skip the
1030 1033 # entries *not* in the set it provided regardless of whether
1031 1034 # they're actually a file or a directory.
1032 1035 if visitentries and f not in visitentries:
1033 1036 continue
1034 1037 if normalizefile:
1035 1038 # even though f might be a directory, we're only
1036 1039 # interested in comparing it to files currently in the
1037 1040 # dmap -- therefore normalizefile is enough
1038 1041 nf = normalizefile(
1039 1042 nd and (nd + b"/" + f) or f, True, True
1040 1043 )
1041 1044 else:
1042 1045 nf = nd and (nd + b"/" + f) or f
1043 1046 if nf not in results:
1044 1047 if kind == dirkind:
1045 1048 if not ignore(nf):
1046 1049 if matchtdir:
1047 1050 matchtdir(nf)
1048 1051 wadd(nf)
1049 1052 if nf in dmap and (matchalways or matchfn(nf)):
1050 1053 results[nf] = None
1051 1054 elif kind == regkind or kind == lnkkind:
1052 1055 if nf in dmap:
1053 1056 if matchalways or matchfn(nf):
1054 1057 results[nf] = st
1055 1058 elif (matchalways or matchfn(nf)) and not ignore(
1056 1059 nf
1057 1060 ):
1058 1061 # unknown file -- normalize if necessary
1059 1062 if not alreadynormed:
1060 1063 nf = normalize(nf, False, True)
1061 1064 results[nf] = st
1062 1065 elif nf in dmap and (matchalways or matchfn(nf)):
1063 1066 results[nf] = None
1064 1067
1065 1068 for nd, d in work:
1066 1069 # alreadynormed means that processwork doesn't have to do any
1067 1070 # expensive directory normalization
1068 1071 alreadynormed = not normalize or nd == d
1069 1072 traverse([d], alreadynormed)
1070 1073
1071 1074 for s in subrepos:
1072 1075 del results[s]
1073 1076 del results[b'.hg']
1074 1077
1075 1078 # step 3: visit remaining files from dmap
1076 1079 if not skipstep3 and not exact:
1077 1080 # If a dmap file is not in results yet, it was either
1078 1081 # a) not matching matchfn b) ignored, c) missing, or d) under a
1079 1082 # symlink directory.
1080 1083 if not results and matchalways:
1081 1084 visit = [f for f in dmap]
1082 1085 else:
1083 1086 visit = [f for f in dmap if f not in results and matchfn(f)]
1084 1087 visit.sort()
1085 1088
1086 1089 if unknown:
1087 1090 # unknown == True means we walked all dirs under the roots
1088 1091 # that wasn't ignored, and everything that matched was stat'ed
1089 1092 # and is already in results.
1090 1093 # The rest must thus be ignored or under a symlink.
1091 1094 audit_path = pathutil.pathauditor(self._root, cached=True)
1092 1095
1093 1096 for nf in iter(visit):
1094 1097 # If a stat for the same file was already added with a
1095 1098 # different case, don't add one for this, since that would
1096 1099 # make it appear as if the file exists under both names
1097 1100 # on disk.
1098 1101 if (
1099 1102 normalizefile
1100 1103 and normalizefile(nf, True, True) in results
1101 1104 ):
1102 1105 results[nf] = None
1103 1106 # Report ignored items in the dmap as long as they are not
1104 1107 # under a symlink directory.
1105 1108 elif audit_path.check(nf):
1106 1109 try:
1107 1110 results[nf] = lstat(join(nf))
1108 1111 # file was just ignored, no links, and exists
1109 1112 except OSError:
1110 1113 # file doesn't exist
1111 1114 results[nf] = None
1112 1115 else:
1113 1116 # It's either missing or under a symlink directory
1114 1117 # which we in this case report as missing
1115 1118 results[nf] = None
1116 1119 else:
1117 1120 # We may not have walked the full directory tree above,
1118 1121 # so stat and check everything we missed.
1119 1122 iv = iter(visit)
1120 1123 for st in util.statfiles([join(i) for i in visit]):
1121 1124 results[next(iv)] = st
1122 1125 return results
1123 1126
1124 1127 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1125 1128 # Force Rayon (Rust parallelism library) to respect the number of
1126 1129 # workers. This is a temporary workaround until Rust code knows
1127 1130 # how to read the config file.
1128 1131 numcpus = self._ui.configint(b"worker", b"numcpus")
1129 1132 if numcpus is not None:
1130 1133 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1131 1134
1132 1135 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1133 1136 if not workers_enabled:
1134 1137 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1135 1138
1136 1139 (
1137 1140 lookup,
1138 1141 modified,
1139 1142 added,
1140 1143 removed,
1141 1144 deleted,
1142 1145 clean,
1143 1146 ignored,
1144 1147 unknown,
1145 1148 warnings,
1146 1149 bad,
1147 1150 traversed,
1148 1151 dirty,
1149 1152 ) = rustmod.status(
1150 1153 self._map._rustmap,
1151 1154 matcher,
1152 1155 self._rootdir,
1153 1156 self._ignorefiles(),
1154 1157 self._checkexec,
1155 1158 self._lastnormaltime,
1156 1159 bool(list_clean),
1157 1160 bool(list_ignored),
1158 1161 bool(list_unknown),
1159 1162 bool(matcher.traversedir),
1160 1163 )
1161 1164
1162 1165 self._dirty |= dirty
1163 1166
1164 1167 if matcher.traversedir:
1165 1168 for dir in traversed:
1166 1169 matcher.traversedir(dir)
1167 1170
1168 1171 if self._ui.warn:
1169 1172 for item in warnings:
1170 1173 if isinstance(item, tuple):
1171 1174 file_path, syntax = item
1172 1175 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1173 1176 file_path,
1174 1177 syntax,
1175 1178 )
1176 1179 self._ui.warn(msg)
1177 1180 else:
1178 1181 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1179 1182 self._ui.warn(
1180 1183 msg
1181 1184 % (
1182 1185 pathutil.canonpath(
1183 1186 self._rootdir, self._rootdir, item
1184 1187 ),
1185 1188 b"No such file or directory",
1186 1189 )
1187 1190 )
1188 1191
1189 1192 for (fn, message) in bad:
1190 1193 matcher.bad(fn, encoding.strtolocal(message))
1191 1194
1192 1195 status = scmutil.status(
1193 1196 modified=modified,
1194 1197 added=added,
1195 1198 removed=removed,
1196 1199 deleted=deleted,
1197 1200 unknown=unknown,
1198 1201 ignored=ignored,
1199 1202 clean=clean,
1200 1203 )
1201 1204 return (lookup, status)
1202 1205
1203 1206 def status(self, match, subrepos, ignored, clean, unknown):
1204 1207 """Determine the status of the working copy relative to the
1205 1208 dirstate and return a pair of (unsure, status), where status is of type
1206 1209 scmutil.status and:
1207 1210
1208 1211 unsure:
1209 1212 files that might have been modified since the dirstate was
1210 1213 written, but need to be read to be sure (size is the same
1211 1214 but mtime differs)
1212 1215 status.modified:
1213 1216 files that have definitely been modified since the dirstate
1214 1217 was written (different size or mode)
1215 1218 status.clean:
1216 1219 files that have definitely not been modified since the
1217 1220 dirstate was written
1218 1221 """
1219 1222 listignored, listclean, listunknown = ignored, clean, unknown
1220 1223 lookup, modified, added, unknown, ignored = [], [], [], [], []
1221 1224 removed, deleted, clean = [], [], []
1222 1225
1223 1226 dmap = self._map
1224 1227 dmap.preload()
1225 1228
1226 1229 use_rust = True
1227 1230
1228 1231 allowed_matchers = (
1229 1232 matchmod.alwaysmatcher,
1230 1233 matchmod.exactmatcher,
1231 1234 matchmod.includematcher,
1232 1235 )
1233 1236
1234 1237 if rustmod is None:
1235 1238 use_rust = False
1236 1239 elif self._checkcase:
1237 1240 # Case-insensitive filesystems are not handled yet
1238 1241 use_rust = False
1239 1242 elif subrepos:
1240 1243 use_rust = False
1241 1244 elif sparse.enabled:
1242 1245 use_rust = False
1243 1246 elif not isinstance(match, allowed_matchers):
1244 1247 # Some matchers have yet to be implemented
1245 1248 use_rust = False
1246 1249
1247 1250 if use_rust:
1248 1251 try:
1249 1252 return self._rust_status(
1250 1253 match, listclean, listignored, listunknown
1251 1254 )
1252 1255 except rustmod.FallbackError:
1253 1256 pass
1254 1257
1255 1258 def noop(f):
1256 1259 pass
1257 1260
1258 1261 dcontains = dmap.__contains__
1259 1262 dget = dmap.__getitem__
1260 1263 ladd = lookup.append # aka "unsure"
1261 1264 madd = modified.append
1262 1265 aadd = added.append
1263 1266 uadd = unknown.append if listunknown else noop
1264 1267 iadd = ignored.append if listignored else noop
1265 1268 radd = removed.append
1266 1269 dadd = deleted.append
1267 1270 cadd = clean.append if listclean else noop
1268 1271 mexact = match.exact
1269 1272 dirignore = self._dirignore
1270 1273 checkexec = self._checkexec
1271 1274 copymap = self._map.copymap
1272 1275 lastnormaltime = self._lastnormaltime
1273 1276
1274 1277 # We need to do full walks when either
1275 1278 # - we're listing all clean files, or
1276 1279 # - match.traversedir does something, because match.traversedir should
1277 1280 # be called for every dir in the working dir
1278 1281 full = listclean or match.traversedir is not None
1279 1282 for fn, st in pycompat.iteritems(
1280 1283 self.walk(match, subrepos, listunknown, listignored, full=full)
1281 1284 ):
1282 1285 if not dcontains(fn):
1283 1286 if (listignored or mexact(fn)) and dirignore(fn):
1284 1287 if listignored:
1285 1288 iadd(fn)
1286 1289 else:
1287 1290 uadd(fn)
1288 1291 continue
1289 1292
1290 1293 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1291 1294 # written like that for performance reasons. dmap[fn] is not a
1292 1295 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1293 1296 # opcode has fast paths when the value to be unpacked is a tuple or
1294 1297 # a list, but falls back to creating a full-fledged iterator in
1295 1298 # general. That is much slower than simply accessing and storing the
1296 1299 # tuple members one by one.
1297 1300 t = dget(fn)
1298 1301 state = t[0]
1299 1302 mode = t[1]
1300 1303 size = t[2]
1301 1304 time = t[3]
1302 1305
1303 1306 if not st and state in b"nma":
1304 1307 dadd(fn)
1305 1308 elif state == b'n':
1306 1309 if (
1307 1310 size >= 0
1308 1311 and (
1309 1312 (size != st.st_size and size != st.st_size & _rangemask)
1310 1313 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1311 1314 )
1312 1315 or size == FROM_P2 # other parent
1313 1316 or fn in copymap
1314 1317 ):
1315 1318 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1316 1319 # issue6456: Size returned may be longer due to
1317 1320 # encryption on EXT-4 fscrypt, undecided.
1318 1321 ladd(fn)
1319 1322 else:
1320 1323 madd(fn)
1321 1324 elif (
1322 1325 time != st[stat.ST_MTIME]
1323 1326 and time != st[stat.ST_MTIME] & _rangemask
1324 1327 ):
1325 1328 ladd(fn)
1326 1329 elif st[stat.ST_MTIME] == lastnormaltime:
1327 1330 # fn may have just been marked as normal and it may have
1328 1331 # changed in the same second without changing its size.
1329 1332 # This can happen if we quickly do multiple commits.
1330 1333 # Force lookup, so we don't miss such a racy file change.
1331 1334 ladd(fn)
1332 1335 elif listclean:
1333 1336 cadd(fn)
1334 1337 elif state == b'm':
1335 1338 madd(fn)
1336 1339 elif state == b'a':
1337 1340 aadd(fn)
1338 1341 elif state == b'r':
1339 1342 radd(fn)
1340 1343 status = scmutil.status(
1341 1344 modified, added, removed, deleted, unknown, ignored, clean
1342 1345 )
1343 1346 return (lookup, status)
1344 1347
1345 1348 def matches(self, match):
1346 1349 """
1347 1350 return files in the dirstate (in whatever state) filtered by match
1348 1351 """
1349 1352 dmap = self._map
1350 1353 if rustmod is not None:
1351 1354 dmap = self._map._rustmap
1352 1355
1353 1356 if match.always():
1354 1357 return dmap.keys()
1355 1358 files = match.files()
1356 1359 if match.isexact():
1357 1360 # fast path -- filter the other way around, since typically files is
1358 1361 # much smaller than dmap
1359 1362 return [f for f in files if f in dmap]
1360 1363 if match.prefix() and all(fn in dmap for fn in files):
1361 1364 # fast path -- all the values are known to be files, so just return
1362 1365 # that
1363 1366 return list(files)
1364 1367 return [f for f in dmap if match(f)]
1365 1368
1366 1369 def _actualfilename(self, tr):
1367 1370 if tr:
1368 1371 return self._pendingfilename
1369 1372 else:
1370 1373 return self._filename
1371 1374
1372 1375 def savebackup(self, tr, backupname):
1373 1376 '''Save current dirstate into backup file'''
1374 1377 filename = self._actualfilename(tr)
1375 1378 assert backupname != filename
1376 1379
1377 1380 # use '_writedirstate' instead of 'write' to write changes certainly,
1378 1381 # because the latter omits writing out if transaction is running.
1379 1382 # output file will be used to create backup of dirstate at this point.
1380 1383 if self._dirty or not self._opener.exists(filename):
1381 1384 self._writedirstate(
1382 1385 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1383 1386 )
1384 1387
1385 1388 if tr:
1386 1389 # ensure that subsequent tr.writepending returns True for
1387 1390 # changes written out above, even if dirstate is never
1388 1391 # changed after this
1389 1392 tr.addfilegenerator(
1390 1393 b'dirstate',
1391 1394 (self._filename,),
1392 1395 self._writedirstate,
1393 1396 location=b'plain',
1394 1397 )
1395 1398
1396 1399 # ensure that pending file written above is unlinked at
1397 1400 # failure, even if tr.writepending isn't invoked until the
1398 1401 # end of this transaction
1399 1402 tr.registertmp(filename, location=b'plain')
1400 1403
1401 1404 self._opener.tryunlink(backupname)
1402 1405 # hardlink backup is okay because _writedirstate is always called
1403 1406 # with an "atomictemp=True" file.
1404 1407 util.copyfile(
1405 1408 self._opener.join(filename),
1406 1409 self._opener.join(backupname),
1407 1410 hardlink=True,
1408 1411 )
1409 1412
1410 1413 def restorebackup(self, tr, backupname):
1411 1414 '''Restore dirstate by backup file'''
1412 1415 # this "invalidate()" prevents "wlock.release()" from writing
1413 1416 # changes of dirstate out after restoring from backup file
1414 1417 self.invalidate()
1415 1418 filename = self._actualfilename(tr)
1416 1419 o = self._opener
1417 1420 if util.samefile(o.join(backupname), o.join(filename)):
1418 1421 o.unlink(backupname)
1419 1422 else:
1420 1423 o.rename(backupname, filename, checkambig=True)
1421 1424
1422 1425 def clearbackup(self, tr, backupname):
1423 1426 '''Clear backup file'''
1424 1427 self._opener.unlink(backupname)
1425 1428
1426 1429
1427 1430 class dirstatemap(object):
1428 1431 """Map encapsulating the dirstate's contents.
1429 1432
1430 1433 The dirstate contains the following state:
1431 1434
1432 1435 - `identity` is the identity of the dirstate file, which can be used to
1433 1436 detect when changes have occurred to the dirstate file.
1434 1437
1435 1438 - `parents` is a pair containing the parents of the working copy. The
1436 1439 parents are updated by calling `setparents`.
1437 1440
1438 1441 - the state map maps filenames to tuples of (state, mode, size, mtime),
1439 1442 where state is a single character representing 'normal', 'added',
1440 1443 'removed', or 'merged'. It is read by treating the dirstate as a
1441 1444 dict. File state is updated by calling the `addfile`, `removefile` and
1442 1445 `dropfile` methods.
1443 1446
1444 1447 - `copymap` maps destination filenames to their source filename.
1445 1448
1446 1449 The dirstate also provides the following views onto the state:
1447 1450
1448 1451 - `nonnormalset` is a set of the filenames that have state other
1449 1452 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1450 1453
1451 1454 - `otherparentset` is a set of the filenames that are marked as coming
1452 1455 from the second parent when the dirstate is currently being merged.
1453 1456
1454 1457 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1455 1458 form that they appear as in the dirstate.
1456 1459
1457 1460 - `dirfoldmap` is a dict mapping normalized directory names to the
1458 1461 denormalized form that they appear as in the dirstate.
1459 1462 """
1460 1463
1461 1464 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1462 1465 self._ui = ui
1463 1466 self._opener = opener
1464 1467 self._root = root
1465 1468 self._filename = b'dirstate'
1466 1469 self._nodelen = 20
1467 1470 self._nodeconstants = nodeconstants
1468 1471 assert (
1469 1472 not use_dirstate_v2
1470 1473 ), "should have detected unsupported requirement"
1471 1474
1472 1475 self._parents = None
1473 1476 self._dirtyparents = False
1474 1477
1475 1478 # for consistent view between _pl() and _read() invocations
1476 1479 self._pendingmode = None
1477 1480
1478 1481 @propertycache
1479 1482 def _map(self):
1480 1483 self._map = {}
1481 1484 self.read()
1482 1485 return self._map
1483 1486
1484 1487 @propertycache
1485 1488 def copymap(self):
1486 1489 self.copymap = {}
1487 1490 self._map
1488 1491 return self.copymap
1489 1492
1490 1493 def directories(self):
1491 1494 # Rust / dirstate-v2 only
1492 1495 return []
1493 1496
1494 1497 def clear(self):
1495 1498 self._map.clear()
1496 1499 self.copymap.clear()
1497 1500 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1498 1501 util.clearcachedproperty(self, b"_dirs")
1499 1502 util.clearcachedproperty(self, b"_alldirs")
1500 1503 util.clearcachedproperty(self, b"filefoldmap")
1501 1504 util.clearcachedproperty(self, b"dirfoldmap")
1502 1505 util.clearcachedproperty(self, b"nonnormalset")
1503 1506 util.clearcachedproperty(self, b"otherparentset")
1504 1507
1505 1508 def items(self):
1506 1509 return pycompat.iteritems(self._map)
1507 1510
1508 1511 # forward for python2,3 compat
1509 1512 iteritems = items
1510 1513
1511 1514 def __len__(self):
1512 1515 return len(self._map)
1513 1516
1514 1517 def __iter__(self):
1515 1518 return iter(self._map)
1516 1519
1517 1520 def get(self, key, default=None):
1518 1521 return self._map.get(key, default)
1519 1522
1520 1523 def __contains__(self, key):
1521 1524 return key in self._map
1522 1525
1523 1526 def __getitem__(self, key):
1524 1527 return self._map[key]
1525 1528
1526 1529 def keys(self):
1527 1530 return self._map.keys()
1528 1531
1529 1532 def preload(self):
1530 1533 """Loads the underlying data, if it's not already loaded"""
1531 1534 self._map
1532 1535
1533 1536 def addfile(self, f, oldstate, state, mode, size, mtime):
1534 1537 """Add a tracked file to the dirstate."""
1535 1538 if oldstate in b"?r" and "_dirs" in self.__dict__:
1536 1539 self._dirs.addpath(f)
1537 1540 if oldstate == b"?" and "_alldirs" in self.__dict__:
1538 1541 self._alldirs.addpath(f)
1539 1542 self._map[f] = dirstatetuple(state, mode, size, mtime)
1540 if state != b'n' or mtime == -1:
1543 if state != b'n' or mtime == AMBIGUOUS_TIME:
1541 1544 self.nonnormalset.add(f)
1542 1545 if size == FROM_P2:
1543 1546 self.otherparentset.add(f)
1544 1547
1545 1548 def removefile(self, f, oldstate, size):
1546 1549 """
1547 1550 Mark a file as removed in the dirstate.
1548 1551
1549 1552 The `size` parameter is used to store sentinel values that indicate
1550 1553 the file's previous state. In the future, we should refactor this
1551 1554 to be more explicit about what that state is.
1552 1555 """
1553 1556 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1554 1557 self._dirs.delpath(f)
1555 1558 if oldstate == b"?" and "_alldirs" in self.__dict__:
1556 1559 self._alldirs.addpath(f)
1557 1560 if "filefoldmap" in self.__dict__:
1558 1561 normed = util.normcase(f)
1559 1562 self.filefoldmap.pop(normed, None)
1560 1563 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1561 1564 self.nonnormalset.add(f)
1562 1565
1563 1566 def dropfile(self, f, oldstate):
1564 1567 """
1565 1568 Remove a file from the dirstate. Returns True if the file was
1566 1569 previously recorded.
1567 1570 """
1568 1571 exists = self._map.pop(f, None) is not None
1569 1572 if exists:
1570 1573 if oldstate != b"r" and "_dirs" in self.__dict__:
1571 1574 self._dirs.delpath(f)
1572 1575 if "_alldirs" in self.__dict__:
1573 1576 self._alldirs.delpath(f)
1574 1577 if "filefoldmap" in self.__dict__:
1575 1578 normed = util.normcase(f)
1576 1579 self.filefoldmap.pop(normed, None)
1577 1580 self.nonnormalset.discard(f)
1578 1581 return exists
1579 1582
1580 1583 def clearambiguoustimes(self, files, now):
1581 1584 for f in files:
1582 1585 e = self.get(f)
1583 1586 if e is not None and e[0] == b'n' and e[3] == now:
1584 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1587 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1585 1588 self.nonnormalset.add(f)
1586 1589
1587 1590 def nonnormalentries(self):
1588 1591 '''Compute the nonnormal dirstate entries from the dmap'''
1589 1592 try:
1590 1593 return parsers.nonnormalotherparententries(self._map)
1591 1594 except AttributeError:
1592 1595 nonnorm = set()
1593 1596 otherparent = set()
1594 1597 for fname, e in pycompat.iteritems(self._map):
1595 if e[0] != b'n' or e[3] == -1:
1598 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1596 1599 nonnorm.add(fname)
1597 1600 if e[0] == b'n' and e[2] == FROM_P2:
1598 1601 otherparent.add(fname)
1599 1602 return nonnorm, otherparent
1600 1603
1601 1604 @propertycache
1602 1605 def filefoldmap(self):
1603 1606 """Returns a dictionary mapping normalized case paths to their
1604 1607 non-normalized versions.
1605 1608 """
1606 1609 try:
1607 1610 makefilefoldmap = parsers.make_file_foldmap
1608 1611 except AttributeError:
1609 1612 pass
1610 1613 else:
1611 1614 return makefilefoldmap(
1612 1615 self._map, util.normcasespec, util.normcasefallback
1613 1616 )
1614 1617
1615 1618 f = {}
1616 1619 normcase = util.normcase
1617 1620 for name, s in pycompat.iteritems(self._map):
1618 1621 if s[0] != b'r':
1619 1622 f[normcase(name)] = name
1620 1623 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1621 1624 return f
1622 1625
1623 1626 def hastrackeddir(self, d):
1624 1627 """
1625 1628 Returns True if the dirstate contains a tracked (not removed) file
1626 1629 in this directory.
1627 1630 """
1628 1631 return d in self._dirs
1629 1632
1630 1633 def hasdir(self, d):
1631 1634 """
1632 1635 Returns True if the dirstate contains a file (tracked or removed)
1633 1636 in this directory.
1634 1637 """
1635 1638 return d in self._alldirs
1636 1639
1637 1640 @propertycache
1638 1641 def _dirs(self):
1639 1642 return pathutil.dirs(self._map, b'r')
1640 1643
1641 1644 @propertycache
1642 1645 def _alldirs(self):
1643 1646 return pathutil.dirs(self._map)
1644 1647
1645 1648 def _opendirstatefile(self):
1646 1649 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1647 1650 if self._pendingmode is not None and self._pendingmode != mode:
1648 1651 fp.close()
1649 1652 raise error.Abort(
1650 1653 _(b'working directory state may be changed parallelly')
1651 1654 )
1652 1655 self._pendingmode = mode
1653 1656 return fp
1654 1657
1655 1658 def parents(self):
1656 1659 if not self._parents:
1657 1660 try:
1658 1661 fp = self._opendirstatefile()
1659 1662 st = fp.read(2 * self._nodelen)
1660 1663 fp.close()
1661 1664 except IOError as err:
1662 1665 if err.errno != errno.ENOENT:
1663 1666 raise
1664 1667 # File doesn't exist, so the current state is empty
1665 1668 st = b''
1666 1669
1667 1670 l = len(st)
1668 1671 if l == self._nodelen * 2:
1669 1672 self._parents = (
1670 1673 st[: self._nodelen],
1671 1674 st[self._nodelen : 2 * self._nodelen],
1672 1675 )
1673 1676 elif l == 0:
1674 1677 self._parents = (
1675 1678 self._nodeconstants.nullid,
1676 1679 self._nodeconstants.nullid,
1677 1680 )
1678 1681 else:
1679 1682 raise error.Abort(
1680 1683 _(b'working directory state appears damaged!')
1681 1684 )
1682 1685
1683 1686 return self._parents
1684 1687
1685 1688 def setparents(self, p1, p2):
1686 1689 self._parents = (p1, p2)
1687 1690 self._dirtyparents = True
1688 1691
1689 1692 def read(self):
1690 1693 # ignore HG_PENDING because identity is used only for writing
1691 1694 self.identity = util.filestat.frompath(
1692 1695 self._opener.join(self._filename)
1693 1696 )
1694 1697
1695 1698 try:
1696 1699 fp = self._opendirstatefile()
1697 1700 try:
1698 1701 st = fp.read()
1699 1702 finally:
1700 1703 fp.close()
1701 1704 except IOError as err:
1702 1705 if err.errno != errno.ENOENT:
1703 1706 raise
1704 1707 return
1705 1708 if not st:
1706 1709 return
1707 1710
1708 1711 if util.safehasattr(parsers, b'dict_new_presized'):
1709 1712 # Make an estimate of the number of files in the dirstate based on
1710 1713 # its size. This trades wasting some memory for avoiding costly
1711 1714 # resizes. Each entry have a prefix of 17 bytes followed by one or
1712 1715 # two path names. Studies on various large-scale real-world repositories
1713 1716 # found 54 bytes a reasonable upper limit for the average path names.
1714 1717 # Copy entries are ignored for the sake of this estimate.
1715 1718 self._map = parsers.dict_new_presized(len(st) // 71)
1716 1719
1717 1720 # Python's garbage collector triggers a GC each time a certain number
1718 1721 # of container objects (the number being defined by
1719 1722 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1720 1723 # for each file in the dirstate. The C version then immediately marks
1721 1724 # them as not to be tracked by the collector. However, this has no
1722 1725 # effect on when GCs are triggered, only on what objects the GC looks
1723 1726 # into. This means that O(number of files) GCs are unavoidable.
1724 1727 # Depending on when in the process's lifetime the dirstate is parsed,
1725 1728 # this can get very expensive. As a workaround, disable GC while
1726 1729 # parsing the dirstate.
1727 1730 #
1728 1731 # (we cannot decorate the function directly since it is in a C module)
1729 1732 parse_dirstate = util.nogc(parsers.parse_dirstate)
1730 1733 p = parse_dirstate(self._map, self.copymap, st)
1731 1734 if not self._dirtyparents:
1732 1735 self.setparents(*p)
1733 1736
1734 1737 # Avoid excess attribute lookups by fast pathing certain checks
1735 1738 self.__contains__ = self._map.__contains__
1736 1739 self.__getitem__ = self._map.__getitem__
1737 1740 self.get = self._map.get
1738 1741
1739 1742 def write(self, st, now):
1740 1743 st.write(
1741 1744 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1742 1745 )
1743 1746 st.close()
1744 1747 self._dirtyparents = False
1745 1748 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1746 1749
1747 1750 @propertycache
1748 1751 def nonnormalset(self):
1749 1752 nonnorm, otherparents = self.nonnormalentries()
1750 1753 self.otherparentset = otherparents
1751 1754 return nonnorm
1752 1755
1753 1756 @propertycache
1754 1757 def otherparentset(self):
1755 1758 nonnorm, otherparents = self.nonnormalentries()
1756 1759 self.nonnormalset = nonnorm
1757 1760 return otherparents
1758 1761
1759 1762 def non_normal_or_other_parent_paths(self):
1760 1763 return self.nonnormalset.union(self.otherparentset)
1761 1764
1762 1765 @propertycache
1763 1766 def identity(self):
1764 1767 self._map
1765 1768 return self.identity
1766 1769
1767 1770 @propertycache
1768 1771 def dirfoldmap(self):
1769 1772 f = {}
1770 1773 normcase = util.normcase
1771 1774 for name in self._dirs:
1772 1775 f[normcase(name)] = name
1773 1776 return f
1774 1777
1775 1778
1776 1779 if rustmod is not None:
1777 1780
1778 1781 class dirstatemap(object):
1779 1782 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1780 1783 self._use_dirstate_v2 = use_dirstate_v2
1781 1784 self._nodeconstants = nodeconstants
1782 1785 self._ui = ui
1783 1786 self._opener = opener
1784 1787 self._root = root
1785 1788 self._filename = b'dirstate'
1786 1789 self._nodelen = 20 # Also update Rust code when changing this!
1787 1790 self._parents = None
1788 1791 self._dirtyparents = False
1789 1792
1790 1793 # for consistent view between _pl() and _read() invocations
1791 1794 self._pendingmode = None
1792 1795
1793 1796 self._use_dirstate_tree = self._ui.configbool(
1794 1797 b"experimental",
1795 1798 b"dirstate-tree.in-memory",
1796 1799 False,
1797 1800 )
1798 1801
1799 1802 def addfile(self, *args, **kwargs):
1800 1803 return self._rustmap.addfile(*args, **kwargs)
1801 1804
1802 1805 def removefile(self, *args, **kwargs):
1803 1806 return self._rustmap.removefile(*args, **kwargs)
1804 1807
1805 1808 def dropfile(self, *args, **kwargs):
1806 1809 return self._rustmap.dropfile(*args, **kwargs)
1807 1810
1808 1811 def clearambiguoustimes(self, *args, **kwargs):
1809 1812 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1810 1813
1811 1814 def nonnormalentries(self):
1812 1815 return self._rustmap.nonnormalentries()
1813 1816
1814 1817 def get(self, *args, **kwargs):
1815 1818 return self._rustmap.get(*args, **kwargs)
1816 1819
1817 1820 @property
1818 1821 def copymap(self):
1819 1822 return self._rustmap.copymap()
1820 1823
1821 1824 def directories(self):
1822 1825 return self._rustmap.directories()
1823 1826
1824 1827 def preload(self):
1825 1828 self._rustmap
1826 1829
1827 1830 def clear(self):
1828 1831 self._rustmap.clear()
1829 1832 self.setparents(
1830 1833 self._nodeconstants.nullid, self._nodeconstants.nullid
1831 1834 )
1832 1835 util.clearcachedproperty(self, b"_dirs")
1833 1836 util.clearcachedproperty(self, b"_alldirs")
1834 1837 util.clearcachedproperty(self, b"dirfoldmap")
1835 1838
1836 1839 def items(self):
1837 1840 return self._rustmap.items()
1838 1841
1839 1842 def keys(self):
1840 1843 return iter(self._rustmap)
1841 1844
1842 1845 def __contains__(self, key):
1843 1846 return key in self._rustmap
1844 1847
1845 1848 def __getitem__(self, item):
1846 1849 return self._rustmap[item]
1847 1850
1848 1851 def __len__(self):
1849 1852 return len(self._rustmap)
1850 1853
1851 1854 def __iter__(self):
1852 1855 return iter(self._rustmap)
1853 1856
1854 1857 # forward for python2,3 compat
1855 1858 iteritems = items
1856 1859
1857 1860 def _opendirstatefile(self):
1858 1861 fp, mode = txnutil.trypending(
1859 1862 self._root, self._opener, self._filename
1860 1863 )
1861 1864 if self._pendingmode is not None and self._pendingmode != mode:
1862 1865 fp.close()
1863 1866 raise error.Abort(
1864 1867 _(b'working directory state may be changed parallelly')
1865 1868 )
1866 1869 self._pendingmode = mode
1867 1870 return fp
1868 1871
1869 1872 def setparents(self, p1, p2):
1870 1873 self._parents = (p1, p2)
1871 1874 self._dirtyparents = True
1872 1875
1873 1876 def parents(self):
1874 1877 if not self._parents:
1875 1878 if self._use_dirstate_v2:
1876 1879 offset = len(rustmod.V2_FORMAT_MARKER)
1877 1880 else:
1878 1881 offset = 0
1879 1882 read_len = offset + self._nodelen * 2
1880 1883 try:
1881 1884 fp = self._opendirstatefile()
1882 1885 st = fp.read(read_len)
1883 1886 fp.close()
1884 1887 except IOError as err:
1885 1888 if err.errno != errno.ENOENT:
1886 1889 raise
1887 1890 # File doesn't exist, so the current state is empty
1888 1891 st = b''
1889 1892
1890 1893 l = len(st)
1891 1894 if l == read_len:
1892 1895 st = st[offset:]
1893 1896 self._parents = (
1894 1897 st[: self._nodelen],
1895 1898 st[self._nodelen : 2 * self._nodelen],
1896 1899 )
1897 1900 elif l == 0:
1898 1901 self._parents = (
1899 1902 self._nodeconstants.nullid,
1900 1903 self._nodeconstants.nullid,
1901 1904 )
1902 1905 else:
1903 1906 raise error.Abort(
1904 1907 _(b'working directory state appears damaged!')
1905 1908 )
1906 1909
1907 1910 return self._parents
1908 1911
1909 1912 @propertycache
1910 1913 def _rustmap(self):
1911 1914 """
1912 1915 Fills the Dirstatemap when called.
1913 1916 """
1914 1917 # ignore HG_PENDING because identity is used only for writing
1915 1918 self.identity = util.filestat.frompath(
1916 1919 self._opener.join(self._filename)
1917 1920 )
1918 1921
1919 1922 try:
1920 1923 fp = self._opendirstatefile()
1921 1924 try:
1922 1925 st = fp.read()
1923 1926 finally:
1924 1927 fp.close()
1925 1928 except IOError as err:
1926 1929 if err.errno != errno.ENOENT:
1927 1930 raise
1928 1931 st = b''
1929 1932
1930 1933 self._rustmap, parents = rustmod.DirstateMap.new(
1931 1934 self._use_dirstate_tree, self._use_dirstate_v2, st
1932 1935 )
1933 1936
1934 1937 if parents and not self._dirtyparents:
1935 1938 self.setparents(*parents)
1936 1939
1937 1940 self.__contains__ = self._rustmap.__contains__
1938 1941 self.__getitem__ = self._rustmap.__getitem__
1939 1942 self.get = self._rustmap.get
1940 1943 return self._rustmap
1941 1944
1942 1945 def write(self, st, now):
1943 1946 parents = self.parents()
1944 1947 packed = self._rustmap.write(
1945 1948 self._use_dirstate_v2, parents[0], parents[1], now
1946 1949 )
1947 1950 st.write(packed)
1948 1951 st.close()
1949 1952 self._dirtyparents = False
1950 1953
1951 1954 @propertycache
1952 1955 def filefoldmap(self):
1953 1956 """Returns a dictionary mapping normalized case paths to their
1954 1957 non-normalized versions.
1955 1958 """
1956 1959 return self._rustmap.filefoldmapasdict()
1957 1960
1958 1961 def hastrackeddir(self, d):
1959 1962 return self._rustmap.hastrackeddir(d)
1960 1963
1961 1964 def hasdir(self, d):
1962 1965 return self._rustmap.hasdir(d)
1963 1966
1964 1967 @propertycache
1965 1968 def identity(self):
1966 1969 self._rustmap
1967 1970 return self.identity
1968 1971
1969 1972 @property
1970 1973 def nonnormalset(self):
1971 1974 nonnorm = self._rustmap.non_normal_entries()
1972 1975 return nonnorm
1973 1976
1974 1977 @propertycache
1975 1978 def otherparentset(self):
1976 1979 otherparents = self._rustmap.other_parent_entries()
1977 1980 return otherparents
1978 1981
1979 1982 def non_normal_or_other_parent_paths(self):
1980 1983 return self._rustmap.non_normal_or_other_parent_paths()
1981 1984
1982 1985 @propertycache
1983 1986 def dirfoldmap(self):
1984 1987 f = {}
1985 1988 normcase = util.normcase
1986 1989 for name, _pseudo_entry in self.directories():
1987 1990 f[normcase(name)] = name
1988 1991 return f
General Comments 0
You need to be logged in to leave comments. Login now