##// END OF EJS Templates
dirstate: introduce a symbolic constant for the NONNORMAL marker...
marmoute -
r48277:4ac418b4 default
parent child Browse files
Show More
@@ -1,1985 +1,1988 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .pycompat import delattr
18 18
19 19 from hgdemandimport import tracing
20 20
21 21 from . import (
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 pathutil,
26 26 policy,
27 27 pycompat,
28 28 scmutil,
29 29 sparse,
30 30 txnutil,
31 31 util,
32 32 )
33 33
34 34 from .interfaces import (
35 35 dirstate as intdirstate,
36 36 util as interfaceutil,
37 37 )
38 38
39 39 parsers = policy.importmod('parsers')
40 40 rustmod = policy.importrust('dirstate')
41 41
42 42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43 43
44 44 propertycache = util.propertycache
45 45 filecache = scmutil.filecache
46 46 _rangemask = 0x7FFFFFFF
47 47
48 48 dirstatetuple = parsers.dirstatetuple
49 49
50 50
51 51 # a special value used internally for `size` if the file come from the other parent
52 52 FROM_P2 = -2
53 53
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = -1
56
54 57
55 58 class repocache(filecache):
56 59 """filecache for files in .hg/"""
57 60
58 61 def join(self, obj, fname):
59 62 return obj._opener.join(fname)
60 63
61 64
62 65 class rootcache(filecache):
63 66 """filecache for files in the repository root"""
64 67
65 68 def join(self, obj, fname):
66 69 return obj._join(fname)
67 70
68 71
69 72 def _getfsnow(vfs):
70 73 '''Get "now" timestamp on filesystem'''
71 74 tmpfd, tmpname = vfs.mkstemp()
72 75 try:
73 76 return os.fstat(tmpfd)[stat.ST_MTIME]
74 77 finally:
75 78 os.close(tmpfd)
76 79 vfs.unlink(tmpname)
77 80
78 81
79 82 @interfaceutil.implementer(intdirstate.idirstate)
80 83 class dirstate(object):
81 84 def __init__(
82 85 self,
83 86 opener,
84 87 ui,
85 88 root,
86 89 validate,
87 90 sparsematchfn,
88 91 nodeconstants,
89 92 use_dirstate_v2,
90 93 ):
91 94 """Create a new dirstate object.
92 95
93 96 opener is an open()-like callable that can be used to open the
94 97 dirstate file; root is the root of the directory tracked by
95 98 the dirstate.
96 99 """
97 100 self._use_dirstate_v2 = use_dirstate_v2
98 101 self._nodeconstants = nodeconstants
99 102 self._opener = opener
100 103 self._validate = validate
101 104 self._root = root
102 105 self._sparsematchfn = sparsematchfn
103 106 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
104 107 # UNC path pointing to root share (issue4557)
105 108 self._rootdir = pathutil.normasprefix(root)
106 109 self._dirty = False
107 110 self._lastnormaltime = 0
108 111 self._ui = ui
109 112 self._filecache = {}
110 113 self._parentwriters = 0
111 114 self._filename = b'dirstate'
112 115 self._pendingfilename = b'%s.pending' % self._filename
113 116 self._plchangecallbacks = {}
114 117 self._origpl = None
115 118 self._updatedfiles = set()
116 119 self._mapcls = dirstatemap
117 120 # Access and cache cwd early, so we don't access it for the first time
118 121 # after a working-copy update caused it to not exist (accessing it then
119 122 # raises an exception).
120 123 self._cwd
121 124
122 125 def prefetch_parents(self):
123 126 """make sure the parents are loaded
124 127
125 128 Used to avoid a race condition.
126 129 """
127 130 self._pl
128 131
129 132 @contextlib.contextmanager
130 133 def parentchange(self):
131 134 """Context manager for handling dirstate parents.
132 135
133 136 If an exception occurs in the scope of the context manager,
134 137 the incoherent dirstate won't be written when wlock is
135 138 released.
136 139 """
137 140 self._parentwriters += 1
138 141 yield
139 142 # Typically we want the "undo" step of a context manager in a
140 143 # finally block so it happens even when an exception
141 144 # occurs. In this case, however, we only want to decrement
142 145 # parentwriters if the code in the with statement exits
143 146 # normally, so we don't have a try/finally here on purpose.
144 147 self._parentwriters -= 1
145 148
146 149 def pendingparentchange(self):
147 150 """Returns true if the dirstate is in the middle of a set of changes
148 151 that modify the dirstate parent.
149 152 """
150 153 return self._parentwriters > 0
151 154
152 155 @propertycache
153 156 def _map(self):
154 157 """Return the dirstate contents (see documentation for dirstatemap)."""
155 158 self._map = self._mapcls(
156 159 self._ui,
157 160 self._opener,
158 161 self._root,
159 162 self._nodeconstants,
160 163 self._use_dirstate_v2,
161 164 )
162 165 return self._map
163 166
164 167 @property
165 168 def _sparsematcher(self):
166 169 """The matcher for the sparse checkout.
167 170
168 171 The working directory may not include every file from a manifest. The
169 172 matcher obtained by this property will match a path if it is to be
170 173 included in the working directory.
171 174 """
172 175 # TODO there is potential to cache this property. For now, the matcher
173 176 # is resolved on every access. (But the called function does use a
174 177 # cache to keep the lookup fast.)
175 178 return self._sparsematchfn()
176 179
177 180 @repocache(b'branch')
178 181 def _branch(self):
179 182 try:
180 183 return self._opener.read(b"branch").strip() or b"default"
181 184 except IOError as inst:
182 185 if inst.errno != errno.ENOENT:
183 186 raise
184 187 return b"default"
185 188
186 189 @property
187 190 def _pl(self):
188 191 return self._map.parents()
189 192
190 193 def hasdir(self, d):
191 194 return self._map.hastrackeddir(d)
192 195
193 196 @rootcache(b'.hgignore')
194 197 def _ignore(self):
195 198 files = self._ignorefiles()
196 199 if not files:
197 200 return matchmod.never()
198 201
199 202 pats = [b'include:%s' % f for f in files]
200 203 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
201 204
202 205 @propertycache
203 206 def _slash(self):
204 207 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
205 208
206 209 @propertycache
207 210 def _checklink(self):
208 211 return util.checklink(self._root)
209 212
210 213 @propertycache
211 214 def _checkexec(self):
212 215 return bool(util.checkexec(self._root))
213 216
214 217 @propertycache
215 218 def _checkcase(self):
216 219 return not util.fscasesensitive(self._join(b'.hg'))
217 220
218 221 def _join(self, f):
219 222 # much faster than os.path.join()
220 223 # it's safe because f is always a relative path
221 224 return self._rootdir + f
222 225
223 226 def flagfunc(self, buildfallback):
224 227 if self._checklink and self._checkexec:
225 228
226 229 def f(x):
227 230 try:
228 231 st = os.lstat(self._join(x))
229 232 if util.statislink(st):
230 233 return b'l'
231 234 if util.statisexec(st):
232 235 return b'x'
233 236 except OSError:
234 237 pass
235 238 return b''
236 239
237 240 return f
238 241
239 242 fallback = buildfallback()
240 243 if self._checklink:
241 244
242 245 def f(x):
243 246 if os.path.islink(self._join(x)):
244 247 return b'l'
245 248 if b'x' in fallback(x):
246 249 return b'x'
247 250 return b''
248 251
249 252 return f
250 253 if self._checkexec:
251 254
252 255 def f(x):
253 256 if b'l' in fallback(x):
254 257 return b'l'
255 258 if util.isexec(self._join(x)):
256 259 return b'x'
257 260 return b''
258 261
259 262 return f
260 263 else:
261 264 return fallback
262 265
263 266 @propertycache
264 267 def _cwd(self):
265 268 # internal config: ui.forcecwd
266 269 forcecwd = self._ui.config(b'ui', b'forcecwd')
267 270 if forcecwd:
268 271 return forcecwd
269 272 return encoding.getcwd()
270 273
271 274 def getcwd(self):
272 275 """Return the path from which a canonical path is calculated.
273 276
274 277 This path should be used to resolve file patterns or to convert
275 278 canonical paths back to file paths for display. It shouldn't be
276 279 used to get real file paths. Use vfs functions instead.
277 280 """
278 281 cwd = self._cwd
279 282 if cwd == self._root:
280 283 return b''
281 284 # self._root ends with a path separator if self._root is '/' or 'C:\'
282 285 rootsep = self._root
283 286 if not util.endswithsep(rootsep):
284 287 rootsep += pycompat.ossep
285 288 if cwd.startswith(rootsep):
286 289 return cwd[len(rootsep) :]
287 290 else:
288 291 # we're outside the repo. return an absolute path.
289 292 return cwd
290 293
291 294 def pathto(self, f, cwd=None):
292 295 if cwd is None:
293 296 cwd = self.getcwd()
294 297 path = util.pathto(self._root, cwd, f)
295 298 if self._slash:
296 299 return util.pconvert(path)
297 300 return path
298 301
299 302 def __getitem__(self, key):
300 303 """Return the current state of key (a filename) in the dirstate.
301 304
302 305 States are:
303 306 n normal
304 307 m needs merging
305 308 r marked for removal
306 309 a marked for addition
307 310 ? not tracked
308 311 """
309 312 return self._map.get(key, (b"?",))[0]
310 313
311 314 def __contains__(self, key):
312 315 return key in self._map
313 316
314 317 def __iter__(self):
315 318 return iter(sorted(self._map))
316 319
317 320 def items(self):
318 321 return pycompat.iteritems(self._map)
319 322
320 323 iteritems = items
321 324
322 325 def directories(self):
323 326 return self._map.directories()
324 327
325 328 def parents(self):
326 329 return [self._validate(p) for p in self._pl]
327 330
328 331 def p1(self):
329 332 return self._validate(self._pl[0])
330 333
331 334 def p2(self):
332 335 return self._validate(self._pl[1])
333 336
334 337 def branch(self):
335 338 return encoding.tolocal(self._branch)
336 339
337 340 def setparents(self, p1, p2=None):
338 341 """Set dirstate parents to p1 and p2.
339 342
340 343 When moving from two parents to one, 'm' merged entries a
341 344 adjusted to normal and previous copy records discarded and
342 345 returned by the call.
343 346
344 347 See localrepo.setparents()
345 348 """
346 349 if p2 is None:
347 350 p2 = self._nodeconstants.nullid
348 351 if self._parentwriters == 0:
349 352 raise ValueError(
350 353 b"cannot set dirstate parent outside of "
351 354 b"dirstate.parentchange context manager"
352 355 )
353 356
354 357 self._dirty = True
355 358 oldp2 = self._pl[1]
356 359 if self._origpl is None:
357 360 self._origpl = self._pl
358 361 self._map.setparents(p1, p2)
359 362 copies = {}
360 363 if (
361 364 oldp2 != self._nodeconstants.nullid
362 365 and p2 == self._nodeconstants.nullid
363 366 ):
364 367 candidatefiles = self._map.non_normal_or_other_parent_paths()
365 368
366 369 for f in candidatefiles:
367 370 s = self._map.get(f)
368 371 if s is None:
369 372 continue
370 373
371 374 # Discard 'm' markers when moving away from a merge state
372 375 if s[0] == b'm':
373 376 source = self._map.copymap.get(f)
374 377 if source:
375 378 copies[f] = source
376 379 self.normallookup(f)
377 380 # Also fix up otherparent markers
378 381 elif s[0] == b'n' and s[2] == FROM_P2:
379 382 source = self._map.copymap.get(f)
380 383 if source:
381 384 copies[f] = source
382 385 self.add(f)
383 386 return copies
384 387
385 388 def setbranch(self, branch):
386 389 self.__class__._branch.set(self, encoding.fromlocal(branch))
387 390 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
388 391 try:
389 392 f.write(self._branch + b'\n')
390 393 f.close()
391 394
392 395 # make sure filecache has the correct stat info for _branch after
393 396 # replacing the underlying file
394 397 ce = self._filecache[b'_branch']
395 398 if ce:
396 399 ce.refresh()
397 400 except: # re-raises
398 401 f.discard()
399 402 raise
400 403
401 404 def invalidate(self):
402 405 """Causes the next access to reread the dirstate.
403 406
404 407 This is different from localrepo.invalidatedirstate() because it always
405 408 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
406 409 check whether the dirstate has changed before rereading it."""
407 410
408 411 for a in ("_map", "_branch", "_ignore"):
409 412 if a in self.__dict__:
410 413 delattr(self, a)
411 414 self._lastnormaltime = 0
412 415 self._dirty = False
413 416 self._updatedfiles.clear()
414 417 self._parentwriters = 0
415 418 self._origpl = None
416 419
417 420 def copy(self, source, dest):
418 421 """Mark dest as a copy of source. Unmark dest if source is None."""
419 422 if source == dest:
420 423 return
421 424 self._dirty = True
422 425 if source is not None:
423 426 self._map.copymap[dest] = source
424 427 self._updatedfiles.add(source)
425 428 self._updatedfiles.add(dest)
426 429 elif self._map.copymap.pop(dest, None):
427 430 self._updatedfiles.add(dest)
428 431
429 432 def copied(self, file):
430 433 return self._map.copymap.get(file, None)
431 434
432 435 def copies(self):
433 436 return self._map.copymap
434 437
435 438 def _addpath(self, f, state, mode, size, mtime):
436 439 oldstate = self[f]
437 440 if state == b'a' or oldstate == b'r':
438 441 scmutil.checkfilename(f)
439 442 if self._map.hastrackeddir(f):
440 443 msg = _(b'directory %r already in dirstate')
441 444 msg %= pycompat.bytestr(f)
442 445 raise error.Abort(msg)
443 446 # shadows
444 447 for d in pathutil.finddirs(f):
445 448 if self._map.hastrackeddir(d):
446 449 break
447 450 entry = self._map.get(d)
448 451 if entry is not None and entry[0] != b'r':
449 452 msg = _(b'file %r in dirstate clashes with %r')
450 453 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
451 454 raise error.Abort(msg)
452 455 self._dirty = True
453 456 self._updatedfiles.add(f)
454 457 self._map.addfile(f, oldstate, state, mode, size, mtime)
455 458
456 459 def normal(self, f, parentfiledata=None):
457 460 """Mark a file normal and clean.
458 461
459 462 parentfiledata: (mode, size, mtime) of the clean file
460 463
461 464 parentfiledata should be computed from memory (for mode,
462 465 size), as or close as possible from the point where we
463 466 determined the file was clean, to limit the risk of the
464 467 file having been changed by an external process between the
465 468 moment where the file was determined to be clean and now."""
466 469 if parentfiledata:
467 470 (mode, size, mtime) = parentfiledata
468 471 else:
469 472 s = os.lstat(self._join(f))
470 473 mode = s.st_mode
471 474 size = s.st_size
472 475 mtime = s[stat.ST_MTIME]
473 476 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
474 477 self._map.copymap.pop(f, None)
475 478 if f in self._map.nonnormalset:
476 479 self._map.nonnormalset.remove(f)
477 480 if mtime > self._lastnormaltime:
478 481 # Remember the most recent modification timeslot for status(),
479 482 # to make sure we won't miss future size-preserving file content
480 483 # modifications that happen within the same timeslot.
481 484 self._lastnormaltime = mtime
482 485
483 486 def normallookup(self, f):
484 487 '''Mark a file normal, but possibly dirty.'''
485 488 if self._pl[1] != self._nodeconstants.nullid:
486 489 # if there is a merge going on and the file was either
487 490 # in state 'm' (-1) or coming from other parent (-2) before
488 491 # being removed, restore that state.
489 492 entry = self._map.get(f)
490 493 if entry is not None:
491 if entry[0] == b'r' and entry[2] in (-1, FROM_P2):
494 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
492 495 source = self._map.copymap.get(f)
493 if entry[2] == -1:
496 if entry[2] == NONNORMAL:
494 497 self.merge(f)
495 498 elif entry[2] == FROM_P2:
496 499 self.otherparent(f)
497 500 if source:
498 501 self.copy(source, f)
499 502 return
500 503 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
501 504 return
502 self._addpath(f, b'n', 0, -1, -1)
505 self._addpath(f, b'n', 0, NONNORMAL, -1)
503 506 self._map.copymap.pop(f, None)
504 507
505 508 def otherparent(self, f):
506 509 '''Mark as coming from the other parent, always dirty.'''
507 510 if self._pl[1] == self._nodeconstants.nullid:
508 511 msg = _(b"setting %r to other parent only allowed in merges") % f
509 512 raise error.Abort(msg)
510 513 if f in self and self[f] == b'n':
511 514 # merge-like
512 515 self._addpath(f, b'm', 0, FROM_P2, -1)
513 516 else:
514 517 # add-like
515 518 self._addpath(f, b'n', 0, FROM_P2, -1)
516 519 self._map.copymap.pop(f, None)
517 520
518 521 def add(self, f):
519 522 '''Mark a file added.'''
520 self._addpath(f, b'a', 0, -1, -1)
523 self._addpath(f, b'a', 0, NONNORMAL, -1)
521 524 self._map.copymap.pop(f, None)
522 525
523 526 def remove(self, f):
524 527 '''Mark a file removed.'''
525 528 self._dirty = True
526 529 oldstate = self[f]
527 530 size = 0
528 531 if self._pl[1] != self._nodeconstants.nullid:
529 532 entry = self._map.get(f)
530 533 if entry is not None:
531 534 # backup the previous state
532 535 if entry[0] == b'm': # merge
533 size = -1
536 size = NONNORMAL
534 537 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
535 538 size = FROM_P2
536 539 self._map.otherparentset.add(f)
537 540 self._updatedfiles.add(f)
538 541 self._map.removefile(f, oldstate, size)
539 542 if size == 0:
540 543 self._map.copymap.pop(f, None)
541 544
542 545 def merge(self, f):
543 546 '''Mark a file merged.'''
544 547 if self._pl[1] == self._nodeconstants.nullid:
545 548 return self.normallookup(f)
546 549 return self.otherparent(f)
547 550
548 551 def drop(self, f):
549 552 '''Drop a file from the dirstate'''
550 553 oldstate = self[f]
551 554 if self._map.dropfile(f, oldstate):
552 555 self._dirty = True
553 556 self._updatedfiles.add(f)
554 557 self._map.copymap.pop(f, None)
555 558
556 559 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
557 560 if exists is None:
558 561 exists = os.path.lexists(os.path.join(self._root, path))
559 562 if not exists:
560 563 # Maybe a path component exists
561 564 if not ignoremissing and b'/' in path:
562 565 d, f = path.rsplit(b'/', 1)
563 566 d = self._normalize(d, False, ignoremissing, None)
564 567 folded = d + b"/" + f
565 568 else:
566 569 # No path components, preserve original case
567 570 folded = path
568 571 else:
569 572 # recursively normalize leading directory components
570 573 # against dirstate
571 574 if b'/' in normed:
572 575 d, f = normed.rsplit(b'/', 1)
573 576 d = self._normalize(d, False, ignoremissing, True)
574 577 r = self._root + b"/" + d
575 578 folded = d + b"/" + util.fspath(f, r)
576 579 else:
577 580 folded = util.fspath(normed, self._root)
578 581 storemap[normed] = folded
579 582
580 583 return folded
581 584
582 585 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
583 586 normed = util.normcase(path)
584 587 folded = self._map.filefoldmap.get(normed, None)
585 588 if folded is None:
586 589 if isknown:
587 590 folded = path
588 591 else:
589 592 folded = self._discoverpath(
590 593 path, normed, ignoremissing, exists, self._map.filefoldmap
591 594 )
592 595 return folded
593 596
594 597 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
595 598 normed = util.normcase(path)
596 599 folded = self._map.filefoldmap.get(normed, None)
597 600 if folded is None:
598 601 folded = self._map.dirfoldmap.get(normed, None)
599 602 if folded is None:
600 603 if isknown:
601 604 folded = path
602 605 else:
603 606 # store discovered result in dirfoldmap so that future
604 607 # normalizefile calls don't start matching directories
605 608 folded = self._discoverpath(
606 609 path, normed, ignoremissing, exists, self._map.dirfoldmap
607 610 )
608 611 return folded
609 612
610 613 def normalize(self, path, isknown=False, ignoremissing=False):
611 614 """
612 615 normalize the case of a pathname when on a casefolding filesystem
613 616
614 617 isknown specifies whether the filename came from walking the
615 618 disk, to avoid extra filesystem access.
616 619
617 620 If ignoremissing is True, missing path are returned
618 621 unchanged. Otherwise, we try harder to normalize possibly
619 622 existing path components.
620 623
621 624 The normalized case is determined based on the following precedence:
622 625
623 626 - version of name already stored in the dirstate
624 627 - version of name stored on disk
625 628 - version provided via command arguments
626 629 """
627 630
628 631 if self._checkcase:
629 632 return self._normalize(path, isknown, ignoremissing)
630 633 return path
631 634
632 635 def clear(self):
633 636 self._map.clear()
634 637 self._lastnormaltime = 0
635 638 self._updatedfiles.clear()
636 639 self._dirty = True
637 640
638 641 def rebuild(self, parent, allfiles, changedfiles=None):
639 642 if changedfiles is None:
640 643 # Rebuild entire dirstate
641 644 to_lookup = allfiles
642 645 to_drop = []
643 646 lastnormaltime = self._lastnormaltime
644 647 self.clear()
645 648 self._lastnormaltime = lastnormaltime
646 649 elif len(changedfiles) < 10:
647 650 # Avoid turning allfiles into a set, which can be expensive if it's
648 651 # large.
649 652 to_lookup = []
650 653 to_drop = []
651 654 for f in changedfiles:
652 655 if f in allfiles:
653 656 to_lookup.append(f)
654 657 else:
655 658 to_drop.append(f)
656 659 else:
657 660 changedfilesset = set(changedfiles)
658 661 to_lookup = changedfilesset & set(allfiles)
659 662 to_drop = changedfilesset - to_lookup
660 663
661 664 if self._origpl is None:
662 665 self._origpl = self._pl
663 666 self._map.setparents(parent, self._nodeconstants.nullid)
664 667
665 668 for f in to_lookup:
666 669 self.normallookup(f)
667 670 for f in to_drop:
668 671 self.drop(f)
669 672
670 673 self._dirty = True
671 674
672 675 def identity(self):
673 676 """Return identity of dirstate itself to detect changing in storage
674 677
675 678 If identity of previous dirstate is equal to this, writing
676 679 changes based on the former dirstate out can keep consistency.
677 680 """
678 681 return self._map.identity
679 682
680 683 def write(self, tr):
681 684 if not self._dirty:
682 685 return
683 686
684 687 filename = self._filename
685 688 if tr:
686 689 # 'dirstate.write()' is not only for writing in-memory
687 690 # changes out, but also for dropping ambiguous timestamp.
688 691 # delayed writing re-raise "ambiguous timestamp issue".
689 692 # See also the wiki page below for detail:
690 693 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
691 694
692 695 # emulate dropping timestamp in 'parsers.pack_dirstate'
693 696 now = _getfsnow(self._opener)
694 697 self._map.clearambiguoustimes(self._updatedfiles, now)
695 698
696 699 # emulate that all 'dirstate.normal' results are written out
697 700 self._lastnormaltime = 0
698 701 self._updatedfiles.clear()
699 702
700 703 # delay writing in-memory changes out
701 704 tr.addfilegenerator(
702 705 b'dirstate',
703 706 (self._filename,),
704 707 self._writedirstate,
705 708 location=b'plain',
706 709 )
707 710 return
708 711
709 712 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
710 713 self._writedirstate(st)
711 714
712 715 def addparentchangecallback(self, category, callback):
713 716 """add a callback to be called when the wd parents are changed
714 717
715 718 Callback will be called with the following arguments:
716 719 dirstate, (oldp1, oldp2), (newp1, newp2)
717 720
718 721 Category is a unique identifier to allow overwriting an old callback
719 722 with a newer callback.
720 723 """
721 724 self._plchangecallbacks[category] = callback
722 725
723 726 def _writedirstate(self, st):
724 727 # notify callbacks about parents change
725 728 if self._origpl is not None and self._origpl != self._pl:
726 729 for c, callback in sorted(
727 730 pycompat.iteritems(self._plchangecallbacks)
728 731 ):
729 732 callback(self, self._origpl, self._pl)
730 733 self._origpl = None
731 734 # use the modification time of the newly created temporary file as the
732 735 # filesystem's notion of 'now'
733 736 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
734 737
735 738 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
736 739 # timestamp of each entries in dirstate, because of 'now > mtime'
737 740 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
738 741 if delaywrite > 0:
739 742 # do we have any files to delay for?
740 743 for f, e in pycompat.iteritems(self._map):
741 744 if e[0] == b'n' and e[3] == now:
742 745 import time # to avoid useless import
743 746
744 747 # rather than sleep n seconds, sleep until the next
745 748 # multiple of n seconds
746 749 clock = time.time()
747 750 start = int(clock) - (int(clock) % delaywrite)
748 751 end = start + delaywrite
749 752 time.sleep(end - clock)
750 753 now = end # trust our estimate that the end is near now
751 754 break
752 755
753 756 self._map.write(st, now)
754 757 self._lastnormaltime = 0
755 758 self._dirty = False
756 759
757 760 def _dirignore(self, f):
758 761 if self._ignore(f):
759 762 return True
760 763 for p in pathutil.finddirs(f):
761 764 if self._ignore(p):
762 765 return True
763 766 return False
764 767
765 768 def _ignorefiles(self):
766 769 files = []
767 770 if os.path.exists(self._join(b'.hgignore')):
768 771 files.append(self._join(b'.hgignore'))
769 772 for name, path in self._ui.configitems(b"ui"):
770 773 if name == b'ignore' or name.startswith(b'ignore.'):
771 774 # we need to use os.path.join here rather than self._join
772 775 # because path is arbitrary and user-specified
773 776 files.append(os.path.join(self._rootdir, util.expandpath(path)))
774 777 return files
775 778
776 779 def _ignorefileandline(self, f):
777 780 files = collections.deque(self._ignorefiles())
778 781 visited = set()
779 782 while files:
780 783 i = files.popleft()
781 784 patterns = matchmod.readpatternfile(
782 785 i, self._ui.warn, sourceinfo=True
783 786 )
784 787 for pattern, lineno, line in patterns:
785 788 kind, p = matchmod._patsplit(pattern, b'glob')
786 789 if kind == b"subinclude":
787 790 if p not in visited:
788 791 files.append(p)
789 792 continue
790 793 m = matchmod.match(
791 794 self._root, b'', [], [pattern], warn=self._ui.warn
792 795 )
793 796 if m(f):
794 797 return (i, lineno, line)
795 798 visited.add(i)
796 799 return (None, -1, b"")
797 800
798 801 def _walkexplicit(self, match, subrepos):
799 802 """Get stat data about the files explicitly specified by match.
800 803
801 804 Return a triple (results, dirsfound, dirsnotfound).
802 805 - results is a mapping from filename to stat result. It also contains
803 806 listings mapping subrepos and .hg to None.
804 807 - dirsfound is a list of files found to be directories.
805 808 - dirsnotfound is a list of files that the dirstate thinks are
806 809 directories and that were not found."""
807 810
808 811 def badtype(mode):
809 812 kind = _(b'unknown')
810 813 if stat.S_ISCHR(mode):
811 814 kind = _(b'character device')
812 815 elif stat.S_ISBLK(mode):
813 816 kind = _(b'block device')
814 817 elif stat.S_ISFIFO(mode):
815 818 kind = _(b'fifo')
816 819 elif stat.S_ISSOCK(mode):
817 820 kind = _(b'socket')
818 821 elif stat.S_ISDIR(mode):
819 822 kind = _(b'directory')
820 823 return _(b'unsupported file type (type is %s)') % kind
821 824
822 825 badfn = match.bad
823 826 dmap = self._map
824 827 lstat = os.lstat
825 828 getkind = stat.S_IFMT
826 829 dirkind = stat.S_IFDIR
827 830 regkind = stat.S_IFREG
828 831 lnkkind = stat.S_IFLNK
829 832 join = self._join
830 833 dirsfound = []
831 834 foundadd = dirsfound.append
832 835 dirsnotfound = []
833 836 notfoundadd = dirsnotfound.append
834 837
835 838 if not match.isexact() and self._checkcase:
836 839 normalize = self._normalize
837 840 else:
838 841 normalize = None
839 842
840 843 files = sorted(match.files())
841 844 subrepos.sort()
842 845 i, j = 0, 0
843 846 while i < len(files) and j < len(subrepos):
844 847 subpath = subrepos[j] + b"/"
845 848 if files[i] < subpath:
846 849 i += 1
847 850 continue
848 851 while i < len(files) and files[i].startswith(subpath):
849 852 del files[i]
850 853 j += 1
851 854
852 855 if not files or b'' in files:
853 856 files = [b'']
854 857 # constructing the foldmap is expensive, so don't do it for the
855 858 # common case where files is ['']
856 859 normalize = None
857 860 results = dict.fromkeys(subrepos)
858 861 results[b'.hg'] = None
859 862
860 863 for ff in files:
861 864 if normalize:
862 865 nf = normalize(ff, False, True)
863 866 else:
864 867 nf = ff
865 868 if nf in results:
866 869 continue
867 870
868 871 try:
869 872 st = lstat(join(nf))
870 873 kind = getkind(st.st_mode)
871 874 if kind == dirkind:
872 875 if nf in dmap:
873 876 # file replaced by dir on disk but still in dirstate
874 877 results[nf] = None
875 878 foundadd((nf, ff))
876 879 elif kind == regkind or kind == lnkkind:
877 880 results[nf] = st
878 881 else:
879 882 badfn(ff, badtype(kind))
880 883 if nf in dmap:
881 884 results[nf] = None
882 885 except OSError as inst: # nf not found on disk - it is dirstate only
883 886 if nf in dmap: # does it exactly match a missing file?
884 887 results[nf] = None
885 888 else: # does it match a missing directory?
886 889 if self._map.hasdir(nf):
887 890 notfoundadd(nf)
888 891 else:
889 892 badfn(ff, encoding.strtolocal(inst.strerror))
890 893
891 894 # match.files() may contain explicitly-specified paths that shouldn't
892 895 # be taken; drop them from the list of files found. dirsfound/notfound
893 896 # aren't filtered here because they will be tested later.
894 897 if match.anypats():
895 898 for f in list(results):
896 899 if f == b'.hg' or f in subrepos:
897 900 # keep sentinel to disable further out-of-repo walks
898 901 continue
899 902 if not match(f):
900 903 del results[f]
901 904
902 905 # Case insensitive filesystems cannot rely on lstat() failing to detect
903 906 # a case-only rename. Prune the stat object for any file that does not
904 907 # match the case in the filesystem, if there are multiple files that
905 908 # normalize to the same path.
906 909 if match.isexact() and self._checkcase:
907 910 normed = {}
908 911
909 912 for f, st in pycompat.iteritems(results):
910 913 if st is None:
911 914 continue
912 915
913 916 nc = util.normcase(f)
914 917 paths = normed.get(nc)
915 918
916 919 if paths is None:
917 920 paths = set()
918 921 normed[nc] = paths
919 922
920 923 paths.add(f)
921 924
922 925 for norm, paths in pycompat.iteritems(normed):
923 926 if len(paths) > 1:
924 927 for path in paths:
925 928 folded = self._discoverpath(
926 929 path, norm, True, None, self._map.dirfoldmap
927 930 )
928 931 if path != folded:
929 932 results[path] = None
930 933
931 934 return results, dirsfound, dirsnotfound
932 935
933 936 def walk(self, match, subrepos, unknown, ignored, full=True):
934 937 """
935 938 Walk recursively through the directory tree, finding all files
936 939 matched by match.
937 940
938 941 If full is False, maybe skip some known-clean files.
939 942
940 943 Return a dict mapping filename to stat-like object (either
941 944 mercurial.osutil.stat instance or return value of os.stat()).
942 945
943 946 """
944 947 # full is a flag that extensions that hook into walk can use -- this
945 948 # implementation doesn't use it at all. This satisfies the contract
946 949 # because we only guarantee a "maybe".
947 950
948 951 if ignored:
949 952 ignore = util.never
950 953 dirignore = util.never
951 954 elif unknown:
952 955 ignore = self._ignore
953 956 dirignore = self._dirignore
954 957 else:
955 958 # if not unknown and not ignored, drop dir recursion and step 2
956 959 ignore = util.always
957 960 dirignore = util.always
958 961
959 962 matchfn = match.matchfn
960 963 matchalways = match.always()
961 964 matchtdir = match.traversedir
962 965 dmap = self._map
963 966 listdir = util.listdir
964 967 lstat = os.lstat
965 968 dirkind = stat.S_IFDIR
966 969 regkind = stat.S_IFREG
967 970 lnkkind = stat.S_IFLNK
968 971 join = self._join
969 972
970 973 exact = skipstep3 = False
971 974 if match.isexact(): # match.exact
972 975 exact = True
973 976 dirignore = util.always # skip step 2
974 977 elif match.prefix(): # match.match, no patterns
975 978 skipstep3 = True
976 979
977 980 if not exact and self._checkcase:
978 981 normalize = self._normalize
979 982 normalizefile = self._normalizefile
980 983 skipstep3 = False
981 984 else:
982 985 normalize = self._normalize
983 986 normalizefile = None
984 987
985 988 # step 1: find all explicit files
986 989 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
987 990 if matchtdir:
988 991 for d in work:
989 992 matchtdir(d[0])
990 993 for d in dirsnotfound:
991 994 matchtdir(d)
992 995
993 996 skipstep3 = skipstep3 and not (work or dirsnotfound)
994 997 work = [d for d in work if not dirignore(d[0])]
995 998
996 999 # step 2: visit subdirectories
997 1000 def traverse(work, alreadynormed):
998 1001 wadd = work.append
999 1002 while work:
1000 1003 tracing.counter('dirstate.walk work', len(work))
1001 1004 nd = work.pop()
1002 1005 visitentries = match.visitchildrenset(nd)
1003 1006 if not visitentries:
1004 1007 continue
1005 1008 if visitentries == b'this' or visitentries == b'all':
1006 1009 visitentries = None
1007 1010 skip = None
1008 1011 if nd != b'':
1009 1012 skip = b'.hg'
1010 1013 try:
1011 1014 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1012 1015 entries = listdir(join(nd), stat=True, skip=skip)
1013 1016 except OSError as inst:
1014 1017 if inst.errno in (errno.EACCES, errno.ENOENT):
1015 1018 match.bad(
1016 1019 self.pathto(nd), encoding.strtolocal(inst.strerror)
1017 1020 )
1018 1021 continue
1019 1022 raise
1020 1023 for f, kind, st in entries:
1021 1024 # Some matchers may return files in the visitentries set,
1022 1025 # instead of 'this', if the matcher explicitly mentions them
1023 1026 # and is not an exactmatcher. This is acceptable; we do not
1024 1027 # make any hard assumptions about file-or-directory below
1025 1028 # based on the presence of `f` in visitentries. If
1026 1029 # visitchildrenset returned a set, we can always skip the
1027 1030 # entries *not* in the set it provided regardless of whether
1028 1031 # they're actually a file or a directory.
1029 1032 if visitentries and f not in visitentries:
1030 1033 continue
1031 1034 if normalizefile:
1032 1035 # even though f might be a directory, we're only
1033 1036 # interested in comparing it to files currently in the
1034 1037 # dmap -- therefore normalizefile is enough
1035 1038 nf = normalizefile(
1036 1039 nd and (nd + b"/" + f) or f, True, True
1037 1040 )
1038 1041 else:
1039 1042 nf = nd and (nd + b"/" + f) or f
1040 1043 if nf not in results:
1041 1044 if kind == dirkind:
1042 1045 if not ignore(nf):
1043 1046 if matchtdir:
1044 1047 matchtdir(nf)
1045 1048 wadd(nf)
1046 1049 if nf in dmap and (matchalways or matchfn(nf)):
1047 1050 results[nf] = None
1048 1051 elif kind == regkind or kind == lnkkind:
1049 1052 if nf in dmap:
1050 1053 if matchalways or matchfn(nf):
1051 1054 results[nf] = st
1052 1055 elif (matchalways or matchfn(nf)) and not ignore(
1053 1056 nf
1054 1057 ):
1055 1058 # unknown file -- normalize if necessary
1056 1059 if not alreadynormed:
1057 1060 nf = normalize(nf, False, True)
1058 1061 results[nf] = st
1059 1062 elif nf in dmap and (matchalways or matchfn(nf)):
1060 1063 results[nf] = None
1061 1064
1062 1065 for nd, d in work:
1063 1066 # alreadynormed means that processwork doesn't have to do any
1064 1067 # expensive directory normalization
1065 1068 alreadynormed = not normalize or nd == d
1066 1069 traverse([d], alreadynormed)
1067 1070
1068 1071 for s in subrepos:
1069 1072 del results[s]
1070 1073 del results[b'.hg']
1071 1074
1072 1075 # step 3: visit remaining files from dmap
1073 1076 if not skipstep3 and not exact:
1074 1077 # If a dmap file is not in results yet, it was either
1075 1078 # a) not matching matchfn b) ignored, c) missing, or d) under a
1076 1079 # symlink directory.
1077 1080 if not results and matchalways:
1078 1081 visit = [f for f in dmap]
1079 1082 else:
1080 1083 visit = [f for f in dmap if f not in results and matchfn(f)]
1081 1084 visit.sort()
1082 1085
1083 1086 if unknown:
1084 1087 # unknown == True means we walked all dirs under the roots
1085 1088 # that wasn't ignored, and everything that matched was stat'ed
1086 1089 # and is already in results.
1087 1090 # The rest must thus be ignored or under a symlink.
1088 1091 audit_path = pathutil.pathauditor(self._root, cached=True)
1089 1092
1090 1093 for nf in iter(visit):
1091 1094 # If a stat for the same file was already added with a
1092 1095 # different case, don't add one for this, since that would
1093 1096 # make it appear as if the file exists under both names
1094 1097 # on disk.
1095 1098 if (
1096 1099 normalizefile
1097 1100 and normalizefile(nf, True, True) in results
1098 1101 ):
1099 1102 results[nf] = None
1100 1103 # Report ignored items in the dmap as long as they are not
1101 1104 # under a symlink directory.
1102 1105 elif audit_path.check(nf):
1103 1106 try:
1104 1107 results[nf] = lstat(join(nf))
1105 1108 # file was just ignored, no links, and exists
1106 1109 except OSError:
1107 1110 # file doesn't exist
1108 1111 results[nf] = None
1109 1112 else:
1110 1113 # It's either missing or under a symlink directory
1111 1114 # which we in this case report as missing
1112 1115 results[nf] = None
1113 1116 else:
1114 1117 # We may not have walked the full directory tree above,
1115 1118 # so stat and check everything we missed.
1116 1119 iv = iter(visit)
1117 1120 for st in util.statfiles([join(i) for i in visit]):
1118 1121 results[next(iv)] = st
1119 1122 return results
1120 1123
1121 1124 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1122 1125 # Force Rayon (Rust parallelism library) to respect the number of
1123 1126 # workers. This is a temporary workaround until Rust code knows
1124 1127 # how to read the config file.
1125 1128 numcpus = self._ui.configint(b"worker", b"numcpus")
1126 1129 if numcpus is not None:
1127 1130 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1128 1131
1129 1132 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1130 1133 if not workers_enabled:
1131 1134 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1132 1135
1133 1136 (
1134 1137 lookup,
1135 1138 modified,
1136 1139 added,
1137 1140 removed,
1138 1141 deleted,
1139 1142 clean,
1140 1143 ignored,
1141 1144 unknown,
1142 1145 warnings,
1143 1146 bad,
1144 1147 traversed,
1145 1148 dirty,
1146 1149 ) = rustmod.status(
1147 1150 self._map._rustmap,
1148 1151 matcher,
1149 1152 self._rootdir,
1150 1153 self._ignorefiles(),
1151 1154 self._checkexec,
1152 1155 self._lastnormaltime,
1153 1156 bool(list_clean),
1154 1157 bool(list_ignored),
1155 1158 bool(list_unknown),
1156 1159 bool(matcher.traversedir),
1157 1160 )
1158 1161
1159 1162 self._dirty |= dirty
1160 1163
1161 1164 if matcher.traversedir:
1162 1165 for dir in traversed:
1163 1166 matcher.traversedir(dir)
1164 1167
1165 1168 if self._ui.warn:
1166 1169 for item in warnings:
1167 1170 if isinstance(item, tuple):
1168 1171 file_path, syntax = item
1169 1172 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1170 1173 file_path,
1171 1174 syntax,
1172 1175 )
1173 1176 self._ui.warn(msg)
1174 1177 else:
1175 1178 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1176 1179 self._ui.warn(
1177 1180 msg
1178 1181 % (
1179 1182 pathutil.canonpath(
1180 1183 self._rootdir, self._rootdir, item
1181 1184 ),
1182 1185 b"No such file or directory",
1183 1186 )
1184 1187 )
1185 1188
1186 1189 for (fn, message) in bad:
1187 1190 matcher.bad(fn, encoding.strtolocal(message))
1188 1191
1189 1192 status = scmutil.status(
1190 1193 modified=modified,
1191 1194 added=added,
1192 1195 removed=removed,
1193 1196 deleted=deleted,
1194 1197 unknown=unknown,
1195 1198 ignored=ignored,
1196 1199 clean=clean,
1197 1200 )
1198 1201 return (lookup, status)
1199 1202
1200 1203 def status(self, match, subrepos, ignored, clean, unknown):
1201 1204 """Determine the status of the working copy relative to the
1202 1205 dirstate and return a pair of (unsure, status), where status is of type
1203 1206 scmutil.status and:
1204 1207
1205 1208 unsure:
1206 1209 files that might have been modified since the dirstate was
1207 1210 written, but need to be read to be sure (size is the same
1208 1211 but mtime differs)
1209 1212 status.modified:
1210 1213 files that have definitely been modified since the dirstate
1211 1214 was written (different size or mode)
1212 1215 status.clean:
1213 1216 files that have definitely not been modified since the
1214 1217 dirstate was written
1215 1218 """
1216 1219 listignored, listclean, listunknown = ignored, clean, unknown
1217 1220 lookup, modified, added, unknown, ignored = [], [], [], [], []
1218 1221 removed, deleted, clean = [], [], []
1219 1222
1220 1223 dmap = self._map
1221 1224 dmap.preload()
1222 1225
1223 1226 use_rust = True
1224 1227
1225 1228 allowed_matchers = (
1226 1229 matchmod.alwaysmatcher,
1227 1230 matchmod.exactmatcher,
1228 1231 matchmod.includematcher,
1229 1232 )
1230 1233
1231 1234 if rustmod is None:
1232 1235 use_rust = False
1233 1236 elif self._checkcase:
1234 1237 # Case-insensitive filesystems are not handled yet
1235 1238 use_rust = False
1236 1239 elif subrepos:
1237 1240 use_rust = False
1238 1241 elif sparse.enabled:
1239 1242 use_rust = False
1240 1243 elif not isinstance(match, allowed_matchers):
1241 1244 # Some matchers have yet to be implemented
1242 1245 use_rust = False
1243 1246
1244 1247 if use_rust:
1245 1248 try:
1246 1249 return self._rust_status(
1247 1250 match, listclean, listignored, listunknown
1248 1251 )
1249 1252 except rustmod.FallbackError:
1250 1253 pass
1251 1254
1252 1255 def noop(f):
1253 1256 pass
1254 1257
1255 1258 dcontains = dmap.__contains__
1256 1259 dget = dmap.__getitem__
1257 1260 ladd = lookup.append # aka "unsure"
1258 1261 madd = modified.append
1259 1262 aadd = added.append
1260 1263 uadd = unknown.append if listunknown else noop
1261 1264 iadd = ignored.append if listignored else noop
1262 1265 radd = removed.append
1263 1266 dadd = deleted.append
1264 1267 cadd = clean.append if listclean else noop
1265 1268 mexact = match.exact
1266 1269 dirignore = self._dirignore
1267 1270 checkexec = self._checkexec
1268 1271 copymap = self._map.copymap
1269 1272 lastnormaltime = self._lastnormaltime
1270 1273
1271 1274 # We need to do full walks when either
1272 1275 # - we're listing all clean files, or
1273 1276 # - match.traversedir does something, because match.traversedir should
1274 1277 # be called for every dir in the working dir
1275 1278 full = listclean or match.traversedir is not None
1276 1279 for fn, st in pycompat.iteritems(
1277 1280 self.walk(match, subrepos, listunknown, listignored, full=full)
1278 1281 ):
1279 1282 if not dcontains(fn):
1280 1283 if (listignored or mexact(fn)) and dirignore(fn):
1281 1284 if listignored:
1282 1285 iadd(fn)
1283 1286 else:
1284 1287 uadd(fn)
1285 1288 continue
1286 1289
1287 1290 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1288 1291 # written like that for performance reasons. dmap[fn] is not a
1289 1292 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1290 1293 # opcode has fast paths when the value to be unpacked is a tuple or
1291 1294 # a list, but falls back to creating a full-fledged iterator in
1292 1295 # general. That is much slower than simply accessing and storing the
1293 1296 # tuple members one by one.
1294 1297 t = dget(fn)
1295 1298 state = t[0]
1296 1299 mode = t[1]
1297 1300 size = t[2]
1298 1301 time = t[3]
1299 1302
1300 1303 if not st and state in b"nma":
1301 1304 dadd(fn)
1302 1305 elif state == b'n':
1303 1306 if (
1304 1307 size >= 0
1305 1308 and (
1306 1309 (size != st.st_size and size != st.st_size & _rangemask)
1307 1310 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1308 1311 )
1309 1312 or size == FROM_P2 # other parent
1310 1313 or fn in copymap
1311 1314 ):
1312 1315 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1313 1316 # issue6456: Size returned may be longer due to
1314 1317 # encryption on EXT-4 fscrypt, undecided.
1315 1318 ladd(fn)
1316 1319 else:
1317 1320 madd(fn)
1318 1321 elif (
1319 1322 time != st[stat.ST_MTIME]
1320 1323 and time != st[stat.ST_MTIME] & _rangemask
1321 1324 ):
1322 1325 ladd(fn)
1323 1326 elif st[stat.ST_MTIME] == lastnormaltime:
1324 1327 # fn may have just been marked as normal and it may have
1325 1328 # changed in the same second without changing its size.
1326 1329 # This can happen if we quickly do multiple commits.
1327 1330 # Force lookup, so we don't miss such a racy file change.
1328 1331 ladd(fn)
1329 1332 elif listclean:
1330 1333 cadd(fn)
1331 1334 elif state == b'm':
1332 1335 madd(fn)
1333 1336 elif state == b'a':
1334 1337 aadd(fn)
1335 1338 elif state == b'r':
1336 1339 radd(fn)
1337 1340 status = scmutil.status(
1338 1341 modified, added, removed, deleted, unknown, ignored, clean
1339 1342 )
1340 1343 return (lookup, status)
1341 1344
1342 1345 def matches(self, match):
1343 1346 """
1344 1347 return files in the dirstate (in whatever state) filtered by match
1345 1348 """
1346 1349 dmap = self._map
1347 1350 if rustmod is not None:
1348 1351 dmap = self._map._rustmap
1349 1352
1350 1353 if match.always():
1351 1354 return dmap.keys()
1352 1355 files = match.files()
1353 1356 if match.isexact():
1354 1357 # fast path -- filter the other way around, since typically files is
1355 1358 # much smaller than dmap
1356 1359 return [f for f in files if f in dmap]
1357 1360 if match.prefix() and all(fn in dmap for fn in files):
1358 1361 # fast path -- all the values are known to be files, so just return
1359 1362 # that
1360 1363 return list(files)
1361 1364 return [f for f in dmap if match(f)]
1362 1365
1363 1366 def _actualfilename(self, tr):
1364 1367 if tr:
1365 1368 return self._pendingfilename
1366 1369 else:
1367 1370 return self._filename
1368 1371
1369 1372 def savebackup(self, tr, backupname):
1370 1373 '''Save current dirstate into backup file'''
1371 1374 filename = self._actualfilename(tr)
1372 1375 assert backupname != filename
1373 1376
1374 1377 # use '_writedirstate' instead of 'write' to write changes certainly,
1375 1378 # because the latter omits writing out if transaction is running.
1376 1379 # output file will be used to create backup of dirstate at this point.
1377 1380 if self._dirty or not self._opener.exists(filename):
1378 1381 self._writedirstate(
1379 1382 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1380 1383 )
1381 1384
1382 1385 if tr:
1383 1386 # ensure that subsequent tr.writepending returns True for
1384 1387 # changes written out above, even if dirstate is never
1385 1388 # changed after this
1386 1389 tr.addfilegenerator(
1387 1390 b'dirstate',
1388 1391 (self._filename,),
1389 1392 self._writedirstate,
1390 1393 location=b'plain',
1391 1394 )
1392 1395
1393 1396 # ensure that pending file written above is unlinked at
1394 1397 # failure, even if tr.writepending isn't invoked until the
1395 1398 # end of this transaction
1396 1399 tr.registertmp(filename, location=b'plain')
1397 1400
1398 1401 self._opener.tryunlink(backupname)
1399 1402 # hardlink backup is okay because _writedirstate is always called
1400 1403 # with an "atomictemp=True" file.
1401 1404 util.copyfile(
1402 1405 self._opener.join(filename),
1403 1406 self._opener.join(backupname),
1404 1407 hardlink=True,
1405 1408 )
1406 1409
1407 1410 def restorebackup(self, tr, backupname):
1408 1411 '''Restore dirstate by backup file'''
1409 1412 # this "invalidate()" prevents "wlock.release()" from writing
1410 1413 # changes of dirstate out after restoring from backup file
1411 1414 self.invalidate()
1412 1415 filename = self._actualfilename(tr)
1413 1416 o = self._opener
1414 1417 if util.samefile(o.join(backupname), o.join(filename)):
1415 1418 o.unlink(backupname)
1416 1419 else:
1417 1420 o.rename(backupname, filename, checkambig=True)
1418 1421
1419 1422 def clearbackup(self, tr, backupname):
1420 1423 '''Clear backup file'''
1421 1424 self._opener.unlink(backupname)
1422 1425
1423 1426
1424 1427 class dirstatemap(object):
1425 1428 """Map encapsulating the dirstate's contents.
1426 1429
1427 1430 The dirstate contains the following state:
1428 1431
1429 1432 - `identity` is the identity of the dirstate file, which can be used to
1430 1433 detect when changes have occurred to the dirstate file.
1431 1434
1432 1435 - `parents` is a pair containing the parents of the working copy. The
1433 1436 parents are updated by calling `setparents`.
1434 1437
1435 1438 - the state map maps filenames to tuples of (state, mode, size, mtime),
1436 1439 where state is a single character representing 'normal', 'added',
1437 1440 'removed', or 'merged'. It is read by treating the dirstate as a
1438 1441 dict. File state is updated by calling the `addfile`, `removefile` and
1439 1442 `dropfile` methods.
1440 1443
1441 1444 - `copymap` maps destination filenames to their source filename.
1442 1445
1443 1446 The dirstate also provides the following views onto the state:
1444 1447
1445 1448 - `nonnormalset` is a set of the filenames that have state other
1446 1449 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1447 1450
1448 1451 - `otherparentset` is a set of the filenames that are marked as coming
1449 1452 from the second parent when the dirstate is currently being merged.
1450 1453
1451 1454 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1452 1455 form that they appear as in the dirstate.
1453 1456
1454 1457 - `dirfoldmap` is a dict mapping normalized directory names to the
1455 1458 denormalized form that they appear as in the dirstate.
1456 1459 """
1457 1460
1458 1461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1459 1462 self._ui = ui
1460 1463 self._opener = opener
1461 1464 self._root = root
1462 1465 self._filename = b'dirstate'
1463 1466 self._nodelen = 20
1464 1467 self._nodeconstants = nodeconstants
1465 1468 assert (
1466 1469 not use_dirstate_v2
1467 1470 ), "should have detected unsupported requirement"
1468 1471
1469 1472 self._parents = None
1470 1473 self._dirtyparents = False
1471 1474
1472 1475 # for consistent view between _pl() and _read() invocations
1473 1476 self._pendingmode = None
1474 1477
1475 1478 @propertycache
1476 1479 def _map(self):
1477 1480 self._map = {}
1478 1481 self.read()
1479 1482 return self._map
1480 1483
1481 1484 @propertycache
1482 1485 def copymap(self):
1483 1486 self.copymap = {}
1484 1487 self._map
1485 1488 return self.copymap
1486 1489
1487 1490 def directories(self):
1488 1491 # Rust / dirstate-v2 only
1489 1492 return []
1490 1493
1491 1494 def clear(self):
1492 1495 self._map.clear()
1493 1496 self.copymap.clear()
1494 1497 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1495 1498 util.clearcachedproperty(self, b"_dirs")
1496 1499 util.clearcachedproperty(self, b"_alldirs")
1497 1500 util.clearcachedproperty(self, b"filefoldmap")
1498 1501 util.clearcachedproperty(self, b"dirfoldmap")
1499 1502 util.clearcachedproperty(self, b"nonnormalset")
1500 1503 util.clearcachedproperty(self, b"otherparentset")
1501 1504
1502 1505 def items(self):
1503 1506 return pycompat.iteritems(self._map)
1504 1507
1505 1508 # forward for python2,3 compat
1506 1509 iteritems = items
1507 1510
1508 1511 def __len__(self):
1509 1512 return len(self._map)
1510 1513
1511 1514 def __iter__(self):
1512 1515 return iter(self._map)
1513 1516
1514 1517 def get(self, key, default=None):
1515 1518 return self._map.get(key, default)
1516 1519
1517 1520 def __contains__(self, key):
1518 1521 return key in self._map
1519 1522
1520 1523 def __getitem__(self, key):
1521 1524 return self._map[key]
1522 1525
1523 1526 def keys(self):
1524 1527 return self._map.keys()
1525 1528
1526 1529 def preload(self):
1527 1530 """Loads the underlying data, if it's not already loaded"""
1528 1531 self._map
1529 1532
1530 1533 def addfile(self, f, oldstate, state, mode, size, mtime):
1531 1534 """Add a tracked file to the dirstate."""
1532 1535 if oldstate in b"?r" and "_dirs" in self.__dict__:
1533 1536 self._dirs.addpath(f)
1534 1537 if oldstate == b"?" and "_alldirs" in self.__dict__:
1535 1538 self._alldirs.addpath(f)
1536 1539 self._map[f] = dirstatetuple(state, mode, size, mtime)
1537 1540 if state != b'n' or mtime == -1:
1538 1541 self.nonnormalset.add(f)
1539 1542 if size == FROM_P2:
1540 1543 self.otherparentset.add(f)
1541 1544
1542 1545 def removefile(self, f, oldstate, size):
1543 1546 """
1544 1547 Mark a file as removed in the dirstate.
1545 1548
1546 1549 The `size` parameter is used to store sentinel values that indicate
1547 1550 the file's previous state. In the future, we should refactor this
1548 1551 to be more explicit about what that state is.
1549 1552 """
1550 1553 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1551 1554 self._dirs.delpath(f)
1552 1555 if oldstate == b"?" and "_alldirs" in self.__dict__:
1553 1556 self._alldirs.addpath(f)
1554 1557 if "filefoldmap" in self.__dict__:
1555 1558 normed = util.normcase(f)
1556 1559 self.filefoldmap.pop(normed, None)
1557 1560 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1558 1561 self.nonnormalset.add(f)
1559 1562
1560 1563 def dropfile(self, f, oldstate):
1561 1564 """
1562 1565 Remove a file from the dirstate. Returns True if the file was
1563 1566 previously recorded.
1564 1567 """
1565 1568 exists = self._map.pop(f, None) is not None
1566 1569 if exists:
1567 1570 if oldstate != b"r" and "_dirs" in self.__dict__:
1568 1571 self._dirs.delpath(f)
1569 1572 if "_alldirs" in self.__dict__:
1570 1573 self._alldirs.delpath(f)
1571 1574 if "filefoldmap" in self.__dict__:
1572 1575 normed = util.normcase(f)
1573 1576 self.filefoldmap.pop(normed, None)
1574 1577 self.nonnormalset.discard(f)
1575 1578 return exists
1576 1579
1577 1580 def clearambiguoustimes(self, files, now):
1578 1581 for f in files:
1579 1582 e = self.get(f)
1580 1583 if e is not None and e[0] == b'n' and e[3] == now:
1581 1584 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1582 1585 self.nonnormalset.add(f)
1583 1586
1584 1587 def nonnormalentries(self):
1585 1588 '''Compute the nonnormal dirstate entries from the dmap'''
1586 1589 try:
1587 1590 return parsers.nonnormalotherparententries(self._map)
1588 1591 except AttributeError:
1589 1592 nonnorm = set()
1590 1593 otherparent = set()
1591 1594 for fname, e in pycompat.iteritems(self._map):
1592 1595 if e[0] != b'n' or e[3] == -1:
1593 1596 nonnorm.add(fname)
1594 1597 if e[0] == b'n' and e[2] == FROM_P2:
1595 1598 otherparent.add(fname)
1596 1599 return nonnorm, otherparent
1597 1600
1598 1601 @propertycache
1599 1602 def filefoldmap(self):
1600 1603 """Returns a dictionary mapping normalized case paths to their
1601 1604 non-normalized versions.
1602 1605 """
1603 1606 try:
1604 1607 makefilefoldmap = parsers.make_file_foldmap
1605 1608 except AttributeError:
1606 1609 pass
1607 1610 else:
1608 1611 return makefilefoldmap(
1609 1612 self._map, util.normcasespec, util.normcasefallback
1610 1613 )
1611 1614
1612 1615 f = {}
1613 1616 normcase = util.normcase
1614 1617 for name, s in pycompat.iteritems(self._map):
1615 1618 if s[0] != b'r':
1616 1619 f[normcase(name)] = name
1617 1620 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1618 1621 return f
1619 1622
1620 1623 def hastrackeddir(self, d):
1621 1624 """
1622 1625 Returns True if the dirstate contains a tracked (not removed) file
1623 1626 in this directory.
1624 1627 """
1625 1628 return d in self._dirs
1626 1629
1627 1630 def hasdir(self, d):
1628 1631 """
1629 1632 Returns True if the dirstate contains a file (tracked or removed)
1630 1633 in this directory.
1631 1634 """
1632 1635 return d in self._alldirs
1633 1636
1634 1637 @propertycache
1635 1638 def _dirs(self):
1636 1639 return pathutil.dirs(self._map, b'r')
1637 1640
1638 1641 @propertycache
1639 1642 def _alldirs(self):
1640 1643 return pathutil.dirs(self._map)
1641 1644
1642 1645 def _opendirstatefile(self):
1643 1646 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1644 1647 if self._pendingmode is not None and self._pendingmode != mode:
1645 1648 fp.close()
1646 1649 raise error.Abort(
1647 1650 _(b'working directory state may be changed parallelly')
1648 1651 )
1649 1652 self._pendingmode = mode
1650 1653 return fp
1651 1654
1652 1655 def parents(self):
1653 1656 if not self._parents:
1654 1657 try:
1655 1658 fp = self._opendirstatefile()
1656 1659 st = fp.read(2 * self._nodelen)
1657 1660 fp.close()
1658 1661 except IOError as err:
1659 1662 if err.errno != errno.ENOENT:
1660 1663 raise
1661 1664 # File doesn't exist, so the current state is empty
1662 1665 st = b''
1663 1666
1664 1667 l = len(st)
1665 1668 if l == self._nodelen * 2:
1666 1669 self._parents = (
1667 1670 st[: self._nodelen],
1668 1671 st[self._nodelen : 2 * self._nodelen],
1669 1672 )
1670 1673 elif l == 0:
1671 1674 self._parents = (
1672 1675 self._nodeconstants.nullid,
1673 1676 self._nodeconstants.nullid,
1674 1677 )
1675 1678 else:
1676 1679 raise error.Abort(
1677 1680 _(b'working directory state appears damaged!')
1678 1681 )
1679 1682
1680 1683 return self._parents
1681 1684
1682 1685 def setparents(self, p1, p2):
1683 1686 self._parents = (p1, p2)
1684 1687 self._dirtyparents = True
1685 1688
1686 1689 def read(self):
1687 1690 # ignore HG_PENDING because identity is used only for writing
1688 1691 self.identity = util.filestat.frompath(
1689 1692 self._opener.join(self._filename)
1690 1693 )
1691 1694
1692 1695 try:
1693 1696 fp = self._opendirstatefile()
1694 1697 try:
1695 1698 st = fp.read()
1696 1699 finally:
1697 1700 fp.close()
1698 1701 except IOError as err:
1699 1702 if err.errno != errno.ENOENT:
1700 1703 raise
1701 1704 return
1702 1705 if not st:
1703 1706 return
1704 1707
1705 1708 if util.safehasattr(parsers, b'dict_new_presized'):
1706 1709 # Make an estimate of the number of files in the dirstate based on
1707 1710 # its size. This trades wasting some memory for avoiding costly
1708 1711 # resizes. Each entry have a prefix of 17 bytes followed by one or
1709 1712 # two path names. Studies on various large-scale real-world repositories
1710 1713 # found 54 bytes a reasonable upper limit for the average path names.
1711 1714 # Copy entries are ignored for the sake of this estimate.
1712 1715 self._map = parsers.dict_new_presized(len(st) // 71)
1713 1716
1714 1717 # Python's garbage collector triggers a GC each time a certain number
1715 1718 # of container objects (the number being defined by
1716 1719 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1717 1720 # for each file in the dirstate. The C version then immediately marks
1718 1721 # them as not to be tracked by the collector. However, this has no
1719 1722 # effect on when GCs are triggered, only on what objects the GC looks
1720 1723 # into. This means that O(number of files) GCs are unavoidable.
1721 1724 # Depending on when in the process's lifetime the dirstate is parsed,
1722 1725 # this can get very expensive. As a workaround, disable GC while
1723 1726 # parsing the dirstate.
1724 1727 #
1725 1728 # (we cannot decorate the function directly since it is in a C module)
1726 1729 parse_dirstate = util.nogc(parsers.parse_dirstate)
1727 1730 p = parse_dirstate(self._map, self.copymap, st)
1728 1731 if not self._dirtyparents:
1729 1732 self.setparents(*p)
1730 1733
1731 1734 # Avoid excess attribute lookups by fast pathing certain checks
1732 1735 self.__contains__ = self._map.__contains__
1733 1736 self.__getitem__ = self._map.__getitem__
1734 1737 self.get = self._map.get
1735 1738
1736 1739 def write(self, st, now):
1737 1740 st.write(
1738 1741 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1739 1742 )
1740 1743 st.close()
1741 1744 self._dirtyparents = False
1742 1745 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1743 1746
1744 1747 @propertycache
1745 1748 def nonnormalset(self):
1746 1749 nonnorm, otherparents = self.nonnormalentries()
1747 1750 self.otherparentset = otherparents
1748 1751 return nonnorm
1749 1752
1750 1753 @propertycache
1751 1754 def otherparentset(self):
1752 1755 nonnorm, otherparents = self.nonnormalentries()
1753 1756 self.nonnormalset = nonnorm
1754 1757 return otherparents
1755 1758
1756 1759 def non_normal_or_other_parent_paths(self):
1757 1760 return self.nonnormalset.union(self.otherparentset)
1758 1761
1759 1762 @propertycache
1760 1763 def identity(self):
1761 1764 self._map
1762 1765 return self.identity
1763 1766
1764 1767 @propertycache
1765 1768 def dirfoldmap(self):
1766 1769 f = {}
1767 1770 normcase = util.normcase
1768 1771 for name in self._dirs:
1769 1772 f[normcase(name)] = name
1770 1773 return f
1771 1774
1772 1775
1773 1776 if rustmod is not None:
1774 1777
1775 1778 class dirstatemap(object):
1776 1779 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1777 1780 self._use_dirstate_v2 = use_dirstate_v2
1778 1781 self._nodeconstants = nodeconstants
1779 1782 self._ui = ui
1780 1783 self._opener = opener
1781 1784 self._root = root
1782 1785 self._filename = b'dirstate'
1783 1786 self._nodelen = 20 # Also update Rust code when changing this!
1784 1787 self._parents = None
1785 1788 self._dirtyparents = False
1786 1789
1787 1790 # for consistent view between _pl() and _read() invocations
1788 1791 self._pendingmode = None
1789 1792
1790 1793 self._use_dirstate_tree = self._ui.configbool(
1791 1794 b"experimental",
1792 1795 b"dirstate-tree.in-memory",
1793 1796 False,
1794 1797 )
1795 1798
1796 1799 def addfile(self, *args, **kwargs):
1797 1800 return self._rustmap.addfile(*args, **kwargs)
1798 1801
1799 1802 def removefile(self, *args, **kwargs):
1800 1803 return self._rustmap.removefile(*args, **kwargs)
1801 1804
1802 1805 def dropfile(self, *args, **kwargs):
1803 1806 return self._rustmap.dropfile(*args, **kwargs)
1804 1807
1805 1808 def clearambiguoustimes(self, *args, **kwargs):
1806 1809 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1807 1810
1808 1811 def nonnormalentries(self):
1809 1812 return self._rustmap.nonnormalentries()
1810 1813
1811 1814 def get(self, *args, **kwargs):
1812 1815 return self._rustmap.get(*args, **kwargs)
1813 1816
1814 1817 @property
1815 1818 def copymap(self):
1816 1819 return self._rustmap.copymap()
1817 1820
1818 1821 def directories(self):
1819 1822 return self._rustmap.directories()
1820 1823
1821 1824 def preload(self):
1822 1825 self._rustmap
1823 1826
1824 1827 def clear(self):
1825 1828 self._rustmap.clear()
1826 1829 self.setparents(
1827 1830 self._nodeconstants.nullid, self._nodeconstants.nullid
1828 1831 )
1829 1832 util.clearcachedproperty(self, b"_dirs")
1830 1833 util.clearcachedproperty(self, b"_alldirs")
1831 1834 util.clearcachedproperty(self, b"dirfoldmap")
1832 1835
1833 1836 def items(self):
1834 1837 return self._rustmap.items()
1835 1838
1836 1839 def keys(self):
1837 1840 return iter(self._rustmap)
1838 1841
1839 1842 def __contains__(self, key):
1840 1843 return key in self._rustmap
1841 1844
1842 1845 def __getitem__(self, item):
1843 1846 return self._rustmap[item]
1844 1847
1845 1848 def __len__(self):
1846 1849 return len(self._rustmap)
1847 1850
1848 1851 def __iter__(self):
1849 1852 return iter(self._rustmap)
1850 1853
1851 1854 # forward for python2,3 compat
1852 1855 iteritems = items
1853 1856
1854 1857 def _opendirstatefile(self):
1855 1858 fp, mode = txnutil.trypending(
1856 1859 self._root, self._opener, self._filename
1857 1860 )
1858 1861 if self._pendingmode is not None and self._pendingmode != mode:
1859 1862 fp.close()
1860 1863 raise error.Abort(
1861 1864 _(b'working directory state may be changed parallelly')
1862 1865 )
1863 1866 self._pendingmode = mode
1864 1867 return fp
1865 1868
1866 1869 def setparents(self, p1, p2):
1867 1870 self._parents = (p1, p2)
1868 1871 self._dirtyparents = True
1869 1872
1870 1873 def parents(self):
1871 1874 if not self._parents:
1872 1875 if self._use_dirstate_v2:
1873 1876 offset = len(rustmod.V2_FORMAT_MARKER)
1874 1877 else:
1875 1878 offset = 0
1876 1879 read_len = offset + self._nodelen * 2
1877 1880 try:
1878 1881 fp = self._opendirstatefile()
1879 1882 st = fp.read(read_len)
1880 1883 fp.close()
1881 1884 except IOError as err:
1882 1885 if err.errno != errno.ENOENT:
1883 1886 raise
1884 1887 # File doesn't exist, so the current state is empty
1885 1888 st = b''
1886 1889
1887 1890 l = len(st)
1888 1891 if l == read_len:
1889 1892 st = st[offset:]
1890 1893 self._parents = (
1891 1894 st[: self._nodelen],
1892 1895 st[self._nodelen : 2 * self._nodelen],
1893 1896 )
1894 1897 elif l == 0:
1895 1898 self._parents = (
1896 1899 self._nodeconstants.nullid,
1897 1900 self._nodeconstants.nullid,
1898 1901 )
1899 1902 else:
1900 1903 raise error.Abort(
1901 1904 _(b'working directory state appears damaged!')
1902 1905 )
1903 1906
1904 1907 return self._parents
1905 1908
1906 1909 @propertycache
1907 1910 def _rustmap(self):
1908 1911 """
1909 1912 Fills the Dirstatemap when called.
1910 1913 """
1911 1914 # ignore HG_PENDING because identity is used only for writing
1912 1915 self.identity = util.filestat.frompath(
1913 1916 self._opener.join(self._filename)
1914 1917 )
1915 1918
1916 1919 try:
1917 1920 fp = self._opendirstatefile()
1918 1921 try:
1919 1922 st = fp.read()
1920 1923 finally:
1921 1924 fp.close()
1922 1925 except IOError as err:
1923 1926 if err.errno != errno.ENOENT:
1924 1927 raise
1925 1928 st = b''
1926 1929
1927 1930 self._rustmap, parents = rustmod.DirstateMap.new(
1928 1931 self._use_dirstate_tree, self._use_dirstate_v2, st
1929 1932 )
1930 1933
1931 1934 if parents and not self._dirtyparents:
1932 1935 self.setparents(*parents)
1933 1936
1934 1937 self.__contains__ = self._rustmap.__contains__
1935 1938 self.__getitem__ = self._rustmap.__getitem__
1936 1939 self.get = self._rustmap.get
1937 1940 return self._rustmap
1938 1941
1939 1942 def write(self, st, now):
1940 1943 parents = self.parents()
1941 1944 packed = self._rustmap.write(
1942 1945 self._use_dirstate_v2, parents[0], parents[1], now
1943 1946 )
1944 1947 st.write(packed)
1945 1948 st.close()
1946 1949 self._dirtyparents = False
1947 1950
1948 1951 @propertycache
1949 1952 def filefoldmap(self):
1950 1953 """Returns a dictionary mapping normalized case paths to their
1951 1954 non-normalized versions.
1952 1955 """
1953 1956 return self._rustmap.filefoldmapasdict()
1954 1957
1955 1958 def hastrackeddir(self, d):
1956 1959 return self._rustmap.hastrackeddir(d)
1957 1960
1958 1961 def hasdir(self, d):
1959 1962 return self._rustmap.hasdir(d)
1960 1963
1961 1964 @propertycache
1962 1965 def identity(self):
1963 1966 self._rustmap
1964 1967 return self.identity
1965 1968
1966 1969 @property
1967 1970 def nonnormalset(self):
1968 1971 nonnorm = self._rustmap.non_normal_entries()
1969 1972 return nonnorm
1970 1973
1971 1974 @propertycache
1972 1975 def otherparentset(self):
1973 1976 otherparents = self._rustmap.other_parent_entries()
1974 1977 return otherparents
1975 1978
1976 1979 def non_normal_or_other_parent_paths(self):
1977 1980 return self._rustmap.non_normal_or_other_parent_paths()
1978 1981
1979 1982 @propertycache
1980 1983 def dirfoldmap(self):
1981 1984 f = {}
1982 1985 normcase = util.normcase
1983 1986 for name, _pseudo_entry in self.directories():
1984 1987 f[normcase(name)] = name
1985 1988 return f
General Comments 0
You need to be logged in to leave comments. Login now