##// END OF EJS Templates
rust: Remove handling of `parents` in `DirstateMap`...
Simon Sapin -
r47891:b6339a99 default
parent child Browse files
Show More
@@ -1,1958 +1,1956 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .pycompat import delattr
18 18
19 19 from hgdemandimport import tracing
20 20
21 21 from . import (
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 pathutil,
26 26 policy,
27 27 pycompat,
28 28 scmutil,
29 29 sparse,
30 30 txnutil,
31 31 util,
32 32 )
33 33
34 34 from .interfaces import (
35 35 dirstate as intdirstate,
36 36 util as interfaceutil,
37 37 )
38 38
39 39 parsers = policy.importmod('parsers')
40 40 rustmod = policy.importrust('dirstate')
41 41
42 42 propertycache = util.propertycache
43 43 filecache = scmutil.filecache
44 44 _rangemask = 0x7FFFFFFF
45 45
46 46 dirstatetuple = parsers.dirstatetuple
47 47
48 48
49 49 class repocache(filecache):
50 50 """filecache for files in .hg/"""
51 51
52 52 def join(self, obj, fname):
53 53 return obj._opener.join(fname)
54 54
55 55
56 56 class rootcache(filecache):
57 57 """filecache for files in the repository root"""
58 58
59 59 def join(self, obj, fname):
60 60 return obj._join(fname)
61 61
62 62
63 63 def _getfsnow(vfs):
64 64 '''Get "now" timestamp on filesystem'''
65 65 tmpfd, tmpname = vfs.mkstemp()
66 66 try:
67 67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 68 finally:
69 69 os.close(tmpfd)
70 70 vfs.unlink(tmpname)
71 71
72 72
73 73 @interfaceutil.implementer(intdirstate.idirstate)
74 74 class dirstate(object):
75 75 def __init__(
76 76 self, opener, ui, root, validate, sparsematchfn, nodeconstants
77 77 ):
78 78 """Create a new dirstate object.
79 79
80 80 opener is an open()-like callable that can be used to open the
81 81 dirstate file; root is the root of the directory tracked by
82 82 the dirstate.
83 83 """
84 84 self._nodeconstants = nodeconstants
85 85 self._opener = opener
86 86 self._validate = validate
87 87 self._root = root
88 88 self._sparsematchfn = sparsematchfn
89 89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
90 90 # UNC path pointing to root share (issue4557)
91 91 self._rootdir = pathutil.normasprefix(root)
92 92 self._dirty = False
93 93 self._lastnormaltime = 0
94 94 self._ui = ui
95 95 self._filecache = {}
96 96 self._parentwriters = 0
97 97 self._filename = b'dirstate'
98 98 self._pendingfilename = b'%s.pending' % self._filename
99 99 self._plchangecallbacks = {}
100 100 self._origpl = None
101 101 self._updatedfiles = set()
102 102 self._mapcls = dirstatemap
103 103 # Access and cache cwd early, so we don't access it for the first time
104 104 # after a working-copy update caused it to not exist (accessing it then
105 105 # raises an exception).
106 106 self._cwd
107 107
108 108 def prefetch_parents(self):
109 109 """make sure the parents are loaded
110 110
111 111 Used to avoid a race condition.
112 112 """
113 113 self._pl
114 114
115 115 @contextlib.contextmanager
116 116 def parentchange(self):
117 117 """Context manager for handling dirstate parents.
118 118
119 119 If an exception occurs in the scope of the context manager,
120 120 the incoherent dirstate won't be written when wlock is
121 121 released.
122 122 """
123 123 self._parentwriters += 1
124 124 yield
125 125 # Typically we want the "undo" step of a context manager in a
126 126 # finally block so it happens even when an exception
127 127 # occurs. In this case, however, we only want to decrement
128 128 # parentwriters if the code in the with statement exits
129 129 # normally, so we don't have a try/finally here on purpose.
130 130 self._parentwriters -= 1
131 131
132 132 def pendingparentchange(self):
133 133 """Returns true if the dirstate is in the middle of a set of changes
134 134 that modify the dirstate parent.
135 135 """
136 136 return self._parentwriters > 0
137 137
138 138 @propertycache
139 139 def _map(self):
140 140 """Return the dirstate contents (see documentation for dirstatemap)."""
141 141 self._map = self._mapcls(
142 142 self._ui, self._opener, self._root, self._nodeconstants
143 143 )
144 144 return self._map
145 145
146 146 @property
147 147 def _sparsematcher(self):
148 148 """The matcher for the sparse checkout.
149 149
150 150 The working directory may not include every file from a manifest. The
151 151 matcher obtained by this property will match a path if it is to be
152 152 included in the working directory.
153 153 """
154 154 # TODO there is potential to cache this property. For now, the matcher
155 155 # is resolved on every access. (But the called function does use a
156 156 # cache to keep the lookup fast.)
157 157 return self._sparsematchfn()
158 158
159 159 @repocache(b'branch')
160 160 def _branch(self):
161 161 try:
162 162 return self._opener.read(b"branch").strip() or b"default"
163 163 except IOError as inst:
164 164 if inst.errno != errno.ENOENT:
165 165 raise
166 166 return b"default"
167 167
168 168 @property
169 169 def _pl(self):
170 170 return self._map.parents()
171 171
172 172 def hasdir(self, d):
173 173 return self._map.hastrackeddir(d)
174 174
175 175 @rootcache(b'.hgignore')
176 176 def _ignore(self):
177 177 files = self._ignorefiles()
178 178 if not files:
179 179 return matchmod.never()
180 180
181 181 pats = [b'include:%s' % f for f in files]
182 182 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
183 183
184 184 @propertycache
185 185 def _slash(self):
186 186 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
187 187
188 188 @propertycache
189 189 def _checklink(self):
190 190 return util.checklink(self._root)
191 191
192 192 @propertycache
193 193 def _checkexec(self):
194 194 return bool(util.checkexec(self._root))
195 195
196 196 @propertycache
197 197 def _checkcase(self):
198 198 return not util.fscasesensitive(self._join(b'.hg'))
199 199
200 200 def _join(self, f):
201 201 # much faster than os.path.join()
202 202 # it's safe because f is always a relative path
203 203 return self._rootdir + f
204 204
205 205 def flagfunc(self, buildfallback):
206 206 if self._checklink and self._checkexec:
207 207
208 208 def f(x):
209 209 try:
210 210 st = os.lstat(self._join(x))
211 211 if util.statislink(st):
212 212 return b'l'
213 213 if util.statisexec(st):
214 214 return b'x'
215 215 except OSError:
216 216 pass
217 217 return b''
218 218
219 219 return f
220 220
221 221 fallback = buildfallback()
222 222 if self._checklink:
223 223
224 224 def f(x):
225 225 if os.path.islink(self._join(x)):
226 226 return b'l'
227 227 if b'x' in fallback(x):
228 228 return b'x'
229 229 return b''
230 230
231 231 return f
232 232 if self._checkexec:
233 233
234 234 def f(x):
235 235 if b'l' in fallback(x):
236 236 return b'l'
237 237 if util.isexec(self._join(x)):
238 238 return b'x'
239 239 return b''
240 240
241 241 return f
242 242 else:
243 243 return fallback
244 244
245 245 @propertycache
246 246 def _cwd(self):
247 247 # internal config: ui.forcecwd
248 248 forcecwd = self._ui.config(b'ui', b'forcecwd')
249 249 if forcecwd:
250 250 return forcecwd
251 251 return encoding.getcwd()
252 252
253 253 def getcwd(self):
254 254 """Return the path from which a canonical path is calculated.
255 255
256 256 This path should be used to resolve file patterns or to convert
257 257 canonical paths back to file paths for display. It shouldn't be
258 258 used to get real file paths. Use vfs functions instead.
259 259 """
260 260 cwd = self._cwd
261 261 if cwd == self._root:
262 262 return b''
263 263 # self._root ends with a path separator if self._root is '/' or 'C:\'
264 264 rootsep = self._root
265 265 if not util.endswithsep(rootsep):
266 266 rootsep += pycompat.ossep
267 267 if cwd.startswith(rootsep):
268 268 return cwd[len(rootsep) :]
269 269 else:
270 270 # we're outside the repo. return an absolute path.
271 271 return cwd
272 272
273 273 def pathto(self, f, cwd=None):
274 274 if cwd is None:
275 275 cwd = self.getcwd()
276 276 path = util.pathto(self._root, cwd, f)
277 277 if self._slash:
278 278 return util.pconvert(path)
279 279 return path
280 280
281 281 def __getitem__(self, key):
282 282 """Return the current state of key (a filename) in the dirstate.
283 283
284 284 States are:
285 285 n normal
286 286 m needs merging
287 287 r marked for removal
288 288 a marked for addition
289 289 ? not tracked
290 290 """
291 291 return self._map.get(key, (b"?",))[0]
292 292
293 293 def __contains__(self, key):
294 294 return key in self._map
295 295
296 296 def __iter__(self):
297 297 return iter(sorted(self._map))
298 298
299 299 def items(self):
300 300 return pycompat.iteritems(self._map)
301 301
302 302 iteritems = items
303 303
304 304 def parents(self):
305 305 return [self._validate(p) for p in self._pl]
306 306
307 307 def p1(self):
308 308 return self._validate(self._pl[0])
309 309
310 310 def p2(self):
311 311 return self._validate(self._pl[1])
312 312
313 313 def branch(self):
314 314 return encoding.tolocal(self._branch)
315 315
316 316 def setparents(self, p1, p2=None):
317 317 """Set dirstate parents to p1 and p2.
318 318
319 319 When moving from two parents to one, 'm' merged entries a
320 320 adjusted to normal and previous copy records discarded and
321 321 returned by the call.
322 322
323 323 See localrepo.setparents()
324 324 """
325 325 if p2 is None:
326 326 p2 = self._nodeconstants.nullid
327 327 if self._parentwriters == 0:
328 328 raise ValueError(
329 329 b"cannot set dirstate parent outside of "
330 330 b"dirstate.parentchange context manager"
331 331 )
332 332
333 333 self._dirty = True
334 334 oldp2 = self._pl[1]
335 335 if self._origpl is None:
336 336 self._origpl = self._pl
337 337 self._map.setparents(p1, p2)
338 338 copies = {}
339 339 if (
340 340 oldp2 != self._nodeconstants.nullid
341 341 and p2 == self._nodeconstants.nullid
342 342 ):
343 343 candidatefiles = self._map.non_normal_or_other_parent_paths()
344 344
345 345 for f in candidatefiles:
346 346 s = self._map.get(f)
347 347 if s is None:
348 348 continue
349 349
350 350 # Discard 'm' markers when moving away from a merge state
351 351 if s[0] == b'm':
352 352 source = self._map.copymap.get(f)
353 353 if source:
354 354 copies[f] = source
355 355 self.normallookup(f)
356 356 # Also fix up otherparent markers
357 357 elif s[0] == b'n' and s[2] == -2:
358 358 source = self._map.copymap.get(f)
359 359 if source:
360 360 copies[f] = source
361 361 self.add(f)
362 362 return copies
363 363
364 364 def setbranch(self, branch):
365 365 self.__class__._branch.set(self, encoding.fromlocal(branch))
366 366 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
367 367 try:
368 368 f.write(self._branch + b'\n')
369 369 f.close()
370 370
371 371 # make sure filecache has the correct stat info for _branch after
372 372 # replacing the underlying file
373 373 ce = self._filecache[b'_branch']
374 374 if ce:
375 375 ce.refresh()
376 376 except: # re-raises
377 377 f.discard()
378 378 raise
379 379
380 380 def invalidate(self):
381 381 """Causes the next access to reread the dirstate.
382 382
383 383 This is different from localrepo.invalidatedirstate() because it always
384 384 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
385 385 check whether the dirstate has changed before rereading it."""
386 386
387 387 for a in ("_map", "_branch", "_ignore"):
388 388 if a in self.__dict__:
389 389 delattr(self, a)
390 390 self._lastnormaltime = 0
391 391 self._dirty = False
392 392 self._updatedfiles.clear()
393 393 self._parentwriters = 0
394 394 self._origpl = None
395 395
396 396 def copy(self, source, dest):
397 397 """Mark dest as a copy of source. Unmark dest if source is None."""
398 398 if source == dest:
399 399 return
400 400 self._dirty = True
401 401 if source is not None:
402 402 self._map.copymap[dest] = source
403 403 self._updatedfiles.add(source)
404 404 self._updatedfiles.add(dest)
405 405 elif self._map.copymap.pop(dest, None):
406 406 self._updatedfiles.add(dest)
407 407
408 408 def copied(self, file):
409 409 return self._map.copymap.get(file, None)
410 410
411 411 def copies(self):
412 412 return self._map.copymap
413 413
414 414 def _addpath(self, f, state, mode, size, mtime):
415 415 oldstate = self[f]
416 416 if state == b'a' or oldstate == b'r':
417 417 scmutil.checkfilename(f)
418 418 if self._map.hastrackeddir(f):
419 419 raise error.Abort(
420 420 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
421 421 )
422 422 # shadows
423 423 for d in pathutil.finddirs(f):
424 424 if self._map.hastrackeddir(d):
425 425 break
426 426 entry = self._map.get(d)
427 427 if entry is not None and entry[0] != b'r':
428 428 raise error.Abort(
429 429 _(b'file %r in dirstate clashes with %r')
430 430 % (pycompat.bytestr(d), pycompat.bytestr(f))
431 431 )
432 432 self._dirty = True
433 433 self._updatedfiles.add(f)
434 434 self._map.addfile(f, oldstate, state, mode, size, mtime)
435 435
436 436 def normal(self, f, parentfiledata=None):
437 437 """Mark a file normal and clean.
438 438
439 439 parentfiledata: (mode, size, mtime) of the clean file
440 440
441 441 parentfiledata should be computed from memory (for mode,
442 442 size), as or close as possible from the point where we
443 443 determined the file was clean, to limit the risk of the
444 444 file having been changed by an external process between the
445 445 moment where the file was determined to be clean and now."""
446 446 if parentfiledata:
447 447 (mode, size, mtime) = parentfiledata
448 448 else:
449 449 s = os.lstat(self._join(f))
450 450 mode = s.st_mode
451 451 size = s.st_size
452 452 mtime = s[stat.ST_MTIME]
453 453 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
454 454 self._map.copymap.pop(f, None)
455 455 if f in self._map.nonnormalset:
456 456 self._map.nonnormalset.remove(f)
457 457 if mtime > self._lastnormaltime:
458 458 # Remember the most recent modification timeslot for status(),
459 459 # to make sure we won't miss future size-preserving file content
460 460 # modifications that happen within the same timeslot.
461 461 self._lastnormaltime = mtime
462 462
463 463 def normallookup(self, f):
464 464 '''Mark a file normal, but possibly dirty.'''
465 465 if self._pl[1] != self._nodeconstants.nullid:
466 466 # if there is a merge going on and the file was either
467 467 # in state 'm' (-1) or coming from other parent (-2) before
468 468 # being removed, restore that state.
469 469 entry = self._map.get(f)
470 470 if entry is not None:
471 471 if entry[0] == b'r' and entry[2] in (-1, -2):
472 472 source = self._map.copymap.get(f)
473 473 if entry[2] == -1:
474 474 self.merge(f)
475 475 elif entry[2] == -2:
476 476 self.otherparent(f)
477 477 if source:
478 478 self.copy(source, f)
479 479 return
480 480 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
481 481 return
482 482 self._addpath(f, b'n', 0, -1, -1)
483 483 self._map.copymap.pop(f, None)
484 484
485 485 def otherparent(self, f):
486 486 '''Mark as coming from the other parent, always dirty.'''
487 487 if self._pl[1] == self._nodeconstants.nullid:
488 488 raise error.Abort(
489 489 _(b"setting %r to other parent only allowed in merges") % f
490 490 )
491 491 if f in self and self[f] == b'n':
492 492 # merge-like
493 493 self._addpath(f, b'm', 0, -2, -1)
494 494 else:
495 495 # add-like
496 496 self._addpath(f, b'n', 0, -2, -1)
497 497 self._map.copymap.pop(f, None)
498 498
499 499 def add(self, f):
500 500 '''Mark a file added.'''
501 501 self._addpath(f, b'a', 0, -1, -1)
502 502 self._map.copymap.pop(f, None)
503 503
504 504 def remove(self, f):
505 505 '''Mark a file removed.'''
506 506 self._dirty = True
507 507 oldstate = self[f]
508 508 size = 0
509 509 if self._pl[1] != self._nodeconstants.nullid:
510 510 entry = self._map.get(f)
511 511 if entry is not None:
512 512 # backup the previous state
513 513 if entry[0] == b'm': # merge
514 514 size = -1
515 515 elif entry[0] == b'n' and entry[2] == -2: # other parent
516 516 size = -2
517 517 self._map.otherparentset.add(f)
518 518 self._updatedfiles.add(f)
519 519 self._map.removefile(f, oldstate, size)
520 520 if size == 0:
521 521 self._map.copymap.pop(f, None)
522 522
523 523 def merge(self, f):
524 524 '''Mark a file merged.'''
525 525 if self._pl[1] == self._nodeconstants.nullid:
526 526 return self.normallookup(f)
527 527 return self.otherparent(f)
528 528
529 529 def drop(self, f):
530 530 '''Drop a file from the dirstate'''
531 531 oldstate = self[f]
532 532 if self._map.dropfile(f, oldstate):
533 533 self._dirty = True
534 534 self._updatedfiles.add(f)
535 535 self._map.copymap.pop(f, None)
536 536
537 537 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
538 538 if exists is None:
539 539 exists = os.path.lexists(os.path.join(self._root, path))
540 540 if not exists:
541 541 # Maybe a path component exists
542 542 if not ignoremissing and b'/' in path:
543 543 d, f = path.rsplit(b'/', 1)
544 544 d = self._normalize(d, False, ignoremissing, None)
545 545 folded = d + b"/" + f
546 546 else:
547 547 # No path components, preserve original case
548 548 folded = path
549 549 else:
550 550 # recursively normalize leading directory components
551 551 # against dirstate
552 552 if b'/' in normed:
553 553 d, f = normed.rsplit(b'/', 1)
554 554 d = self._normalize(d, False, ignoremissing, True)
555 555 r = self._root + b"/" + d
556 556 folded = d + b"/" + util.fspath(f, r)
557 557 else:
558 558 folded = util.fspath(normed, self._root)
559 559 storemap[normed] = folded
560 560
561 561 return folded
562 562
563 563 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
564 564 normed = util.normcase(path)
565 565 folded = self._map.filefoldmap.get(normed, None)
566 566 if folded is None:
567 567 if isknown:
568 568 folded = path
569 569 else:
570 570 folded = self._discoverpath(
571 571 path, normed, ignoremissing, exists, self._map.filefoldmap
572 572 )
573 573 return folded
574 574
575 575 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
576 576 normed = util.normcase(path)
577 577 folded = self._map.filefoldmap.get(normed, None)
578 578 if folded is None:
579 579 folded = self._map.dirfoldmap.get(normed, None)
580 580 if folded is None:
581 581 if isknown:
582 582 folded = path
583 583 else:
584 584 # store discovered result in dirfoldmap so that future
585 585 # normalizefile calls don't start matching directories
586 586 folded = self._discoverpath(
587 587 path, normed, ignoremissing, exists, self._map.dirfoldmap
588 588 )
589 589 return folded
590 590
591 591 def normalize(self, path, isknown=False, ignoremissing=False):
592 592 """
593 593 normalize the case of a pathname when on a casefolding filesystem
594 594
595 595 isknown specifies whether the filename came from walking the
596 596 disk, to avoid extra filesystem access.
597 597
598 598 If ignoremissing is True, missing path are returned
599 599 unchanged. Otherwise, we try harder to normalize possibly
600 600 existing path components.
601 601
602 602 The normalized case is determined based on the following precedence:
603 603
604 604 - version of name already stored in the dirstate
605 605 - version of name stored on disk
606 606 - version provided via command arguments
607 607 """
608 608
609 609 if self._checkcase:
610 610 return self._normalize(path, isknown, ignoremissing)
611 611 return path
612 612
613 613 def clear(self):
614 614 self._map.clear()
615 615 self._lastnormaltime = 0
616 616 self._updatedfiles.clear()
617 617 self._dirty = True
618 618
619 619 def rebuild(self, parent, allfiles, changedfiles=None):
620 620 if changedfiles is None:
621 621 # Rebuild entire dirstate
622 622 to_lookup = allfiles
623 623 to_drop = []
624 624 lastnormaltime = self._lastnormaltime
625 625 self.clear()
626 626 self._lastnormaltime = lastnormaltime
627 627 elif len(changedfiles) < 10:
628 628 # Avoid turning allfiles into a set, which can be expensive if it's
629 629 # large.
630 630 to_lookup = []
631 631 to_drop = []
632 632 for f in changedfiles:
633 633 if f in allfiles:
634 634 to_lookup.append(f)
635 635 else:
636 636 to_drop.append(f)
637 637 else:
638 638 changedfilesset = set(changedfiles)
639 639 to_lookup = changedfilesset & set(allfiles)
640 640 to_drop = changedfilesset - to_lookup
641 641
642 642 if self._origpl is None:
643 643 self._origpl = self._pl
644 644 self._map.setparents(parent, self._nodeconstants.nullid)
645 645
646 646 for f in to_lookup:
647 647 self.normallookup(f)
648 648 for f in to_drop:
649 649 self.drop(f)
650 650
651 651 self._dirty = True
652 652
653 653 def identity(self):
654 654 """Return identity of dirstate itself to detect changing in storage
655 655
656 656 If identity of previous dirstate is equal to this, writing
657 657 changes based on the former dirstate out can keep consistency.
658 658 """
659 659 return self._map.identity
660 660
661 661 def write(self, tr):
662 662 if not self._dirty:
663 663 return
664 664
665 665 filename = self._filename
666 666 if tr:
667 667 # 'dirstate.write()' is not only for writing in-memory
668 668 # changes out, but also for dropping ambiguous timestamp.
669 669 # delayed writing re-raise "ambiguous timestamp issue".
670 670 # See also the wiki page below for detail:
671 671 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
672 672
673 673 # emulate dropping timestamp in 'parsers.pack_dirstate'
674 674 now = _getfsnow(self._opener)
675 675 self._map.clearambiguoustimes(self._updatedfiles, now)
676 676
677 677 # emulate that all 'dirstate.normal' results are written out
678 678 self._lastnormaltime = 0
679 679 self._updatedfiles.clear()
680 680
681 681 # delay writing in-memory changes out
682 682 tr.addfilegenerator(
683 683 b'dirstate',
684 684 (self._filename,),
685 685 self._writedirstate,
686 686 location=b'plain',
687 687 )
688 688 return
689 689
690 690 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
691 691 self._writedirstate(st)
692 692
693 693 def addparentchangecallback(self, category, callback):
694 694 """add a callback to be called when the wd parents are changed
695 695
696 696 Callback will be called with the following arguments:
697 697 dirstate, (oldp1, oldp2), (newp1, newp2)
698 698
699 699 Category is a unique identifier to allow overwriting an old callback
700 700 with a newer callback.
701 701 """
702 702 self._plchangecallbacks[category] = callback
703 703
704 704 def _writedirstate(self, st):
705 705 # notify callbacks about parents change
706 706 if self._origpl is not None and self._origpl != self._pl:
707 707 for c, callback in sorted(
708 708 pycompat.iteritems(self._plchangecallbacks)
709 709 ):
710 710 callback(self, self._origpl, self._pl)
711 711 self._origpl = None
712 712 # use the modification time of the newly created temporary file as the
713 713 # filesystem's notion of 'now'
714 714 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
715 715
716 716 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
717 717 # timestamp of each entries in dirstate, because of 'now > mtime'
718 718 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
719 719 if delaywrite > 0:
720 720 # do we have any files to delay for?
721 721 for f, e in pycompat.iteritems(self._map):
722 722 if e[0] == b'n' and e[3] == now:
723 723 import time # to avoid useless import
724 724
725 725 # rather than sleep n seconds, sleep until the next
726 726 # multiple of n seconds
727 727 clock = time.time()
728 728 start = int(clock) - (int(clock) % delaywrite)
729 729 end = start + delaywrite
730 730 time.sleep(end - clock)
731 731 now = end # trust our estimate that the end is near now
732 732 break
733 733
734 734 self._map.write(st, now)
735 735 self._lastnormaltime = 0
736 736 self._dirty = False
737 737
738 738 def _dirignore(self, f):
739 739 if self._ignore(f):
740 740 return True
741 741 for p in pathutil.finddirs(f):
742 742 if self._ignore(p):
743 743 return True
744 744 return False
745 745
746 746 def _ignorefiles(self):
747 747 files = []
748 748 if os.path.exists(self._join(b'.hgignore')):
749 749 files.append(self._join(b'.hgignore'))
750 750 for name, path in self._ui.configitems(b"ui"):
751 751 if name == b'ignore' or name.startswith(b'ignore.'):
752 752 # we need to use os.path.join here rather than self._join
753 753 # because path is arbitrary and user-specified
754 754 files.append(os.path.join(self._rootdir, util.expandpath(path)))
755 755 return files
756 756
757 757 def _ignorefileandline(self, f):
758 758 files = collections.deque(self._ignorefiles())
759 759 visited = set()
760 760 while files:
761 761 i = files.popleft()
762 762 patterns = matchmod.readpatternfile(
763 763 i, self._ui.warn, sourceinfo=True
764 764 )
765 765 for pattern, lineno, line in patterns:
766 766 kind, p = matchmod._patsplit(pattern, b'glob')
767 767 if kind == b"subinclude":
768 768 if p not in visited:
769 769 files.append(p)
770 770 continue
771 771 m = matchmod.match(
772 772 self._root, b'', [], [pattern], warn=self._ui.warn
773 773 )
774 774 if m(f):
775 775 return (i, lineno, line)
776 776 visited.add(i)
777 777 return (None, -1, b"")
778 778
779 779 def _walkexplicit(self, match, subrepos):
780 780 """Get stat data about the files explicitly specified by match.
781 781
782 782 Return a triple (results, dirsfound, dirsnotfound).
783 783 - results is a mapping from filename to stat result. It also contains
784 784 listings mapping subrepos and .hg to None.
785 785 - dirsfound is a list of files found to be directories.
786 786 - dirsnotfound is a list of files that the dirstate thinks are
787 787 directories and that were not found."""
788 788
789 789 def badtype(mode):
790 790 kind = _(b'unknown')
791 791 if stat.S_ISCHR(mode):
792 792 kind = _(b'character device')
793 793 elif stat.S_ISBLK(mode):
794 794 kind = _(b'block device')
795 795 elif stat.S_ISFIFO(mode):
796 796 kind = _(b'fifo')
797 797 elif stat.S_ISSOCK(mode):
798 798 kind = _(b'socket')
799 799 elif stat.S_ISDIR(mode):
800 800 kind = _(b'directory')
801 801 return _(b'unsupported file type (type is %s)') % kind
802 802
803 803 badfn = match.bad
804 804 dmap = self._map
805 805 lstat = os.lstat
806 806 getkind = stat.S_IFMT
807 807 dirkind = stat.S_IFDIR
808 808 regkind = stat.S_IFREG
809 809 lnkkind = stat.S_IFLNK
810 810 join = self._join
811 811 dirsfound = []
812 812 foundadd = dirsfound.append
813 813 dirsnotfound = []
814 814 notfoundadd = dirsnotfound.append
815 815
816 816 if not match.isexact() and self._checkcase:
817 817 normalize = self._normalize
818 818 else:
819 819 normalize = None
820 820
821 821 files = sorted(match.files())
822 822 subrepos.sort()
823 823 i, j = 0, 0
824 824 while i < len(files) and j < len(subrepos):
825 825 subpath = subrepos[j] + b"/"
826 826 if files[i] < subpath:
827 827 i += 1
828 828 continue
829 829 while i < len(files) and files[i].startswith(subpath):
830 830 del files[i]
831 831 j += 1
832 832
833 833 if not files or b'' in files:
834 834 files = [b'']
835 835 # constructing the foldmap is expensive, so don't do it for the
836 836 # common case where files is ['']
837 837 normalize = None
838 838 results = dict.fromkeys(subrepos)
839 839 results[b'.hg'] = None
840 840
841 841 for ff in files:
842 842 if normalize:
843 843 nf = normalize(ff, False, True)
844 844 else:
845 845 nf = ff
846 846 if nf in results:
847 847 continue
848 848
849 849 try:
850 850 st = lstat(join(nf))
851 851 kind = getkind(st.st_mode)
852 852 if kind == dirkind:
853 853 if nf in dmap:
854 854 # file replaced by dir on disk but still in dirstate
855 855 results[nf] = None
856 856 foundadd((nf, ff))
857 857 elif kind == regkind or kind == lnkkind:
858 858 results[nf] = st
859 859 else:
860 860 badfn(ff, badtype(kind))
861 861 if nf in dmap:
862 862 results[nf] = None
863 863 except OSError as inst: # nf not found on disk - it is dirstate only
864 864 if nf in dmap: # does it exactly match a missing file?
865 865 results[nf] = None
866 866 else: # does it match a missing directory?
867 867 if self._map.hasdir(nf):
868 868 notfoundadd(nf)
869 869 else:
870 870 badfn(ff, encoding.strtolocal(inst.strerror))
871 871
872 872 # match.files() may contain explicitly-specified paths that shouldn't
873 873 # be taken; drop them from the list of files found. dirsfound/notfound
874 874 # aren't filtered here because they will be tested later.
875 875 if match.anypats():
876 876 for f in list(results):
877 877 if f == b'.hg' or f in subrepos:
878 878 # keep sentinel to disable further out-of-repo walks
879 879 continue
880 880 if not match(f):
881 881 del results[f]
882 882
883 883 # Case insensitive filesystems cannot rely on lstat() failing to detect
884 884 # a case-only rename. Prune the stat object for any file that does not
885 885 # match the case in the filesystem, if there are multiple files that
886 886 # normalize to the same path.
887 887 if match.isexact() and self._checkcase:
888 888 normed = {}
889 889
890 890 for f, st in pycompat.iteritems(results):
891 891 if st is None:
892 892 continue
893 893
894 894 nc = util.normcase(f)
895 895 paths = normed.get(nc)
896 896
897 897 if paths is None:
898 898 paths = set()
899 899 normed[nc] = paths
900 900
901 901 paths.add(f)
902 902
903 903 for norm, paths in pycompat.iteritems(normed):
904 904 if len(paths) > 1:
905 905 for path in paths:
906 906 folded = self._discoverpath(
907 907 path, norm, True, None, self._map.dirfoldmap
908 908 )
909 909 if path != folded:
910 910 results[path] = None
911 911
912 912 return results, dirsfound, dirsnotfound
913 913
914 914 def walk(self, match, subrepos, unknown, ignored, full=True):
915 915 """
916 916 Walk recursively through the directory tree, finding all files
917 917 matched by match.
918 918
919 919 If full is False, maybe skip some known-clean files.
920 920
921 921 Return a dict mapping filename to stat-like object (either
922 922 mercurial.osutil.stat instance or return value of os.stat()).
923 923
924 924 """
925 925 # full is a flag that extensions that hook into walk can use -- this
926 926 # implementation doesn't use it at all. This satisfies the contract
927 927 # because we only guarantee a "maybe".
928 928
929 929 if ignored:
930 930 ignore = util.never
931 931 dirignore = util.never
932 932 elif unknown:
933 933 ignore = self._ignore
934 934 dirignore = self._dirignore
935 935 else:
936 936 # if not unknown and not ignored, drop dir recursion and step 2
937 937 ignore = util.always
938 938 dirignore = util.always
939 939
940 940 matchfn = match.matchfn
941 941 matchalways = match.always()
942 942 matchtdir = match.traversedir
943 943 dmap = self._map
944 944 listdir = util.listdir
945 945 lstat = os.lstat
946 946 dirkind = stat.S_IFDIR
947 947 regkind = stat.S_IFREG
948 948 lnkkind = stat.S_IFLNK
949 949 join = self._join
950 950
951 951 exact = skipstep3 = False
952 952 if match.isexact(): # match.exact
953 953 exact = True
954 954 dirignore = util.always # skip step 2
955 955 elif match.prefix(): # match.match, no patterns
956 956 skipstep3 = True
957 957
958 958 if not exact and self._checkcase:
959 959 normalize = self._normalize
960 960 normalizefile = self._normalizefile
961 961 skipstep3 = False
962 962 else:
963 963 normalize = self._normalize
964 964 normalizefile = None
965 965
966 966 # step 1: find all explicit files
967 967 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
968 968 if matchtdir:
969 969 for d in work:
970 970 matchtdir(d[0])
971 971 for d in dirsnotfound:
972 972 matchtdir(d)
973 973
974 974 skipstep3 = skipstep3 and not (work or dirsnotfound)
975 975 work = [d for d in work if not dirignore(d[0])]
976 976
977 977 # step 2: visit subdirectories
978 978 def traverse(work, alreadynormed):
979 979 wadd = work.append
980 980 while work:
981 981 tracing.counter('dirstate.walk work', len(work))
982 982 nd = work.pop()
983 983 visitentries = match.visitchildrenset(nd)
984 984 if not visitentries:
985 985 continue
986 986 if visitentries == b'this' or visitentries == b'all':
987 987 visitentries = None
988 988 skip = None
989 989 if nd != b'':
990 990 skip = b'.hg'
991 991 try:
992 992 with tracing.log('dirstate.walk.traverse listdir %s', nd):
993 993 entries = listdir(join(nd), stat=True, skip=skip)
994 994 except OSError as inst:
995 995 if inst.errno in (errno.EACCES, errno.ENOENT):
996 996 match.bad(
997 997 self.pathto(nd), encoding.strtolocal(inst.strerror)
998 998 )
999 999 continue
1000 1000 raise
1001 1001 for f, kind, st in entries:
1002 1002 # Some matchers may return files in the visitentries set,
1003 1003 # instead of 'this', if the matcher explicitly mentions them
1004 1004 # and is not an exactmatcher. This is acceptable; we do not
1005 1005 # make any hard assumptions about file-or-directory below
1006 1006 # based on the presence of `f` in visitentries. If
1007 1007 # visitchildrenset returned a set, we can always skip the
1008 1008 # entries *not* in the set it provided regardless of whether
1009 1009 # they're actually a file or a directory.
1010 1010 if visitentries and f not in visitentries:
1011 1011 continue
1012 1012 if normalizefile:
1013 1013 # even though f might be a directory, we're only
1014 1014 # interested in comparing it to files currently in the
1015 1015 # dmap -- therefore normalizefile is enough
1016 1016 nf = normalizefile(
1017 1017 nd and (nd + b"/" + f) or f, True, True
1018 1018 )
1019 1019 else:
1020 1020 nf = nd and (nd + b"/" + f) or f
1021 1021 if nf not in results:
1022 1022 if kind == dirkind:
1023 1023 if not ignore(nf):
1024 1024 if matchtdir:
1025 1025 matchtdir(nf)
1026 1026 wadd(nf)
1027 1027 if nf in dmap and (matchalways or matchfn(nf)):
1028 1028 results[nf] = None
1029 1029 elif kind == regkind or kind == lnkkind:
1030 1030 if nf in dmap:
1031 1031 if matchalways or matchfn(nf):
1032 1032 results[nf] = st
1033 1033 elif (matchalways or matchfn(nf)) and not ignore(
1034 1034 nf
1035 1035 ):
1036 1036 # unknown file -- normalize if necessary
1037 1037 if not alreadynormed:
1038 1038 nf = normalize(nf, False, True)
1039 1039 results[nf] = st
1040 1040 elif nf in dmap and (matchalways or matchfn(nf)):
1041 1041 results[nf] = None
1042 1042
1043 1043 for nd, d in work:
1044 1044 # alreadynormed means that processwork doesn't have to do any
1045 1045 # expensive directory normalization
1046 1046 alreadynormed = not normalize or nd == d
1047 1047 traverse([d], alreadynormed)
1048 1048
1049 1049 for s in subrepos:
1050 1050 del results[s]
1051 1051 del results[b'.hg']
1052 1052
1053 1053 # step 3: visit remaining files from dmap
1054 1054 if not skipstep3 and not exact:
1055 1055 # If a dmap file is not in results yet, it was either
1056 1056 # a) not matching matchfn b) ignored, c) missing, or d) under a
1057 1057 # symlink directory.
1058 1058 if not results and matchalways:
1059 1059 visit = [f for f in dmap]
1060 1060 else:
1061 1061 visit = [f for f in dmap if f not in results and matchfn(f)]
1062 1062 visit.sort()
1063 1063
1064 1064 if unknown:
1065 1065 # unknown == True means we walked all dirs under the roots
1066 1066 # that wasn't ignored, and everything that matched was stat'ed
1067 1067 # and is already in results.
1068 1068 # The rest must thus be ignored or under a symlink.
1069 1069 audit_path = pathutil.pathauditor(self._root, cached=True)
1070 1070
1071 1071 for nf in iter(visit):
1072 1072 # If a stat for the same file was already added with a
1073 1073 # different case, don't add one for this, since that would
1074 1074 # make it appear as if the file exists under both names
1075 1075 # on disk.
1076 1076 if (
1077 1077 normalizefile
1078 1078 and normalizefile(nf, True, True) in results
1079 1079 ):
1080 1080 results[nf] = None
1081 1081 # Report ignored items in the dmap as long as they are not
1082 1082 # under a symlink directory.
1083 1083 elif audit_path.check(nf):
1084 1084 try:
1085 1085 results[nf] = lstat(join(nf))
1086 1086 # file was just ignored, no links, and exists
1087 1087 except OSError:
1088 1088 # file doesn't exist
1089 1089 results[nf] = None
1090 1090 else:
1091 1091 # It's either missing or under a symlink directory
1092 1092 # which we in this case report as missing
1093 1093 results[nf] = None
1094 1094 else:
1095 1095 # We may not have walked the full directory tree above,
1096 1096 # so stat and check everything we missed.
1097 1097 iv = iter(visit)
1098 1098 for st in util.statfiles([join(i) for i in visit]):
1099 1099 results[next(iv)] = st
1100 1100 return results
1101 1101
1102 1102 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1103 1103 # Force Rayon (Rust parallelism library) to respect the number of
1104 1104 # workers. This is a temporary workaround until Rust code knows
1105 1105 # how to read the config file.
1106 1106 numcpus = self._ui.configint(b"worker", b"numcpus")
1107 1107 if numcpus is not None:
1108 1108 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1109 1109
1110 1110 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1111 1111 if not workers_enabled:
1112 1112 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1113 1113
1114 1114 (
1115 1115 lookup,
1116 1116 modified,
1117 1117 added,
1118 1118 removed,
1119 1119 deleted,
1120 1120 clean,
1121 1121 ignored,
1122 1122 unknown,
1123 1123 warnings,
1124 1124 bad,
1125 1125 traversed,
1126 1126 ) = rustmod.status(
1127 1127 self._map._rustmap,
1128 1128 matcher,
1129 1129 self._rootdir,
1130 1130 self._ignorefiles(),
1131 1131 self._checkexec,
1132 1132 self._lastnormaltime,
1133 1133 bool(list_clean),
1134 1134 bool(list_ignored),
1135 1135 bool(list_unknown),
1136 1136 bool(matcher.traversedir),
1137 1137 )
1138 1138
1139 1139 if matcher.traversedir:
1140 1140 for dir in traversed:
1141 1141 matcher.traversedir(dir)
1142 1142
1143 1143 if self._ui.warn:
1144 1144 for item in warnings:
1145 1145 if isinstance(item, tuple):
1146 1146 file_path, syntax = item
1147 1147 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1148 1148 file_path,
1149 1149 syntax,
1150 1150 )
1151 1151 self._ui.warn(msg)
1152 1152 else:
1153 1153 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1154 1154 self._ui.warn(
1155 1155 msg
1156 1156 % (
1157 1157 pathutil.canonpath(
1158 1158 self._rootdir, self._rootdir, item
1159 1159 ),
1160 1160 b"No such file or directory",
1161 1161 )
1162 1162 )
1163 1163
1164 1164 for (fn, message) in bad:
1165 1165 matcher.bad(fn, encoding.strtolocal(message))
1166 1166
1167 1167 status = scmutil.status(
1168 1168 modified=modified,
1169 1169 added=added,
1170 1170 removed=removed,
1171 1171 deleted=deleted,
1172 1172 unknown=unknown,
1173 1173 ignored=ignored,
1174 1174 clean=clean,
1175 1175 )
1176 1176 return (lookup, status)
1177 1177
1178 1178 def status(self, match, subrepos, ignored, clean, unknown):
1179 1179 """Determine the status of the working copy relative to the
1180 1180 dirstate and return a pair of (unsure, status), where status is of type
1181 1181 scmutil.status and:
1182 1182
1183 1183 unsure:
1184 1184 files that might have been modified since the dirstate was
1185 1185 written, but need to be read to be sure (size is the same
1186 1186 but mtime differs)
1187 1187 status.modified:
1188 1188 files that have definitely been modified since the dirstate
1189 1189 was written (different size or mode)
1190 1190 status.clean:
1191 1191 files that have definitely not been modified since the
1192 1192 dirstate was written
1193 1193 """
1194 1194 listignored, listclean, listunknown = ignored, clean, unknown
1195 1195 lookup, modified, added, unknown, ignored = [], [], [], [], []
1196 1196 removed, deleted, clean = [], [], []
1197 1197
1198 1198 dmap = self._map
1199 1199 dmap.preload()
1200 1200
1201 1201 use_rust = True
1202 1202
1203 1203 allowed_matchers = (
1204 1204 matchmod.alwaysmatcher,
1205 1205 matchmod.exactmatcher,
1206 1206 matchmod.includematcher,
1207 1207 )
1208 1208
1209 1209 if rustmod is None:
1210 1210 use_rust = False
1211 1211 elif self._checkcase:
1212 1212 # Case-insensitive filesystems are not handled yet
1213 1213 use_rust = False
1214 1214 elif subrepos:
1215 1215 use_rust = False
1216 1216 elif sparse.enabled:
1217 1217 use_rust = False
1218 1218 elif not isinstance(match, allowed_matchers):
1219 1219 # Some matchers have yet to be implemented
1220 1220 use_rust = False
1221 1221
1222 1222 if use_rust:
1223 1223 try:
1224 1224 return self._rust_status(
1225 1225 match, listclean, listignored, listunknown
1226 1226 )
1227 1227 except rustmod.FallbackError:
1228 1228 pass
1229 1229
1230 1230 def noop(f):
1231 1231 pass
1232 1232
1233 1233 dcontains = dmap.__contains__
1234 1234 dget = dmap.__getitem__
1235 1235 ladd = lookup.append # aka "unsure"
1236 1236 madd = modified.append
1237 1237 aadd = added.append
1238 1238 uadd = unknown.append if listunknown else noop
1239 1239 iadd = ignored.append if listignored else noop
1240 1240 radd = removed.append
1241 1241 dadd = deleted.append
1242 1242 cadd = clean.append if listclean else noop
1243 1243 mexact = match.exact
1244 1244 dirignore = self._dirignore
1245 1245 checkexec = self._checkexec
1246 1246 copymap = self._map.copymap
1247 1247 lastnormaltime = self._lastnormaltime
1248 1248
1249 1249 # We need to do full walks when either
1250 1250 # - we're listing all clean files, or
1251 1251 # - match.traversedir does something, because match.traversedir should
1252 1252 # be called for every dir in the working dir
1253 1253 full = listclean or match.traversedir is not None
1254 1254 for fn, st in pycompat.iteritems(
1255 1255 self.walk(match, subrepos, listunknown, listignored, full=full)
1256 1256 ):
1257 1257 if not dcontains(fn):
1258 1258 if (listignored or mexact(fn)) and dirignore(fn):
1259 1259 if listignored:
1260 1260 iadd(fn)
1261 1261 else:
1262 1262 uadd(fn)
1263 1263 continue
1264 1264
1265 1265 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1266 1266 # written like that for performance reasons. dmap[fn] is not a
1267 1267 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1268 1268 # opcode has fast paths when the value to be unpacked is a tuple or
1269 1269 # a list, but falls back to creating a full-fledged iterator in
1270 1270 # general. That is much slower than simply accessing and storing the
1271 1271 # tuple members one by one.
1272 1272 t = dget(fn)
1273 1273 state = t[0]
1274 1274 mode = t[1]
1275 1275 size = t[2]
1276 1276 time = t[3]
1277 1277
1278 1278 if not st and state in b"nma":
1279 1279 dadd(fn)
1280 1280 elif state == b'n':
1281 1281 if (
1282 1282 size >= 0
1283 1283 and (
1284 1284 (size != st.st_size and size != st.st_size & _rangemask)
1285 1285 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1286 1286 )
1287 1287 or size == -2 # other parent
1288 1288 or fn in copymap
1289 1289 ):
1290 1290 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1291 1291 # issue6456: Size returned may be longer due to
1292 1292 # encryption on EXT-4 fscrypt, undecided.
1293 1293 ladd(fn)
1294 1294 else:
1295 1295 madd(fn)
1296 1296 elif (
1297 1297 time != st[stat.ST_MTIME]
1298 1298 and time != st[stat.ST_MTIME] & _rangemask
1299 1299 ):
1300 1300 ladd(fn)
1301 1301 elif st[stat.ST_MTIME] == lastnormaltime:
1302 1302 # fn may have just been marked as normal and it may have
1303 1303 # changed in the same second without changing its size.
1304 1304 # This can happen if we quickly do multiple commits.
1305 1305 # Force lookup, so we don't miss such a racy file change.
1306 1306 ladd(fn)
1307 1307 elif listclean:
1308 1308 cadd(fn)
1309 1309 elif state == b'm':
1310 1310 madd(fn)
1311 1311 elif state == b'a':
1312 1312 aadd(fn)
1313 1313 elif state == b'r':
1314 1314 radd(fn)
1315 1315 status = scmutil.status(
1316 1316 modified, added, removed, deleted, unknown, ignored, clean
1317 1317 )
1318 1318 return (lookup, status)
1319 1319
1320 1320 def matches(self, match):
1321 1321 """
1322 1322 return files in the dirstate (in whatever state) filtered by match
1323 1323 """
1324 1324 dmap = self._map
1325 1325 if rustmod is not None:
1326 1326 dmap = self._map._rustmap
1327 1327
1328 1328 if match.always():
1329 1329 return dmap.keys()
1330 1330 files = match.files()
1331 1331 if match.isexact():
1332 1332 # fast path -- filter the other way around, since typically files is
1333 1333 # much smaller than dmap
1334 1334 return [f for f in files if f in dmap]
1335 1335 if match.prefix() and all(fn in dmap for fn in files):
1336 1336 # fast path -- all the values are known to be files, so just return
1337 1337 # that
1338 1338 return list(files)
1339 1339 return [f for f in dmap if match(f)]
1340 1340
1341 1341 def _actualfilename(self, tr):
1342 1342 if tr:
1343 1343 return self._pendingfilename
1344 1344 else:
1345 1345 return self._filename
1346 1346
1347 1347 def savebackup(self, tr, backupname):
1348 1348 '''Save current dirstate into backup file'''
1349 1349 filename = self._actualfilename(tr)
1350 1350 assert backupname != filename
1351 1351
1352 1352 # use '_writedirstate' instead of 'write' to write changes certainly,
1353 1353 # because the latter omits writing out if transaction is running.
1354 1354 # output file will be used to create backup of dirstate at this point.
1355 1355 if self._dirty or not self._opener.exists(filename):
1356 1356 self._writedirstate(
1357 1357 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1358 1358 )
1359 1359
1360 1360 if tr:
1361 1361 # ensure that subsequent tr.writepending returns True for
1362 1362 # changes written out above, even if dirstate is never
1363 1363 # changed after this
1364 1364 tr.addfilegenerator(
1365 1365 b'dirstate',
1366 1366 (self._filename,),
1367 1367 self._writedirstate,
1368 1368 location=b'plain',
1369 1369 )
1370 1370
1371 1371 # ensure that pending file written above is unlinked at
1372 1372 # failure, even if tr.writepending isn't invoked until the
1373 1373 # end of this transaction
1374 1374 tr.registertmp(filename, location=b'plain')
1375 1375
1376 1376 self._opener.tryunlink(backupname)
1377 1377 # hardlink backup is okay because _writedirstate is always called
1378 1378 # with an "atomictemp=True" file.
1379 1379 util.copyfile(
1380 1380 self._opener.join(filename),
1381 1381 self._opener.join(backupname),
1382 1382 hardlink=True,
1383 1383 )
1384 1384
1385 1385 def restorebackup(self, tr, backupname):
1386 1386 '''Restore dirstate by backup file'''
1387 1387 # this "invalidate()" prevents "wlock.release()" from writing
1388 1388 # changes of dirstate out after restoring from backup file
1389 1389 self.invalidate()
1390 1390 filename = self._actualfilename(tr)
1391 1391 o = self._opener
1392 1392 if util.samefile(o.join(backupname), o.join(filename)):
1393 1393 o.unlink(backupname)
1394 1394 else:
1395 1395 o.rename(backupname, filename, checkambig=True)
1396 1396
1397 1397 def clearbackup(self, tr, backupname):
1398 1398 '''Clear backup file'''
1399 1399 self._opener.unlink(backupname)
1400 1400
1401 1401
1402 1402 class dirstatemap(object):
1403 1403 """Map encapsulating the dirstate's contents.
1404 1404
1405 1405 The dirstate contains the following state:
1406 1406
1407 1407 - `identity` is the identity of the dirstate file, which can be used to
1408 1408 detect when changes have occurred to the dirstate file.
1409 1409
1410 1410 - `parents` is a pair containing the parents of the working copy. The
1411 1411 parents are updated by calling `setparents`.
1412 1412
1413 1413 - the state map maps filenames to tuples of (state, mode, size, mtime),
1414 1414 where state is a single character representing 'normal', 'added',
1415 1415 'removed', or 'merged'. It is read by treating the dirstate as a
1416 1416 dict. File state is updated by calling the `addfile`, `removefile` and
1417 1417 `dropfile` methods.
1418 1418
1419 1419 - `copymap` maps destination filenames to their source filename.
1420 1420
1421 1421 The dirstate also provides the following views onto the state:
1422 1422
1423 1423 - `nonnormalset` is a set of the filenames that have state other
1424 1424 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1425 1425
1426 1426 - `otherparentset` is a set of the filenames that are marked as coming
1427 1427 from the second parent when the dirstate is currently being merged.
1428 1428
1429 1429 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1430 1430 form that they appear as in the dirstate.
1431 1431
1432 1432 - `dirfoldmap` is a dict mapping normalized directory names to the
1433 1433 denormalized form that they appear as in the dirstate.
1434 1434 """
1435 1435
1436 1436 def __init__(self, ui, opener, root, nodeconstants):
1437 1437 self._ui = ui
1438 1438 self._opener = opener
1439 1439 self._root = root
1440 1440 self._filename = b'dirstate'
1441 1441 self._nodelen = 20
1442 1442 self._nodeconstants = nodeconstants
1443 1443
1444 1444 self._parents = None
1445 1445 self._dirtyparents = False
1446 1446
1447 1447 # for consistent view between _pl() and _read() invocations
1448 1448 self._pendingmode = None
1449 1449
1450 1450 @propertycache
1451 1451 def _map(self):
1452 1452 self._map = {}
1453 1453 self.read()
1454 1454 return self._map
1455 1455
1456 1456 @propertycache
1457 1457 def copymap(self):
1458 1458 self.copymap = {}
1459 1459 self._map
1460 1460 return self.copymap
1461 1461
1462 1462 def clear(self):
1463 1463 self._map.clear()
1464 1464 self.copymap.clear()
1465 1465 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1466 1466 util.clearcachedproperty(self, b"_dirs")
1467 1467 util.clearcachedproperty(self, b"_alldirs")
1468 1468 util.clearcachedproperty(self, b"filefoldmap")
1469 1469 util.clearcachedproperty(self, b"dirfoldmap")
1470 1470 util.clearcachedproperty(self, b"nonnormalset")
1471 1471 util.clearcachedproperty(self, b"otherparentset")
1472 1472
1473 1473 def items(self):
1474 1474 return pycompat.iteritems(self._map)
1475 1475
1476 1476 # forward for python2,3 compat
1477 1477 iteritems = items
1478 1478
1479 1479 def __len__(self):
1480 1480 return len(self._map)
1481 1481
1482 1482 def __iter__(self):
1483 1483 return iter(self._map)
1484 1484
1485 1485 def get(self, key, default=None):
1486 1486 return self._map.get(key, default)
1487 1487
1488 1488 def __contains__(self, key):
1489 1489 return key in self._map
1490 1490
1491 1491 def __getitem__(self, key):
1492 1492 return self._map[key]
1493 1493
1494 1494 def keys(self):
1495 1495 return self._map.keys()
1496 1496
1497 1497 def preload(self):
1498 1498 """Loads the underlying data, if it's not already loaded"""
1499 1499 self._map
1500 1500
1501 1501 def addfile(self, f, oldstate, state, mode, size, mtime):
1502 1502 """Add a tracked file to the dirstate."""
1503 1503 if oldstate in b"?r" and "_dirs" in self.__dict__:
1504 1504 self._dirs.addpath(f)
1505 1505 if oldstate == b"?" and "_alldirs" in self.__dict__:
1506 1506 self._alldirs.addpath(f)
1507 1507 self._map[f] = dirstatetuple(state, mode, size, mtime)
1508 1508 if state != b'n' or mtime == -1:
1509 1509 self.nonnormalset.add(f)
1510 1510 if size == -2:
1511 1511 self.otherparentset.add(f)
1512 1512
1513 1513 def removefile(self, f, oldstate, size):
1514 1514 """
1515 1515 Mark a file as removed in the dirstate.
1516 1516
1517 1517 The `size` parameter is used to store sentinel values that indicate
1518 1518 the file's previous state. In the future, we should refactor this
1519 1519 to be more explicit about what that state is.
1520 1520 """
1521 1521 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1522 1522 self._dirs.delpath(f)
1523 1523 if oldstate == b"?" and "_alldirs" in self.__dict__:
1524 1524 self._alldirs.addpath(f)
1525 1525 if "filefoldmap" in self.__dict__:
1526 1526 normed = util.normcase(f)
1527 1527 self.filefoldmap.pop(normed, None)
1528 1528 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1529 1529 self.nonnormalset.add(f)
1530 1530
1531 1531 def dropfile(self, f, oldstate):
1532 1532 """
1533 1533 Remove a file from the dirstate. Returns True if the file was
1534 1534 previously recorded.
1535 1535 """
1536 1536 exists = self._map.pop(f, None) is not None
1537 1537 if exists:
1538 1538 if oldstate != b"r" and "_dirs" in self.__dict__:
1539 1539 self._dirs.delpath(f)
1540 1540 if "_alldirs" in self.__dict__:
1541 1541 self._alldirs.delpath(f)
1542 1542 if "filefoldmap" in self.__dict__:
1543 1543 normed = util.normcase(f)
1544 1544 self.filefoldmap.pop(normed, None)
1545 1545 self.nonnormalset.discard(f)
1546 1546 return exists
1547 1547
1548 1548 def clearambiguoustimes(self, files, now):
1549 1549 for f in files:
1550 1550 e = self.get(f)
1551 1551 if e is not None and e[0] == b'n' and e[3] == now:
1552 1552 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1553 1553 self.nonnormalset.add(f)
1554 1554
1555 1555 def nonnormalentries(self):
1556 1556 '''Compute the nonnormal dirstate entries from the dmap'''
1557 1557 try:
1558 1558 return parsers.nonnormalotherparententries(self._map)
1559 1559 except AttributeError:
1560 1560 nonnorm = set()
1561 1561 otherparent = set()
1562 1562 for fname, e in pycompat.iteritems(self._map):
1563 1563 if e[0] != b'n' or e[3] == -1:
1564 1564 nonnorm.add(fname)
1565 1565 if e[0] == b'n' and e[2] == -2:
1566 1566 otherparent.add(fname)
1567 1567 return nonnorm, otherparent
1568 1568
1569 1569 @propertycache
1570 1570 def filefoldmap(self):
1571 1571 """Returns a dictionary mapping normalized case paths to their
1572 1572 non-normalized versions.
1573 1573 """
1574 1574 try:
1575 1575 makefilefoldmap = parsers.make_file_foldmap
1576 1576 except AttributeError:
1577 1577 pass
1578 1578 else:
1579 1579 return makefilefoldmap(
1580 1580 self._map, util.normcasespec, util.normcasefallback
1581 1581 )
1582 1582
1583 1583 f = {}
1584 1584 normcase = util.normcase
1585 1585 for name, s in pycompat.iteritems(self._map):
1586 1586 if s[0] != b'r':
1587 1587 f[normcase(name)] = name
1588 1588 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1589 1589 return f
1590 1590
1591 1591 def hastrackeddir(self, d):
1592 1592 """
1593 1593 Returns True if the dirstate contains a tracked (not removed) file
1594 1594 in this directory.
1595 1595 """
1596 1596 return d in self._dirs
1597 1597
1598 1598 def hasdir(self, d):
1599 1599 """
1600 1600 Returns True if the dirstate contains a file (tracked or removed)
1601 1601 in this directory.
1602 1602 """
1603 1603 return d in self._alldirs
1604 1604
1605 1605 @propertycache
1606 1606 def _dirs(self):
1607 1607 return pathutil.dirs(self._map, b'r')
1608 1608
1609 1609 @propertycache
1610 1610 def _alldirs(self):
1611 1611 return pathutil.dirs(self._map)
1612 1612
1613 1613 def _opendirstatefile(self):
1614 1614 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1615 1615 if self._pendingmode is not None and self._pendingmode != mode:
1616 1616 fp.close()
1617 1617 raise error.Abort(
1618 1618 _(b'working directory state may be changed parallelly')
1619 1619 )
1620 1620 self._pendingmode = mode
1621 1621 return fp
1622 1622
1623 1623 def parents(self):
1624 1624 if not self._parents:
1625 1625 try:
1626 1626 fp = self._opendirstatefile()
1627 1627 st = fp.read(2 * self._nodelen)
1628 1628 fp.close()
1629 1629 except IOError as err:
1630 1630 if err.errno != errno.ENOENT:
1631 1631 raise
1632 1632 # File doesn't exist, so the current state is empty
1633 1633 st = b''
1634 1634
1635 1635 l = len(st)
1636 1636 if l == self._nodelen * 2:
1637 1637 self._parents = (
1638 1638 st[: self._nodelen],
1639 1639 st[self._nodelen : 2 * self._nodelen],
1640 1640 )
1641 1641 elif l == 0:
1642 1642 self._parents = (
1643 1643 self._nodeconstants.nullid,
1644 1644 self._nodeconstants.nullid,
1645 1645 )
1646 1646 else:
1647 1647 raise error.Abort(
1648 1648 _(b'working directory state appears damaged!')
1649 1649 )
1650 1650
1651 1651 return self._parents
1652 1652
1653 1653 def setparents(self, p1, p2):
1654 1654 self._parents = (p1, p2)
1655 1655 self._dirtyparents = True
1656 1656
1657 1657 def read(self):
1658 1658 # ignore HG_PENDING because identity is used only for writing
1659 1659 self.identity = util.filestat.frompath(
1660 1660 self._opener.join(self._filename)
1661 1661 )
1662 1662
1663 1663 try:
1664 1664 fp = self._opendirstatefile()
1665 1665 try:
1666 1666 st = fp.read()
1667 1667 finally:
1668 1668 fp.close()
1669 1669 except IOError as err:
1670 1670 if err.errno != errno.ENOENT:
1671 1671 raise
1672 1672 return
1673 1673 if not st:
1674 1674 return
1675 1675
1676 1676 if util.safehasattr(parsers, b'dict_new_presized'):
1677 1677 # Make an estimate of the number of files in the dirstate based on
1678 1678 # its size. This trades wasting some memory for avoiding costly
1679 1679 # resizes. Each entry have a prefix of 17 bytes followed by one or
1680 1680 # two path names. Studies on various large-scale real-world repositories
1681 1681 # found 54 bytes a reasonable upper limit for the average path names.
1682 1682 # Copy entries are ignored for the sake of this estimate.
1683 1683 self._map = parsers.dict_new_presized(len(st) // 71)
1684 1684
1685 1685 # Python's garbage collector triggers a GC each time a certain number
1686 1686 # of container objects (the number being defined by
1687 1687 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1688 1688 # for each file in the dirstate. The C version then immediately marks
1689 1689 # them as not to be tracked by the collector. However, this has no
1690 1690 # effect on when GCs are triggered, only on what objects the GC looks
1691 1691 # into. This means that O(number of files) GCs are unavoidable.
1692 1692 # Depending on when in the process's lifetime the dirstate is parsed,
1693 1693 # this can get very expensive. As a workaround, disable GC while
1694 1694 # parsing the dirstate.
1695 1695 #
1696 1696 # (we cannot decorate the function directly since it is in a C module)
1697 1697 parse_dirstate = util.nogc(parsers.parse_dirstate)
1698 1698 p = parse_dirstate(self._map, self.copymap, st)
1699 1699 if not self._dirtyparents:
1700 1700 self.setparents(*p)
1701 1701
1702 1702 # Avoid excess attribute lookups by fast pathing certain checks
1703 1703 self.__contains__ = self._map.__contains__
1704 1704 self.__getitem__ = self._map.__getitem__
1705 1705 self.get = self._map.get
1706 1706
1707 1707 def write(self, st, now):
1708 1708 st.write(
1709 1709 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1710 1710 )
1711 1711 st.close()
1712 1712 self._dirtyparents = False
1713 1713 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1714 1714
1715 1715 @propertycache
1716 1716 def nonnormalset(self):
1717 1717 nonnorm, otherparents = self.nonnormalentries()
1718 1718 self.otherparentset = otherparents
1719 1719 return nonnorm
1720 1720
1721 1721 @propertycache
1722 1722 def otherparentset(self):
1723 1723 nonnorm, otherparents = self.nonnormalentries()
1724 1724 self.nonnormalset = nonnorm
1725 1725 return otherparents
1726 1726
1727 1727 def non_normal_or_other_parent_paths(self):
1728 1728 return self.nonnormalset.union(self.otherparentset)
1729 1729
1730 1730 @propertycache
1731 1731 def identity(self):
1732 1732 self._map
1733 1733 return self.identity
1734 1734
1735 1735 @propertycache
1736 1736 def dirfoldmap(self):
1737 1737 f = {}
1738 1738 normcase = util.normcase
1739 1739 for name in self._dirs:
1740 1740 f[normcase(name)] = name
1741 1741 return f
1742 1742
1743 1743
1744 1744 if rustmod is not None:
1745 1745
1746 1746 class dirstatemap(object):
1747 1747 def __init__(self, ui, opener, root, nodeconstants):
1748 1748 self._nodeconstants = nodeconstants
1749 1749 self._ui = ui
1750 1750 self._opener = opener
1751 1751 self._root = root
1752 1752 self._filename = b'dirstate'
1753 self._nodelen = 20
1753 1754 self._parents = None
1754 1755 self._dirtyparents = False
1755 1756
1756 1757 # for consistent view between _pl() and _read() invocations
1757 1758 self._pendingmode = None
1758 1759
1759 1760 def addfile(self, *args, **kwargs):
1760 1761 return self._rustmap.addfile(*args, **kwargs)
1761 1762
1762 1763 def removefile(self, *args, **kwargs):
1763 1764 return self._rustmap.removefile(*args, **kwargs)
1764 1765
1765 1766 def dropfile(self, *args, **kwargs):
1766 1767 return self._rustmap.dropfile(*args, **kwargs)
1767 1768
1768 1769 def clearambiguoustimes(self, *args, **kwargs):
1769 1770 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1770 1771
1771 1772 def nonnormalentries(self):
1772 1773 return self._rustmap.nonnormalentries()
1773 1774
1774 1775 def get(self, *args, **kwargs):
1775 1776 return self._rustmap.get(*args, **kwargs)
1776 1777
1777 1778 @propertycache
1778 1779 def _rustmap(self):
1779 1780 """
1780 1781 Fills the Dirstatemap when called.
1781 Use `self._inner_rustmap` if reading the dirstate is not necessary.
1782 """
1783 self._rustmap = self._inner_rustmap
1784 self.read()
1785 return self._rustmap
1786
1787 @propertycache
1788 def _inner_rustmap(self):
1789 """
1790 Does not fill the Dirstatemap when called. This allows for
1791 optimizations where only setting/getting the parents is needed.
1792 1782 """
1793 1783 use_dirstate_tree = self._ui.configbool(
1794 1784 b"experimental",
1795 1785 b"dirstate-tree.in-memory",
1796 1786 False,
1797 1787 )
1798 self._inner_rustmap = rustmod.DirstateMap(use_dirstate_tree)
1799 return self._inner_rustmap
1788 self._rustmap = rustmod.DirstateMap(use_dirstate_tree)
1789 self.read()
1790 return self._rustmap
1800 1791
1801 1792 @property
1802 1793 def copymap(self):
1803 1794 return self._rustmap.copymap()
1804 1795
1805 1796 def preload(self):
1806 1797 self._rustmap
1807 1798
1808 1799 def clear(self):
1809 1800 self._rustmap.clear()
1810 self._inner_rustmap.clear()
1811 1801 self.setparents(
1812 1802 self._nodeconstants.nullid, self._nodeconstants.nullid
1813 1803 )
1814 1804 util.clearcachedproperty(self, b"_dirs")
1815 1805 util.clearcachedproperty(self, b"_alldirs")
1816 1806 util.clearcachedproperty(self, b"dirfoldmap")
1817 1807
1818 1808 def items(self):
1819 1809 return self._rustmap.items()
1820 1810
1821 1811 def keys(self):
1822 1812 return iter(self._rustmap)
1823 1813
1824 1814 def __contains__(self, key):
1825 1815 return key in self._rustmap
1826 1816
1827 1817 def __getitem__(self, item):
1828 1818 return self._rustmap[item]
1829 1819
1830 1820 def __len__(self):
1831 1821 return len(self._rustmap)
1832 1822
1833 1823 def __iter__(self):
1834 1824 return iter(self._rustmap)
1835 1825
1836 1826 # forward for python2,3 compat
1837 1827 iteritems = items
1838 1828
1839 1829 def _opendirstatefile(self):
1840 1830 fp, mode = txnutil.trypending(
1841 1831 self._root, self._opener, self._filename
1842 1832 )
1843 1833 if self._pendingmode is not None and self._pendingmode != mode:
1844 1834 fp.close()
1845 1835 raise error.Abort(
1846 1836 _(b'working directory state may be changed parallelly')
1847 1837 )
1848 1838 self._pendingmode = mode
1849 1839 return fp
1850 1840
1851 1841 def setparents(self, p1, p2):
1852 self._rustmap.setparents(p1, p2)
1853 1842 self._parents = (p1, p2)
1854 1843 self._dirtyparents = True
1855 1844
1856 1845 def parents(self):
1857 1846 if not self._parents:
1858 1847 try:
1859 1848 fp = self._opendirstatefile()
1860 1849 st = fp.read(40)
1861 1850 fp.close()
1862 1851 except IOError as err:
1863 1852 if err.errno != errno.ENOENT:
1864 1853 raise
1865 1854 # File doesn't exist, so the current state is empty
1866 1855 st = b''
1867 1856
1868 try:
1869 self._parents = self._inner_rustmap.parents(st)
1870 except ValueError:
1857 l = len(st)
1858 if l == self._nodelen * 2:
1859 self._parents = (
1860 st[: self._nodelen],
1861 st[self._nodelen : 2 * self._nodelen],
1862 )
1863 elif l == 0:
1864 self._parents = (
1865 self._nodeconstants.nullid,
1866 self._nodeconstants.nullid,
1867 )
1868 else:
1871 1869 raise error.Abort(
1872 1870 _(b'working directory state appears damaged!')
1873 1871 )
1874 1872
1875 1873 return self._parents
1876 1874
1877 1875 def read(self):
1878 1876 # ignore HG_PENDING because identity is used only for writing
1879 1877 self.identity = util.filestat.frompath(
1880 1878 self._opener.join(self._filename)
1881 1879 )
1882 1880
1883 1881 try:
1884 1882 fp = self._opendirstatefile()
1885 1883 try:
1886 1884 st = fp.read()
1887 1885 finally:
1888 1886 fp.close()
1889 1887 except IOError as err:
1890 1888 if err.errno != errno.ENOENT:
1891 1889 raise
1892 1890 return
1893 1891 if not st:
1894 1892 return
1895 1893
1896 1894 parse_dirstate = util.nogc(self._rustmap.read)
1897 1895 parents = parse_dirstate(st)
1898 1896 if parents and not self._dirtyparents:
1899 1897 self.setparents(*parents)
1900 1898
1901 1899 self.__contains__ = self._rustmap.__contains__
1902 1900 self.__getitem__ = self._rustmap.__getitem__
1903 1901 self.get = self._rustmap.get
1904 1902
1905 1903 def write(self, st, now):
1906 1904 parents = self.parents()
1907 1905 st.write(self._rustmap.write(parents[0], parents[1], now))
1908 1906 st.close()
1909 1907 self._dirtyparents = False
1910 1908
1911 1909 @propertycache
1912 1910 def filefoldmap(self):
1913 1911 """Returns a dictionary mapping normalized case paths to their
1914 1912 non-normalized versions.
1915 1913 """
1916 1914 return self._rustmap.filefoldmapasdict()
1917 1915
1918 1916 def hastrackeddir(self, d):
1919 1917 self._dirs # Trigger Python's propertycache
1920 1918 return self._rustmap.hastrackeddir(d)
1921 1919
1922 1920 def hasdir(self, d):
1923 1921 self._dirs # Trigger Python's propertycache
1924 1922 return self._rustmap.hasdir(d)
1925 1923
1926 1924 @propertycache
1927 1925 def _dirs(self):
1928 1926 return self._rustmap.getdirs()
1929 1927
1930 1928 @propertycache
1931 1929 def _alldirs(self):
1932 1930 return self._rustmap.getalldirs()
1933 1931
1934 1932 @propertycache
1935 1933 def identity(self):
1936 1934 self._rustmap
1937 1935 return self.identity
1938 1936
1939 1937 @property
1940 1938 def nonnormalset(self):
1941 1939 nonnorm = self._rustmap.non_normal_entries()
1942 1940 return nonnorm
1943 1941
1944 1942 @propertycache
1945 1943 def otherparentset(self):
1946 1944 otherparents = self._rustmap.other_parent_entries()
1947 1945 return otherparents
1948 1946
1949 1947 def non_normal_or_other_parent_paths(self):
1950 1948 return self._rustmap.non_normal_or_other_parent_paths()
1951 1949
1952 1950 @propertycache
1953 1951 def dirfoldmap(self):
1954 1952 f = {}
1955 1953 normcase = util.normcase
1956 1954 for name in self._dirs:
1957 1955 f[normcase(name)] = name
1958 1956 return f
@@ -1,459 +1,408 b''
1 1 // dirstate_map.rs
2 2 //
3 3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 4 //
5 5 // This software may be used and distributed according to the terms of the
6 6 // GNU General Public License version 2 or any later version.
7 7
8 8 use crate::dirstate::parsers::clear_ambiguous_mtime;
9 9 use crate::dirstate::parsers::Timestamp;
10 use crate::errors::HgError;
11 use crate::revlog::node::NULL_NODE;
12 10 use crate::{
13 dirstate::{parsers::PARENT_SIZE, EntryState},
11 dirstate::EntryState,
14 12 pack_dirstate, parse_dirstate,
15 13 utils::hg_path::{HgPath, HgPathBuf},
16 14 CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateMapError,
17 15 DirstateParents, StateMap,
18 16 };
19 17 use micro_timer::timed;
20 18 use std::collections::HashSet;
21 use std::convert::TryInto;
22 19 use std::iter::FromIterator;
23 20 use std::ops::Deref;
24 21
25 22 #[derive(Default)]
26 23 pub struct DirstateMap {
27 24 state_map: StateMap,
28 25 pub copy_map: CopyMap,
29 26 pub dirs: Option<DirsMultiset>,
30 27 pub all_dirs: Option<DirsMultiset>,
31 28 non_normal_set: Option<HashSet<HgPathBuf>>,
32 29 other_parent_set: Option<HashSet<HgPathBuf>>,
33 parents: Option<DirstateParents>,
34 dirty_parents: bool,
35 30 }
36 31
37 32 /// Should only really be used in python interface code, for clarity
38 33 impl Deref for DirstateMap {
39 34 type Target = StateMap;
40 35
41 36 fn deref(&self) -> &Self::Target {
42 37 &self.state_map
43 38 }
44 39 }
45 40
46 41 impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap {
47 42 fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>(
48 43 iter: I,
49 44 ) -> Self {
50 45 Self {
51 46 state_map: iter.into_iter().collect(),
52 47 ..Self::default()
53 48 }
54 49 }
55 50 }
56 51
57 52 impl DirstateMap {
58 53 pub fn new() -> Self {
59 54 Self::default()
60 55 }
61 56
62 57 pub fn clear(&mut self) {
63 58 self.state_map = StateMap::default();
64 59 self.copy_map.clear();
65 60 self.non_normal_set = None;
66 61 self.other_parent_set = None;
67 self.set_parents(&DirstateParents {
68 p1: NULL_NODE,
69 p2: NULL_NODE,
70 })
71 62 }
72 63
73 64 /// Add a tracked file to the dirstate
74 65 pub fn add_file(
75 66 &mut self,
76 67 filename: &HgPath,
77 68 old_state: EntryState,
78 69 entry: DirstateEntry,
79 70 ) -> Result<(), DirstateMapError> {
80 71 if old_state == EntryState::Unknown || old_state == EntryState::Removed
81 72 {
82 73 if let Some(ref mut dirs) = self.dirs {
83 74 dirs.add_path(filename)?;
84 75 }
85 76 }
86 77 if old_state == EntryState::Unknown {
87 78 if let Some(ref mut all_dirs) = self.all_dirs {
88 79 all_dirs.add_path(filename)?;
89 80 }
90 81 }
91 82 self.state_map.insert(filename.to_owned(), entry.to_owned());
92 83
93 84 if entry.is_non_normal() {
94 85 self.get_non_normal_other_parent_entries()
95 86 .0
96 87 .insert(filename.to_owned());
97 88 }
98 89
99 90 if entry.is_from_other_parent() {
100 91 self.get_non_normal_other_parent_entries()
101 92 .1
102 93 .insert(filename.to_owned());
103 94 }
104 95 Ok(())
105 96 }
106 97
107 98 /// Mark a file as removed in the dirstate.
108 99 ///
109 100 /// The `size` parameter is used to store sentinel values that indicate
110 101 /// the file's previous state. In the future, we should refactor this
111 102 /// to be more explicit about what that state is.
112 103 pub fn remove_file(
113 104 &mut self,
114 105 filename: &HgPath,
115 106 old_state: EntryState,
116 107 size: i32,
117 108 ) -> Result<(), DirstateMapError> {
118 109 if old_state != EntryState::Unknown && old_state != EntryState::Removed
119 110 {
120 111 if let Some(ref mut dirs) = self.dirs {
121 112 dirs.delete_path(filename)?;
122 113 }
123 114 }
124 115 if old_state == EntryState::Unknown {
125 116 if let Some(ref mut all_dirs) = self.all_dirs {
126 117 all_dirs.add_path(filename)?;
127 118 }
128 119 }
129 120
130 121 self.state_map.insert(
131 122 filename.to_owned(),
132 123 DirstateEntry {
133 124 state: EntryState::Removed,
134 125 mode: 0,
135 126 size,
136 127 mtime: 0,
137 128 },
138 129 );
139 130 self.get_non_normal_other_parent_entries()
140 131 .0
141 132 .insert(filename.to_owned());
142 133 Ok(())
143 134 }
144 135
145 136 /// Remove a file from the dirstate.
146 137 /// Returns `true` if the file was previously recorded.
147 138 pub fn drop_file(
148 139 &mut self,
149 140 filename: &HgPath,
150 141 old_state: EntryState,
151 142 ) -> Result<bool, DirstateMapError> {
152 143 let exists = self.state_map.remove(filename).is_some();
153 144
154 145 if exists {
155 146 if old_state != EntryState::Removed {
156 147 if let Some(ref mut dirs) = self.dirs {
157 148 dirs.delete_path(filename)?;
158 149 }
159 150 }
160 151 if let Some(ref mut all_dirs) = self.all_dirs {
161 152 all_dirs.delete_path(filename)?;
162 153 }
163 154 }
164 155 self.get_non_normal_other_parent_entries()
165 156 .0
166 157 .remove(filename);
167 158
168 159 Ok(exists)
169 160 }
170 161
171 162 pub fn clear_ambiguous_times(
172 163 &mut self,
173 164 filenames: Vec<HgPathBuf>,
174 165 now: i32,
175 166 ) {
176 167 for filename in filenames {
177 168 if let Some(entry) = self.state_map.get_mut(&filename) {
178 169 if clear_ambiguous_mtime(entry, now) {
179 170 self.get_non_normal_other_parent_entries()
180 171 .0
181 172 .insert(filename.to_owned());
182 173 }
183 174 }
184 175 }
185 176 }
186 177
187 178 pub fn non_normal_entries_remove(&mut self, key: impl AsRef<HgPath>) {
188 179 self.get_non_normal_other_parent_entries()
189 180 .0
190 181 .remove(key.as_ref());
191 182 }
192 183
193 184 pub fn non_normal_entries_union(
194 185 &mut self,
195 186 other: HashSet<HgPathBuf>,
196 187 ) -> Vec<HgPathBuf> {
197 188 self.get_non_normal_other_parent_entries()
198 189 .0
199 190 .union(&other)
200 191 .map(ToOwned::to_owned)
201 192 .collect()
202 193 }
203 194
204 195 pub fn get_non_normal_other_parent_entries(
205 196 &mut self,
206 197 ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) {
207 198 self.set_non_normal_other_parent_entries(false);
208 199 (
209 200 self.non_normal_set.as_mut().unwrap(),
210 201 self.other_parent_set.as_mut().unwrap(),
211 202 )
212 203 }
213 204
214 205 /// Useful to get immutable references to those sets in contexts where
215 206 /// you only have an immutable reference to the `DirstateMap`, like when
216 207 /// sharing references with Python.
217 208 ///
218 209 /// TODO, get rid of this along with the other "setter/getter" stuff when
219 210 /// a nice typestate plan is defined.
220 211 ///
221 212 /// # Panics
222 213 ///
223 214 /// Will panic if either set is `None`.
224 215 pub fn get_non_normal_other_parent_entries_panic(
225 216 &self,
226 217 ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) {
227 218 (
228 219 self.non_normal_set.as_ref().unwrap(),
229 220 self.other_parent_set.as_ref().unwrap(),
230 221 )
231 222 }
232 223
233 224 pub fn set_non_normal_other_parent_entries(&mut self, force: bool) {
234 225 if !force
235 226 && self.non_normal_set.is_some()
236 227 && self.other_parent_set.is_some()
237 228 {
238 229 return;
239 230 }
240 231 let mut non_normal = HashSet::new();
241 232 let mut other_parent = HashSet::new();
242 233
243 234 for (filename, entry) in self.state_map.iter() {
244 235 if entry.is_non_normal() {
245 236 non_normal.insert(filename.to_owned());
246 237 }
247 238 if entry.is_from_other_parent() {
248 239 other_parent.insert(filename.to_owned());
249 240 }
250 241 }
251 242 self.non_normal_set = Some(non_normal);
252 243 self.other_parent_set = Some(other_parent);
253 244 }
254 245
255 246 /// Both of these setters and their uses appear to be the simplest way to
256 247 /// emulate a Python lazy property, but it is ugly and unidiomatic.
257 248 /// TODO One day, rewriting this struct using the typestate might be a
258 249 /// good idea.
259 250 pub fn set_all_dirs(&mut self) -> Result<(), DirstateMapError> {
260 251 if self.all_dirs.is_none() {
261 252 self.all_dirs = Some(DirsMultiset::from_dirstate(
262 253 self.state_map.iter(),
263 254 None,
264 255 )?);
265 256 }
266 257 Ok(())
267 258 }
268 259
269 260 pub fn set_dirs(&mut self) -> Result<(), DirstateMapError> {
270 261 if self.dirs.is_none() {
271 262 self.dirs = Some(DirsMultiset::from_dirstate(
272 263 &self.state_map,
273 264 Some(EntryState::Removed),
274 265 )?);
275 266 }
276 267 Ok(())
277 268 }
278 269
279 270 pub fn has_tracked_dir(
280 271 &mut self,
281 272 directory: &HgPath,
282 273 ) -> Result<bool, DirstateMapError> {
283 274 self.set_dirs()?;
284 275 Ok(self.dirs.as_ref().unwrap().contains(directory))
285 276 }
286 277
287 278 pub fn has_dir(
288 279 &mut self,
289 280 directory: &HgPath,
290 281 ) -> Result<bool, DirstateMapError> {
291 282 self.set_all_dirs()?;
292 283 Ok(self.all_dirs.as_ref().unwrap().contains(directory))
293 284 }
294 285
295 pub fn parents(
296 &mut self,
297 file_contents: &[u8],
298 ) -> Result<&DirstateParents, DirstateError> {
299 if let Some(ref parents) = self.parents {
300 return Ok(parents);
301 }
302 let parents;
303 if file_contents.len() == PARENT_SIZE * 2 {
304 parents = DirstateParents {
305 p1: file_contents[..PARENT_SIZE].try_into().unwrap(),
306 p2: file_contents[PARENT_SIZE..PARENT_SIZE * 2]
307 .try_into()
308 .unwrap(),
309 };
310 } else if file_contents.is_empty() {
311 parents = DirstateParents {
312 p1: NULL_NODE,
313 p2: NULL_NODE,
314 };
315 } else {
316 return Err(
317 HgError::corrupted("Dirstate appears to be damaged").into()
318 );
319 }
320
321 self.parents = Some(parents);
322 Ok(self.parents.as_ref().unwrap())
323 }
324
325 pub fn set_parents(&mut self, parents: &DirstateParents) {
326 self.parents = Some(parents.clone());
327 self.dirty_parents = true;
328 }
329
330 286 #[timed]
331 287 pub fn read<'a>(
332 288 &mut self,
333 289 file_contents: &'a [u8],
334 290 ) -> Result<Option<&'a DirstateParents>, DirstateError> {
335 291 if file_contents.is_empty() {
336 292 return Ok(None);
337 293 }
338 294
339 295 let (parents, entries, copies) = parse_dirstate(file_contents)?;
340 296 self.state_map.extend(
341 297 entries
342 298 .into_iter()
343 299 .map(|(path, entry)| (path.to_owned(), entry)),
344 300 );
345 301 self.copy_map.extend(
346 302 copies
347 303 .into_iter()
348 304 .map(|(path, copy)| (path.to_owned(), copy.to_owned())),
349 305 );
350
351 if !self.dirty_parents {
352 self.set_parents(&parents);
353 }
354
355 306 Ok(Some(parents))
356 307 }
357 308
358 309 pub fn pack(
359 310 &mut self,
360 311 parents: DirstateParents,
361 312 now: Timestamp,
362 313 ) -> Result<Vec<u8>, DirstateError> {
363 314 let packed =
364 315 pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?;
365 316
366 self.dirty_parents = false;
367
368 317 self.set_non_normal_other_parent_entries(true);
369 318 Ok(packed)
370 319 }
371 320 }
372 321
373 322 #[cfg(test)]
374 323 mod tests {
375 324 use super::*;
376 325
377 326 #[test]
378 327 fn test_dirs_multiset() {
379 328 let mut map = DirstateMap::new();
380 329 assert!(map.dirs.is_none());
381 330 assert!(map.all_dirs.is_none());
382 331
383 332 assert_eq!(map.has_dir(HgPath::new(b"nope")).unwrap(), false);
384 333 assert!(map.all_dirs.is_some());
385 334 assert!(map.dirs.is_none());
386 335
387 336 assert_eq!(map.has_tracked_dir(HgPath::new(b"nope")).unwrap(), false);
388 337 assert!(map.dirs.is_some());
389 338 }
390 339
391 340 #[test]
392 341 fn test_add_file() {
393 342 let mut map = DirstateMap::new();
394 343
395 344 assert_eq!(0, map.len());
396 345
397 346 map.add_file(
398 347 HgPath::new(b"meh"),
399 348 EntryState::Normal,
400 349 DirstateEntry {
401 350 state: EntryState::Normal,
402 351 mode: 1337,
403 352 mtime: 1337,
404 353 size: 1337,
405 354 },
406 355 )
407 356 .unwrap();
408 357
409 358 assert_eq!(1, map.len());
410 359 assert_eq!(0, map.get_non_normal_other_parent_entries().0.len());
411 360 assert_eq!(0, map.get_non_normal_other_parent_entries().1.len());
412 361 }
413 362
414 363 #[test]
415 364 fn test_non_normal_other_parent_entries() {
416 365 let mut map: DirstateMap = [
417 366 (b"f1", (EntryState::Removed, 1337, 1337, 1337)),
418 367 (b"f2", (EntryState::Normal, 1337, 1337, -1)),
419 368 (b"f3", (EntryState::Normal, 1337, 1337, 1337)),
420 369 (b"f4", (EntryState::Normal, 1337, -2, 1337)),
421 370 (b"f5", (EntryState::Added, 1337, 1337, 1337)),
422 371 (b"f6", (EntryState::Added, 1337, 1337, -1)),
423 372 (b"f7", (EntryState::Merged, 1337, 1337, -1)),
424 373 (b"f8", (EntryState::Merged, 1337, 1337, 1337)),
425 374 (b"f9", (EntryState::Merged, 1337, -2, 1337)),
426 375 (b"fa", (EntryState::Added, 1337, -2, 1337)),
427 376 (b"fb", (EntryState::Removed, 1337, -2, 1337)),
428 377 ]
429 378 .iter()
430 379 .map(|(fname, (state, mode, size, mtime))| {
431 380 (
432 381 HgPathBuf::from_bytes(fname.as_ref()),
433 382 DirstateEntry {
434 383 state: *state,
435 384 mode: *mode,
436 385 size: *size,
437 386 mtime: *mtime,
438 387 },
439 388 )
440 389 })
441 390 .collect();
442 391
443 392 let mut non_normal = [
444 393 b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb",
445 394 ]
446 395 .iter()
447 396 .map(|x| HgPathBuf::from_bytes(x.as_ref()))
448 397 .collect();
449 398
450 399 let mut other_parent = HashSet::new();
451 400 other_parent.insert(HgPathBuf::from_bytes(b"f4"));
452 401 let entries = map.get_non_normal_other_parent_entries();
453 402
454 403 assert_eq!(
455 404 (&mut non_normal, &mut other_parent),
456 405 (entries.0, entries.1)
457 406 );
458 407 }
459 408 }
@@ -1,647 +1,609 b''
1 1 use bytes_cast::BytesCast;
2 2 use micro_timer::timed;
3 3 use std::convert::TryInto;
4 4 use std::path::PathBuf;
5 5
6 6 use super::path_with_basename::WithBasename;
7 7 use crate::dirstate::parsers::clear_ambiguous_mtime;
8 8 use crate::dirstate::parsers::pack_entry;
9 9 use crate::dirstate::parsers::packed_entry_size;
10 10 use crate::dirstate::parsers::parse_dirstate_entries;
11 use crate::dirstate::parsers::parse_dirstate_parents;
12 11 use crate::dirstate::parsers::Timestamp;
13 12 use crate::matchers::Matcher;
14 use crate::revlog::node::NULL_NODE;
15 13 use crate::utils::hg_path::{HgPath, HgPathBuf};
16 14 use crate::CopyMapIter;
17 15 use crate::DirstateEntry;
18 16 use crate::DirstateError;
19 17 use crate::DirstateMapError;
20 18 use crate::DirstateParents;
21 19 use crate::DirstateStatus;
22 20 use crate::EntryState;
23 21 use crate::FastHashMap;
24 22 use crate::PatternFileWarning;
25 23 use crate::StateMapIter;
26 24 use crate::StatusError;
27 25 use crate::StatusOptions;
28 26
29 27 pub struct DirstateMap {
30 parents: Option<DirstateParents>,
31 dirty_parents: bool,
32 28 pub(super) root: ChildNodes,
33 29
34 30 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
35 31 nodes_with_entry_count: usize,
36 32
37 33 /// Number of nodes anywhere in the tree that have
38 34 /// `.copy_source.is_some()`.
39 35 nodes_with_copy_source_count: usize,
40 36 }
41 37
42 38 /// Using a plain `HgPathBuf` of the full path from the repository root as a
43 39 /// map key would also work: all paths in a given map have the same parent
44 40 /// path, so comparing full paths gives the same result as comparing base
45 41 /// names. However `BTreeMap` would waste time always re-comparing the same
46 42 /// string prefix.
47 43 pub(super) type ChildNodes = FastHashMap<WithBasename<HgPathBuf>, Node>;
48 44
49 45 /// Represents a file or a directory
50 46 #[derive(Default)]
51 47 pub(super) struct Node {
52 48 /// `None` for directories
53 49 pub(super) entry: Option<DirstateEntry>,
54 50
55 51 pub(super) copy_source: Option<HgPathBuf>,
56 52
57 53 pub(super) children: ChildNodes,
58 54
59 55 /// How many (non-inclusive) descendants of this node are tracked files
60 56 tracked_descendants_count: usize,
61 57 }
62 58
63 59 impl Node {
64 60 pub(super) fn state(&self) -> Option<EntryState> {
65 61 self.entry.as_ref().map(|entry| entry.state)
66 62 }
67 63 }
68 64
69 65 /// `(full_path, entry, copy_source)`
70 66 type NodeDataMut<'a> = (
71 67 &'a WithBasename<HgPathBuf>,
72 68 &'a mut Option<DirstateEntry>,
73 69 &'a mut Option<HgPathBuf>,
74 70 );
75 71
76 72 impl DirstateMap {
77 73 pub fn new() -> Self {
78 74 Self {
79 parents: None,
80 dirty_parents: false,
81 75 root: ChildNodes::default(),
82 76 nodes_with_entry_count: 0,
83 77 nodes_with_copy_source_count: 0,
84 78 }
85 79 }
86 80
87 81 fn get_node(&self, path: &HgPath) -> Option<&Node> {
88 82 let mut children = &self.root;
89 83 let mut components = path.components();
90 84 let mut component =
91 85 components.next().expect("expected at least one components");
92 86 loop {
93 87 let child = children.get(component)?;
94 88 if let Some(next_component) = components.next() {
95 89 component = next_component;
96 90 children = &child.children;
97 91 } else {
98 92 return Some(child);
99 93 }
100 94 }
101 95 }
102 96
103 97 /// Returns a mutable reference to the node at `path` if it exists
104 98 ///
105 99 /// This takes `root` instead of `&mut self` so that callers can mutate
106 100 /// other fields while the returned borrow is still valid
107 101 fn get_node_mut<'tree>(
108 102 root: &'tree mut ChildNodes,
109 103 path: &HgPath,
110 104 ) -> Option<&'tree mut Node> {
111 105 Self::get_node_mut_tracing_ancestors(root, path, |_| {})
112 106 }
113 107
114 108 /// Same as `get_node_mut`, and calls `each_ancestor` for each ancestor of
115 109 /// the node.
116 110 ///
117 111 /// Note that `each_ancestor` may be called (with what would be ancestors)
118 112 /// even if it turns out there is no node at `path`.
119 113 fn get_node_mut_tracing_ancestors<'tree>(
120 114 root: &'tree mut ChildNodes,
121 115 path: &HgPath,
122 116 mut each_ancestor: impl FnMut(&mut Node),
123 117 ) -> Option<&'tree mut Node> {
124 118 let mut children = root;
125 119 let mut components = path.components();
126 120 let mut component =
127 121 components.next().expect("expected at least one components");
128 122 loop {
129 123 let child = children.get_mut(component)?;
130 124 if let Some(next_component) = components.next() {
131 125 each_ancestor(child);
132 126 component = next_component;
133 127 children = &mut child.children;
134 128 } else {
135 129 return Some(child);
136 130 }
137 131 }
138 132 }
139 133
140 134 fn get_or_insert_node<'tree>(
141 135 root: &'tree mut ChildNodes,
142 136 path: &HgPath,
143 137 ) -> &'tree mut Node {
144 138 Self::get_or_insert_node_tracing_ancestors(root, path, |_| {})
145 139 }
146 140
147 141 fn get_or_insert_node_tracing_ancestors<'tree>(
148 142 root: &'tree mut ChildNodes,
149 143 path: &HgPath,
150 144 mut each_ancestor: impl FnMut(&mut Node),
151 145 ) -> &'tree mut Node {
152 146 let mut child_nodes = root;
153 147 let mut inclusive_ancestor_paths =
154 148 WithBasename::inclusive_ancestors_of(path);
155 149 let mut ancestor_path = inclusive_ancestor_paths
156 150 .next()
157 151 .expect("expected at least one inclusive ancestor");
158 152 loop {
159 153 // TODO: can we avoid allocating an owned key in cases where the
160 154 // map already contains that key, without introducing double
161 155 // lookup?
162 156 let child_node =
163 157 child_nodes.entry(ancestor_path.to_owned()).or_default();
164 158 if let Some(next) = inclusive_ancestor_paths.next() {
165 159 each_ancestor(child_node);
166 160 ancestor_path = next;
167 161 child_nodes = &mut child_node.children;
168 162 } else {
169 163 return child_node;
170 164 }
171 165 }
172 166 }
173 167
174 168 fn add_or_remove_file(
175 169 &mut self,
176 170 path: &HgPath,
177 171 old_state: EntryState,
178 172 new_entry: DirstateEntry,
179 173 ) {
180 174 let tracked_count_increment =
181 175 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
182 176 (false, true) => 1,
183 177 (true, false) => -1,
184 178 _ => 0,
185 179 };
186 180
187 181 let node = Self::get_or_insert_node_tracing_ancestors(
188 182 &mut self.root,
189 183 path,
190 184 |ancestor| {
191 185 // We can’t use `+= increment` because the counter is unsigned,
192 186 // and we want debug builds to detect accidental underflow
193 187 // through zero
194 188 match tracked_count_increment {
195 189 1 => ancestor.tracked_descendants_count += 1,
196 190 -1 => ancestor.tracked_descendants_count -= 1,
197 191 _ => {}
198 192 }
199 193 },
200 194 );
201 195 if node.entry.is_none() {
202 196 self.nodes_with_entry_count += 1
203 197 }
204 198 node.entry = Some(new_entry)
205 199 }
206 200
207 201 fn iter_nodes<'a>(
208 202 &'a self,
209 203 ) -> impl Iterator<Item = (&'a WithBasename<HgPathBuf>, &'a Node)> + 'a
210 204 {
211 205 // Depth first tree traversal.
212 206 //
213 207 // If we could afford internal iteration and recursion,
214 208 // this would look like:
215 209 //
216 210 // ```
217 211 // fn traverse_children(
218 212 // children: &ChildNodes,
219 213 // each: &mut impl FnMut(&Node),
220 214 // ) {
221 215 // for child in children.values() {
222 216 // traverse_children(&child.children, each);
223 217 // each(child);
224 218 // }
225 219 // }
226 220 // ```
227 221 //
228 222 // However we want an external iterator and therefore can’t use the
229 223 // call stack. Use an explicit stack instead:
230 224 let mut stack = Vec::new();
231 225 let mut iter = self.root.iter();
232 226 std::iter::from_fn(move || {
233 227 while let Some((key, child_node)) = iter.next() {
234 228 // Pseudo-recursion
235 229 let new_iter = child_node.children.iter();
236 230 let old_iter = std::mem::replace(&mut iter, new_iter);
237 231 stack.push((key, child_node, old_iter));
238 232 }
239 233 // Found the end of a `children.iter()` iterator.
240 234 if let Some((key, child_node, next_iter)) = stack.pop() {
241 235 // "Return" from pseudo-recursion by restoring state from the
242 236 // explicit stack
243 237 iter = next_iter;
244 238
245 239 Some((key, child_node))
246 240 } else {
247 241 // Reached the bottom of the stack, we’re done
248 242 None
249 243 }
250 244 })
251 245 }
252 246
253 247 /// Mutable iterator for the `(entry, copy source)` of each node.
254 248 ///
255 249 /// It would not be safe to yield mutable references to nodes themeselves
256 250 /// with `-> impl Iterator<Item = &mut Node>` since child nodes are
257 251 /// reachable from their ancestor nodes, potentially creating multiple
258 252 /// `&mut` references to a given node.
259 253 fn iter_node_data_mut<'a>(
260 254 &'a mut self,
261 255 ) -> impl Iterator<Item = NodeDataMut<'a>> + 'a {
262 256 // Explict stack for pseudo-recursion, see `iter_nodes` above.
263 257 let mut stack = Vec::new();
264 258 let mut iter = self.root.iter_mut();
265 259 std::iter::from_fn(move || {
266 260 while let Some((key, child_node)) = iter.next() {
267 261 // Pseudo-recursion
268 262 let data =
269 263 (key, &mut child_node.entry, &mut child_node.copy_source);
270 264 let new_iter = child_node.children.iter_mut();
271 265 let old_iter = std::mem::replace(&mut iter, new_iter);
272 266 stack.push((data, old_iter));
273 267 }
274 268 // Found the end of a `children.values_mut()` iterator.
275 269 if let Some((data, next_iter)) = stack.pop() {
276 270 // "Return" from pseudo-recursion by restoring state from the
277 271 // explicit stack
278 272 iter = next_iter;
279 273
280 274 Some(data)
281 275 } else {
282 276 // Reached the bottom of the stack, we’re done
283 277 None
284 278 }
285 279 })
286 280 }
287 281 }
288 282
289 283 impl super::dispatch::DirstateMapMethods for DirstateMap {
290 284 fn clear(&mut self) {
291 self.set_parents(&DirstateParents {
292 p1: NULL_NODE,
293 p2: NULL_NODE,
294 });
295 285 self.root.clear();
296 286 self.nodes_with_entry_count = 0;
297 287 self.nodes_with_copy_source_count = 0;
298 288 }
299 289
300 290 fn add_file(
301 291 &mut self,
302 292 filename: &HgPath,
303 293 old_state: EntryState,
304 294 entry: DirstateEntry,
305 295 ) -> Result<(), DirstateMapError> {
306 296 self.add_or_remove_file(filename, old_state, entry);
307 297 Ok(())
308 298 }
309 299
310 300 fn remove_file(
311 301 &mut self,
312 302 filename: &HgPath,
313 303 old_state: EntryState,
314 304 size: i32,
315 305 ) -> Result<(), DirstateMapError> {
316 306 let entry = DirstateEntry {
317 307 state: EntryState::Removed,
318 308 mode: 0,
319 309 size,
320 310 mtime: 0,
321 311 };
322 312 self.add_or_remove_file(filename, old_state, entry);
323 313 Ok(())
324 314 }
325 315
326 316 fn drop_file(
327 317 &mut self,
328 318 filename: &HgPath,
329 319 old_state: EntryState,
330 320 ) -> Result<bool, DirstateMapError> {
331 321 let was_tracked = old_state.is_tracked();
332 322 if let Some(node) = Self::get_node_mut_tracing_ancestors(
333 323 &mut self.root,
334 324 filename,
335 325 |ancestor| {
336 326 if was_tracked {
337 327 ancestor.tracked_descendants_count -= 1
338 328 }
339 329 },
340 330 ) {
341 331 let had_entry = node.entry.is_some();
342 332 let had_copy_source = node.copy_source.is_some();
343 333
344 334 // TODO: this leaves in the tree a "non-file" node. Should we
345 335 // remove the node instead, together with ancestor nodes for
346 336 // directories that become empty?
347 337 node.entry = None;
348 338 node.copy_source = None;
349 339
350 340 if had_entry {
351 341 self.nodes_with_entry_count -= 1
352 342 }
353 343 if had_copy_source {
354 344 self.nodes_with_copy_source_count -= 1
355 345 }
356 346 Ok(had_entry)
357 347 } else {
358 348 assert!(!was_tracked);
359 349 Ok(false)
360 350 }
361 351 }
362 352
363 353 fn clear_ambiguous_times(&mut self, filenames: Vec<HgPathBuf>, now: i32) {
364 354 for filename in filenames {
365 355 if let Some(node) = Self::get_node_mut(&mut self.root, &filename) {
366 356 if let Some(entry) = node.entry.as_mut() {
367 357 clear_ambiguous_mtime(entry, now);
368 358 }
369 359 }
370 360 }
371 361 }
372 362
373 363 fn non_normal_entries_contains(&mut self, key: &HgPath) -> bool {
374 364 self.get_node(key)
375 365 .and_then(|node| node.entry.as_ref())
376 366 .map_or(false, DirstateEntry::is_non_normal)
377 367 }
378 368
379 369 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
380 370 // Do nothing, this `DirstateMap` does not have a separate "non normal
381 371 // entries" set that need to be kept up to date
382 372 }
383 373
384 374 fn non_normal_or_other_parent_paths(
385 375 &mut self,
386 376 ) -> Box<dyn Iterator<Item = &HgPathBuf> + '_> {
387 377 Box::new(self.iter_nodes().filter_map(|(path, node)| {
388 378 node.entry
389 379 .as_ref()
390 380 .filter(|entry| {
391 381 entry.is_non_normal() || entry.is_from_other_parent()
392 382 })
393 383 .map(|_| path.full_path())
394 384 }))
395 385 }
396 386
397 387 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
398 388 // Do nothing, this `DirstateMap` does not have a separate "non normal
399 389 // entries" and "from other parent" sets that need to be recomputed
400 390 }
401 391
402 392 fn iter_non_normal_paths(
403 393 &mut self,
404 394 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_> {
405 395 self.iter_non_normal_paths_panic()
406 396 }
407 397
408 398 fn iter_non_normal_paths_panic(
409 399 &self,
410 400 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_> {
411 401 Box::new(self.iter_nodes().filter_map(|(path, node)| {
412 402 node.entry
413 403 .as_ref()
414 404 .filter(|entry| entry.is_non_normal())
415 405 .map(|_| path.full_path())
416 406 }))
417 407 }
418 408
419 409 fn iter_other_parent_paths(
420 410 &mut self,
421 411 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_> {
422 412 Box::new(self.iter_nodes().filter_map(|(path, node)| {
423 413 node.entry
424 414 .as_ref()
425 415 .filter(|entry| entry.is_from_other_parent())
426 416 .map(|_| path.full_path())
427 417 }))
428 418 }
429 419
430 420 fn has_tracked_dir(
431 421 &mut self,
432 422 directory: &HgPath,
433 423 ) -> Result<bool, DirstateMapError> {
434 424 if let Some(node) = self.get_node(directory) {
435 425 // A node without a `DirstateEntry` was created to hold child
436 426 // nodes, and is therefore a directory.
437 427 Ok(node.entry.is_none() && node.tracked_descendants_count > 0)
438 428 } else {
439 429 Ok(false)
440 430 }
441 431 }
442 432
443 433 fn has_dir(
444 434 &mut self,
445 435 directory: &HgPath,
446 436 ) -> Result<bool, DirstateMapError> {
447 437 if let Some(node) = self.get_node(directory) {
448 438 // A node without a `DirstateEntry` was created to hold child
449 439 // nodes, and is therefore a directory.
450 440 Ok(node.entry.is_none())
451 441 } else {
452 442 Ok(false)
453 443 }
454 444 }
455 445
456 fn parents(
457 &mut self,
458 file_contents: &[u8],
459 ) -> Result<&DirstateParents, DirstateError> {
460 if self.parents.is_none() {
461 let parents = if !file_contents.is_empty() {
462 parse_dirstate_parents(file_contents)?.clone()
463 } else {
464 DirstateParents {
465 p1: NULL_NODE,
466 p2: NULL_NODE,
467 }
468 };
469 self.parents = Some(parents);
470 }
471 Ok(self.parents.as_ref().unwrap())
472 }
473
474 fn set_parents(&mut self, parents: &DirstateParents) {
475 self.parents = Some(parents.clone());
476 self.dirty_parents = true;
477 }
478
479 446 #[timed]
480 447 fn read<'a>(
481 448 &mut self,
482 449 file_contents: &'a [u8],
483 450 ) -> Result<Option<&'a DirstateParents>, DirstateError> {
484 451 if file_contents.is_empty() {
485 452 return Ok(None);
486 453 }
487 454
488 455 let parents = parse_dirstate_entries(
489 456 file_contents,
490 457 |path, entry, copy_source| {
491 458 let tracked = entry.state.is_tracked();
492 459 let node = Self::get_or_insert_node_tracing_ancestors(
493 460 &mut self.root,
494 461 path,
495 462 |ancestor| {
496 463 if tracked {
497 464 ancestor.tracked_descendants_count += 1
498 465 }
499 466 },
500 467 );
501 468 assert!(
502 469 node.entry.is_none(),
503 470 "duplicate dirstate entry in read"
504 471 );
505 472 assert!(
506 473 node.copy_source.is_none(),
507 474 "duplicate dirstate entry in read"
508 475 );
509 476 node.entry = Some(*entry);
510 477 node.copy_source = copy_source.map(HgPath::to_owned);
511 478 self.nodes_with_entry_count += 1;
512 479 if copy_source.is_some() {
513 480 self.nodes_with_copy_source_count += 1
514 481 }
515 482 },
516 483 )?;
517 484
518 if !self.dirty_parents {
519 self.set_parents(parents);
520 }
521
522 485 Ok(Some(parents))
523 486 }
524 487
525 488 fn pack(
526 489 &mut self,
527 490 parents: DirstateParents,
528 491 now: Timestamp,
529 492 ) -> Result<Vec<u8>, DirstateError> {
530 493 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
531 494 // reallocations
532 495 let mut size = parents.as_bytes().len();
533 496 for (path, node) in self.iter_nodes() {
534 497 if node.entry.is_some() {
535 498 size += packed_entry_size(
536 499 path.full_path(),
537 500 node.copy_source.as_ref(),
538 501 )
539 502 }
540 503 }
541 504
542 505 let mut packed = Vec::with_capacity(size);
543 506 packed.extend(parents.as_bytes());
544 507
545 508 let now: i32 = now.0.try_into().expect("time overflow");
546 509 for (path, opt_entry, copy_source) in self.iter_node_data_mut() {
547 510 if let Some(entry) = opt_entry {
548 511 clear_ambiguous_mtime(entry, now);
549 512 pack_entry(
550 513 path.full_path(),
551 514 entry,
552 515 copy_source.as_ref(),
553 516 &mut packed,
554 517 );
555 518 }
556 519 }
557 self.dirty_parents = false;
558 520 Ok(packed)
559 521 }
560 522
561 523 fn set_all_dirs(&mut self) -> Result<(), DirstateMapError> {
562 524 // Do nothing, this `DirstateMap` does not a separate `all_dirs` that
563 525 // needs to be recomputed
564 526 Ok(())
565 527 }
566 528
567 529 fn set_dirs(&mut self) -> Result<(), DirstateMapError> {
568 530 // Do nothing, this `DirstateMap` does not a separate `dirs` that needs
569 531 // to be recomputed
570 532 Ok(())
571 533 }
572 534
573 535 fn status<'a>(
574 536 &'a mut self,
575 537 matcher: &'a (dyn Matcher + Sync),
576 538 root_dir: PathBuf,
577 539 ignore_files: Vec<PathBuf>,
578 540 options: StatusOptions,
579 541 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
580 542 {
581 543 super::status::status(self, matcher, root_dir, ignore_files, options)
582 544 }
583 545
584 546 fn copy_map_len(&self) -> usize {
585 547 self.nodes_with_copy_source_count
586 548 }
587 549
588 550 fn copy_map_iter(&self) -> CopyMapIter<'_> {
589 551 Box::new(self.iter_nodes().filter_map(|(path, node)| {
590 552 node.copy_source
591 553 .as_ref()
592 554 .map(|copy_source| (path.full_path(), copy_source))
593 555 }))
594 556 }
595 557
596 558 fn copy_map_contains_key(&self, key: &HgPath) -> bool {
597 559 if let Some(node) = self.get_node(key) {
598 560 node.copy_source.is_some()
599 561 } else {
600 562 false
601 563 }
602 564 }
603 565
604 566 fn copy_map_get(&self, key: &HgPath) -> Option<&HgPathBuf> {
605 567 self.get_node(key)?.copy_source.as_ref()
606 568 }
607 569
608 570 fn copy_map_remove(&mut self, key: &HgPath) -> Option<HgPathBuf> {
609 571 let count = &mut self.nodes_with_copy_source_count;
610 572 Self::get_node_mut(&mut self.root, key).and_then(|node| {
611 573 if node.copy_source.is_some() {
612 574 *count -= 1
613 575 }
614 576 node.copy_source.take()
615 577 })
616 578 }
617 579
618 580 fn copy_map_insert(
619 581 &mut self,
620 582 key: HgPathBuf,
621 583 value: HgPathBuf,
622 584 ) -> Option<HgPathBuf> {
623 585 let node = Self::get_or_insert_node(&mut self.root, &key);
624 586 if node.copy_source.is_none() {
625 587 self.nodes_with_copy_source_count += 1
626 588 }
627 589 node.copy_source.replace(value)
628 590 }
629 591
630 592 fn len(&self) -> usize {
631 593 self.nodes_with_entry_count
632 594 }
633 595
634 596 fn contains_key(&self, key: &HgPath) -> bool {
635 597 self.get(key).is_some()
636 598 }
637 599
638 600 fn get(&self, key: &HgPath) -> Option<&DirstateEntry> {
639 601 self.get_node(key)?.entry.as_ref()
640 602 }
641 603
642 604 fn iter(&self) -> StateMapIter<'_> {
643 605 Box::new(self.iter_nodes().filter_map(|(path, node)| {
644 606 node.entry.as_ref().map(|entry| (path.full_path(), entry))
645 607 }))
646 608 }
647 609 }
@@ -1,314 +1,296 b''
1 1 use std::path::PathBuf;
2 2
3 3 use crate::dirstate::parsers::Timestamp;
4 4 use crate::matchers::Matcher;
5 5 use crate::utils::hg_path::{HgPath, HgPathBuf};
6 6 use crate::CopyMapIter;
7 7 use crate::DirstateEntry;
8 8 use crate::DirstateError;
9 9 use crate::DirstateMap;
10 10 use crate::DirstateMapError;
11 11 use crate::DirstateParents;
12 12 use crate::DirstateStatus;
13 13 use crate::EntryState;
14 14 use crate::PatternFileWarning;
15 15 use crate::StateMapIter;
16 16 use crate::StatusError;
17 17 use crate::StatusOptions;
18 18
19 19 pub trait DirstateMapMethods {
20 20 fn clear(&mut self);
21 21
22 22 fn add_file(
23 23 &mut self,
24 24 filename: &HgPath,
25 25 old_state: EntryState,
26 26 entry: DirstateEntry,
27 27 ) -> Result<(), DirstateMapError>;
28 28
29 29 fn remove_file(
30 30 &mut self,
31 31 filename: &HgPath,
32 32 old_state: EntryState,
33 33 size: i32,
34 34 ) -> Result<(), DirstateMapError>;
35 35
36 36 fn drop_file(
37 37 &mut self,
38 38 filename: &HgPath,
39 39 old_state: EntryState,
40 40 ) -> Result<bool, DirstateMapError>;
41 41
42 42 fn clear_ambiguous_times(&mut self, filenames: Vec<HgPathBuf>, now: i32);
43 43
44 44 fn non_normal_entries_contains(&mut self, key: &HgPath) -> bool;
45 45
46 46 fn non_normal_entries_remove(&mut self, key: &HgPath);
47 47
48 48 fn non_normal_or_other_parent_paths(
49 49 &mut self,
50 50 ) -> Box<dyn Iterator<Item = &HgPathBuf> + '_>;
51 51
52 52 fn set_non_normal_other_parent_entries(&mut self, force: bool);
53 53
54 54 fn iter_non_normal_paths(
55 55 &mut self,
56 56 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_>;
57 57
58 58 fn iter_non_normal_paths_panic(
59 59 &self,
60 60 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_>;
61 61
62 62 fn iter_other_parent_paths(
63 63 &mut self,
64 64 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_>;
65 65
66 66 fn has_tracked_dir(
67 67 &mut self,
68 68 directory: &HgPath,
69 69 ) -> Result<bool, DirstateMapError>;
70 70
71 71 fn has_dir(
72 72 &mut self,
73 73 directory: &HgPath,
74 74 ) -> Result<bool, DirstateMapError>;
75 75
76 fn parents(
77 &mut self,
78 file_contents: &[u8],
79 ) -> Result<&DirstateParents, DirstateError>;
80
81 fn set_parents(&mut self, parents: &DirstateParents);
82
83 76 fn read<'a>(
84 77 &mut self,
85 78 file_contents: &'a [u8],
86 79 ) -> Result<Option<&'a DirstateParents>, DirstateError>;
87 80
88 81 fn pack(
89 82 &mut self,
90 83 parents: DirstateParents,
91 84 now: Timestamp,
92 85 ) -> Result<Vec<u8>, DirstateError>;
93 86
94 87 fn set_all_dirs(&mut self) -> Result<(), DirstateMapError>;
95 88
96 89 fn set_dirs(&mut self) -> Result<(), DirstateMapError>;
97 90
98 91 fn status<'a>(
99 92 &'a mut self,
100 93 matcher: &'a (dyn Matcher + Sync),
101 94 root_dir: PathBuf,
102 95 ignore_files: Vec<PathBuf>,
103 96 options: StatusOptions,
104 97 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
105 98
106 99 fn copy_map_len(&self) -> usize;
107 100
108 101 fn copy_map_iter(&self) -> CopyMapIter<'_>;
109 102
110 103 fn copy_map_contains_key(&self, key: &HgPath) -> bool;
111 104
112 105 fn copy_map_get(&self, key: &HgPath) -> Option<&HgPathBuf>;
113 106
114 107 fn copy_map_remove(&mut self, key: &HgPath) -> Option<HgPathBuf>;
115 108
116 109 fn copy_map_insert(
117 110 &mut self,
118 111 key: HgPathBuf,
119 112 value: HgPathBuf,
120 113 ) -> Option<HgPathBuf>;
121 114
122 115 fn len(&self) -> usize;
123 116
124 117 fn contains_key(&self, key: &HgPath) -> bool;
125 118
126 119 fn get(&self, key: &HgPath) -> Option<&DirstateEntry>;
127 120
128 121 fn iter(&self) -> StateMapIter<'_>;
129 122 }
130 123
131 124 impl DirstateMapMethods for DirstateMap {
132 125 fn clear(&mut self) {
133 126 self.clear()
134 127 }
135 128
136 129 fn add_file(
137 130 &mut self,
138 131 filename: &HgPath,
139 132 old_state: EntryState,
140 133 entry: DirstateEntry,
141 134 ) -> Result<(), DirstateMapError> {
142 135 self.add_file(filename, old_state, entry)
143 136 }
144 137
145 138 fn remove_file(
146 139 &mut self,
147 140 filename: &HgPath,
148 141 old_state: EntryState,
149 142 size: i32,
150 143 ) -> Result<(), DirstateMapError> {
151 144 self.remove_file(filename, old_state, size)
152 145 }
153 146
154 147 fn drop_file(
155 148 &mut self,
156 149 filename: &HgPath,
157 150 old_state: EntryState,
158 151 ) -> Result<bool, DirstateMapError> {
159 152 self.drop_file(filename, old_state)
160 153 }
161 154
162 155 fn clear_ambiguous_times(&mut self, filenames: Vec<HgPathBuf>, now: i32) {
163 156 self.clear_ambiguous_times(filenames, now)
164 157 }
165 158
166 159 fn non_normal_entries_contains(&mut self, key: &HgPath) -> bool {
167 160 let (non_normal, _other_parent) =
168 161 self.get_non_normal_other_parent_entries();
169 162 non_normal.contains(key)
170 163 }
171 164
172 165 fn non_normal_entries_remove(&mut self, key: &HgPath) {
173 166 self.non_normal_entries_remove(key)
174 167 }
175 168
176 169 fn non_normal_or_other_parent_paths(
177 170 &mut self,
178 171 ) -> Box<dyn Iterator<Item = &HgPathBuf> + '_> {
179 172 let (non_normal, other_parent) =
180 173 self.get_non_normal_other_parent_entries();
181 174 Box::new(non_normal.union(other_parent))
182 175 }
183 176
184 177 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
185 178 self.set_non_normal_other_parent_entries(force)
186 179 }
187 180
188 181 fn iter_non_normal_paths(
189 182 &mut self,
190 183 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_> {
191 184 let (non_normal, _other_parent) =
192 185 self.get_non_normal_other_parent_entries();
193 186 Box::new(non_normal.iter())
194 187 }
195 188
196 189 fn iter_non_normal_paths_panic(
197 190 &self,
198 191 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_> {
199 192 let (non_normal, _other_parent) =
200 193 self.get_non_normal_other_parent_entries_panic();
201 194 Box::new(non_normal.iter())
202 195 }
203 196
204 197 fn iter_other_parent_paths(
205 198 &mut self,
206 199 ) -> Box<dyn Iterator<Item = &HgPathBuf> + Send + '_> {
207 200 let (_non_normal, other_parent) =
208 201 self.get_non_normal_other_parent_entries();
209 202 Box::new(other_parent.iter())
210 203 }
211 204
212 205 fn has_tracked_dir(
213 206 &mut self,
214 207 directory: &HgPath,
215 208 ) -> Result<bool, DirstateMapError> {
216 209 self.has_tracked_dir(directory)
217 210 }
218 211
219 212 fn has_dir(
220 213 &mut self,
221 214 directory: &HgPath,
222 215 ) -> Result<bool, DirstateMapError> {
223 216 self.has_dir(directory)
224 217 }
225 218
226 fn parents(
227 &mut self,
228 file_contents: &[u8],
229 ) -> Result<&DirstateParents, DirstateError> {
230 self.parents(file_contents)
231 }
232
233 fn set_parents(&mut self, parents: &DirstateParents) {
234 self.set_parents(parents)
235 }
236
237 219 fn read<'a>(
238 220 &mut self,
239 221 file_contents: &'a [u8],
240 222 ) -> Result<Option<&'a DirstateParents>, DirstateError> {
241 223 self.read(file_contents)
242 224 }
243 225
244 226 fn pack(
245 227 &mut self,
246 228 parents: DirstateParents,
247 229 now: Timestamp,
248 230 ) -> Result<Vec<u8>, DirstateError> {
249 231 self.pack(parents, now)
250 232 }
251 233
252 234 fn set_all_dirs(&mut self) -> Result<(), DirstateMapError> {
253 235 self.set_all_dirs()
254 236 }
255 237
256 238 fn set_dirs(&mut self) -> Result<(), DirstateMapError> {
257 239 self.set_dirs()
258 240 }
259 241
260 242 fn status<'a>(
261 243 &'a mut self,
262 244 matcher: &'a (dyn Matcher + Sync),
263 245 root_dir: PathBuf,
264 246 ignore_files: Vec<PathBuf>,
265 247 options: StatusOptions,
266 248 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
267 249 {
268 250 crate::status(self, matcher, root_dir, ignore_files, options)
269 251 }
270 252
271 253 fn copy_map_len(&self) -> usize {
272 254 self.copy_map.len()
273 255 }
274 256
275 257 fn copy_map_iter(&self) -> CopyMapIter<'_> {
276 258 Box::new(self.copy_map.iter())
277 259 }
278 260
279 261 fn copy_map_contains_key(&self, key: &HgPath) -> bool {
280 262 self.copy_map.contains_key(key)
281 263 }
282 264
283 265 fn copy_map_get(&self, key: &HgPath) -> Option<&HgPathBuf> {
284 266 self.copy_map.get(key)
285 267 }
286 268
287 269 fn copy_map_remove(&mut self, key: &HgPath) -> Option<HgPathBuf> {
288 270 self.copy_map.remove(key)
289 271 }
290 272
291 273 fn copy_map_insert(
292 274 &mut self,
293 275 key: HgPathBuf,
294 276 value: HgPathBuf,
295 277 ) -> Option<HgPathBuf> {
296 278 self.copy_map.insert(key, value)
297 279 }
298 280
299 281 fn len(&self) -> usize {
300 282 (&**self).len()
301 283 }
302 284
303 285 fn contains_key(&self, key: &HgPath) -> bool {
304 286 (&**self).contains_key(key)
305 287 }
306 288
307 289 fn get(&self, key: &HgPath) -> Option<&DirstateEntry> {
308 290 (&**self).get(key)
309 291 }
310 292
311 293 fn iter(&self) -> StateMapIter<'_> {
312 294 Box::new((&**self).iter())
313 295 }
314 296 }
@@ -1,574 +1,553 b''
1 1 // dirstate_map.rs
2 2 //
3 3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 4 //
5 5 // This software may be used and distributed according to the terms of the
6 6 // GNU General Public License version 2 or any later version.
7 7
8 8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
9 9 //! `hg-core` package.
10 10
11 11 use std::cell::{RefCell, RefMut};
12 12 use std::convert::TryInto;
13 13
14 14 use cpython::{
15 15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
16 PyObject, PyResult, PySet, PyString, PyTuple, Python, PythonObject,
17 ToPyObject, UnsafePyLeaked,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
17 UnsafePyLeaked,
18 18 };
19 19
20 20 use crate::{
21 21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
22 22 dirstate::non_normal_entries::{
23 23 NonNormalEntries, NonNormalEntriesIterator,
24 24 },
25 25 dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
26 26 parsers::dirstate_parents_to_pytuple,
27 27 };
28 28 use hg::{
29 29 dirstate::parsers::Timestamp,
30 30 dirstate_tree::dispatch::DirstateMapMethods,
31 31 errors::HgError,
32 32 revlog::Node,
33 33 utils::files::normalize_case,
34 34 utils::hg_path::{HgPath, HgPathBuf},
35 35 DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap,
36 36 DirstateMapError, DirstateParents, EntryState, StateMapIter,
37 37 };
38 38
39 39 // TODO
40 40 // This object needs to share references to multiple members of its Rust
41 41 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
42 42 // Right now `CopyMap` is done, but it needs to have an explicit reference
43 43 // to `RustDirstateMap` which itself needs to have an encapsulation for
44 44 // every method in `CopyMap` (copymapcopy, etc.).
45 45 // This is ugly and hard to maintain.
46 46 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
47 47 // `py_class!` is already implemented and does not mention
48 48 // `RustDirstateMap`, rightfully so.
49 49 // All attributes also have to have a separate refcount data attribute for
50 50 // leaks, with all methods that go along for reference sharing.
51 51 py_class!(pub class DirstateMap |py| {
52 52 @shared data inner: Box<dyn DirstateMapMethods + Send>;
53 53
54 54 def __new__(_cls, use_dirstate_tree: bool) -> PyResult<Self> {
55 55 let inner = if use_dirstate_tree {
56 56 Box::new(hg::dirstate_tree::dirstate_map::DirstateMap::new()) as _
57 57 } else {
58 58 Box::new(RustDirstateMap::default()) as _
59 59 };
60 60 Self::create_instance(py, inner)
61 61 }
62 62
63 63 def clear(&self) -> PyResult<PyObject> {
64 64 self.inner(py).borrow_mut().clear();
65 65 Ok(py.None())
66 66 }
67 67
68 68 def get(
69 69 &self,
70 70 key: PyObject,
71 71 default: Option<PyObject> = None
72 72 ) -> PyResult<Option<PyObject>> {
73 73 let key = key.extract::<PyBytes>(py)?;
74 74 match self.inner(py).borrow().get(HgPath::new(key.data(py))) {
75 75 Some(entry) => {
76 76 Ok(Some(make_dirstate_tuple(py, entry)?))
77 77 },
78 78 None => Ok(default)
79 79 }
80 80 }
81 81
82 82 def addfile(
83 83 &self,
84 84 f: PyObject,
85 85 oldstate: PyObject,
86 86 state: PyObject,
87 87 mode: PyObject,
88 88 size: PyObject,
89 89 mtime: PyObject
90 90 ) -> PyResult<PyObject> {
91 91 self.inner(py).borrow_mut().add_file(
92 92 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
93 93 oldstate.extract::<PyBytes>(py)?.data(py)[0]
94 94 .try_into()
95 95 .map_err(|e: HgError| {
96 96 PyErr::new::<exc::ValueError, _>(py, e.to_string())
97 97 })?,
98 98 DirstateEntry {
99 99 state: state.extract::<PyBytes>(py)?.data(py)[0]
100 100 .try_into()
101 101 .map_err(|e: HgError| {
102 102 PyErr::new::<exc::ValueError, _>(py, e.to_string())
103 103 })?,
104 104 mode: mode.extract(py)?,
105 105 size: size.extract(py)?,
106 106 mtime: mtime.extract(py)?,
107 107 },
108 108 ).and(Ok(py.None())).or_else(|e: DirstateMapError| {
109 109 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
110 110 })
111 111 }
112 112
113 113 def removefile(
114 114 &self,
115 115 f: PyObject,
116 116 oldstate: PyObject,
117 117 size: PyObject
118 118 ) -> PyResult<PyObject> {
119 119 self.inner(py).borrow_mut()
120 120 .remove_file(
121 121 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
122 122 oldstate.extract::<PyBytes>(py)?.data(py)[0]
123 123 .try_into()
124 124 .map_err(|e: HgError| {
125 125 PyErr::new::<exc::ValueError, _>(py, e.to_string())
126 126 })?,
127 127 size.extract(py)?,
128 128 )
129 129 .or_else(|_| {
130 130 Err(PyErr::new::<exc::OSError, _>(
131 131 py,
132 132 "Dirstate error".to_string(),
133 133 ))
134 134 })?;
135 135 Ok(py.None())
136 136 }
137 137
138 138 def dropfile(
139 139 &self,
140 140 f: PyObject,
141 141 oldstate: PyObject
142 142 ) -> PyResult<PyBool> {
143 143 self.inner(py).borrow_mut()
144 144 .drop_file(
145 145 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
146 146 oldstate.extract::<PyBytes>(py)?.data(py)[0]
147 147 .try_into()
148 148 .map_err(|e: HgError| {
149 149 PyErr::new::<exc::ValueError, _>(py, e.to_string())
150 150 })?,
151 151 )
152 152 .and_then(|b| Ok(b.to_py_object(py)))
153 153 .or_else(|e| {
154 154 Err(PyErr::new::<exc::OSError, _>(
155 155 py,
156 156 format!("Dirstate error: {}", e.to_string()),
157 157 ))
158 158 })
159 159 }
160 160
161 161 def clearambiguoustimes(
162 162 &self,
163 163 files: PyObject,
164 164 now: PyObject
165 165 ) -> PyResult<PyObject> {
166 166 let files: PyResult<Vec<HgPathBuf>> = files
167 167 .iter(py)?
168 168 .map(|filename| {
169 169 Ok(HgPathBuf::from_bytes(
170 170 filename?.extract::<PyBytes>(py)?.data(py),
171 171 ))
172 172 })
173 173 .collect();
174 174 self.inner(py).borrow_mut()
175 175 .clear_ambiguous_times(files?, now.extract(py)?);
176 176 Ok(py.None())
177 177 }
178 178
179 179 def other_parent_entries(&self) -> PyResult<PyObject> {
180 180 let mut inner_shared = self.inner(py).borrow_mut();
181 181 let set = PySet::empty(py)?;
182 182 for path in inner_shared.iter_other_parent_paths() {
183 183 set.add(py, PyBytes::new(py, path.as_bytes()))?;
184 184 }
185 185 Ok(set.into_object())
186 186 }
187 187
188 188 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
189 189 NonNormalEntries::from_inner(py, self.clone_ref(py))
190 190 }
191 191
192 192 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
193 193 let key = key.extract::<PyBytes>(py)?;
194 194 Ok(self
195 195 .inner(py)
196 196 .borrow_mut()
197 197 .non_normal_entries_contains(HgPath::new(key.data(py))))
198 198 }
199 199
200 200 def non_normal_entries_display(&self) -> PyResult<PyString> {
201 201 Ok(
202 202 PyString::new(
203 203 py,
204 204 &format!(
205 205 "NonNormalEntries: {}",
206 206 hg::utils::join_display(
207 207 self
208 208 .inner(py)
209 209 .borrow_mut()
210 210 .iter_non_normal_paths(),
211 211 ", "
212 212 )
213 213 )
214 214 )
215 215 )
216 216 }
217 217
218 218 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
219 219 let key = key.extract::<PyBytes>(py)?;
220 220 self
221 221 .inner(py)
222 222 .borrow_mut()
223 223 .non_normal_entries_remove(HgPath::new(key.data(py)));
224 224 Ok(py.None())
225 225 }
226 226
227 227 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
228 228 let mut inner = self.inner(py).borrow_mut();
229 229
230 230 let ret = PyList::new(py, &[]);
231 231 for filename in inner.non_normal_or_other_parent_paths() {
232 232 let as_pystring = PyBytes::new(py, filename.as_bytes());
233 233 ret.append(py, as_pystring.into_object());
234 234 }
235 235 Ok(ret)
236 236 }
237 237
238 238 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
239 239 // Make sure the sets are defined before we no longer have a mutable
240 240 // reference to the dmap.
241 241 self.inner(py)
242 242 .borrow_mut()
243 243 .set_non_normal_other_parent_entries(false);
244 244
245 245 let leaked_ref = self.inner(py).leak_immutable();
246 246
247 247 NonNormalEntriesIterator::from_inner(py, unsafe {
248 248 leaked_ref.map(py, |o| {
249 249 o.iter_non_normal_paths_panic()
250 250 })
251 251 })
252 252 }
253 253
254 254 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
255 255 let d = d.extract::<PyBytes>(py)?;
256 256 Ok(self.inner(py).borrow_mut()
257 257 .has_tracked_dir(HgPath::new(d.data(py)))
258 258 .map_err(|e| {
259 259 PyErr::new::<exc::ValueError, _>(py, e.to_string())
260 260 })?
261 261 .to_py_object(py))
262 262 }
263 263
264 264 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
265 265 let d = d.extract::<PyBytes>(py)?;
266 266 Ok(self.inner(py).borrow_mut()
267 267 .has_dir(HgPath::new(d.data(py)))
268 268 .map_err(|e| {
269 269 PyErr::new::<exc::ValueError, _>(py, e.to_string())
270 270 })?
271 271 .to_py_object(py))
272 272 }
273 273
274 def parents(&self, st: PyObject) -> PyResult<PyTuple> {
275 self.inner(py).borrow_mut()
276 .parents(st.extract::<PyBytes>(py)?.data(py))
277 .map(|parents| dirstate_parents_to_pytuple(py, parents))
278 .or_else(|_| {
279 Err(PyErr::new::<exc::OSError, _>(
280 py,
281 "Dirstate error".to_string(),
282 ))
283 })
284 }
285
286 def setparents(&self, p1: PyObject, p2: PyObject) -> PyResult<PyObject> {
287 let p1 = extract_node_id(py, &p1)?;
288 let p2 = extract_node_id(py, &p2)?;
289
290 self.inner(py).borrow_mut()
291 .set_parents(&DirstateParents { p1, p2 });
292 Ok(py.None())
293 }
294
295 274 def read(&self, st: PyObject) -> PyResult<Option<PyObject>> {
296 275 match self.inner(py).borrow_mut()
297 276 .read(st.extract::<PyBytes>(py)?.data(py))
298 277 {
299 278 Ok(Some(parents)) => Ok(Some(
300 279 dirstate_parents_to_pytuple(py, parents)
301 280 .into_object()
302 281 )),
303 282 Ok(None) => Ok(Some(py.None())),
304 283 Err(_) => Err(PyErr::new::<exc::OSError, _>(
305 284 py,
306 285 "Dirstate error".to_string(),
307 286 )),
308 287 }
309 288 }
310 289 def write(
311 290 &self,
312 291 p1: PyObject,
313 292 p2: PyObject,
314 293 now: PyObject
315 294 ) -> PyResult<PyBytes> {
316 295 let now = Timestamp(now.extract(py)?);
317 296 let parents = DirstateParents {
318 297 p1: extract_node_id(py, &p1)?,
319 298 p2: extract_node_id(py, &p2)?,
320 299 };
321 300
322 301 match self.inner(py).borrow_mut().pack(parents, now) {
323 302 Ok(packed) => Ok(PyBytes::new(py, &packed)),
324 303 Err(_) => Err(PyErr::new::<exc::OSError, _>(
325 304 py,
326 305 "Dirstate error".to_string(),
327 306 )),
328 307 }
329 308 }
330 309
331 310 def filefoldmapasdict(&self) -> PyResult<PyDict> {
332 311 let dict = PyDict::new(py);
333 312 for (path, entry) in self.inner(py).borrow_mut().iter() {
334 313 if entry.state != EntryState::Removed {
335 314 let key = normalize_case(path);
336 315 let value = path;
337 316 dict.set_item(
338 317 py,
339 318 PyBytes::new(py, key.as_bytes()).into_object(),
340 319 PyBytes::new(py, value.as_bytes()).into_object(),
341 320 )?;
342 321 }
343 322 }
344 323 Ok(dict)
345 324 }
346 325
347 326 def __len__(&self) -> PyResult<usize> {
348 327 Ok(self.inner(py).borrow().len())
349 328 }
350 329
351 330 def __contains__(&self, key: PyObject) -> PyResult<bool> {
352 331 let key = key.extract::<PyBytes>(py)?;
353 332 Ok(self.inner(py).borrow().contains_key(HgPath::new(key.data(py))))
354 333 }
355 334
356 335 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
357 336 let key = key.extract::<PyBytes>(py)?;
358 337 let key = HgPath::new(key.data(py));
359 338 match self.inner(py).borrow().get(key) {
360 339 Some(entry) => {
361 340 Ok(make_dirstate_tuple(py, entry)?)
362 341 },
363 342 None => Err(PyErr::new::<exc::KeyError, _>(
364 343 py,
365 344 String::from_utf8_lossy(key.as_bytes()),
366 345 )),
367 346 }
368 347 }
369 348
370 349 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
371 350 let leaked_ref = self.inner(py).leak_immutable();
372 351 DirstateMapKeysIterator::from_inner(
373 352 py,
374 353 unsafe { leaked_ref.map(py, |o| o.iter()) },
375 354 )
376 355 }
377 356
378 357 def items(&self) -> PyResult<DirstateMapItemsIterator> {
379 358 let leaked_ref = self.inner(py).leak_immutable();
380 359 DirstateMapItemsIterator::from_inner(
381 360 py,
382 361 unsafe { leaked_ref.map(py, |o| o.iter()) },
383 362 )
384 363 }
385 364
386 365 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
387 366 let leaked_ref = self.inner(py).leak_immutable();
388 367 DirstateMapKeysIterator::from_inner(
389 368 py,
390 369 unsafe { leaked_ref.map(py, |o| o.iter()) },
391 370 )
392 371 }
393 372
394 373 def getdirs(&self) -> PyResult<Dirs> {
395 374 // TODO don't copy, share the reference
396 375 self.inner(py).borrow_mut().set_dirs()
397 376 .map_err(|e| {
398 377 PyErr::new::<exc::ValueError, _>(py, e.to_string())
399 378 })?;
400 379 Dirs::from_inner(
401 380 py,
402 381 DirsMultiset::from_dirstate(
403 382 self.inner(py).borrow().iter(),
404 383 Some(EntryState::Removed),
405 384 )
406 385 .map_err(|e| {
407 386 PyErr::new::<exc::ValueError, _>(py, e.to_string())
408 387 })?,
409 388 )
410 389 }
411 390 def getalldirs(&self) -> PyResult<Dirs> {
412 391 // TODO don't copy, share the reference
413 392 self.inner(py).borrow_mut().set_all_dirs()
414 393 .map_err(|e| {
415 394 PyErr::new::<exc::ValueError, _>(py, e.to_string())
416 395 })?;
417 396 Dirs::from_inner(
418 397 py,
419 398 DirsMultiset::from_dirstate(
420 399 self.inner(py).borrow().iter(),
421 400 None,
422 401 ).map_err(|e| {
423 402 PyErr::new::<exc::ValueError, _>(py, e.to_string())
424 403 })?,
425 404 )
426 405 }
427 406
428 407 // TODO all copymap* methods, see docstring above
429 408 def copymapcopy(&self) -> PyResult<PyDict> {
430 409 let dict = PyDict::new(py);
431 410 for (key, value) in self.inner(py).borrow().copy_map_iter() {
432 411 dict.set_item(
433 412 py,
434 413 PyBytes::new(py, key.as_bytes()),
435 414 PyBytes::new(py, value.as_bytes()),
436 415 )?;
437 416 }
438 417 Ok(dict)
439 418 }
440 419
441 420 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
442 421 let key = key.extract::<PyBytes>(py)?;
443 422 match self.inner(py).borrow().copy_map_get(HgPath::new(key.data(py))) {
444 423 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
445 424 None => Err(PyErr::new::<exc::KeyError, _>(
446 425 py,
447 426 String::from_utf8_lossy(key.data(py)),
448 427 )),
449 428 }
450 429 }
451 430 def copymap(&self) -> PyResult<CopyMap> {
452 431 CopyMap::from_inner(py, self.clone_ref(py))
453 432 }
454 433
455 434 def copymaplen(&self) -> PyResult<usize> {
456 435 Ok(self.inner(py).borrow().copy_map_len())
457 436 }
458 437 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
459 438 let key = key.extract::<PyBytes>(py)?;
460 439 Ok(self
461 440 .inner(py)
462 441 .borrow()
463 442 .copy_map_contains_key(HgPath::new(key.data(py))))
464 443 }
465 444 def copymapget(
466 445 &self,
467 446 key: PyObject,
468 447 default: Option<PyObject>
469 448 ) -> PyResult<Option<PyObject>> {
470 449 let key = key.extract::<PyBytes>(py)?;
471 450 match self
472 451 .inner(py)
473 452 .borrow()
474 453 .copy_map_get(HgPath::new(key.data(py)))
475 454 {
476 455 Some(copy) => Ok(Some(
477 456 PyBytes::new(py, copy.as_bytes()).into_object(),
478 457 )),
479 458 None => Ok(default),
480 459 }
481 460 }
482 461 def copymapsetitem(
483 462 &self,
484 463 key: PyObject,
485 464 value: PyObject
486 465 ) -> PyResult<PyObject> {
487 466 let key = key.extract::<PyBytes>(py)?;
488 467 let value = value.extract::<PyBytes>(py)?;
489 468 self.inner(py).borrow_mut().copy_map_insert(
490 469 HgPathBuf::from_bytes(key.data(py)),
491 470 HgPathBuf::from_bytes(value.data(py)),
492 471 );
493 472 Ok(py.None())
494 473 }
495 474 def copymappop(
496 475 &self,
497 476 key: PyObject,
498 477 default: Option<PyObject>
499 478 ) -> PyResult<Option<PyObject>> {
500 479 let key = key.extract::<PyBytes>(py)?;
501 480 match self
502 481 .inner(py)
503 482 .borrow_mut()
504 483 .copy_map_remove(HgPath::new(key.data(py)))
505 484 {
506 485 Some(_) => Ok(None),
507 486 None => Ok(default),
508 487 }
509 488 }
510 489
511 490 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
512 491 let leaked_ref = self.inner(py).leak_immutable();
513 492 CopyMapKeysIterator::from_inner(
514 493 py,
515 494 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
516 495 )
517 496 }
518 497
519 498 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
520 499 let leaked_ref = self.inner(py).leak_immutable();
521 500 CopyMapItemsIterator::from_inner(
522 501 py,
523 502 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
524 503 )
525 504 }
526 505
527 506 });
528 507
529 508 impl DirstateMap {
530 509 pub fn get_inner_mut<'a>(
531 510 &'a self,
532 511 py: Python<'a>,
533 512 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
534 513 self.inner(py).borrow_mut()
535 514 }
536 515 fn translate_key(
537 516 py: Python,
538 517 res: (&HgPathBuf, &DirstateEntry),
539 518 ) -> PyResult<Option<PyBytes>> {
540 519 Ok(Some(PyBytes::new(py, res.0.as_bytes())))
541 520 }
542 521 fn translate_key_value(
543 522 py: Python,
544 523 res: (&HgPathBuf, &DirstateEntry),
545 524 ) -> PyResult<Option<(PyBytes, PyObject)>> {
546 525 let (f, entry) = res;
547 526 Ok(Some((
548 527 PyBytes::new(py, f.as_bytes()),
549 528 make_dirstate_tuple(py, &entry)?,
550 529 )))
551 530 }
552 531 }
553 532
554 533 py_shared_iterator!(
555 534 DirstateMapKeysIterator,
556 535 UnsafePyLeaked<StateMapIter<'static>>,
557 536 DirstateMap::translate_key,
558 537 Option<PyBytes>
559 538 );
560 539
561 540 py_shared_iterator!(
562 541 DirstateMapItemsIterator,
563 542 UnsafePyLeaked<StateMapIter<'static>>,
564 543 DirstateMap::translate_key_value,
565 544 Option<(PyBytes, PyObject)>
566 545 );
567 546
568 547 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
569 548 let bytes = obj.extract::<PyBytes>(py)?;
570 549 match bytes.data(py).try_into() {
571 550 Ok(s) => Ok(s),
572 551 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
573 552 }
574 553 }
General Comments 0
You need to be logged in to leave comments. Login now