##// END OF EJS Templates
dirstate: remove deprecated API...
Raphaël Gomès -
r49362:934a6213 default
parent child Browse files
Show More
@@ -1,1450 +1,1429 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .pycompat import delattr
18 18
19 19 from hgdemandimport import tracing
20 20
21 21 from . import (
22 22 dirstatemap,
23 23 encoding,
24 24 error,
25 25 match as matchmod,
26 26 pathutil,
27 27 policy,
28 28 pycompat,
29 29 scmutil,
30 30 sparse,
31 31 util,
32 32 )
33 33
34 34 from .dirstateutils import (
35 35 timestamp,
36 36 )
37 37
38 38 from .interfaces import (
39 39 dirstate as intdirstate,
40 40 util as interfaceutil,
41 41 )
42 42
43 43 parsers = policy.importmod('parsers')
44 44 rustmod = policy.importrust('dirstate')
45 45
46 46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47 47
48 48 propertycache = util.propertycache
49 49 filecache = scmutil.filecache
50 50 _rangemask = dirstatemap.rangemask
51 51
52 52 DirstateItem = dirstatemap.DirstateItem
53 53
54 54
55 55 class repocache(filecache):
56 56 """filecache for files in .hg/"""
57 57
58 58 def join(self, obj, fname):
59 59 return obj._opener.join(fname)
60 60
61 61
62 62 class rootcache(filecache):
63 63 """filecache for files in the repository root"""
64 64
65 65 def join(self, obj, fname):
66 66 return obj._join(fname)
67 67
68 68
69 69 def requires_parents_change(func):
70 70 def wrap(self, *args, **kwargs):
71 71 if not self.pendingparentchange():
72 72 msg = 'calling `%s` outside of a parentchange context'
73 73 msg %= func.__name__
74 74 raise error.ProgrammingError(msg)
75 75 return func(self, *args, **kwargs)
76 76
77 77 return wrap
78 78
79 79
80 80 def requires_no_parents_change(func):
81 81 def wrap(self, *args, **kwargs):
82 82 if self.pendingparentchange():
83 83 msg = 'calling `%s` inside of a parentchange context'
84 84 msg %= func.__name__
85 85 raise error.ProgrammingError(msg)
86 86 return func(self, *args, **kwargs)
87 87
88 88 return wrap
89 89
90 90
91 91 @interfaceutil.implementer(intdirstate.idirstate)
92 92 class dirstate(object):
93 93 def __init__(
94 94 self,
95 95 opener,
96 96 ui,
97 97 root,
98 98 validate,
99 99 sparsematchfn,
100 100 nodeconstants,
101 101 use_dirstate_v2,
102 102 ):
103 103 """Create a new dirstate object.
104 104
105 105 opener is an open()-like callable that can be used to open the
106 106 dirstate file; root is the root of the directory tracked by
107 107 the dirstate.
108 108 """
109 109 self._use_dirstate_v2 = use_dirstate_v2
110 110 self._nodeconstants = nodeconstants
111 111 self._opener = opener
112 112 self._validate = validate
113 113 self._root = root
114 114 self._sparsematchfn = sparsematchfn
115 115 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
116 116 # UNC path pointing to root share (issue4557)
117 117 self._rootdir = pathutil.normasprefix(root)
118 118 self._dirty = False
119 119 self._ui = ui
120 120 self._filecache = {}
121 121 self._parentwriters = 0
122 122 self._filename = b'dirstate'
123 123 self._pendingfilename = b'%s.pending' % self._filename
124 124 self._plchangecallbacks = {}
125 125 self._origpl = None
126 126 self._mapcls = dirstatemap.dirstatemap
127 127 # Access and cache cwd early, so we don't access it for the first time
128 128 # after a working-copy update caused it to not exist (accessing it then
129 129 # raises an exception).
130 130 self._cwd
131 131
132 132 def prefetch_parents(self):
133 133 """make sure the parents are loaded
134 134
135 135 Used to avoid a race condition.
136 136 """
137 137 self._pl
138 138
139 139 @contextlib.contextmanager
140 140 def parentchange(self):
141 141 """Context manager for handling dirstate parents.
142 142
143 143 If an exception occurs in the scope of the context manager,
144 144 the incoherent dirstate won't be written when wlock is
145 145 released.
146 146 """
147 147 self._parentwriters += 1
148 148 yield
149 149 # Typically we want the "undo" step of a context manager in a
150 150 # finally block so it happens even when an exception
151 151 # occurs. In this case, however, we only want to decrement
152 152 # parentwriters if the code in the with statement exits
153 153 # normally, so we don't have a try/finally here on purpose.
154 154 self._parentwriters -= 1
155 155
156 156 def pendingparentchange(self):
157 157 """Returns true if the dirstate is in the middle of a set of changes
158 158 that modify the dirstate parent.
159 159 """
160 160 return self._parentwriters > 0
161 161
162 162 @propertycache
163 163 def _map(self):
164 164 """Return the dirstate contents (see documentation for dirstatemap)."""
165 165 self._map = self._mapcls(
166 166 self._ui,
167 167 self._opener,
168 168 self._root,
169 169 self._nodeconstants,
170 170 self._use_dirstate_v2,
171 171 )
172 172 return self._map
173 173
174 174 @property
175 175 def _sparsematcher(self):
176 176 """The matcher for the sparse checkout.
177 177
178 178 The working directory may not include every file from a manifest. The
179 179 matcher obtained by this property will match a path if it is to be
180 180 included in the working directory.
181 181 """
182 182 # TODO there is potential to cache this property. For now, the matcher
183 183 # is resolved on every access. (But the called function does use a
184 184 # cache to keep the lookup fast.)
185 185 return self._sparsematchfn()
186 186
187 187 @repocache(b'branch')
188 188 def _branch(self):
189 189 try:
190 190 return self._opener.read(b"branch").strip() or b"default"
191 191 except IOError as inst:
192 192 if inst.errno != errno.ENOENT:
193 193 raise
194 194 return b"default"
195 195
196 196 @property
197 197 def _pl(self):
198 198 return self._map.parents()
199 199
200 200 def hasdir(self, d):
201 201 return self._map.hastrackeddir(d)
202 202
203 203 @rootcache(b'.hgignore')
204 204 def _ignore(self):
205 205 files = self._ignorefiles()
206 206 if not files:
207 207 return matchmod.never()
208 208
209 209 pats = [b'include:%s' % f for f in files]
210 210 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
211 211
212 212 @propertycache
213 213 def _slash(self):
214 214 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
215 215
216 216 @propertycache
217 217 def _checklink(self):
218 218 return util.checklink(self._root)
219 219
220 220 @propertycache
221 221 def _checkexec(self):
222 222 return bool(util.checkexec(self._root))
223 223
224 224 @propertycache
225 225 def _checkcase(self):
226 226 return not util.fscasesensitive(self._join(b'.hg'))
227 227
228 228 def _join(self, f):
229 229 # much faster than os.path.join()
230 230 # it's safe because f is always a relative path
231 231 return self._rootdir + f
232 232
233 233 def flagfunc(self, buildfallback):
234 234 """build a callable that returns flags associated with a filename
235 235
236 236 The information is extracted from three possible layers:
237 237 1. the file system if it supports the information
238 238 2. the "fallback" information stored in the dirstate if any
239 239 3. a more expensive mechanism inferring the flags from the parents.
240 240 """
241 241
242 242 # small hack to cache the result of buildfallback()
243 243 fallback_func = []
244 244
245 245 def get_flags(x):
246 246 entry = None
247 247 fallback_value = None
248 248 try:
249 249 st = os.lstat(self._join(x))
250 250 except OSError:
251 251 return b''
252 252
253 253 if self._checklink:
254 254 if util.statislink(st):
255 255 return b'l'
256 256 else:
257 257 entry = self.get_entry(x)
258 258 if entry.has_fallback_symlink:
259 259 if entry.fallback_symlink:
260 260 return b'l'
261 261 else:
262 262 if not fallback_func:
263 263 fallback_func.append(buildfallback())
264 264 fallback_value = fallback_func[0](x)
265 265 if b'l' in fallback_value:
266 266 return b'l'
267 267
268 268 if self._checkexec:
269 269 if util.statisexec(st):
270 270 return b'x'
271 271 else:
272 272 if entry is None:
273 273 entry = self.get_entry(x)
274 274 if entry.has_fallback_exec:
275 275 if entry.fallback_exec:
276 276 return b'x'
277 277 else:
278 278 if fallback_value is None:
279 279 if not fallback_func:
280 280 fallback_func.append(buildfallback())
281 281 fallback_value = fallback_func[0](x)
282 282 if b'x' in fallback_value:
283 283 return b'x'
284 284 return b''
285 285
286 286 return get_flags
287 287
288 288 @propertycache
289 289 def _cwd(self):
290 290 # internal config: ui.forcecwd
291 291 forcecwd = self._ui.config(b'ui', b'forcecwd')
292 292 if forcecwd:
293 293 return forcecwd
294 294 return encoding.getcwd()
295 295
296 296 def getcwd(self):
297 297 """Return the path from which a canonical path is calculated.
298 298
299 299 This path should be used to resolve file patterns or to convert
300 300 canonical paths back to file paths for display. It shouldn't be
301 301 used to get real file paths. Use vfs functions instead.
302 302 """
303 303 cwd = self._cwd
304 304 if cwd == self._root:
305 305 return b''
306 306 # self._root ends with a path separator if self._root is '/' or 'C:\'
307 307 rootsep = self._root
308 308 if not util.endswithsep(rootsep):
309 309 rootsep += pycompat.ossep
310 310 if cwd.startswith(rootsep):
311 311 return cwd[len(rootsep) :]
312 312 else:
313 313 # we're outside the repo. return an absolute path.
314 314 return cwd
315 315
316 316 def pathto(self, f, cwd=None):
317 317 if cwd is None:
318 318 cwd = self.getcwd()
319 319 path = util.pathto(self._root, cwd, f)
320 320 if self._slash:
321 321 return util.pconvert(path)
322 322 return path
323 323
324 def __getitem__(self, key):
325 """Return the current state of key (a filename) in the dirstate.
326
327 States are:
328 n normal
329 m needs merging
330 r marked for removal
331 a marked for addition
332 ? not tracked
333
334 XXX The "state" is a bit obscure to be in the "public" API. we should
335 consider migrating all user of this to going through the dirstate entry
336 instead.
337 """
338 msg = b"don't use dirstate[file], use dirstate.get_entry(file)"
339 util.nouideprecwarn(msg, b'6.1', stacklevel=2)
340 entry = self._map.get(key)
341 if entry is not None:
342 return entry.state
343 return b'?'
344
345 324 def get_entry(self, path):
346 325 """return a DirstateItem for the associated path"""
347 326 entry = self._map.get(path)
348 327 if entry is None:
349 328 return DirstateItem()
350 329 return entry
351 330
352 331 def __contains__(self, key):
353 332 return key in self._map
354 333
355 334 def __iter__(self):
356 335 return iter(sorted(self._map))
357 336
358 337 def items(self):
359 338 return pycompat.iteritems(self._map)
360 339
361 340 iteritems = items
362 341
363 342 def parents(self):
364 343 return [self._validate(p) for p in self._pl]
365 344
366 345 def p1(self):
367 346 return self._validate(self._pl[0])
368 347
369 348 def p2(self):
370 349 return self._validate(self._pl[1])
371 350
372 351 @property
373 352 def in_merge(self):
374 353 """True if a merge is in progress"""
375 354 return self._pl[1] != self._nodeconstants.nullid
376 355
377 356 def branch(self):
378 357 return encoding.tolocal(self._branch)
379 358
380 359 def setparents(self, p1, p2=None):
381 360 """Set dirstate parents to p1 and p2.
382 361
383 362 When moving from two parents to one, "merged" entries a
384 363 adjusted to normal and previous copy records discarded and
385 364 returned by the call.
386 365
387 366 See localrepo.setparents()
388 367 """
389 368 if p2 is None:
390 369 p2 = self._nodeconstants.nullid
391 370 if self._parentwriters == 0:
392 371 raise ValueError(
393 372 b"cannot set dirstate parent outside of "
394 373 b"dirstate.parentchange context manager"
395 374 )
396 375
397 376 self._dirty = True
398 377 oldp2 = self._pl[1]
399 378 if self._origpl is None:
400 379 self._origpl = self._pl
401 380 nullid = self._nodeconstants.nullid
402 381 # True if we need to fold p2 related state back to a linear case
403 382 fold_p2 = oldp2 != nullid and p2 == nullid
404 383 return self._map.setparents(p1, p2, fold_p2=fold_p2)
405 384
406 385 def setbranch(self, branch):
407 386 self.__class__._branch.set(self, encoding.fromlocal(branch))
408 387 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
409 388 try:
410 389 f.write(self._branch + b'\n')
411 390 f.close()
412 391
413 392 # make sure filecache has the correct stat info for _branch after
414 393 # replacing the underlying file
415 394 ce = self._filecache[b'_branch']
416 395 if ce:
417 396 ce.refresh()
418 397 except: # re-raises
419 398 f.discard()
420 399 raise
421 400
422 401 def invalidate(self):
423 402 """Causes the next access to reread the dirstate.
424 403
425 404 This is different from localrepo.invalidatedirstate() because it always
426 405 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
427 406 check whether the dirstate has changed before rereading it."""
428 407
429 408 for a in ("_map", "_branch", "_ignore"):
430 409 if a in self.__dict__:
431 410 delattr(self, a)
432 411 self._dirty = False
433 412 self._parentwriters = 0
434 413 self._origpl = None
435 414
436 415 def copy(self, source, dest):
437 416 """Mark dest as a copy of source. Unmark dest if source is None."""
438 417 if source == dest:
439 418 return
440 419 self._dirty = True
441 420 if source is not None:
442 421 self._map.copymap[dest] = source
443 422 else:
444 423 self._map.copymap.pop(dest, None)
445 424
446 425 def copied(self, file):
447 426 return self._map.copymap.get(file, None)
448 427
449 428 def copies(self):
450 429 return self._map.copymap
451 430
452 431 @requires_no_parents_change
453 432 def set_tracked(self, filename, reset_copy=False):
454 433 """a "public" method for generic code to mark a file as tracked
455 434
456 435 This function is to be called outside of "update/merge" case. For
457 436 example by a command like `hg add X`.
458 437
459 438 if reset_copy is set, any existing copy information will be dropped.
460 439
461 440 return True the file was previously untracked, False otherwise.
462 441 """
463 442 self._dirty = True
464 443 entry = self._map.get(filename)
465 444 if entry is None or not entry.tracked:
466 445 self._check_new_tracked_filename(filename)
467 446 pre_tracked = self._map.set_tracked(filename)
468 447 if reset_copy:
469 448 self._map.copymap.pop(filename, None)
470 449 return pre_tracked
471 450
472 451 @requires_no_parents_change
473 452 def set_untracked(self, filename):
474 453 """a "public" method for generic code to mark a file as untracked
475 454
476 455 This function is to be called outside of "update/merge" case. For
477 456 example by a command like `hg remove X`.
478 457
479 458 return True the file was previously tracked, False otherwise.
480 459 """
481 460 ret = self._map.set_untracked(filename)
482 461 if ret:
483 462 self._dirty = True
484 463 return ret
485 464
486 465 @requires_no_parents_change
487 466 def set_clean(self, filename, parentfiledata):
488 467 """record that the current state of the file on disk is known to be clean"""
489 468 self._dirty = True
490 469 if not self._map[filename].tracked:
491 470 self._check_new_tracked_filename(filename)
492 471 (mode, size, mtime) = parentfiledata
493 472 self._map.set_clean(filename, mode, size, mtime)
494 473
495 474 @requires_no_parents_change
496 475 def set_possibly_dirty(self, filename):
497 476 """record that the current state of the file on disk is unknown"""
498 477 self._dirty = True
499 478 self._map.set_possibly_dirty(filename)
500 479
501 480 @requires_parents_change
502 481 def update_file_p1(
503 482 self,
504 483 filename,
505 484 p1_tracked,
506 485 ):
507 486 """Set a file as tracked in the parent (or not)
508 487
509 488 This is to be called when adjust the dirstate to a new parent after an history
510 489 rewriting operation.
511 490
512 491 It should not be called during a merge (p2 != nullid) and only within
513 492 a `with dirstate.parentchange():` context.
514 493 """
515 494 if self.in_merge:
516 495 msg = b'update_file_reference should not be called when merging'
517 496 raise error.ProgrammingError(msg)
518 497 entry = self._map.get(filename)
519 498 if entry is None:
520 499 wc_tracked = False
521 500 else:
522 501 wc_tracked = entry.tracked
523 502 if not (p1_tracked or wc_tracked):
524 503 # the file is no longer relevant to anyone
525 504 if self._map.get(filename) is not None:
526 505 self._map.reset_state(filename)
527 506 self._dirty = True
528 507 elif (not p1_tracked) and wc_tracked:
529 508 if entry is not None and entry.added:
530 509 return # avoid dropping copy information (maybe?)
531 510
532 511 self._map.reset_state(
533 512 filename,
534 513 wc_tracked,
535 514 p1_tracked,
536 515 # the underlying reference might have changed, we will have to
537 516 # check it.
538 517 has_meaningful_mtime=False,
539 518 )
540 519
541 520 @requires_parents_change
542 521 def update_file(
543 522 self,
544 523 filename,
545 524 wc_tracked,
546 525 p1_tracked,
547 526 p2_info=False,
548 527 possibly_dirty=False,
549 528 parentfiledata=None,
550 529 ):
551 530 """update the information about a file in the dirstate
552 531
553 532 This is to be called when the direstates parent changes to keep track
554 533 of what is the file situation in regards to the working copy and its parent.
555 534
556 535 This function must be called within a `dirstate.parentchange` context.
557 536
558 537 note: the API is at an early stage and we might need to adjust it
559 538 depending of what information ends up being relevant and useful to
560 539 other processing.
561 540 """
562 541
563 542 # note: I do not think we need to double check name clash here since we
564 543 # are in a update/merge case that should already have taken care of
565 544 # this. The test agrees
566 545
567 546 self._dirty = True
568 547
569 548 self._map.reset_state(
570 549 filename,
571 550 wc_tracked,
572 551 p1_tracked,
573 552 p2_info=p2_info,
574 553 has_meaningful_mtime=not possibly_dirty,
575 554 parentfiledata=parentfiledata,
576 555 )
577 556
578 557 def _check_new_tracked_filename(self, filename):
579 558 scmutil.checkfilename(filename)
580 559 if self._map.hastrackeddir(filename):
581 560 msg = _(b'directory %r already in dirstate')
582 561 msg %= pycompat.bytestr(filename)
583 562 raise error.Abort(msg)
584 563 # shadows
585 564 for d in pathutil.finddirs(filename):
586 565 if self._map.hastrackeddir(d):
587 566 break
588 567 entry = self._map.get(d)
589 568 if entry is not None and not entry.removed:
590 569 msg = _(b'file %r in dirstate clashes with %r')
591 570 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
592 571 raise error.Abort(msg)
593 572
594 573 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
595 574 if exists is None:
596 575 exists = os.path.lexists(os.path.join(self._root, path))
597 576 if not exists:
598 577 # Maybe a path component exists
599 578 if not ignoremissing and b'/' in path:
600 579 d, f = path.rsplit(b'/', 1)
601 580 d = self._normalize(d, False, ignoremissing, None)
602 581 folded = d + b"/" + f
603 582 else:
604 583 # No path components, preserve original case
605 584 folded = path
606 585 else:
607 586 # recursively normalize leading directory components
608 587 # against dirstate
609 588 if b'/' in normed:
610 589 d, f = normed.rsplit(b'/', 1)
611 590 d = self._normalize(d, False, ignoremissing, True)
612 591 r = self._root + b"/" + d
613 592 folded = d + b"/" + util.fspath(f, r)
614 593 else:
615 594 folded = util.fspath(normed, self._root)
616 595 storemap[normed] = folded
617 596
618 597 return folded
619 598
620 599 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
621 600 normed = util.normcase(path)
622 601 folded = self._map.filefoldmap.get(normed, None)
623 602 if folded is None:
624 603 if isknown:
625 604 folded = path
626 605 else:
627 606 folded = self._discoverpath(
628 607 path, normed, ignoremissing, exists, self._map.filefoldmap
629 608 )
630 609 return folded
631 610
632 611 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
633 612 normed = util.normcase(path)
634 613 folded = self._map.filefoldmap.get(normed, None)
635 614 if folded is None:
636 615 folded = self._map.dirfoldmap.get(normed, None)
637 616 if folded is None:
638 617 if isknown:
639 618 folded = path
640 619 else:
641 620 # store discovered result in dirfoldmap so that future
642 621 # normalizefile calls don't start matching directories
643 622 folded = self._discoverpath(
644 623 path, normed, ignoremissing, exists, self._map.dirfoldmap
645 624 )
646 625 return folded
647 626
648 627 def normalize(self, path, isknown=False, ignoremissing=False):
649 628 """
650 629 normalize the case of a pathname when on a casefolding filesystem
651 630
652 631 isknown specifies whether the filename came from walking the
653 632 disk, to avoid extra filesystem access.
654 633
655 634 If ignoremissing is True, missing path are returned
656 635 unchanged. Otherwise, we try harder to normalize possibly
657 636 existing path components.
658 637
659 638 The normalized case is determined based on the following precedence:
660 639
661 640 - version of name already stored in the dirstate
662 641 - version of name stored on disk
663 642 - version provided via command arguments
664 643 """
665 644
666 645 if self._checkcase:
667 646 return self._normalize(path, isknown, ignoremissing)
668 647 return path
669 648
670 649 def clear(self):
671 650 self._map.clear()
672 651 self._dirty = True
673 652
674 653 def rebuild(self, parent, allfiles, changedfiles=None):
675 654 if changedfiles is None:
676 655 # Rebuild entire dirstate
677 656 to_lookup = allfiles
678 657 to_drop = []
679 658 self.clear()
680 659 elif len(changedfiles) < 10:
681 660 # Avoid turning allfiles into a set, which can be expensive if it's
682 661 # large.
683 662 to_lookup = []
684 663 to_drop = []
685 664 for f in changedfiles:
686 665 if f in allfiles:
687 666 to_lookup.append(f)
688 667 else:
689 668 to_drop.append(f)
690 669 else:
691 670 changedfilesset = set(changedfiles)
692 671 to_lookup = changedfilesset & set(allfiles)
693 672 to_drop = changedfilesset - to_lookup
694 673
695 674 if self._origpl is None:
696 675 self._origpl = self._pl
697 676 self._map.setparents(parent, self._nodeconstants.nullid)
698 677
699 678 for f in to_lookup:
700 679
701 680 if self.in_merge:
702 681 self.set_tracked(f)
703 682 else:
704 683 self._map.reset_state(
705 684 f,
706 685 wc_tracked=True,
707 686 p1_tracked=True,
708 687 )
709 688 for f in to_drop:
710 689 self._map.reset_state(f)
711 690
712 691 self._dirty = True
713 692
714 693 def identity(self):
715 694 """Return identity of dirstate itself to detect changing in storage
716 695
717 696 If identity of previous dirstate is equal to this, writing
718 697 changes based on the former dirstate out can keep consistency.
719 698 """
720 699 return self._map.identity
721 700
722 701 def write(self, tr):
723 702 if not self._dirty:
724 703 return
725 704
726 705 filename = self._filename
727 706 if tr:
728 707 # delay writing in-memory changes out
729 708 tr.addfilegenerator(
730 709 b'dirstate',
731 710 (self._filename,),
732 711 lambda f: self._writedirstate(tr, f),
733 712 location=b'plain',
734 713 )
735 714 return
736 715
737 716 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
738 717 self._writedirstate(tr, st)
739 718
740 719 def addparentchangecallback(self, category, callback):
741 720 """add a callback to be called when the wd parents are changed
742 721
743 722 Callback will be called with the following arguments:
744 723 dirstate, (oldp1, oldp2), (newp1, newp2)
745 724
746 725 Category is a unique identifier to allow overwriting an old callback
747 726 with a newer callback.
748 727 """
749 728 self._plchangecallbacks[category] = callback
750 729
751 730 def _writedirstate(self, tr, st):
752 731 # notify callbacks about parents change
753 732 if self._origpl is not None and self._origpl != self._pl:
754 733 for c, callback in sorted(
755 734 pycompat.iteritems(self._plchangecallbacks)
756 735 ):
757 736 callback(self, self._origpl, self._pl)
758 737 self._origpl = None
759 738
760 739 self._map.write(tr, st)
761 740 self._dirty = False
762 741
763 742 def _dirignore(self, f):
764 743 if self._ignore(f):
765 744 return True
766 745 for p in pathutil.finddirs(f):
767 746 if self._ignore(p):
768 747 return True
769 748 return False
770 749
771 750 def _ignorefiles(self):
772 751 files = []
773 752 if os.path.exists(self._join(b'.hgignore')):
774 753 files.append(self._join(b'.hgignore'))
775 754 for name, path in self._ui.configitems(b"ui"):
776 755 if name == b'ignore' or name.startswith(b'ignore.'):
777 756 # we need to use os.path.join here rather than self._join
778 757 # because path is arbitrary and user-specified
779 758 files.append(os.path.join(self._rootdir, util.expandpath(path)))
780 759 return files
781 760
782 761 def _ignorefileandline(self, f):
783 762 files = collections.deque(self._ignorefiles())
784 763 visited = set()
785 764 while files:
786 765 i = files.popleft()
787 766 patterns = matchmod.readpatternfile(
788 767 i, self._ui.warn, sourceinfo=True
789 768 )
790 769 for pattern, lineno, line in patterns:
791 770 kind, p = matchmod._patsplit(pattern, b'glob')
792 771 if kind == b"subinclude":
793 772 if p not in visited:
794 773 files.append(p)
795 774 continue
796 775 m = matchmod.match(
797 776 self._root, b'', [], [pattern], warn=self._ui.warn
798 777 )
799 778 if m(f):
800 779 return (i, lineno, line)
801 780 visited.add(i)
802 781 return (None, -1, b"")
803 782
804 783 def _walkexplicit(self, match, subrepos):
805 784 """Get stat data about the files explicitly specified by match.
806 785
807 786 Return a triple (results, dirsfound, dirsnotfound).
808 787 - results is a mapping from filename to stat result. It also contains
809 788 listings mapping subrepos and .hg to None.
810 789 - dirsfound is a list of files found to be directories.
811 790 - dirsnotfound is a list of files that the dirstate thinks are
812 791 directories and that were not found."""
813 792
814 793 def badtype(mode):
815 794 kind = _(b'unknown')
816 795 if stat.S_ISCHR(mode):
817 796 kind = _(b'character device')
818 797 elif stat.S_ISBLK(mode):
819 798 kind = _(b'block device')
820 799 elif stat.S_ISFIFO(mode):
821 800 kind = _(b'fifo')
822 801 elif stat.S_ISSOCK(mode):
823 802 kind = _(b'socket')
824 803 elif stat.S_ISDIR(mode):
825 804 kind = _(b'directory')
826 805 return _(b'unsupported file type (type is %s)') % kind
827 806
828 807 badfn = match.bad
829 808 dmap = self._map
830 809 lstat = os.lstat
831 810 getkind = stat.S_IFMT
832 811 dirkind = stat.S_IFDIR
833 812 regkind = stat.S_IFREG
834 813 lnkkind = stat.S_IFLNK
835 814 join = self._join
836 815 dirsfound = []
837 816 foundadd = dirsfound.append
838 817 dirsnotfound = []
839 818 notfoundadd = dirsnotfound.append
840 819
841 820 if not match.isexact() and self._checkcase:
842 821 normalize = self._normalize
843 822 else:
844 823 normalize = None
845 824
846 825 files = sorted(match.files())
847 826 subrepos.sort()
848 827 i, j = 0, 0
849 828 while i < len(files) and j < len(subrepos):
850 829 subpath = subrepos[j] + b"/"
851 830 if files[i] < subpath:
852 831 i += 1
853 832 continue
854 833 while i < len(files) and files[i].startswith(subpath):
855 834 del files[i]
856 835 j += 1
857 836
858 837 if not files or b'' in files:
859 838 files = [b'']
860 839 # constructing the foldmap is expensive, so don't do it for the
861 840 # common case where files is ['']
862 841 normalize = None
863 842 results = dict.fromkeys(subrepos)
864 843 results[b'.hg'] = None
865 844
866 845 for ff in files:
867 846 if normalize:
868 847 nf = normalize(ff, False, True)
869 848 else:
870 849 nf = ff
871 850 if nf in results:
872 851 continue
873 852
874 853 try:
875 854 st = lstat(join(nf))
876 855 kind = getkind(st.st_mode)
877 856 if kind == dirkind:
878 857 if nf in dmap:
879 858 # file replaced by dir on disk but still in dirstate
880 859 results[nf] = None
881 860 foundadd((nf, ff))
882 861 elif kind == regkind or kind == lnkkind:
883 862 results[nf] = st
884 863 else:
885 864 badfn(ff, badtype(kind))
886 865 if nf in dmap:
887 866 results[nf] = None
888 867 except OSError as inst: # nf not found on disk - it is dirstate only
889 868 if nf in dmap: # does it exactly match a missing file?
890 869 results[nf] = None
891 870 else: # does it match a missing directory?
892 871 if self._map.hasdir(nf):
893 872 notfoundadd(nf)
894 873 else:
895 874 badfn(ff, encoding.strtolocal(inst.strerror))
896 875
897 876 # match.files() may contain explicitly-specified paths that shouldn't
898 877 # be taken; drop them from the list of files found. dirsfound/notfound
899 878 # aren't filtered here because they will be tested later.
900 879 if match.anypats():
901 880 for f in list(results):
902 881 if f == b'.hg' or f in subrepos:
903 882 # keep sentinel to disable further out-of-repo walks
904 883 continue
905 884 if not match(f):
906 885 del results[f]
907 886
908 887 # Case insensitive filesystems cannot rely on lstat() failing to detect
909 888 # a case-only rename. Prune the stat object for any file that does not
910 889 # match the case in the filesystem, if there are multiple files that
911 890 # normalize to the same path.
912 891 if match.isexact() and self._checkcase:
913 892 normed = {}
914 893
915 894 for f, st in pycompat.iteritems(results):
916 895 if st is None:
917 896 continue
918 897
919 898 nc = util.normcase(f)
920 899 paths = normed.get(nc)
921 900
922 901 if paths is None:
923 902 paths = set()
924 903 normed[nc] = paths
925 904
926 905 paths.add(f)
927 906
928 907 for norm, paths in pycompat.iteritems(normed):
929 908 if len(paths) > 1:
930 909 for path in paths:
931 910 folded = self._discoverpath(
932 911 path, norm, True, None, self._map.dirfoldmap
933 912 )
934 913 if path != folded:
935 914 results[path] = None
936 915
937 916 return results, dirsfound, dirsnotfound
938 917
939 918 def walk(self, match, subrepos, unknown, ignored, full=True):
940 919 """
941 920 Walk recursively through the directory tree, finding all files
942 921 matched by match.
943 922
944 923 If full is False, maybe skip some known-clean files.
945 924
946 925 Return a dict mapping filename to stat-like object (either
947 926 mercurial.osutil.stat instance or return value of os.stat()).
948 927
949 928 """
950 929 # full is a flag that extensions that hook into walk can use -- this
951 930 # implementation doesn't use it at all. This satisfies the contract
952 931 # because we only guarantee a "maybe".
953 932
954 933 if ignored:
955 934 ignore = util.never
956 935 dirignore = util.never
957 936 elif unknown:
958 937 ignore = self._ignore
959 938 dirignore = self._dirignore
960 939 else:
961 940 # if not unknown and not ignored, drop dir recursion and step 2
962 941 ignore = util.always
963 942 dirignore = util.always
964 943
965 944 matchfn = match.matchfn
966 945 matchalways = match.always()
967 946 matchtdir = match.traversedir
968 947 dmap = self._map
969 948 listdir = util.listdir
970 949 lstat = os.lstat
971 950 dirkind = stat.S_IFDIR
972 951 regkind = stat.S_IFREG
973 952 lnkkind = stat.S_IFLNK
974 953 join = self._join
975 954
976 955 exact = skipstep3 = False
977 956 if match.isexact(): # match.exact
978 957 exact = True
979 958 dirignore = util.always # skip step 2
980 959 elif match.prefix(): # match.match, no patterns
981 960 skipstep3 = True
982 961
983 962 if not exact and self._checkcase:
984 963 normalize = self._normalize
985 964 normalizefile = self._normalizefile
986 965 skipstep3 = False
987 966 else:
988 967 normalize = self._normalize
989 968 normalizefile = None
990 969
991 970 # step 1: find all explicit files
992 971 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
993 972 if matchtdir:
994 973 for d in work:
995 974 matchtdir(d[0])
996 975 for d in dirsnotfound:
997 976 matchtdir(d)
998 977
999 978 skipstep3 = skipstep3 and not (work or dirsnotfound)
1000 979 work = [d for d in work if not dirignore(d[0])]
1001 980
1002 981 # step 2: visit subdirectories
1003 982 def traverse(work, alreadynormed):
1004 983 wadd = work.append
1005 984 while work:
1006 985 tracing.counter('dirstate.walk work', len(work))
1007 986 nd = work.pop()
1008 987 visitentries = match.visitchildrenset(nd)
1009 988 if not visitentries:
1010 989 continue
1011 990 if visitentries == b'this' or visitentries == b'all':
1012 991 visitentries = None
1013 992 skip = None
1014 993 if nd != b'':
1015 994 skip = b'.hg'
1016 995 try:
1017 996 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1018 997 entries = listdir(join(nd), stat=True, skip=skip)
1019 998 except OSError as inst:
1020 999 if inst.errno in (errno.EACCES, errno.ENOENT):
1021 1000 match.bad(
1022 1001 self.pathto(nd), encoding.strtolocal(inst.strerror)
1023 1002 )
1024 1003 continue
1025 1004 raise
1026 1005 for f, kind, st in entries:
1027 1006 # Some matchers may return files in the visitentries set,
1028 1007 # instead of 'this', if the matcher explicitly mentions them
1029 1008 # and is not an exactmatcher. This is acceptable; we do not
1030 1009 # make any hard assumptions about file-or-directory below
1031 1010 # based on the presence of `f` in visitentries. If
1032 1011 # visitchildrenset returned a set, we can always skip the
1033 1012 # entries *not* in the set it provided regardless of whether
1034 1013 # they're actually a file or a directory.
1035 1014 if visitentries and f not in visitentries:
1036 1015 continue
1037 1016 if normalizefile:
1038 1017 # even though f might be a directory, we're only
1039 1018 # interested in comparing it to files currently in the
1040 1019 # dmap -- therefore normalizefile is enough
1041 1020 nf = normalizefile(
1042 1021 nd and (nd + b"/" + f) or f, True, True
1043 1022 )
1044 1023 else:
1045 1024 nf = nd and (nd + b"/" + f) or f
1046 1025 if nf not in results:
1047 1026 if kind == dirkind:
1048 1027 if not ignore(nf):
1049 1028 if matchtdir:
1050 1029 matchtdir(nf)
1051 1030 wadd(nf)
1052 1031 if nf in dmap and (matchalways or matchfn(nf)):
1053 1032 results[nf] = None
1054 1033 elif kind == regkind or kind == lnkkind:
1055 1034 if nf in dmap:
1056 1035 if matchalways or matchfn(nf):
1057 1036 results[nf] = st
1058 1037 elif (matchalways or matchfn(nf)) and not ignore(
1059 1038 nf
1060 1039 ):
1061 1040 # unknown file -- normalize if necessary
1062 1041 if not alreadynormed:
1063 1042 nf = normalize(nf, False, True)
1064 1043 results[nf] = st
1065 1044 elif nf in dmap and (matchalways or matchfn(nf)):
1066 1045 results[nf] = None
1067 1046
1068 1047 for nd, d in work:
1069 1048 # alreadynormed means that processwork doesn't have to do any
1070 1049 # expensive directory normalization
1071 1050 alreadynormed = not normalize or nd == d
1072 1051 traverse([d], alreadynormed)
1073 1052
1074 1053 for s in subrepos:
1075 1054 del results[s]
1076 1055 del results[b'.hg']
1077 1056
1078 1057 # step 3: visit remaining files from dmap
1079 1058 if not skipstep3 and not exact:
1080 1059 # If a dmap file is not in results yet, it was either
1081 1060 # a) not matching matchfn b) ignored, c) missing, or d) under a
1082 1061 # symlink directory.
1083 1062 if not results and matchalways:
1084 1063 visit = [f for f in dmap]
1085 1064 else:
1086 1065 visit = [f for f in dmap if f not in results and matchfn(f)]
1087 1066 visit.sort()
1088 1067
1089 1068 if unknown:
1090 1069 # unknown == True means we walked all dirs under the roots
1091 1070 # that wasn't ignored, and everything that matched was stat'ed
1092 1071 # and is already in results.
1093 1072 # The rest must thus be ignored or under a symlink.
1094 1073 audit_path = pathutil.pathauditor(self._root, cached=True)
1095 1074
1096 1075 for nf in iter(visit):
1097 1076 # If a stat for the same file was already added with a
1098 1077 # different case, don't add one for this, since that would
1099 1078 # make it appear as if the file exists under both names
1100 1079 # on disk.
1101 1080 if (
1102 1081 normalizefile
1103 1082 and normalizefile(nf, True, True) in results
1104 1083 ):
1105 1084 results[nf] = None
1106 1085 # Report ignored items in the dmap as long as they are not
1107 1086 # under a symlink directory.
1108 1087 elif audit_path.check(nf):
1109 1088 try:
1110 1089 results[nf] = lstat(join(nf))
1111 1090 # file was just ignored, no links, and exists
1112 1091 except OSError:
1113 1092 # file doesn't exist
1114 1093 results[nf] = None
1115 1094 else:
1116 1095 # It's either missing or under a symlink directory
1117 1096 # which we in this case report as missing
1118 1097 results[nf] = None
1119 1098 else:
1120 1099 # We may not have walked the full directory tree above,
1121 1100 # so stat and check everything we missed.
1122 1101 iv = iter(visit)
1123 1102 for st in util.statfiles([join(i) for i in visit]):
1124 1103 results[next(iv)] = st
1125 1104 return results
1126 1105
1127 1106 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1128 1107 # Force Rayon (Rust parallelism library) to respect the number of
1129 1108 # workers. This is a temporary workaround until Rust code knows
1130 1109 # how to read the config file.
1131 1110 numcpus = self._ui.configint(b"worker", b"numcpus")
1132 1111 if numcpus is not None:
1133 1112 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1134 1113
1135 1114 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1136 1115 if not workers_enabled:
1137 1116 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1138 1117
1139 1118 (
1140 1119 lookup,
1141 1120 modified,
1142 1121 added,
1143 1122 removed,
1144 1123 deleted,
1145 1124 clean,
1146 1125 ignored,
1147 1126 unknown,
1148 1127 warnings,
1149 1128 bad,
1150 1129 traversed,
1151 1130 dirty,
1152 1131 ) = rustmod.status(
1153 1132 self._map._map,
1154 1133 matcher,
1155 1134 self._rootdir,
1156 1135 self._ignorefiles(),
1157 1136 self._checkexec,
1158 1137 bool(list_clean),
1159 1138 bool(list_ignored),
1160 1139 bool(list_unknown),
1161 1140 bool(matcher.traversedir),
1162 1141 )
1163 1142
1164 1143 self._dirty |= dirty
1165 1144
1166 1145 if matcher.traversedir:
1167 1146 for dir in traversed:
1168 1147 matcher.traversedir(dir)
1169 1148
1170 1149 if self._ui.warn:
1171 1150 for item in warnings:
1172 1151 if isinstance(item, tuple):
1173 1152 file_path, syntax = item
1174 1153 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1175 1154 file_path,
1176 1155 syntax,
1177 1156 )
1178 1157 self._ui.warn(msg)
1179 1158 else:
1180 1159 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1181 1160 self._ui.warn(
1182 1161 msg
1183 1162 % (
1184 1163 pathutil.canonpath(
1185 1164 self._rootdir, self._rootdir, item
1186 1165 ),
1187 1166 b"No such file or directory",
1188 1167 )
1189 1168 )
1190 1169
1191 1170 for (fn, message) in bad:
1192 1171 matcher.bad(fn, encoding.strtolocal(message))
1193 1172
1194 1173 status = scmutil.status(
1195 1174 modified=modified,
1196 1175 added=added,
1197 1176 removed=removed,
1198 1177 deleted=deleted,
1199 1178 unknown=unknown,
1200 1179 ignored=ignored,
1201 1180 clean=clean,
1202 1181 )
1203 1182 return (lookup, status)
1204 1183
1205 1184 def status(self, match, subrepos, ignored, clean, unknown):
1206 1185 """Determine the status of the working copy relative to the
1207 1186 dirstate and return a pair of (unsure, status), where status is of type
1208 1187 scmutil.status and:
1209 1188
1210 1189 unsure:
1211 1190 files that might have been modified since the dirstate was
1212 1191 written, but need to be read to be sure (size is the same
1213 1192 but mtime differs)
1214 1193 status.modified:
1215 1194 files that have definitely been modified since the dirstate
1216 1195 was written (different size or mode)
1217 1196 status.clean:
1218 1197 files that have definitely not been modified since the
1219 1198 dirstate was written
1220 1199 """
1221 1200 listignored, listclean, listunknown = ignored, clean, unknown
1222 1201 lookup, modified, added, unknown, ignored = [], [], [], [], []
1223 1202 removed, deleted, clean = [], [], []
1224 1203
1225 1204 dmap = self._map
1226 1205 dmap.preload()
1227 1206
1228 1207 use_rust = True
1229 1208
1230 1209 allowed_matchers = (
1231 1210 matchmod.alwaysmatcher,
1232 1211 matchmod.exactmatcher,
1233 1212 matchmod.includematcher,
1234 1213 )
1235 1214
1236 1215 if rustmod is None:
1237 1216 use_rust = False
1238 1217 elif self._checkcase:
1239 1218 # Case-insensitive filesystems are not handled yet
1240 1219 use_rust = False
1241 1220 elif subrepos:
1242 1221 use_rust = False
1243 1222 elif sparse.enabled:
1244 1223 use_rust = False
1245 1224 elif not isinstance(match, allowed_matchers):
1246 1225 # Some matchers have yet to be implemented
1247 1226 use_rust = False
1248 1227
1249 1228 # Get the time from the filesystem so we can disambiguate files that
1250 1229 # appear modified in the present or future.
1251 1230 try:
1252 1231 mtime_boundary = timestamp.get_fs_now(self._opener)
1253 1232 except OSError:
1254 1233 # In largefiles or readonly context
1255 1234 mtime_boundary = None
1256 1235
1257 1236 if use_rust:
1258 1237 try:
1259 1238 res = self._rust_status(
1260 1239 match, listclean, listignored, listunknown
1261 1240 )
1262 1241 return res + (mtime_boundary,)
1263 1242 except rustmod.FallbackError:
1264 1243 pass
1265 1244
1266 1245 def noop(f):
1267 1246 pass
1268 1247
1269 1248 dcontains = dmap.__contains__
1270 1249 dget = dmap.__getitem__
1271 1250 ladd = lookup.append # aka "unsure"
1272 1251 madd = modified.append
1273 1252 aadd = added.append
1274 1253 uadd = unknown.append if listunknown else noop
1275 1254 iadd = ignored.append if listignored else noop
1276 1255 radd = removed.append
1277 1256 dadd = deleted.append
1278 1257 cadd = clean.append if listclean else noop
1279 1258 mexact = match.exact
1280 1259 dirignore = self._dirignore
1281 1260 checkexec = self._checkexec
1282 1261 checklink = self._checklink
1283 1262 copymap = self._map.copymap
1284 1263
1285 1264 # We need to do full walks when either
1286 1265 # - we're listing all clean files, or
1287 1266 # - match.traversedir does something, because match.traversedir should
1288 1267 # be called for every dir in the working dir
1289 1268 full = listclean or match.traversedir is not None
1290 1269 for fn, st in pycompat.iteritems(
1291 1270 self.walk(match, subrepos, listunknown, listignored, full=full)
1292 1271 ):
1293 1272 if not dcontains(fn):
1294 1273 if (listignored or mexact(fn)) and dirignore(fn):
1295 1274 if listignored:
1296 1275 iadd(fn)
1297 1276 else:
1298 1277 uadd(fn)
1299 1278 continue
1300 1279
1301 1280 t = dget(fn)
1302 1281 mode = t.mode
1303 1282 size = t.size
1304 1283
1305 1284 if not st and t.tracked:
1306 1285 dadd(fn)
1307 1286 elif t.p2_info:
1308 1287 madd(fn)
1309 1288 elif t.added:
1310 1289 aadd(fn)
1311 1290 elif t.removed:
1312 1291 radd(fn)
1313 1292 elif t.tracked:
1314 1293 if not checklink and t.has_fallback_symlink:
1315 1294 # If the file system does not support symlink, the mode
1316 1295 # might not be correctly stored in the dirstate, so do not
1317 1296 # trust it.
1318 1297 ladd(fn)
1319 1298 elif not checkexec and t.has_fallback_exec:
1320 1299 # If the file system does not support exec bits, the mode
1321 1300 # might not be correctly stored in the dirstate, so do not
1322 1301 # trust it.
1323 1302 ladd(fn)
1324 1303 elif (
1325 1304 size >= 0
1326 1305 and (
1327 1306 (size != st.st_size and size != st.st_size & _rangemask)
1328 1307 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1329 1308 )
1330 1309 or fn in copymap
1331 1310 ):
1332 1311 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1333 1312 # issue6456: Size returned may be longer due to
1334 1313 # encryption on EXT-4 fscrypt, undecided.
1335 1314 ladd(fn)
1336 1315 else:
1337 1316 madd(fn)
1338 1317 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1339 1318 # There might be a change in the future if for example the
1340 1319 # internal clock is off, but this is a case where the issues
1341 1320 # the user would face would be a lot worse and there is
1342 1321 # nothing we can really do.
1343 1322 ladd(fn)
1344 1323 elif listclean:
1345 1324 cadd(fn)
1346 1325 status = scmutil.status(
1347 1326 modified, added, removed, deleted, unknown, ignored, clean
1348 1327 )
1349 1328 return (lookup, status, mtime_boundary)
1350 1329
1351 1330 def matches(self, match):
1352 1331 """
1353 1332 return files in the dirstate (in whatever state) filtered by match
1354 1333 """
1355 1334 dmap = self._map
1356 1335 if rustmod is not None:
1357 1336 dmap = self._map._map
1358 1337
1359 1338 if match.always():
1360 1339 return dmap.keys()
1361 1340 files = match.files()
1362 1341 if match.isexact():
1363 1342 # fast path -- filter the other way around, since typically files is
1364 1343 # much smaller than dmap
1365 1344 return [f for f in files if f in dmap]
1366 1345 if match.prefix() and all(fn in dmap for fn in files):
1367 1346 # fast path -- all the values are known to be files, so just return
1368 1347 # that
1369 1348 return list(files)
1370 1349 return [f for f in dmap if match(f)]
1371 1350
1372 1351 def _actualfilename(self, tr):
1373 1352 if tr:
1374 1353 return self._pendingfilename
1375 1354 else:
1376 1355 return self._filename
1377 1356
1378 1357 def savebackup(self, tr, backupname):
1379 1358 '''Save current dirstate into backup file'''
1380 1359 filename = self._actualfilename(tr)
1381 1360 assert backupname != filename
1382 1361
1383 1362 # use '_writedirstate' instead of 'write' to write changes certainly,
1384 1363 # because the latter omits writing out if transaction is running.
1385 1364 # output file will be used to create backup of dirstate at this point.
1386 1365 if self._dirty or not self._opener.exists(filename):
1387 1366 self._writedirstate(
1388 1367 tr,
1389 1368 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1390 1369 )
1391 1370
1392 1371 if tr:
1393 1372 # ensure that subsequent tr.writepending returns True for
1394 1373 # changes written out above, even if dirstate is never
1395 1374 # changed after this
1396 1375 tr.addfilegenerator(
1397 1376 b'dirstate',
1398 1377 (self._filename,),
1399 1378 lambda f: self._writedirstate(tr, f),
1400 1379 location=b'plain',
1401 1380 )
1402 1381
1403 1382 # ensure that pending file written above is unlinked at
1404 1383 # failure, even if tr.writepending isn't invoked until the
1405 1384 # end of this transaction
1406 1385 tr.registertmp(filename, location=b'plain')
1407 1386
1408 1387 self._opener.tryunlink(backupname)
1409 1388 # hardlink backup is okay because _writedirstate is always called
1410 1389 # with an "atomictemp=True" file.
1411 1390 util.copyfile(
1412 1391 self._opener.join(filename),
1413 1392 self._opener.join(backupname),
1414 1393 hardlink=True,
1415 1394 )
1416 1395
1417 1396 def restorebackup(self, tr, backupname):
1418 1397 '''Restore dirstate by backup file'''
1419 1398 # this "invalidate()" prevents "wlock.release()" from writing
1420 1399 # changes of dirstate out after restoring from backup file
1421 1400 self.invalidate()
1422 1401 filename = self._actualfilename(tr)
1423 1402 o = self._opener
1424 1403 if util.samefile(o.join(backupname), o.join(filename)):
1425 1404 o.unlink(backupname)
1426 1405 else:
1427 1406 o.rename(backupname, filename, checkambig=True)
1428 1407
1429 1408 def clearbackup(self, tr, backupname):
1430 1409 '''Clear backup file'''
1431 1410 self._opener.unlink(backupname)
1432 1411
1433 1412 def verify(self, m1, m2):
1434 1413 """check the dirstate content again the parent manifest and yield errors"""
1435 1414 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1436 1415 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1437 1416 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1438 1417 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1439 1418 for f, entry in self.items():
1440 1419 state = entry.state
1441 1420 if state in b"nr" and f not in m1:
1442 1421 yield (missing_from_p1, f, state)
1443 1422 if state in b"a" and f in m1:
1444 1423 yield (unexpected_in_p1, f, state)
1445 1424 if state in b"m" and f not in m1 and f not in m2:
1446 1425 yield (missing_from_ps, f, state)
1447 1426 for f in m1:
1448 1427 state = self.get_entry(f).state
1449 1428 if state not in b"nrm":
1450 1429 yield (missing_from_ds, f, state)
@@ -1,220 +1,209 b''
1 1 from __future__ import absolute_import, print_function
2 2
3 3 import contextlib
4 4
5 5 from . import util as interfaceutil
6 6
7 7
8 8 class idirstate(interfaceutil.Interface):
9 9 def __init__(
10 10 opener,
11 11 ui,
12 12 root,
13 13 validate,
14 14 sparsematchfn,
15 15 nodeconstants,
16 16 use_dirstate_v2,
17 17 ):
18 18 """Create a new dirstate object.
19 19
20 20 opener is an open()-like callable that can be used to open the
21 21 dirstate file; root is the root of the directory tracked by
22 22 the dirstate.
23 23 """
24 24
25 25 # TODO: all these private methods and attributes should be made
26 26 # public or removed from the interface.
27 27 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
28 28
29 29 def _ignorefiles():
30 30 """Return a list of files containing patterns to ignore."""
31 31
32 32 def _ignorefileandline(f):
33 33 """Given a file `f`, return the ignore file and line that ignores it."""
34 34
35 35 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
36 36 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
37 37
38 38 @contextlib.contextmanager
39 39 def parentchange():
40 40 """Context manager for handling dirstate parents.
41 41
42 42 If an exception occurs in the scope of the context manager,
43 43 the incoherent dirstate won't be written when wlock is
44 44 released.
45 45 """
46 46
47 47 def pendingparentchange():
48 48 """Returns true if the dirstate is in the middle of a set of changes
49 49 that modify the dirstate parent.
50 50 """
51 51
52 52 def hasdir(d):
53 53 pass
54 54
55 55 def flagfunc(buildfallback):
56 56 pass
57 57
58 58 def getcwd():
59 59 """Return the path from which a canonical path is calculated.
60 60
61 61 This path should be used to resolve file patterns or to convert
62 62 canonical paths back to file paths for display. It shouldn't be
63 63 used to get real file paths. Use vfs functions instead.
64 64 """
65 65
66 66 def pathto(f, cwd=None):
67 67 pass
68 68
69 def __getitem__(key):
70 """Return the current state of key (a filename) in the dirstate.
71
72 States are:
73 n normal
74 m needs merging
75 r marked for removal
76 a marked for addition
77 ? not tracked
78 """
79
80 69 def __contains__(key):
81 70 """Check if bytestring `key` is known to the dirstate."""
82 71
83 72 def __iter__():
84 73 """Iterate the dirstate's contained filenames as bytestrings."""
85 74
86 75 def items():
87 76 """Iterate the dirstate's entries as (filename, DirstateItem.
88 77
89 78 As usual, filename is a bytestring.
90 79 """
91 80
92 81 iteritems = items
93 82
94 83 def parents():
95 84 pass
96 85
97 86 def p1():
98 87 pass
99 88
100 89 def p2():
101 90 pass
102 91
103 92 def branch():
104 93 pass
105 94
106 95 def setparents(p1, p2=None):
107 96 """Set dirstate parents to p1 and p2.
108 97
109 98 When moving from two parents to one, 'm' merged entries a
110 99 adjusted to normal and previous copy records discarded and
111 100 returned by the call.
112 101
113 102 See localrepo.setparents()
114 103 """
115 104
116 105 def setbranch(branch):
117 106 pass
118 107
119 108 def invalidate():
120 109 """Causes the next access to reread the dirstate.
121 110
122 111 This is different from localrepo.invalidatedirstate() because it always
123 112 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
124 113 check whether the dirstate has changed before rereading it."""
125 114
126 115 def copy(source, dest):
127 116 """Mark dest as a copy of source. Unmark dest if source is None."""
128 117
129 118 def copied(file):
130 119 pass
131 120
132 121 def copies():
133 122 pass
134 123
135 124 def normalize(path, isknown=False, ignoremissing=False):
136 125 """
137 126 normalize the case of a pathname when on a casefolding filesystem
138 127
139 128 isknown specifies whether the filename came from walking the
140 129 disk, to avoid extra filesystem access.
141 130
142 131 If ignoremissing is True, missing path are returned
143 132 unchanged. Otherwise, we try harder to normalize possibly
144 133 existing path components.
145 134
146 135 The normalized case is determined based on the following precedence:
147 136
148 137 - version of name already stored in the dirstate
149 138 - version of name stored on disk
150 139 - version provided via command arguments
151 140 """
152 141
153 142 def clear():
154 143 pass
155 144
156 145 def rebuild(parent, allfiles, changedfiles=None):
157 146 pass
158 147
159 148 def identity():
160 149 """Return identity of dirstate it to detect changing in storage
161 150
162 151 If identity of previous dirstate is equal to this, writing
163 152 changes based on the former dirstate out can keep consistency.
164 153 """
165 154
166 155 def write(tr):
167 156 pass
168 157
169 158 def addparentchangecallback(category, callback):
170 159 """add a callback to be called when the wd parents are changed
171 160
172 161 Callback will be called with the following arguments:
173 162 dirstate, (oldp1, oldp2), (newp1, newp2)
174 163
175 164 Category is a unique identifier to allow overwriting an old callback
176 165 with a newer callback.
177 166 """
178 167
179 168 def walk(match, subrepos, unknown, ignored, full=True):
180 169 """
181 170 Walk recursively through the directory tree, finding all files
182 171 matched by match.
183 172
184 173 If full is False, maybe skip some known-clean files.
185 174
186 175 Return a dict mapping filename to stat-like object (either
187 176 mercurial.osutil.stat instance or return value of os.stat()).
188 177
189 178 """
190 179
191 180 def status(match, subrepos, ignored, clean, unknown):
192 181 """Determine the status of the working copy relative to the
193 182 dirstate and return a pair of (unsure, status), where status is of type
194 183 scmutil.status and:
195 184
196 185 unsure:
197 186 files that might have been modified since the dirstate was
198 187 written, but need to be read to be sure (size is the same
199 188 but mtime differs)
200 189 status.modified:
201 190 files that have definitely been modified since the dirstate
202 191 was written (different size or mode)
203 192 status.clean:
204 193 files that have definitely not been modified since the
205 194 dirstate was written
206 195 """
207 196
208 197 def matches(match):
209 198 """
210 199 return files in the dirstate (in whatever state) filtered by match
211 200 """
212 201
213 202 def savebackup(tr, backupname):
214 203 '''Save current dirstate into backup file'''
215 204
216 205 def restorebackup(tr, backupname):
217 206 '''Restore dirstate by backup file'''
218 207
219 208 def clearbackup(tr, backupname):
220 209 '''Clear backup file'''
General Comments 0
You need to be logged in to leave comments. Login now