##// END OF EJS Templates
dirstate: drop workaround for '.' matching root directory...
Martin von Zweigbergk -
r42529:448486e1 default
parent child Browse files
Show More
@@ -1,1523 +1,1521
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import os
14 14 import stat
15 15
16 16 from .i18n import _
17 17 from .node import nullid
18 18 from . import (
19 19 encoding,
20 20 error,
21 21 match as matchmod,
22 22 pathutil,
23 23 policy,
24 24 pycompat,
25 25 scmutil,
26 26 txnutil,
27 27 util,
28 28 )
29 29
30 30 try:
31 31 from . import rustext
32 32 rustext.__name__ # force actual import (see hgdemandimport)
33 33 except ImportError:
34 34 rustext = None
35 35
36 36 parsers = policy.importmod(r'parsers')
37 37
38 38 propertycache = util.propertycache
39 39 filecache = scmutil.filecache
40 40 _rangemask = 0x7fffffff
41 41
42 42 dirstatetuple = parsers.dirstatetuple
43 43
44 44 class repocache(filecache):
45 45 """filecache for files in .hg/"""
46 46 def join(self, obj, fname):
47 47 return obj._opener.join(fname)
48 48
49 49 class rootcache(filecache):
50 50 """filecache for files in the repository root"""
51 51 def join(self, obj, fname):
52 52 return obj._join(fname)
53 53
54 54 def _getfsnow(vfs):
55 55 '''Get "now" timestamp on filesystem'''
56 56 tmpfd, tmpname = vfs.mkstemp()
57 57 try:
58 58 return os.fstat(tmpfd)[stat.ST_MTIME]
59 59 finally:
60 60 os.close(tmpfd)
61 61 vfs.unlink(tmpname)
62 62
63 63 class dirstate(object):
64 64
65 65 def __init__(self, opener, ui, root, validate, sparsematchfn):
66 66 '''Create a new dirstate object.
67 67
68 68 opener is an open()-like callable that can be used to open the
69 69 dirstate file; root is the root of the directory tracked by
70 70 the dirstate.
71 71 '''
72 72 self._opener = opener
73 73 self._validate = validate
74 74 self._root = root
75 75 self._sparsematchfn = sparsematchfn
76 76 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
77 77 # UNC path pointing to root share (issue4557)
78 78 self._rootdir = pathutil.normasprefix(root)
79 79 self._dirty = False
80 80 self._lastnormaltime = 0
81 81 self._ui = ui
82 82 self._filecache = {}
83 83 self._parentwriters = 0
84 84 self._filename = 'dirstate'
85 85 self._pendingfilename = '%s.pending' % self._filename
86 86 self._plchangecallbacks = {}
87 87 self._origpl = None
88 88 self._updatedfiles = set()
89 89 self._mapcls = dirstatemap
90 90 # Access and cache cwd early, so we don't access it for the first time
91 91 # after a working-copy update caused it to not exist (accessing it then
92 92 # raises an exception).
93 93 self._cwd
94 94
95 95 @contextlib.contextmanager
96 96 def parentchange(self):
97 97 '''Context manager for handling dirstate parents.
98 98
99 99 If an exception occurs in the scope of the context manager,
100 100 the incoherent dirstate won't be written when wlock is
101 101 released.
102 102 '''
103 103 self._parentwriters += 1
104 104 yield
105 105 # Typically we want the "undo" step of a context manager in a
106 106 # finally block so it happens even when an exception
107 107 # occurs. In this case, however, we only want to decrement
108 108 # parentwriters if the code in the with statement exits
109 109 # normally, so we don't have a try/finally here on purpose.
110 110 self._parentwriters -= 1
111 111
112 112 def pendingparentchange(self):
113 113 '''Returns true if the dirstate is in the middle of a set of changes
114 114 that modify the dirstate parent.
115 115 '''
116 116 return self._parentwriters > 0
117 117
118 118 @propertycache
119 119 def _map(self):
120 120 """Return the dirstate contents (see documentation for dirstatemap)."""
121 121 self._map = self._mapcls(self._ui, self._opener, self._root)
122 122 return self._map
123 123
124 124 @property
125 125 def _sparsematcher(self):
126 126 """The matcher for the sparse checkout.
127 127
128 128 The working directory may not include every file from a manifest. The
129 129 matcher obtained by this property will match a path if it is to be
130 130 included in the working directory.
131 131 """
132 132 # TODO there is potential to cache this property. For now, the matcher
133 133 # is resolved on every access. (But the called function does use a
134 134 # cache to keep the lookup fast.)
135 135 return self._sparsematchfn()
136 136
137 137 @repocache('branch')
138 138 def _branch(self):
139 139 try:
140 140 return self._opener.read("branch").strip() or "default"
141 141 except IOError as inst:
142 142 if inst.errno != errno.ENOENT:
143 143 raise
144 144 return "default"
145 145
146 146 @property
147 147 def _pl(self):
148 148 return self._map.parents()
149 149
150 150 def hasdir(self, d):
151 151 return self._map.hastrackeddir(d)
152 152
153 153 @rootcache('.hgignore')
154 154 def _ignore(self):
155 155 files = self._ignorefiles()
156 156 if not files:
157 157 return matchmod.never()
158 158
159 159 pats = ['include:%s' % f for f in files]
160 160 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
161 161
162 162 @propertycache
163 163 def _slash(self):
164 164 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
165 165
166 166 @propertycache
167 167 def _checklink(self):
168 168 return util.checklink(self._root)
169 169
170 170 @propertycache
171 171 def _checkexec(self):
172 172 return util.checkexec(self._root)
173 173
174 174 @propertycache
175 175 def _checkcase(self):
176 176 return not util.fscasesensitive(self._join('.hg'))
177 177
178 178 def _join(self, f):
179 179 # much faster than os.path.join()
180 180 # it's safe because f is always a relative path
181 181 return self._rootdir + f
182 182
183 183 def flagfunc(self, buildfallback):
184 184 if self._checklink and self._checkexec:
185 185 def f(x):
186 186 try:
187 187 st = os.lstat(self._join(x))
188 188 if util.statislink(st):
189 189 return 'l'
190 190 if util.statisexec(st):
191 191 return 'x'
192 192 except OSError:
193 193 pass
194 194 return ''
195 195 return f
196 196
197 197 fallback = buildfallback()
198 198 if self._checklink:
199 199 def f(x):
200 200 if os.path.islink(self._join(x)):
201 201 return 'l'
202 202 if 'x' in fallback(x):
203 203 return 'x'
204 204 return ''
205 205 return f
206 206 if self._checkexec:
207 207 def f(x):
208 208 if 'l' in fallback(x):
209 209 return 'l'
210 210 if util.isexec(self._join(x)):
211 211 return 'x'
212 212 return ''
213 213 return f
214 214 else:
215 215 return fallback
216 216
217 217 @propertycache
218 218 def _cwd(self):
219 219 # internal config: ui.forcecwd
220 220 forcecwd = self._ui.config('ui', 'forcecwd')
221 221 if forcecwd:
222 222 return forcecwd
223 223 return encoding.getcwd()
224 224
225 225 def getcwd(self):
226 226 '''Return the path from which a canonical path is calculated.
227 227
228 228 This path should be used to resolve file patterns or to convert
229 229 canonical paths back to file paths for display. It shouldn't be
230 230 used to get real file paths. Use vfs functions instead.
231 231 '''
232 232 cwd = self._cwd
233 233 if cwd == self._root:
234 234 return ''
235 235 # self._root ends with a path separator if self._root is '/' or 'C:\'
236 236 rootsep = self._root
237 237 if not util.endswithsep(rootsep):
238 238 rootsep += pycompat.ossep
239 239 if cwd.startswith(rootsep):
240 240 return cwd[len(rootsep):]
241 241 else:
242 242 # we're outside the repo. return an absolute path.
243 243 return cwd
244 244
245 245 def pathto(self, f, cwd=None):
246 246 if cwd is None:
247 247 cwd = self.getcwd()
248 248 path = util.pathto(self._root, cwd, f)
249 249 if self._slash:
250 250 return util.pconvert(path)
251 251 return path
252 252
253 253 def __getitem__(self, key):
254 254 '''Return the current state of key (a filename) in the dirstate.
255 255
256 256 States are:
257 257 n normal
258 258 m needs merging
259 259 r marked for removal
260 260 a marked for addition
261 261 ? not tracked
262 262 '''
263 263 return self._map.get(key, ("?",))[0]
264 264
265 265 def __contains__(self, key):
266 266 return key in self._map
267 267
268 268 def __iter__(self):
269 269 return iter(sorted(self._map))
270 270
271 271 def items(self):
272 272 return self._map.iteritems()
273 273
274 274 iteritems = items
275 275
276 276 def parents(self):
277 277 return [self._validate(p) for p in self._pl]
278 278
279 279 def p1(self):
280 280 return self._validate(self._pl[0])
281 281
282 282 def p2(self):
283 283 return self._validate(self._pl[1])
284 284
285 285 def branch(self):
286 286 return encoding.tolocal(self._branch)
287 287
288 288 def setparents(self, p1, p2=nullid):
289 289 """Set dirstate parents to p1 and p2.
290 290
291 291 When moving from two parents to one, 'm' merged entries a
292 292 adjusted to normal and previous copy records discarded and
293 293 returned by the call.
294 294
295 295 See localrepo.setparents()
296 296 """
297 297 if self._parentwriters == 0:
298 298 raise ValueError("cannot set dirstate parent outside of "
299 299 "dirstate.parentchange context manager")
300 300
301 301 self._dirty = True
302 302 oldp2 = self._pl[1]
303 303 if self._origpl is None:
304 304 self._origpl = self._pl
305 305 self._map.setparents(p1, p2)
306 306 copies = {}
307 307 if oldp2 != nullid and p2 == nullid:
308 308 candidatefiles = self._map.nonnormalset.union(
309 309 self._map.otherparentset)
310 310 for f in candidatefiles:
311 311 s = self._map.get(f)
312 312 if s is None:
313 313 continue
314 314
315 315 # Discard 'm' markers when moving away from a merge state
316 316 if s[0] == 'm':
317 317 source = self._map.copymap.get(f)
318 318 if source:
319 319 copies[f] = source
320 320 self.normallookup(f)
321 321 # Also fix up otherparent markers
322 322 elif s[0] == 'n' and s[2] == -2:
323 323 source = self._map.copymap.get(f)
324 324 if source:
325 325 copies[f] = source
326 326 self.add(f)
327 327 return copies
328 328
329 329 def setbranch(self, branch):
330 330 self.__class__._branch.set(self, encoding.fromlocal(branch))
331 331 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
332 332 try:
333 333 f.write(self._branch + '\n')
334 334 f.close()
335 335
336 336 # make sure filecache has the correct stat info for _branch after
337 337 # replacing the underlying file
338 338 ce = self._filecache['_branch']
339 339 if ce:
340 340 ce.refresh()
341 341 except: # re-raises
342 342 f.discard()
343 343 raise
344 344
345 345 def invalidate(self):
346 346 '''Causes the next access to reread the dirstate.
347 347
348 348 This is different from localrepo.invalidatedirstate() because it always
349 349 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
350 350 check whether the dirstate has changed before rereading it.'''
351 351
352 352 for a in (r"_map", r"_branch", r"_ignore"):
353 353 if a in self.__dict__:
354 354 delattr(self, a)
355 355 self._lastnormaltime = 0
356 356 self._dirty = False
357 357 self._updatedfiles.clear()
358 358 self._parentwriters = 0
359 359 self._origpl = None
360 360
361 361 def copy(self, source, dest):
362 362 """Mark dest as a copy of source. Unmark dest if source is None."""
363 363 if source == dest:
364 364 return
365 365 self._dirty = True
366 366 if source is not None:
367 367 self._map.copymap[dest] = source
368 368 self._updatedfiles.add(source)
369 369 self._updatedfiles.add(dest)
370 370 elif self._map.copymap.pop(dest, None):
371 371 self._updatedfiles.add(dest)
372 372
373 373 def copied(self, file):
374 374 return self._map.copymap.get(file, None)
375 375
376 376 def copies(self):
377 377 return self._map.copymap
378 378
379 379 def _addpath(self, f, state, mode, size, mtime):
380 380 oldstate = self[f]
381 381 if state == 'a' or oldstate == 'r':
382 382 scmutil.checkfilename(f)
383 383 if self._map.hastrackeddir(f):
384 384 raise error.Abort(_('directory %r already in dirstate') %
385 385 pycompat.bytestr(f))
386 386 # shadows
387 387 for d in util.finddirs(f):
388 388 if self._map.hastrackeddir(d):
389 389 break
390 390 entry = self._map.get(d)
391 391 if entry is not None and entry[0] != 'r':
392 392 raise error.Abort(
393 393 _('file %r in dirstate clashes with %r') %
394 394 (pycompat.bytestr(d), pycompat.bytestr(f)))
395 395 self._dirty = True
396 396 self._updatedfiles.add(f)
397 397 self._map.addfile(f, oldstate, state, mode, size, mtime)
398 398
399 399 def normal(self, f):
400 400 '''Mark a file normal and clean.'''
401 401 s = os.lstat(self._join(f))
402 402 mtime = s[stat.ST_MTIME]
403 403 self._addpath(f, 'n', s.st_mode,
404 404 s.st_size & _rangemask, mtime & _rangemask)
405 405 self._map.copymap.pop(f, None)
406 406 if f in self._map.nonnormalset:
407 407 self._map.nonnormalset.remove(f)
408 408 if mtime > self._lastnormaltime:
409 409 # Remember the most recent modification timeslot for status(),
410 410 # to make sure we won't miss future size-preserving file content
411 411 # modifications that happen within the same timeslot.
412 412 self._lastnormaltime = mtime
413 413
414 414 def normallookup(self, f):
415 415 '''Mark a file normal, but possibly dirty.'''
416 416 if self._pl[1] != nullid:
417 417 # if there is a merge going on and the file was either
418 418 # in state 'm' (-1) or coming from other parent (-2) before
419 419 # being removed, restore that state.
420 420 entry = self._map.get(f)
421 421 if entry is not None:
422 422 if entry[0] == 'r' and entry[2] in (-1, -2):
423 423 source = self._map.copymap.get(f)
424 424 if entry[2] == -1:
425 425 self.merge(f)
426 426 elif entry[2] == -2:
427 427 self.otherparent(f)
428 428 if source:
429 429 self.copy(source, f)
430 430 return
431 431 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
432 432 return
433 433 self._addpath(f, 'n', 0, -1, -1)
434 434 self._map.copymap.pop(f, None)
435 435
436 436 def otherparent(self, f):
437 437 '''Mark as coming from the other parent, always dirty.'''
438 438 if self._pl[1] == nullid:
439 439 raise error.Abort(_("setting %r to other parent "
440 440 "only allowed in merges") % f)
441 441 if f in self and self[f] == 'n':
442 442 # merge-like
443 443 self._addpath(f, 'm', 0, -2, -1)
444 444 else:
445 445 # add-like
446 446 self._addpath(f, 'n', 0, -2, -1)
447 447 self._map.copymap.pop(f, None)
448 448
449 449 def add(self, f):
450 450 '''Mark a file added.'''
451 451 self._addpath(f, 'a', 0, -1, -1)
452 452 self._map.copymap.pop(f, None)
453 453
454 454 def remove(self, f):
455 455 '''Mark a file removed.'''
456 456 self._dirty = True
457 457 oldstate = self[f]
458 458 size = 0
459 459 if self._pl[1] != nullid:
460 460 entry = self._map.get(f)
461 461 if entry is not None:
462 462 # backup the previous state
463 463 if entry[0] == 'm': # merge
464 464 size = -1
465 465 elif entry[0] == 'n' and entry[2] == -2: # other parent
466 466 size = -2
467 467 self._map.otherparentset.add(f)
468 468 self._updatedfiles.add(f)
469 469 self._map.removefile(f, oldstate, size)
470 470 if size == 0:
471 471 self._map.copymap.pop(f, None)
472 472
473 473 def merge(self, f):
474 474 '''Mark a file merged.'''
475 475 if self._pl[1] == nullid:
476 476 return self.normallookup(f)
477 477 return self.otherparent(f)
478 478
479 479 def drop(self, f):
480 480 '''Drop a file from the dirstate'''
481 481 oldstate = self[f]
482 482 if self._map.dropfile(f, oldstate):
483 483 self._dirty = True
484 484 self._updatedfiles.add(f)
485 485 self._map.copymap.pop(f, None)
486 486
487 487 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
488 488 if exists is None:
489 489 exists = os.path.lexists(os.path.join(self._root, path))
490 490 if not exists:
491 491 # Maybe a path component exists
492 492 if not ignoremissing and '/' in path:
493 493 d, f = path.rsplit('/', 1)
494 494 d = self._normalize(d, False, ignoremissing, None)
495 495 folded = d + "/" + f
496 496 else:
497 497 # No path components, preserve original case
498 498 folded = path
499 499 else:
500 500 # recursively normalize leading directory components
501 501 # against dirstate
502 502 if '/' in normed:
503 503 d, f = normed.rsplit('/', 1)
504 504 d = self._normalize(d, False, ignoremissing, True)
505 505 r = self._root + "/" + d
506 506 folded = d + "/" + util.fspath(f, r)
507 507 else:
508 508 folded = util.fspath(normed, self._root)
509 509 storemap[normed] = folded
510 510
511 511 return folded
512 512
513 513 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
514 514 normed = util.normcase(path)
515 515 folded = self._map.filefoldmap.get(normed, None)
516 516 if folded is None:
517 517 if isknown:
518 518 folded = path
519 519 else:
520 520 folded = self._discoverpath(path, normed, ignoremissing, exists,
521 521 self._map.filefoldmap)
522 522 return folded
523 523
524 524 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
525 525 normed = util.normcase(path)
526 526 folded = self._map.filefoldmap.get(normed, None)
527 527 if folded is None:
528 528 folded = self._map.dirfoldmap.get(normed, None)
529 529 if folded is None:
530 530 if isknown:
531 531 folded = path
532 532 else:
533 533 # store discovered result in dirfoldmap so that future
534 534 # normalizefile calls don't start matching directories
535 535 folded = self._discoverpath(path, normed, ignoremissing, exists,
536 536 self._map.dirfoldmap)
537 537 return folded
538 538
539 539 def normalize(self, path, isknown=False, ignoremissing=False):
540 540 '''
541 541 normalize the case of a pathname when on a casefolding filesystem
542 542
543 543 isknown specifies whether the filename came from walking the
544 544 disk, to avoid extra filesystem access.
545 545
546 546 If ignoremissing is True, missing path are returned
547 547 unchanged. Otherwise, we try harder to normalize possibly
548 548 existing path components.
549 549
550 550 The normalized case is determined based on the following precedence:
551 551
552 552 - version of name already stored in the dirstate
553 553 - version of name stored on disk
554 554 - version provided via command arguments
555 555 '''
556 556
557 557 if self._checkcase:
558 558 return self._normalize(path, isknown, ignoremissing)
559 559 return path
560 560
561 561 def clear(self):
562 562 self._map.clear()
563 563 self._lastnormaltime = 0
564 564 self._updatedfiles.clear()
565 565 self._dirty = True
566 566
567 567 def rebuild(self, parent, allfiles, changedfiles=None):
568 568 if changedfiles is None:
569 569 # Rebuild entire dirstate
570 570 changedfiles = allfiles
571 571 lastnormaltime = self._lastnormaltime
572 572 self.clear()
573 573 self._lastnormaltime = lastnormaltime
574 574
575 575 if self._origpl is None:
576 576 self._origpl = self._pl
577 577 self._map.setparents(parent, nullid)
578 578 for f in changedfiles:
579 579 if f in allfiles:
580 580 self.normallookup(f)
581 581 else:
582 582 self.drop(f)
583 583
584 584 self._dirty = True
585 585
586 586 def identity(self):
587 587 '''Return identity of dirstate itself to detect changing in storage
588 588
589 589 If identity of previous dirstate is equal to this, writing
590 590 changes based on the former dirstate out can keep consistency.
591 591 '''
592 592 return self._map.identity
593 593
594 594 def write(self, tr):
595 595 if not self._dirty:
596 596 return
597 597
598 598 filename = self._filename
599 599 if tr:
600 600 # 'dirstate.write()' is not only for writing in-memory
601 601 # changes out, but also for dropping ambiguous timestamp.
602 602 # delayed writing re-raise "ambiguous timestamp issue".
603 603 # See also the wiki page below for detail:
604 604 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
605 605
606 606 # emulate dropping timestamp in 'parsers.pack_dirstate'
607 607 now = _getfsnow(self._opener)
608 608 self._map.clearambiguoustimes(self._updatedfiles, now)
609 609
610 610 # emulate that all 'dirstate.normal' results are written out
611 611 self._lastnormaltime = 0
612 612 self._updatedfiles.clear()
613 613
614 614 # delay writing in-memory changes out
615 615 tr.addfilegenerator('dirstate', (self._filename,),
616 616 self._writedirstate, location='plain')
617 617 return
618 618
619 619 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
620 620 self._writedirstate(st)
621 621
622 622 def addparentchangecallback(self, category, callback):
623 623 """add a callback to be called when the wd parents are changed
624 624
625 625 Callback will be called with the following arguments:
626 626 dirstate, (oldp1, oldp2), (newp1, newp2)
627 627
628 628 Category is a unique identifier to allow overwriting an old callback
629 629 with a newer callback.
630 630 """
631 631 self._plchangecallbacks[category] = callback
632 632
633 633 def _writedirstate(self, st):
634 634 # notify callbacks about parents change
635 635 if self._origpl is not None and self._origpl != self._pl:
636 636 for c, callback in sorted(self._plchangecallbacks.iteritems()):
637 637 callback(self, self._origpl, self._pl)
638 638 self._origpl = None
639 639 # use the modification time of the newly created temporary file as the
640 640 # filesystem's notion of 'now'
641 641 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
642 642
643 643 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
644 644 # timestamp of each entries in dirstate, because of 'now > mtime'
645 645 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
646 646 if delaywrite > 0:
647 647 # do we have any files to delay for?
648 648 for f, e in self._map.iteritems():
649 649 if e[0] == 'n' and e[3] == now:
650 650 import time # to avoid useless import
651 651 # rather than sleep n seconds, sleep until the next
652 652 # multiple of n seconds
653 653 clock = time.time()
654 654 start = int(clock) - (int(clock) % delaywrite)
655 655 end = start + delaywrite
656 656 time.sleep(end - clock)
657 657 now = end # trust our estimate that the end is near now
658 658 break
659 659
660 660 self._map.write(st, now)
661 661 self._lastnormaltime = 0
662 662 self._dirty = False
663 663
664 664 def _dirignore(self, f):
665 if f == '.':
666 return False
667 665 if self._ignore(f):
668 666 return True
669 667 for p in util.finddirs(f):
670 668 if self._ignore(p):
671 669 return True
672 670 return False
673 671
674 672 def _ignorefiles(self):
675 673 files = []
676 674 if os.path.exists(self._join('.hgignore')):
677 675 files.append(self._join('.hgignore'))
678 676 for name, path in self._ui.configitems("ui"):
679 677 if name == 'ignore' or name.startswith('ignore.'):
680 678 # we need to use os.path.join here rather than self._join
681 679 # because path is arbitrary and user-specified
682 680 files.append(os.path.join(self._rootdir, util.expandpath(path)))
683 681 return files
684 682
685 683 def _ignorefileandline(self, f):
686 684 files = collections.deque(self._ignorefiles())
687 685 visited = set()
688 686 while files:
689 687 i = files.popleft()
690 688 patterns = matchmod.readpatternfile(i, self._ui.warn,
691 689 sourceinfo=True)
692 690 for pattern, lineno, line in patterns:
693 691 kind, p = matchmod._patsplit(pattern, 'glob')
694 692 if kind == "subinclude":
695 693 if p not in visited:
696 694 files.append(p)
697 695 continue
698 696 m = matchmod.match(self._root, '', [], [pattern],
699 697 warn=self._ui.warn)
700 698 if m(f):
701 699 return (i, lineno, line)
702 700 visited.add(i)
703 701 return (None, -1, "")
704 702
705 703 def _walkexplicit(self, match, subrepos):
706 704 '''Get stat data about the files explicitly specified by match.
707 705
708 706 Return a triple (results, dirsfound, dirsnotfound).
709 707 - results is a mapping from filename to stat result. It also contains
710 708 listings mapping subrepos and .hg to None.
711 709 - dirsfound is a list of files found to be directories.
712 710 - dirsnotfound is a list of files that the dirstate thinks are
713 711 directories and that were not found.'''
714 712
715 713 def badtype(mode):
716 714 kind = _('unknown')
717 715 if stat.S_ISCHR(mode):
718 716 kind = _('character device')
719 717 elif stat.S_ISBLK(mode):
720 718 kind = _('block device')
721 719 elif stat.S_ISFIFO(mode):
722 720 kind = _('fifo')
723 721 elif stat.S_ISSOCK(mode):
724 722 kind = _('socket')
725 723 elif stat.S_ISDIR(mode):
726 724 kind = _('directory')
727 725 return _('unsupported file type (type is %s)') % kind
728 726
729 727 matchedir = match.explicitdir
730 728 badfn = match.bad
731 729 dmap = self._map
732 730 lstat = os.lstat
733 731 getkind = stat.S_IFMT
734 732 dirkind = stat.S_IFDIR
735 733 regkind = stat.S_IFREG
736 734 lnkkind = stat.S_IFLNK
737 735 join = self._join
738 736 dirsfound = []
739 737 foundadd = dirsfound.append
740 738 dirsnotfound = []
741 739 notfoundadd = dirsnotfound.append
742 740
743 741 if not match.isexact() and self._checkcase:
744 742 normalize = self._normalize
745 743 else:
746 744 normalize = None
747 745
748 746 files = sorted(match.files())
749 747 subrepos.sort()
750 748 i, j = 0, 0
751 749 while i < len(files) and j < len(subrepos):
752 750 subpath = subrepos[j] + "/"
753 751 if files[i] < subpath:
754 752 i += 1
755 753 continue
756 754 while i < len(files) and files[i].startswith(subpath):
757 755 del files[i]
758 756 j += 1
759 757
760 758 if not files or '' in files:
761 759 files = ['']
762 760 # constructing the foldmap is expensive, so don't do it for the
763 761 # common case where files is ['']
764 762 normalize = None
765 763 results = dict.fromkeys(subrepos)
766 764 results['.hg'] = None
767 765
768 766 for ff in files:
769 767 if normalize:
770 768 nf = normalize(ff, False, True)
771 769 else:
772 770 nf = ff
773 771 if nf in results:
774 772 continue
775 773
776 774 try:
777 775 st = lstat(join(nf))
778 776 kind = getkind(st.st_mode)
779 777 if kind == dirkind:
780 778 if nf in dmap:
781 779 # file replaced by dir on disk but still in dirstate
782 780 results[nf] = None
783 781 if matchedir:
784 782 matchedir(nf)
785 783 foundadd((nf, ff))
786 784 elif kind == regkind or kind == lnkkind:
787 785 results[nf] = st
788 786 else:
789 787 badfn(ff, badtype(kind))
790 788 if nf in dmap:
791 789 results[nf] = None
792 790 except OSError as inst: # nf not found on disk - it is dirstate only
793 791 if nf in dmap: # does it exactly match a missing file?
794 792 results[nf] = None
795 793 else: # does it match a missing directory?
796 794 if self._map.hasdir(nf):
797 795 if matchedir:
798 796 matchedir(nf)
799 797 notfoundadd(nf)
800 798 else:
801 799 badfn(ff, encoding.strtolocal(inst.strerror))
802 800
803 801 # match.files() may contain explicitly-specified paths that shouldn't
804 802 # be taken; drop them from the list of files found. dirsfound/notfound
805 803 # aren't filtered here because they will be tested later.
806 804 if match.anypats():
807 805 for f in list(results):
808 806 if f == '.hg' or f in subrepos:
809 807 # keep sentinel to disable further out-of-repo walks
810 808 continue
811 809 if not match(f):
812 810 del results[f]
813 811
814 812 # Case insensitive filesystems cannot rely on lstat() failing to detect
815 813 # a case-only rename. Prune the stat object for any file that does not
816 814 # match the case in the filesystem, if there are multiple files that
817 815 # normalize to the same path.
818 816 if match.isexact() and self._checkcase:
819 817 normed = {}
820 818
821 819 for f, st in results.iteritems():
822 820 if st is None:
823 821 continue
824 822
825 823 nc = util.normcase(f)
826 824 paths = normed.get(nc)
827 825
828 826 if paths is None:
829 827 paths = set()
830 828 normed[nc] = paths
831 829
832 830 paths.add(f)
833 831
834 832 for norm, paths in normed.iteritems():
835 833 if len(paths) > 1:
836 834 for path in paths:
837 835 folded = self._discoverpath(path, norm, True, None,
838 836 self._map.dirfoldmap)
839 837 if path != folded:
840 838 results[path] = None
841 839
842 840 return results, dirsfound, dirsnotfound
843 841
844 842 def walk(self, match, subrepos, unknown, ignored, full=True):
845 843 '''
846 844 Walk recursively through the directory tree, finding all files
847 845 matched by match.
848 846
849 847 If full is False, maybe skip some known-clean files.
850 848
851 849 Return a dict mapping filename to stat-like object (either
852 850 mercurial.osutil.stat instance or return value of os.stat()).
853 851
854 852 '''
855 853 # full is a flag that extensions that hook into walk can use -- this
856 854 # implementation doesn't use it at all. This satisfies the contract
857 855 # because we only guarantee a "maybe".
858 856
859 857 if ignored:
860 858 ignore = util.never
861 859 dirignore = util.never
862 860 elif unknown:
863 861 ignore = self._ignore
864 862 dirignore = self._dirignore
865 863 else:
866 864 # if not unknown and not ignored, drop dir recursion and step 2
867 865 ignore = util.always
868 866 dirignore = util.always
869 867
870 868 matchfn = match.matchfn
871 869 matchalways = match.always()
872 870 matchtdir = match.traversedir
873 871 dmap = self._map
874 872 listdir = util.listdir
875 873 lstat = os.lstat
876 874 dirkind = stat.S_IFDIR
877 875 regkind = stat.S_IFREG
878 876 lnkkind = stat.S_IFLNK
879 877 join = self._join
880 878
881 879 exact = skipstep3 = False
882 880 if match.isexact(): # match.exact
883 881 exact = True
884 882 dirignore = util.always # skip step 2
885 883 elif match.prefix(): # match.match, no patterns
886 884 skipstep3 = True
887 885
888 886 if not exact and self._checkcase:
889 887 normalize = self._normalize
890 888 normalizefile = self._normalizefile
891 889 skipstep3 = False
892 890 else:
893 891 normalize = self._normalize
894 892 normalizefile = None
895 893
896 894 # step 1: find all explicit files
897 895 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
898 896
899 897 skipstep3 = skipstep3 and not (work or dirsnotfound)
900 898 work = [d for d in work if not dirignore(d[0])]
901 899
902 900 # step 2: visit subdirectories
903 901 def traverse(work, alreadynormed):
904 902 wadd = work.append
905 903 while work:
906 904 nd = work.pop()
907 905 visitentries = match.visitchildrenset(nd)
908 906 if not visitentries:
909 907 continue
910 908 if visitentries == 'this' or visitentries == 'all':
911 909 visitentries = None
912 910 skip = None
913 911 if nd != '':
914 912 skip = '.hg'
915 913 try:
916 914 entries = listdir(join(nd), stat=True, skip=skip)
917 915 except OSError as inst:
918 916 if inst.errno in (errno.EACCES, errno.ENOENT):
919 917 match.bad(self.pathto(nd),
920 918 encoding.strtolocal(inst.strerror))
921 919 continue
922 920 raise
923 921 for f, kind, st in entries:
924 922 # Some matchers may return files in the visitentries set,
925 923 # instead of 'this', if the matcher explicitly mentions them
926 924 # and is not an exactmatcher. This is acceptable; we do not
927 925 # make any hard assumptions about file-or-directory below
928 926 # based on the presence of `f` in visitentries. If
929 927 # visitchildrenset returned a set, we can always skip the
930 928 # entries *not* in the set it provided regardless of whether
931 929 # they're actually a file or a directory.
932 930 if visitentries and f not in visitentries:
933 931 continue
934 932 if normalizefile:
935 933 # even though f might be a directory, we're only
936 934 # interested in comparing it to files currently in the
937 935 # dmap -- therefore normalizefile is enough
938 936 nf = normalizefile(nd and (nd + "/" + f) or f, True,
939 937 True)
940 938 else:
941 939 nf = nd and (nd + "/" + f) or f
942 940 if nf not in results:
943 941 if kind == dirkind:
944 942 if not ignore(nf):
945 943 if matchtdir:
946 944 matchtdir(nf)
947 945 wadd(nf)
948 946 if nf in dmap and (matchalways or matchfn(nf)):
949 947 results[nf] = None
950 948 elif kind == regkind or kind == lnkkind:
951 949 if nf in dmap:
952 950 if matchalways or matchfn(nf):
953 951 results[nf] = st
954 952 elif ((matchalways or matchfn(nf))
955 953 and not ignore(nf)):
956 954 # unknown file -- normalize if necessary
957 955 if not alreadynormed:
958 956 nf = normalize(nf, False, True)
959 957 results[nf] = st
960 958 elif nf in dmap and (matchalways or matchfn(nf)):
961 959 results[nf] = None
962 960
963 961 for nd, d in work:
964 962 # alreadynormed means that processwork doesn't have to do any
965 963 # expensive directory normalization
966 964 alreadynormed = not normalize or nd == d
967 965 traverse([d], alreadynormed)
968 966
969 967 for s in subrepos:
970 968 del results[s]
971 969 del results['.hg']
972 970
973 971 # step 3: visit remaining files from dmap
974 972 if not skipstep3 and not exact:
975 973 # If a dmap file is not in results yet, it was either
976 974 # a) not matching matchfn b) ignored, c) missing, or d) under a
977 975 # symlink directory.
978 976 if not results and matchalways:
979 977 visit = [f for f in dmap]
980 978 else:
981 979 visit = [f for f in dmap if f not in results and matchfn(f)]
982 980 visit.sort()
983 981
984 982 if unknown:
985 983 # unknown == True means we walked all dirs under the roots
986 984 # that wasn't ignored, and everything that matched was stat'ed
987 985 # and is already in results.
988 986 # The rest must thus be ignored or under a symlink.
989 987 audit_path = pathutil.pathauditor(self._root, cached=True)
990 988
991 989 for nf in iter(visit):
992 990 # If a stat for the same file was already added with a
993 991 # different case, don't add one for this, since that would
994 992 # make it appear as if the file exists under both names
995 993 # on disk.
996 994 if (normalizefile and
997 995 normalizefile(nf, True, True) in results):
998 996 results[nf] = None
999 997 # Report ignored items in the dmap as long as they are not
1000 998 # under a symlink directory.
1001 999 elif audit_path.check(nf):
1002 1000 try:
1003 1001 results[nf] = lstat(join(nf))
1004 1002 # file was just ignored, no links, and exists
1005 1003 except OSError:
1006 1004 # file doesn't exist
1007 1005 results[nf] = None
1008 1006 else:
1009 1007 # It's either missing or under a symlink directory
1010 1008 # which we in this case report as missing
1011 1009 results[nf] = None
1012 1010 else:
1013 1011 # We may not have walked the full directory tree above,
1014 1012 # so stat and check everything we missed.
1015 1013 iv = iter(visit)
1016 1014 for st in util.statfiles([join(i) for i in visit]):
1017 1015 results[next(iv)] = st
1018 1016 return results
1019 1017
1020 1018 def status(self, match, subrepos, ignored, clean, unknown):
1021 1019 '''Determine the status of the working copy relative to the
1022 1020 dirstate and return a pair of (unsure, status), where status is of type
1023 1021 scmutil.status and:
1024 1022
1025 1023 unsure:
1026 1024 files that might have been modified since the dirstate was
1027 1025 written, but need to be read to be sure (size is the same
1028 1026 but mtime differs)
1029 1027 status.modified:
1030 1028 files that have definitely been modified since the dirstate
1031 1029 was written (different size or mode)
1032 1030 status.clean:
1033 1031 files that have definitely not been modified since the
1034 1032 dirstate was written
1035 1033 '''
1036 1034 listignored, listclean, listunknown = ignored, clean, unknown
1037 1035 lookup, modified, added, unknown, ignored = [], [], [], [], []
1038 1036 removed, deleted, clean = [], [], []
1039 1037
1040 1038 dmap = self._map
1041 1039 dmap.preload()
1042 1040 dcontains = dmap.__contains__
1043 1041 dget = dmap.__getitem__
1044 1042 ladd = lookup.append # aka "unsure"
1045 1043 madd = modified.append
1046 1044 aadd = added.append
1047 1045 uadd = unknown.append
1048 1046 iadd = ignored.append
1049 1047 radd = removed.append
1050 1048 dadd = deleted.append
1051 1049 cadd = clean.append
1052 1050 mexact = match.exact
1053 1051 dirignore = self._dirignore
1054 1052 checkexec = self._checkexec
1055 1053 copymap = self._map.copymap
1056 1054 lastnormaltime = self._lastnormaltime
1057 1055
1058 1056 # We need to do full walks when either
1059 1057 # - we're listing all clean files, or
1060 1058 # - match.traversedir does something, because match.traversedir should
1061 1059 # be called for every dir in the working dir
1062 1060 full = listclean or match.traversedir is not None
1063 1061 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1064 1062 full=full).iteritems():
1065 1063 if not dcontains(fn):
1066 1064 if (listignored or mexact(fn)) and dirignore(fn):
1067 1065 if listignored:
1068 1066 iadd(fn)
1069 1067 else:
1070 1068 uadd(fn)
1071 1069 continue
1072 1070
1073 1071 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1074 1072 # written like that for performance reasons. dmap[fn] is not a
1075 1073 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1076 1074 # opcode has fast paths when the value to be unpacked is a tuple or
1077 1075 # a list, but falls back to creating a full-fledged iterator in
1078 1076 # general. That is much slower than simply accessing and storing the
1079 1077 # tuple members one by one.
1080 1078 t = dget(fn)
1081 1079 state = t[0]
1082 1080 mode = t[1]
1083 1081 size = t[2]
1084 1082 time = t[3]
1085 1083
1086 1084 if not st and state in "nma":
1087 1085 dadd(fn)
1088 1086 elif state == 'n':
1089 1087 if (size >= 0 and
1090 1088 ((size != st.st_size and size != st.st_size & _rangemask)
1091 1089 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1092 1090 or size == -2 # other parent
1093 1091 or fn in copymap):
1094 1092 madd(fn)
1095 1093 elif (time != st[stat.ST_MTIME]
1096 1094 and time != st[stat.ST_MTIME] & _rangemask):
1097 1095 ladd(fn)
1098 1096 elif st[stat.ST_MTIME] == lastnormaltime:
1099 1097 # fn may have just been marked as normal and it may have
1100 1098 # changed in the same second without changing its size.
1101 1099 # This can happen if we quickly do multiple commits.
1102 1100 # Force lookup, so we don't miss such a racy file change.
1103 1101 ladd(fn)
1104 1102 elif listclean:
1105 1103 cadd(fn)
1106 1104 elif state == 'm':
1107 1105 madd(fn)
1108 1106 elif state == 'a':
1109 1107 aadd(fn)
1110 1108 elif state == 'r':
1111 1109 radd(fn)
1112 1110
1113 1111 return (lookup, scmutil.status(modified, added, removed, deleted,
1114 1112 unknown, ignored, clean))
1115 1113
1116 1114 def matches(self, match):
1117 1115 '''
1118 1116 return files in the dirstate (in whatever state) filtered by match
1119 1117 '''
1120 1118 dmap = self._map
1121 1119 if match.always():
1122 1120 return dmap.keys()
1123 1121 files = match.files()
1124 1122 if match.isexact():
1125 1123 # fast path -- filter the other way around, since typically files is
1126 1124 # much smaller than dmap
1127 1125 return [f for f in files if f in dmap]
1128 1126 if match.prefix() and all(fn in dmap for fn in files):
1129 1127 # fast path -- all the values are known to be files, so just return
1130 1128 # that
1131 1129 return list(files)
1132 1130 return [f for f in dmap if match(f)]
1133 1131
1134 1132 def _actualfilename(self, tr):
1135 1133 if tr:
1136 1134 return self._pendingfilename
1137 1135 else:
1138 1136 return self._filename
1139 1137
1140 1138 def savebackup(self, tr, backupname):
1141 1139 '''Save current dirstate into backup file'''
1142 1140 filename = self._actualfilename(tr)
1143 1141 assert backupname != filename
1144 1142
1145 1143 # use '_writedirstate' instead of 'write' to write changes certainly,
1146 1144 # because the latter omits writing out if transaction is running.
1147 1145 # output file will be used to create backup of dirstate at this point.
1148 1146 if self._dirty or not self._opener.exists(filename):
1149 1147 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1150 1148 checkambig=True))
1151 1149
1152 1150 if tr:
1153 1151 # ensure that subsequent tr.writepending returns True for
1154 1152 # changes written out above, even if dirstate is never
1155 1153 # changed after this
1156 1154 tr.addfilegenerator('dirstate', (self._filename,),
1157 1155 self._writedirstate, location='plain')
1158 1156
1159 1157 # ensure that pending file written above is unlinked at
1160 1158 # failure, even if tr.writepending isn't invoked until the
1161 1159 # end of this transaction
1162 1160 tr.registertmp(filename, location='plain')
1163 1161
1164 1162 self._opener.tryunlink(backupname)
1165 1163 # hardlink backup is okay because _writedirstate is always called
1166 1164 # with an "atomictemp=True" file.
1167 1165 util.copyfile(self._opener.join(filename),
1168 1166 self._opener.join(backupname), hardlink=True)
1169 1167
1170 1168 def restorebackup(self, tr, backupname):
1171 1169 '''Restore dirstate by backup file'''
1172 1170 # this "invalidate()" prevents "wlock.release()" from writing
1173 1171 # changes of dirstate out after restoring from backup file
1174 1172 self.invalidate()
1175 1173 filename = self._actualfilename(tr)
1176 1174 o = self._opener
1177 1175 if util.samefile(o.join(backupname), o.join(filename)):
1178 1176 o.unlink(backupname)
1179 1177 else:
1180 1178 o.rename(backupname, filename, checkambig=True)
1181 1179
1182 1180 def clearbackup(self, tr, backupname):
1183 1181 '''Clear backup file'''
1184 1182 self._opener.unlink(backupname)
1185 1183
1186 1184 class dirstatemap(object):
1187 1185 """Map encapsulating the dirstate's contents.
1188 1186
1189 1187 The dirstate contains the following state:
1190 1188
1191 1189 - `identity` is the identity of the dirstate file, which can be used to
1192 1190 detect when changes have occurred to the dirstate file.
1193 1191
1194 1192 - `parents` is a pair containing the parents of the working copy. The
1195 1193 parents are updated by calling `setparents`.
1196 1194
1197 1195 - the state map maps filenames to tuples of (state, mode, size, mtime),
1198 1196 where state is a single character representing 'normal', 'added',
1199 1197 'removed', or 'merged'. It is read by treating the dirstate as a
1200 1198 dict. File state is updated by calling the `addfile`, `removefile` and
1201 1199 `dropfile` methods.
1202 1200
1203 1201 - `copymap` maps destination filenames to their source filename.
1204 1202
1205 1203 The dirstate also provides the following views onto the state:
1206 1204
1207 1205 - `nonnormalset` is a set of the filenames that have state other
1208 1206 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1209 1207
1210 1208 - `otherparentset` is a set of the filenames that are marked as coming
1211 1209 from the second parent when the dirstate is currently being merged.
1212 1210
1213 1211 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1214 1212 form that they appear as in the dirstate.
1215 1213
1216 1214 - `dirfoldmap` is a dict mapping normalized directory names to the
1217 1215 denormalized form that they appear as in the dirstate.
1218 1216 """
1219 1217
1220 1218 def __init__(self, ui, opener, root):
1221 1219 self._ui = ui
1222 1220 self._opener = opener
1223 1221 self._root = root
1224 1222 self._filename = 'dirstate'
1225 1223
1226 1224 self._parents = None
1227 1225 self._dirtyparents = False
1228 1226
1229 1227 # for consistent view between _pl() and _read() invocations
1230 1228 self._pendingmode = None
1231 1229
1232 1230 @propertycache
1233 1231 def _map(self):
1234 1232 self._map = {}
1235 1233 self.read()
1236 1234 return self._map
1237 1235
1238 1236 @propertycache
1239 1237 def copymap(self):
1240 1238 self.copymap = {}
1241 1239 self._map
1242 1240 return self.copymap
1243 1241
1244 1242 def clear(self):
1245 1243 self._map.clear()
1246 1244 self.copymap.clear()
1247 1245 self.setparents(nullid, nullid)
1248 1246 util.clearcachedproperty(self, "_dirs")
1249 1247 util.clearcachedproperty(self, "_alldirs")
1250 1248 util.clearcachedproperty(self, "filefoldmap")
1251 1249 util.clearcachedproperty(self, "dirfoldmap")
1252 1250 util.clearcachedproperty(self, "nonnormalset")
1253 1251 util.clearcachedproperty(self, "otherparentset")
1254 1252
1255 1253 def items(self):
1256 1254 return self._map.iteritems()
1257 1255
1258 1256 # forward for python2,3 compat
1259 1257 iteritems = items
1260 1258
1261 1259 def __len__(self):
1262 1260 return len(self._map)
1263 1261
1264 1262 def __iter__(self):
1265 1263 return iter(self._map)
1266 1264
1267 1265 def get(self, key, default=None):
1268 1266 return self._map.get(key, default)
1269 1267
1270 1268 def __contains__(self, key):
1271 1269 return key in self._map
1272 1270
1273 1271 def __getitem__(self, key):
1274 1272 return self._map[key]
1275 1273
1276 1274 def keys(self):
1277 1275 return self._map.keys()
1278 1276
1279 1277 def preload(self):
1280 1278 """Loads the underlying data, if it's not already loaded"""
1281 1279 self._map
1282 1280
1283 1281 def addfile(self, f, oldstate, state, mode, size, mtime):
1284 1282 """Add a tracked file to the dirstate."""
1285 1283 if oldstate in "?r" and r"_dirs" in self.__dict__:
1286 1284 self._dirs.addpath(f)
1287 1285 if oldstate == "?" and r"_alldirs" in self.__dict__:
1288 1286 self._alldirs.addpath(f)
1289 1287 self._map[f] = dirstatetuple(state, mode, size, mtime)
1290 1288 if state != 'n' or mtime == -1:
1291 1289 self.nonnormalset.add(f)
1292 1290 if size == -2:
1293 1291 self.otherparentset.add(f)
1294 1292
1295 1293 def removefile(self, f, oldstate, size):
1296 1294 """
1297 1295 Mark a file as removed in the dirstate.
1298 1296
1299 1297 The `size` parameter is used to store sentinel values that indicate
1300 1298 the file's previous state. In the future, we should refactor this
1301 1299 to be more explicit about what that state is.
1302 1300 """
1303 1301 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1304 1302 self._dirs.delpath(f)
1305 1303 if oldstate == "?" and r"_alldirs" in self.__dict__:
1306 1304 self._alldirs.addpath(f)
1307 1305 if r"filefoldmap" in self.__dict__:
1308 1306 normed = util.normcase(f)
1309 1307 self.filefoldmap.pop(normed, None)
1310 1308 self._map[f] = dirstatetuple('r', 0, size, 0)
1311 1309 self.nonnormalset.add(f)
1312 1310
1313 1311 def dropfile(self, f, oldstate):
1314 1312 """
1315 1313 Remove a file from the dirstate. Returns True if the file was
1316 1314 previously recorded.
1317 1315 """
1318 1316 exists = self._map.pop(f, None) is not None
1319 1317 if exists:
1320 1318 if oldstate != "r" and r"_dirs" in self.__dict__:
1321 1319 self._dirs.delpath(f)
1322 1320 if r"_alldirs" in self.__dict__:
1323 1321 self._alldirs.delpath(f)
1324 1322 if r"filefoldmap" in self.__dict__:
1325 1323 normed = util.normcase(f)
1326 1324 self.filefoldmap.pop(normed, None)
1327 1325 self.nonnormalset.discard(f)
1328 1326 return exists
1329 1327
1330 1328 def clearambiguoustimes(self, files, now):
1331 1329 for f in files:
1332 1330 e = self.get(f)
1333 1331 if e is not None and e[0] == 'n' and e[3] == now:
1334 1332 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1335 1333 self.nonnormalset.add(f)
1336 1334
1337 1335 def nonnormalentries(self):
1338 1336 '''Compute the nonnormal dirstate entries from the dmap'''
1339 1337 try:
1340 1338 return parsers.nonnormalotherparententries(self._map)
1341 1339 except AttributeError:
1342 1340 nonnorm = set()
1343 1341 otherparent = set()
1344 1342 for fname, e in self._map.iteritems():
1345 1343 if e[0] != 'n' or e[3] == -1:
1346 1344 nonnorm.add(fname)
1347 1345 if e[0] == 'n' and e[2] == -2:
1348 1346 otherparent.add(fname)
1349 1347 return nonnorm, otherparent
1350 1348
1351 1349 @propertycache
1352 1350 def filefoldmap(self):
1353 1351 """Returns a dictionary mapping normalized case paths to their
1354 1352 non-normalized versions.
1355 1353 """
1356 1354 try:
1357 1355 makefilefoldmap = parsers.make_file_foldmap
1358 1356 except AttributeError:
1359 1357 pass
1360 1358 else:
1361 1359 return makefilefoldmap(self._map, util.normcasespec,
1362 1360 util.normcasefallback)
1363 1361
1364 1362 f = {}
1365 1363 normcase = util.normcase
1366 1364 for name, s in self._map.iteritems():
1367 1365 if s[0] != 'r':
1368 1366 f[normcase(name)] = name
1369 1367 f['.'] = '.' # prevents useless util.fspath() invocation
1370 1368 return f
1371 1369
1372 1370 def hastrackeddir(self, d):
1373 1371 """
1374 1372 Returns True if the dirstate contains a tracked (not removed) file
1375 1373 in this directory.
1376 1374 """
1377 1375 return d in self._dirs
1378 1376
1379 1377 def hasdir(self, d):
1380 1378 """
1381 1379 Returns True if the dirstate contains a file (tracked or removed)
1382 1380 in this directory.
1383 1381 """
1384 1382 return d in self._alldirs
1385 1383
1386 1384 @propertycache
1387 1385 def _dirs(self):
1388 1386 return util.dirs(self._map, 'r')
1389 1387
1390 1388 @propertycache
1391 1389 def _alldirs(self):
1392 1390 return util.dirs(self._map)
1393 1391
1394 1392 def _opendirstatefile(self):
1395 1393 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1396 1394 if self._pendingmode is not None and self._pendingmode != mode:
1397 1395 fp.close()
1398 1396 raise error.Abort(_('working directory state may be '
1399 1397 'changed parallelly'))
1400 1398 self._pendingmode = mode
1401 1399 return fp
1402 1400
1403 1401 def parents(self):
1404 1402 if not self._parents:
1405 1403 try:
1406 1404 fp = self._opendirstatefile()
1407 1405 st = fp.read(40)
1408 1406 fp.close()
1409 1407 except IOError as err:
1410 1408 if err.errno != errno.ENOENT:
1411 1409 raise
1412 1410 # File doesn't exist, so the current state is empty
1413 1411 st = ''
1414 1412
1415 1413 l = len(st)
1416 1414 if l == 40:
1417 1415 self._parents = (st[:20], st[20:40])
1418 1416 elif l == 0:
1419 1417 self._parents = (nullid, nullid)
1420 1418 else:
1421 1419 raise error.Abort(_('working directory state appears '
1422 1420 'damaged!'))
1423 1421
1424 1422 return self._parents
1425 1423
1426 1424 def setparents(self, p1, p2):
1427 1425 self._parents = (p1, p2)
1428 1426 self._dirtyparents = True
1429 1427
1430 1428 def read(self):
1431 1429 # ignore HG_PENDING because identity is used only for writing
1432 1430 self.identity = util.filestat.frompath(
1433 1431 self._opener.join(self._filename))
1434 1432
1435 1433 try:
1436 1434 fp = self._opendirstatefile()
1437 1435 try:
1438 1436 st = fp.read()
1439 1437 finally:
1440 1438 fp.close()
1441 1439 except IOError as err:
1442 1440 if err.errno != errno.ENOENT:
1443 1441 raise
1444 1442 return
1445 1443 if not st:
1446 1444 return
1447 1445
1448 1446 if util.safehasattr(parsers, 'dict_new_presized'):
1449 1447 # Make an estimate of the number of files in the dirstate based on
1450 1448 # its size. From a linear regression on a set of real-world repos,
1451 1449 # all over 10,000 files, the size of a dirstate entry is 85
1452 1450 # bytes. The cost of resizing is significantly higher than the cost
1453 1451 # of filling in a larger presized dict, so subtract 20% from the
1454 1452 # size.
1455 1453 #
1456 1454 # This heuristic is imperfect in many ways, so in a future dirstate
1457 1455 # format update it makes sense to just record the number of entries
1458 1456 # on write.
1459 1457 self._map = parsers.dict_new_presized(len(st) // 71)
1460 1458
1461 1459 # Python's garbage collector triggers a GC each time a certain number
1462 1460 # of container objects (the number being defined by
1463 1461 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1464 1462 # for each file in the dirstate. The C version then immediately marks
1465 1463 # them as not to be tracked by the collector. However, this has no
1466 1464 # effect on when GCs are triggered, only on what objects the GC looks
1467 1465 # into. This means that O(number of files) GCs are unavoidable.
1468 1466 # Depending on when in the process's lifetime the dirstate is parsed,
1469 1467 # this can get very expensive. As a workaround, disable GC while
1470 1468 # parsing the dirstate.
1471 1469 #
1472 1470 # (we cannot decorate the function directly since it is in a C module)
1473 1471 if rustext is not None:
1474 1472 parse_dirstate = rustext.dirstate.parse_dirstate
1475 1473 else:
1476 1474 parse_dirstate = parsers.parse_dirstate
1477 1475
1478 1476 parse_dirstate = util.nogc(parse_dirstate)
1479 1477 p = parse_dirstate(self._map, self.copymap, st)
1480 1478 if not self._dirtyparents:
1481 1479 self.setparents(*p)
1482 1480
1483 1481 # Avoid excess attribute lookups by fast pathing certain checks
1484 1482 self.__contains__ = self._map.__contains__
1485 1483 self.__getitem__ = self._map.__getitem__
1486 1484 self.get = self._map.get
1487 1485
1488 1486 def write(self, st, now):
1489 1487 if rustext is not None:
1490 1488 pack_dirstate = rustext.dirstate.pack_dirstate
1491 1489 else:
1492 1490 pack_dirstate = parsers.pack_dirstate
1493 1491
1494 1492 st.write(pack_dirstate(self._map, self.copymap,
1495 1493 self.parents(), now))
1496 1494 st.close()
1497 1495 self._dirtyparents = False
1498 1496 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1499 1497
1500 1498 @propertycache
1501 1499 def nonnormalset(self):
1502 1500 nonnorm, otherparents = self.nonnormalentries()
1503 1501 self.otherparentset = otherparents
1504 1502 return nonnorm
1505 1503
1506 1504 @propertycache
1507 1505 def otherparentset(self):
1508 1506 nonnorm, otherparents = self.nonnormalentries()
1509 1507 self.nonnormalset = nonnorm
1510 1508 return otherparents
1511 1509
1512 1510 @propertycache
1513 1511 def identity(self):
1514 1512 self._map
1515 1513 return self.identity
1516 1514
1517 1515 @propertycache
1518 1516 def dirfoldmap(self):
1519 1517 f = {}
1520 1518 normcase = util.normcase
1521 1519 for name in self._dirs:
1522 1520 f[normcase(name)] = name
1523 1521 return f
General Comments 0
You need to be logged in to leave comments. Login now