##// END OF EJS Templates
store: drop the `filefilter` argument to `_walk`...
marmoute -
r47613:6afb5ef1 default
parent child Browse files
Show More
@@ -1,756 +1,756 b''
1 1 # store.py - repository store handling for Mercurial
2 2 #
3 3 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import functools
12 12 import os
13 13 import stat
14 14
15 15 from .i18n import _
16 16 from .pycompat import getattr
17 17 from .node import hex
18 18 from . import (
19 19 changelog,
20 20 error,
21 21 manifest,
22 22 policy,
23 23 pycompat,
24 24 util,
25 25 vfs as vfsmod,
26 26 )
27 27 from .utils import hashutil
28 28
29 29 parsers = policy.importmod('parsers')
30 30 # how much bytes should be read from fncache in one read
31 31 # It is done to prevent loading large fncache files into memory
32 32 fncache_chunksize = 10 ** 6
33 33
34 34
35 35 def _matchtrackedpath(path, matcher):
36 36 """parses a fncache entry and returns whether the entry is tracking a path
37 37 matched by matcher or not.
38 38
39 39 If matcher is None, returns True"""
40 40
41 41 if matcher is None:
42 42 return True
43 43 path = decodedir(path)
44 44 if path.startswith(b'data/'):
45 45 return matcher(path[len(b'data/') : -len(b'.i')])
46 46 elif path.startswith(b'meta/'):
47 47 return matcher.visitdir(path[len(b'meta/') : -len(b'/00manifest.i')])
48 48
49 49 raise error.ProgrammingError(b"cannot decode path %s" % path)
50 50
51 51
52 52 # This avoids a collision between a file named foo and a dir named
53 53 # foo.i or foo.d
54 54 def _encodedir(path):
55 55 """
56 56 >>> _encodedir(b'data/foo.i')
57 57 'data/foo.i'
58 58 >>> _encodedir(b'data/foo.i/bla.i')
59 59 'data/foo.i.hg/bla.i'
60 60 >>> _encodedir(b'data/foo.i.hg/bla.i')
61 61 'data/foo.i.hg.hg/bla.i'
62 62 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
63 63 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
64 64 """
65 65 return (
66 66 path.replace(b".hg/", b".hg.hg/")
67 67 .replace(b".i/", b".i.hg/")
68 68 .replace(b".d/", b".d.hg/")
69 69 )
70 70
71 71
72 72 encodedir = getattr(parsers, 'encodedir', _encodedir)
73 73
74 74
75 75 def decodedir(path):
76 76 """
77 77 >>> decodedir(b'data/foo.i')
78 78 'data/foo.i'
79 79 >>> decodedir(b'data/foo.i.hg/bla.i')
80 80 'data/foo.i/bla.i'
81 81 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
82 82 'data/foo.i.hg/bla.i'
83 83 """
84 84 if b".hg/" not in path:
85 85 return path
86 86 return (
87 87 path.replace(b".d.hg/", b".d/")
88 88 .replace(b".i.hg/", b".i/")
89 89 .replace(b".hg.hg/", b".hg/")
90 90 )
91 91
92 92
93 93 def _reserved():
94 94 """characters that are problematic for filesystems
95 95
96 96 * ascii escapes (0..31)
97 97 * ascii hi (126..255)
98 98 * windows specials
99 99
100 100 these characters will be escaped by encodefunctions
101 101 """
102 102 winreserved = [ord(x) for x in u'\\:*?"<>|']
103 103 for x in range(32):
104 104 yield x
105 105 for x in range(126, 256):
106 106 yield x
107 107 for x in winreserved:
108 108 yield x
109 109
110 110
111 111 def _buildencodefun():
112 112 """
113 113 >>> enc, dec = _buildencodefun()
114 114
115 115 >>> enc(b'nothing/special.txt')
116 116 'nothing/special.txt'
117 117 >>> dec(b'nothing/special.txt')
118 118 'nothing/special.txt'
119 119
120 120 >>> enc(b'HELLO')
121 121 '_h_e_l_l_o'
122 122 >>> dec(b'_h_e_l_l_o')
123 123 'HELLO'
124 124
125 125 >>> enc(b'hello:world?')
126 126 'hello~3aworld~3f'
127 127 >>> dec(b'hello~3aworld~3f')
128 128 'hello:world?'
129 129
130 130 >>> enc(b'the\\x07quick\\xADshot')
131 131 'the~07quick~adshot'
132 132 >>> dec(b'the~07quick~adshot')
133 133 'the\\x07quick\\xadshot'
134 134 """
135 135 e = b'_'
136 136 xchr = pycompat.bytechr
137 137 asciistr = list(map(xchr, range(127)))
138 138 capitals = list(range(ord(b"A"), ord(b"Z") + 1))
139 139
140 140 cmap = {x: x for x in asciistr}
141 141 for x in _reserved():
142 142 cmap[xchr(x)] = b"~%02x" % x
143 143 for x in capitals + [ord(e)]:
144 144 cmap[xchr(x)] = e + xchr(x).lower()
145 145
146 146 dmap = {}
147 147 for k, v in pycompat.iteritems(cmap):
148 148 dmap[v] = k
149 149
150 150 def decode(s):
151 151 i = 0
152 152 while i < len(s):
153 153 for l in pycompat.xrange(1, 4):
154 154 try:
155 155 yield dmap[s[i : i + l]]
156 156 i += l
157 157 break
158 158 except KeyError:
159 159 pass
160 160 else:
161 161 raise KeyError
162 162
163 163 return (
164 164 lambda s: b''.join(
165 165 [cmap[s[c : c + 1]] for c in pycompat.xrange(len(s))]
166 166 ),
167 167 lambda s: b''.join(list(decode(s))),
168 168 )
169 169
170 170
171 171 _encodefname, _decodefname = _buildencodefun()
172 172
173 173
174 174 def encodefilename(s):
175 175 """
176 176 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
177 177 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
178 178 """
179 179 return _encodefname(encodedir(s))
180 180
181 181
182 182 def decodefilename(s):
183 183 """
184 184 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
185 185 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
186 186 """
187 187 return decodedir(_decodefname(s))
188 188
189 189
190 190 def _buildlowerencodefun():
191 191 """
192 192 >>> f = _buildlowerencodefun()
193 193 >>> f(b'nothing/special.txt')
194 194 'nothing/special.txt'
195 195 >>> f(b'HELLO')
196 196 'hello'
197 197 >>> f(b'hello:world?')
198 198 'hello~3aworld~3f'
199 199 >>> f(b'the\\x07quick\\xADshot')
200 200 'the~07quick~adshot'
201 201 """
202 202 xchr = pycompat.bytechr
203 203 cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
204 204 for x in _reserved():
205 205 cmap[xchr(x)] = b"~%02x" % x
206 206 for x in range(ord(b"A"), ord(b"Z") + 1):
207 207 cmap[xchr(x)] = xchr(x).lower()
208 208
209 209 def lowerencode(s):
210 210 return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
211 211
212 212 return lowerencode
213 213
214 214
215 215 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
216 216
217 217 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
218 218 _winres3 = (b'aux', b'con', b'prn', b'nul') # length 3
219 219 _winres4 = (b'com', b'lpt') # length 4 (with trailing 1..9)
220 220
221 221
222 222 def _auxencode(path, dotencode):
223 223 """
224 224 Encodes filenames containing names reserved by Windows or which end in
225 225 period or space. Does not touch other single reserved characters c.
226 226 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
227 227 Additionally encodes space or period at the beginning, if dotencode is
228 228 True. Parameter path is assumed to be all lowercase.
229 229 A segment only needs encoding if a reserved name appears as a
230 230 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
231 231 doesn't need encoding.
232 232
233 233 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
234 234 >>> _auxencode(s.split(b'/'), True)
235 235 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
236 236 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
237 237 >>> _auxencode(s.split(b'/'), False)
238 238 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
239 239 >>> _auxencode([b'foo. '], True)
240 240 ['foo.~20']
241 241 >>> _auxencode([b' .foo'], True)
242 242 ['~20.foo']
243 243 """
244 244 for i, n in enumerate(path):
245 245 if not n:
246 246 continue
247 247 if dotencode and n[0] in b'. ':
248 248 n = b"~%02x" % ord(n[0:1]) + n[1:]
249 249 path[i] = n
250 250 else:
251 251 l = n.find(b'.')
252 252 if l == -1:
253 253 l = len(n)
254 254 if (l == 3 and n[:3] in _winres3) or (
255 255 l == 4
256 256 and n[3:4] <= b'9'
257 257 and n[3:4] >= b'1'
258 258 and n[:3] in _winres4
259 259 ):
260 260 # encode third letter ('aux' -> 'au~78')
261 261 ec = b"~%02x" % ord(n[2:3])
262 262 n = n[0:2] + ec + n[3:]
263 263 path[i] = n
264 264 if n[-1] in b'. ':
265 265 # encode last period or space ('foo...' -> 'foo..~2e')
266 266 path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
267 267 return path
268 268
269 269
270 270 _maxstorepathlen = 120
271 271 _dirprefixlen = 8
272 272 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
273 273
274 274
275 275 def _hashencode(path, dotencode):
276 276 digest = hex(hashutil.sha1(path).digest())
277 277 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
278 278 parts = _auxencode(le, dotencode)
279 279 basename = parts[-1]
280 280 _root, ext = os.path.splitext(basename)
281 281 sdirs = []
282 282 sdirslen = 0
283 283 for p in parts[:-1]:
284 284 d = p[:_dirprefixlen]
285 285 if d[-1] in b'. ':
286 286 # Windows can't access dirs ending in period or space
287 287 d = d[:-1] + b'_'
288 288 if sdirslen == 0:
289 289 t = len(d)
290 290 else:
291 291 t = sdirslen + 1 + len(d)
292 292 if t > _maxshortdirslen:
293 293 break
294 294 sdirs.append(d)
295 295 sdirslen = t
296 296 dirs = b'/'.join(sdirs)
297 297 if len(dirs) > 0:
298 298 dirs += b'/'
299 299 res = b'dh/' + dirs + digest + ext
300 300 spaceleft = _maxstorepathlen - len(res)
301 301 if spaceleft > 0:
302 302 filler = basename[:spaceleft]
303 303 res = b'dh/' + dirs + filler + digest + ext
304 304 return res
305 305
306 306
307 307 def _hybridencode(path, dotencode):
308 308 """encodes path with a length limit
309 309
310 310 Encodes all paths that begin with 'data/', according to the following.
311 311
312 312 Default encoding (reversible):
313 313
314 314 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
315 315 characters are encoded as '~xx', where xx is the two digit hex code
316 316 of the character (see encodefilename).
317 317 Relevant path components consisting of Windows reserved filenames are
318 318 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
319 319
320 320 Hashed encoding (not reversible):
321 321
322 322 If the default-encoded path is longer than _maxstorepathlen, a
323 323 non-reversible hybrid hashing of the path is done instead.
324 324 This encoding uses up to _dirprefixlen characters of all directory
325 325 levels of the lowerencoded path, but not more levels than can fit into
326 326 _maxshortdirslen.
327 327 Then follows the filler followed by the sha digest of the full path.
328 328 The filler is the beginning of the basename of the lowerencoded path
329 329 (the basename is everything after the last path separator). The filler
330 330 is as long as possible, filling in characters from the basename until
331 331 the encoded path has _maxstorepathlen characters (or all chars of the
332 332 basename have been taken).
333 333 The extension (e.g. '.i' or '.d') is preserved.
334 334
335 335 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
336 336 encoding was used.
337 337 """
338 338 path = encodedir(path)
339 339 ef = _encodefname(path).split(b'/')
340 340 res = b'/'.join(_auxencode(ef, dotencode))
341 341 if len(res) > _maxstorepathlen:
342 342 res = _hashencode(path, dotencode)
343 343 return res
344 344
345 345
346 346 def _pathencode(path):
347 347 de = encodedir(path)
348 348 if len(path) > _maxstorepathlen:
349 349 return _hashencode(de, True)
350 350 ef = _encodefname(de).split(b'/')
351 351 res = b'/'.join(_auxencode(ef, True))
352 352 if len(res) > _maxstorepathlen:
353 353 return _hashencode(de, True)
354 354 return res
355 355
356 356
357 357 _pathencode = getattr(parsers, 'pathencode', _pathencode)
358 358
359 359
360 360 def _plainhybridencode(f):
361 361 return _hybridencode(f, False)
362 362
363 363
364 364 def _calcmode(vfs):
365 365 try:
366 366 # files in .hg/ will be created using this mode
367 367 mode = vfs.stat().st_mode
368 368 # avoid some useless chmods
369 369 if (0o777 & ~util.umask) == (0o777 & mode):
370 370 mode = None
371 371 except OSError:
372 372 mode = None
373 373 return mode
374 374
375 375
376 376 _data = [
377 377 b'bookmarks',
378 378 b'narrowspec',
379 379 b'data',
380 380 b'meta',
381 381 b'00manifest.d',
382 382 b'00manifest.i',
383 383 b'00changelog.d',
384 384 b'00changelog.i',
385 385 b'phaseroots',
386 386 b'obsstore',
387 387 b'requires',
388 388 ]
389 389
390 390 REVLOG_FILES_EXT = (b'.i', b'.d', b'.n', b'.nd')
391 391
392 392
393 393 def isrevlog(f, kind, st):
394 394 if kind != stat.S_IFREG:
395 395 return False
396 396 return f.endswith(REVLOG_FILES_EXT)
397 397
398 398
399 399 class basicstore(object):
400 400 '''base class for local repository stores'''
401 401
402 402 def __init__(self, path, vfstype):
403 403 vfs = vfstype(path)
404 404 self.path = vfs.base
405 405 self.createmode = _calcmode(vfs)
406 406 vfs.createmode = self.createmode
407 407 self.rawvfs = vfs
408 408 self.vfs = vfsmod.filtervfs(vfs, encodedir)
409 409 self.opener = self.vfs
410 410
411 411 def join(self, f):
412 412 return self.path + b'/' + encodedir(f)
413 413
414 def _walk(self, relpath, recurse, filefilter=isrevlog):
414 def _walk(self, relpath, recurse):
415 415 '''yields (unencoded, encoded, size)'''
416 416 path = self.path
417 417 if relpath:
418 418 path += b'/' + relpath
419 419 striplen = len(self.path) + 1
420 420 l = []
421 421 if self.rawvfs.isdir(path):
422 422 visit = [path]
423 423 readdir = self.rawvfs.readdir
424 424 while visit:
425 425 p = visit.pop()
426 426 for f, kind, st in readdir(p, stat=True):
427 427 fp = p + b'/' + f
428 if filefilter(f, kind, st):
428 if isrevlog(f, kind, st):
429 429 n = util.pconvert(fp[striplen:])
430 430 l.append((decodedir(n), n, st.st_size))
431 431 elif kind == stat.S_IFDIR and recurse:
432 432 visit.append(fp)
433 433 l.sort()
434 434 return l
435 435
436 436 def changelog(self, trypending, concurrencychecker=None):
437 437 return changelog.changelog(
438 438 self.vfs,
439 439 trypending=trypending,
440 440 concurrencychecker=concurrencychecker,
441 441 )
442 442
443 443 def manifestlog(self, repo, storenarrowmatch):
444 444 rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs)
445 445 return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
446 446
447 447 def datafiles(self, matcher=None):
448 448 return self._walk(b'data', True) + self._walk(b'meta', True)
449 449
450 450 def topfiles(self):
451 451 # yield manifest before changelog
452 452 return reversed(self._walk(b'', False))
453 453
454 454 def walk(self, matcher=None):
455 455 """return file related to data storage (ie: revlogs)
456 456
457 457 yields (unencoded, encoded, size)
458 458
459 459 if a matcher is passed, storage files of only those tracked paths
460 460 are passed with matches the matcher
461 461 """
462 462 # yield data files first
463 463 for x in self.datafiles(matcher):
464 464 yield x
465 465 for x in self.topfiles():
466 466 yield x
467 467
468 468 def copylist(self):
469 469 return _data
470 470
471 471 def write(self, tr):
472 472 pass
473 473
474 474 def invalidatecaches(self):
475 475 pass
476 476
477 477 def markremoved(self, fn):
478 478 pass
479 479
480 480 def __contains__(self, path):
481 481 '''Checks if the store contains path'''
482 482 path = b"/".join((b"data", path))
483 483 # file?
484 484 if self.vfs.exists(path + b".i"):
485 485 return True
486 486 # dir?
487 487 if not path.endswith(b"/"):
488 488 path = path + b"/"
489 489 return self.vfs.exists(path)
490 490
491 491
492 492 class encodedstore(basicstore):
493 493 def __init__(self, path, vfstype):
494 494 vfs = vfstype(path + b'/store')
495 495 self.path = vfs.base
496 496 self.createmode = _calcmode(vfs)
497 497 vfs.createmode = self.createmode
498 498 self.rawvfs = vfs
499 499 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
500 500 self.opener = self.vfs
501 501
502 502 def datafiles(self, matcher=None):
503 503 for a, b, size in super(encodedstore, self).datafiles():
504 504 try:
505 505 a = decodefilename(a)
506 506 except KeyError:
507 507 a = None
508 508 if a is not None and not _matchtrackedpath(a, matcher):
509 509 continue
510 510 yield a, b, size
511 511
512 512 def join(self, f):
513 513 return self.path + b'/' + encodefilename(f)
514 514
515 515 def copylist(self):
516 516 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
517 517
518 518
519 519 class fncache(object):
520 520 # the filename used to be partially encoded
521 521 # hence the encodedir/decodedir dance
522 522 def __init__(self, vfs):
523 523 self.vfs = vfs
524 524 self.entries = None
525 525 self._dirty = False
526 526 # set of new additions to fncache
527 527 self.addls = set()
528 528
529 529 def ensureloaded(self, warn=None):
530 530 """read the fncache file if not already read.
531 531
532 532 If the file on disk is corrupted, raise. If warn is provided,
533 533 warn and keep going instead."""
534 534 if self.entries is None:
535 535 self._load(warn)
536 536
537 537 def _load(self, warn=None):
538 538 '''fill the entries from the fncache file'''
539 539 self._dirty = False
540 540 try:
541 541 fp = self.vfs(b'fncache', mode=b'rb')
542 542 except IOError:
543 543 # skip nonexistent file
544 544 self.entries = set()
545 545 return
546 546
547 547 self.entries = set()
548 548 chunk = b''
549 549 for c in iter(functools.partial(fp.read, fncache_chunksize), b''):
550 550 chunk += c
551 551 try:
552 552 p = chunk.rindex(b'\n')
553 553 self.entries.update(decodedir(chunk[: p + 1]).splitlines())
554 554 chunk = chunk[p + 1 :]
555 555 except ValueError:
556 556 # substring '\n' not found, maybe the entry is bigger than the
557 557 # chunksize, so let's keep iterating
558 558 pass
559 559
560 560 if chunk:
561 561 msg = _(b"fncache does not ends with a newline")
562 562 if warn:
563 563 warn(msg + b'\n')
564 564 else:
565 565 raise error.Abort(
566 566 msg,
567 567 hint=_(
568 568 b"use 'hg debugrebuildfncache' to "
569 569 b"rebuild the fncache"
570 570 ),
571 571 )
572 572 self._checkentries(fp, warn)
573 573 fp.close()
574 574
575 575 def _checkentries(self, fp, warn):
576 576 """ make sure there is no empty string in entries """
577 577 if b'' in self.entries:
578 578 fp.seek(0)
579 579 for n, line in enumerate(util.iterfile(fp)):
580 580 if not line.rstrip(b'\n'):
581 581 t = _(b'invalid entry in fncache, line %d') % (n + 1)
582 582 if warn:
583 583 warn(t + b'\n')
584 584 else:
585 585 raise error.Abort(t)
586 586
587 587 def write(self, tr):
588 588 if self._dirty:
589 589 assert self.entries is not None
590 590 self.entries = self.entries | self.addls
591 591 self.addls = set()
592 592 tr.addbackup(b'fncache')
593 593 fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
594 594 if self.entries:
595 595 fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
596 596 fp.close()
597 597 self._dirty = False
598 598 if self.addls:
599 599 # if we have just new entries, let's append them to the fncache
600 600 tr.addbackup(b'fncache')
601 601 fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
602 602 if self.addls:
603 603 fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
604 604 fp.close()
605 605 self.entries = None
606 606 self.addls = set()
607 607
608 608 def add(self, fn):
609 609 if self.entries is None:
610 610 self._load()
611 611 if fn not in self.entries:
612 612 self.addls.add(fn)
613 613
614 614 def remove(self, fn):
615 615 if self.entries is None:
616 616 self._load()
617 617 if fn in self.addls:
618 618 self.addls.remove(fn)
619 619 return
620 620 try:
621 621 self.entries.remove(fn)
622 622 self._dirty = True
623 623 except KeyError:
624 624 pass
625 625
626 626 def __contains__(self, fn):
627 627 if fn in self.addls:
628 628 return True
629 629 if self.entries is None:
630 630 self._load()
631 631 return fn in self.entries
632 632
633 633 def __iter__(self):
634 634 if self.entries is None:
635 635 self._load()
636 636 return iter(self.entries | self.addls)
637 637
638 638
639 639 class _fncachevfs(vfsmod.proxyvfs):
640 640 def __init__(self, vfs, fnc, encode):
641 641 vfsmod.proxyvfs.__init__(self, vfs)
642 642 self.fncache = fnc
643 643 self.encode = encode
644 644
645 645 def __call__(self, path, mode=b'r', *args, **kw):
646 646 encoded = self.encode(path)
647 647 if mode not in (b'r', b'rb') and (
648 648 path.startswith(b'data/') or path.startswith(b'meta/')
649 649 ):
650 650 # do not trigger a fncache load when adding a file that already is
651 651 # known to exist.
652 652 notload = self.fncache.entries is None and self.vfs.exists(encoded)
653 653 if notload and b'a' in mode and not self.vfs.stat(encoded).st_size:
654 654 # when appending to an existing file, if the file has size zero,
655 655 # it should be considered as missing. Such zero-size files are
656 656 # the result of truncation when a transaction is aborted.
657 657 notload = False
658 658 if not notload:
659 659 self.fncache.add(path)
660 660 return self.vfs(encoded, mode, *args, **kw)
661 661
662 662 def join(self, path):
663 663 if path:
664 664 return self.vfs.join(self.encode(path))
665 665 else:
666 666 return self.vfs.join(path)
667 667
668 668
669 669 class fncachestore(basicstore):
670 670 def __init__(self, path, vfstype, dotencode):
671 671 if dotencode:
672 672 encode = _pathencode
673 673 else:
674 674 encode = _plainhybridencode
675 675 self.encode = encode
676 676 vfs = vfstype(path + b'/store')
677 677 self.path = vfs.base
678 678 self.pathsep = self.path + b'/'
679 679 self.createmode = _calcmode(vfs)
680 680 vfs.createmode = self.createmode
681 681 self.rawvfs = vfs
682 682 fnc = fncache(vfs)
683 683 self.fncache = fnc
684 684 self.vfs = _fncachevfs(vfs, fnc, encode)
685 685 self.opener = self.vfs
686 686
687 687 def join(self, f):
688 688 return self.pathsep + self.encode(f)
689 689
690 690 def getsize(self, path):
691 691 return self.rawvfs.stat(path).st_size
692 692
693 693 def datafiles(self, matcher=None):
694 694 for f in sorted(self.fncache):
695 695 if not _matchtrackedpath(f, matcher):
696 696 continue
697 697 ef = self.encode(f)
698 698 try:
699 699 yield f, ef, self.getsize(ef)
700 700 except OSError as err:
701 701 if err.errno != errno.ENOENT:
702 702 raise
703 703
704 704 def copylist(self):
705 705 d = (
706 706 b'bookmarks',
707 707 b'narrowspec',
708 708 b'data',
709 709 b'meta',
710 710 b'dh',
711 711 b'fncache',
712 712 b'phaseroots',
713 713 b'obsstore',
714 714 b'00manifest.d',
715 715 b'00manifest.i',
716 716 b'00changelog.d',
717 717 b'00changelog.i',
718 718 b'requires',
719 719 )
720 720 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in d]
721 721
722 722 def write(self, tr):
723 723 self.fncache.write(tr)
724 724
725 725 def invalidatecaches(self):
726 726 self.fncache.entries = None
727 727 self.fncache.addls = set()
728 728
729 729 def markremoved(self, fn):
730 730 self.fncache.remove(fn)
731 731
732 732 def _exists(self, f):
733 733 ef = self.encode(f)
734 734 try:
735 735 self.getsize(ef)
736 736 return True
737 737 except OSError as err:
738 738 if err.errno != errno.ENOENT:
739 739 raise
740 740 # nonexistent entry
741 741 return False
742 742
743 743 def __contains__(self, path):
744 744 '''Checks if the store contains path'''
745 745 path = b"/".join((b"data", path))
746 746 # check for files (exact match)
747 747 e = path + b'.i'
748 748 if e in self.fncache and self._exists(e):
749 749 return True
750 750 # now check for directories (prefix match)
751 751 if not path.endswith(b'/'):
752 752 path += b'/'
753 753 for e in self.fncache:
754 754 if e.startswith(path) and self._exists(e):
755 755 return True
756 756 return False
General Comments 0
You need to be logged in to leave comments. Login now