##// END OF EJS Templates
store: document the `walk` method...
marmoute -
r47612:eed3e2b7 default
parent child Browse files
Show More
@@ -1,754 +1,756
1 1 # store.py - repository store handling for Mercurial
2 2 #
3 3 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import functools
12 12 import os
13 13 import stat
14 14
15 15 from .i18n import _
16 16 from .pycompat import getattr
17 17 from .node import hex
18 18 from . import (
19 19 changelog,
20 20 error,
21 21 manifest,
22 22 policy,
23 23 pycompat,
24 24 util,
25 25 vfs as vfsmod,
26 26 )
27 27 from .utils import hashutil
28 28
29 29 parsers = policy.importmod('parsers')
30 30 # how much bytes should be read from fncache in one read
31 31 # It is done to prevent loading large fncache files into memory
32 32 fncache_chunksize = 10 ** 6
33 33
34 34
35 35 def _matchtrackedpath(path, matcher):
36 36 """parses a fncache entry and returns whether the entry is tracking a path
37 37 matched by matcher or not.
38 38
39 39 If matcher is None, returns True"""
40 40
41 41 if matcher is None:
42 42 return True
43 43 path = decodedir(path)
44 44 if path.startswith(b'data/'):
45 45 return matcher(path[len(b'data/') : -len(b'.i')])
46 46 elif path.startswith(b'meta/'):
47 47 return matcher.visitdir(path[len(b'meta/') : -len(b'/00manifest.i')])
48 48
49 49 raise error.ProgrammingError(b"cannot decode path %s" % path)
50 50
51 51
52 52 # This avoids a collision between a file named foo and a dir named
53 53 # foo.i or foo.d
54 54 def _encodedir(path):
55 55 """
56 56 >>> _encodedir(b'data/foo.i')
57 57 'data/foo.i'
58 58 >>> _encodedir(b'data/foo.i/bla.i')
59 59 'data/foo.i.hg/bla.i'
60 60 >>> _encodedir(b'data/foo.i.hg/bla.i')
61 61 'data/foo.i.hg.hg/bla.i'
62 62 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
63 63 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
64 64 """
65 65 return (
66 66 path.replace(b".hg/", b".hg.hg/")
67 67 .replace(b".i/", b".i.hg/")
68 68 .replace(b".d/", b".d.hg/")
69 69 )
70 70
71 71
72 72 encodedir = getattr(parsers, 'encodedir', _encodedir)
73 73
74 74
75 75 def decodedir(path):
76 76 """
77 77 >>> decodedir(b'data/foo.i')
78 78 'data/foo.i'
79 79 >>> decodedir(b'data/foo.i.hg/bla.i')
80 80 'data/foo.i/bla.i'
81 81 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
82 82 'data/foo.i.hg/bla.i'
83 83 """
84 84 if b".hg/" not in path:
85 85 return path
86 86 return (
87 87 path.replace(b".d.hg/", b".d/")
88 88 .replace(b".i.hg/", b".i/")
89 89 .replace(b".hg.hg/", b".hg/")
90 90 )
91 91
92 92
93 93 def _reserved():
94 94 """characters that are problematic for filesystems
95 95
96 96 * ascii escapes (0..31)
97 97 * ascii hi (126..255)
98 98 * windows specials
99 99
100 100 these characters will be escaped by encodefunctions
101 101 """
102 102 winreserved = [ord(x) for x in u'\\:*?"<>|']
103 103 for x in range(32):
104 104 yield x
105 105 for x in range(126, 256):
106 106 yield x
107 107 for x in winreserved:
108 108 yield x
109 109
110 110
111 111 def _buildencodefun():
112 112 """
113 113 >>> enc, dec = _buildencodefun()
114 114
115 115 >>> enc(b'nothing/special.txt')
116 116 'nothing/special.txt'
117 117 >>> dec(b'nothing/special.txt')
118 118 'nothing/special.txt'
119 119
120 120 >>> enc(b'HELLO')
121 121 '_h_e_l_l_o'
122 122 >>> dec(b'_h_e_l_l_o')
123 123 'HELLO'
124 124
125 125 >>> enc(b'hello:world?')
126 126 'hello~3aworld~3f'
127 127 >>> dec(b'hello~3aworld~3f')
128 128 'hello:world?'
129 129
130 130 >>> enc(b'the\\x07quick\\xADshot')
131 131 'the~07quick~adshot'
132 132 >>> dec(b'the~07quick~adshot')
133 133 'the\\x07quick\\xadshot'
134 134 """
135 135 e = b'_'
136 136 xchr = pycompat.bytechr
137 137 asciistr = list(map(xchr, range(127)))
138 138 capitals = list(range(ord(b"A"), ord(b"Z") + 1))
139 139
140 140 cmap = {x: x for x in asciistr}
141 141 for x in _reserved():
142 142 cmap[xchr(x)] = b"~%02x" % x
143 143 for x in capitals + [ord(e)]:
144 144 cmap[xchr(x)] = e + xchr(x).lower()
145 145
146 146 dmap = {}
147 147 for k, v in pycompat.iteritems(cmap):
148 148 dmap[v] = k
149 149
150 150 def decode(s):
151 151 i = 0
152 152 while i < len(s):
153 153 for l in pycompat.xrange(1, 4):
154 154 try:
155 155 yield dmap[s[i : i + l]]
156 156 i += l
157 157 break
158 158 except KeyError:
159 159 pass
160 160 else:
161 161 raise KeyError
162 162
163 163 return (
164 164 lambda s: b''.join(
165 165 [cmap[s[c : c + 1]] for c in pycompat.xrange(len(s))]
166 166 ),
167 167 lambda s: b''.join(list(decode(s))),
168 168 )
169 169
170 170
171 171 _encodefname, _decodefname = _buildencodefun()
172 172
173 173
174 174 def encodefilename(s):
175 175 """
176 176 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
177 177 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
178 178 """
179 179 return _encodefname(encodedir(s))
180 180
181 181
182 182 def decodefilename(s):
183 183 """
184 184 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
185 185 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
186 186 """
187 187 return decodedir(_decodefname(s))
188 188
189 189
190 190 def _buildlowerencodefun():
191 191 """
192 192 >>> f = _buildlowerencodefun()
193 193 >>> f(b'nothing/special.txt')
194 194 'nothing/special.txt'
195 195 >>> f(b'HELLO')
196 196 'hello'
197 197 >>> f(b'hello:world?')
198 198 'hello~3aworld~3f'
199 199 >>> f(b'the\\x07quick\\xADshot')
200 200 'the~07quick~adshot'
201 201 """
202 202 xchr = pycompat.bytechr
203 203 cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
204 204 for x in _reserved():
205 205 cmap[xchr(x)] = b"~%02x" % x
206 206 for x in range(ord(b"A"), ord(b"Z") + 1):
207 207 cmap[xchr(x)] = xchr(x).lower()
208 208
209 209 def lowerencode(s):
210 210 return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
211 211
212 212 return lowerencode
213 213
214 214
215 215 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
216 216
217 217 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
218 218 _winres3 = (b'aux', b'con', b'prn', b'nul') # length 3
219 219 _winres4 = (b'com', b'lpt') # length 4 (with trailing 1..9)
220 220
221 221
222 222 def _auxencode(path, dotencode):
223 223 """
224 224 Encodes filenames containing names reserved by Windows or which end in
225 225 period or space. Does not touch other single reserved characters c.
226 226 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
227 227 Additionally encodes space or period at the beginning, if dotencode is
228 228 True. Parameter path is assumed to be all lowercase.
229 229 A segment only needs encoding if a reserved name appears as a
230 230 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
231 231 doesn't need encoding.
232 232
233 233 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
234 234 >>> _auxencode(s.split(b'/'), True)
235 235 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
236 236 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
237 237 >>> _auxencode(s.split(b'/'), False)
238 238 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
239 239 >>> _auxencode([b'foo. '], True)
240 240 ['foo.~20']
241 241 >>> _auxencode([b' .foo'], True)
242 242 ['~20.foo']
243 243 """
244 244 for i, n in enumerate(path):
245 245 if not n:
246 246 continue
247 247 if dotencode and n[0] in b'. ':
248 248 n = b"~%02x" % ord(n[0:1]) + n[1:]
249 249 path[i] = n
250 250 else:
251 251 l = n.find(b'.')
252 252 if l == -1:
253 253 l = len(n)
254 254 if (l == 3 and n[:3] in _winres3) or (
255 255 l == 4
256 256 and n[3:4] <= b'9'
257 257 and n[3:4] >= b'1'
258 258 and n[:3] in _winres4
259 259 ):
260 260 # encode third letter ('aux' -> 'au~78')
261 261 ec = b"~%02x" % ord(n[2:3])
262 262 n = n[0:2] + ec + n[3:]
263 263 path[i] = n
264 264 if n[-1] in b'. ':
265 265 # encode last period or space ('foo...' -> 'foo..~2e')
266 266 path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
267 267 return path
268 268
269 269
270 270 _maxstorepathlen = 120
271 271 _dirprefixlen = 8
272 272 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
273 273
274 274
275 275 def _hashencode(path, dotencode):
276 276 digest = hex(hashutil.sha1(path).digest())
277 277 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
278 278 parts = _auxencode(le, dotencode)
279 279 basename = parts[-1]
280 280 _root, ext = os.path.splitext(basename)
281 281 sdirs = []
282 282 sdirslen = 0
283 283 for p in parts[:-1]:
284 284 d = p[:_dirprefixlen]
285 285 if d[-1] in b'. ':
286 286 # Windows can't access dirs ending in period or space
287 287 d = d[:-1] + b'_'
288 288 if sdirslen == 0:
289 289 t = len(d)
290 290 else:
291 291 t = sdirslen + 1 + len(d)
292 292 if t > _maxshortdirslen:
293 293 break
294 294 sdirs.append(d)
295 295 sdirslen = t
296 296 dirs = b'/'.join(sdirs)
297 297 if len(dirs) > 0:
298 298 dirs += b'/'
299 299 res = b'dh/' + dirs + digest + ext
300 300 spaceleft = _maxstorepathlen - len(res)
301 301 if spaceleft > 0:
302 302 filler = basename[:spaceleft]
303 303 res = b'dh/' + dirs + filler + digest + ext
304 304 return res
305 305
306 306
307 307 def _hybridencode(path, dotencode):
308 308 """encodes path with a length limit
309 309
310 310 Encodes all paths that begin with 'data/', according to the following.
311 311
312 312 Default encoding (reversible):
313 313
314 314 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
315 315 characters are encoded as '~xx', where xx is the two digit hex code
316 316 of the character (see encodefilename).
317 317 Relevant path components consisting of Windows reserved filenames are
318 318 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
319 319
320 320 Hashed encoding (not reversible):
321 321
322 322 If the default-encoded path is longer than _maxstorepathlen, a
323 323 non-reversible hybrid hashing of the path is done instead.
324 324 This encoding uses up to _dirprefixlen characters of all directory
325 325 levels of the lowerencoded path, but not more levels than can fit into
326 326 _maxshortdirslen.
327 327 Then follows the filler followed by the sha digest of the full path.
328 328 The filler is the beginning of the basename of the lowerencoded path
329 329 (the basename is everything after the last path separator). The filler
330 330 is as long as possible, filling in characters from the basename until
331 331 the encoded path has _maxstorepathlen characters (or all chars of the
332 332 basename have been taken).
333 333 The extension (e.g. '.i' or '.d') is preserved.
334 334
335 335 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
336 336 encoding was used.
337 337 """
338 338 path = encodedir(path)
339 339 ef = _encodefname(path).split(b'/')
340 340 res = b'/'.join(_auxencode(ef, dotencode))
341 341 if len(res) > _maxstorepathlen:
342 342 res = _hashencode(path, dotencode)
343 343 return res
344 344
345 345
346 346 def _pathencode(path):
347 347 de = encodedir(path)
348 348 if len(path) > _maxstorepathlen:
349 349 return _hashencode(de, True)
350 350 ef = _encodefname(de).split(b'/')
351 351 res = b'/'.join(_auxencode(ef, True))
352 352 if len(res) > _maxstorepathlen:
353 353 return _hashencode(de, True)
354 354 return res
355 355
356 356
357 357 _pathencode = getattr(parsers, 'pathencode', _pathencode)
358 358
359 359
360 360 def _plainhybridencode(f):
361 361 return _hybridencode(f, False)
362 362
363 363
364 364 def _calcmode(vfs):
365 365 try:
366 366 # files in .hg/ will be created using this mode
367 367 mode = vfs.stat().st_mode
368 368 # avoid some useless chmods
369 369 if (0o777 & ~util.umask) == (0o777 & mode):
370 370 mode = None
371 371 except OSError:
372 372 mode = None
373 373 return mode
374 374
375 375
376 376 _data = [
377 377 b'bookmarks',
378 378 b'narrowspec',
379 379 b'data',
380 380 b'meta',
381 381 b'00manifest.d',
382 382 b'00manifest.i',
383 383 b'00changelog.d',
384 384 b'00changelog.i',
385 385 b'phaseroots',
386 386 b'obsstore',
387 387 b'requires',
388 388 ]
389 389
390 390 REVLOG_FILES_EXT = (b'.i', b'.d', b'.n', b'.nd')
391 391
392 392
393 393 def isrevlog(f, kind, st):
394 394 if kind != stat.S_IFREG:
395 395 return False
396 396 return f.endswith(REVLOG_FILES_EXT)
397 397
398 398
399 399 class basicstore(object):
400 400 '''base class for local repository stores'''
401 401
402 402 def __init__(self, path, vfstype):
403 403 vfs = vfstype(path)
404 404 self.path = vfs.base
405 405 self.createmode = _calcmode(vfs)
406 406 vfs.createmode = self.createmode
407 407 self.rawvfs = vfs
408 408 self.vfs = vfsmod.filtervfs(vfs, encodedir)
409 409 self.opener = self.vfs
410 410
411 411 def join(self, f):
412 412 return self.path + b'/' + encodedir(f)
413 413
414 414 def _walk(self, relpath, recurse, filefilter=isrevlog):
415 415 '''yields (unencoded, encoded, size)'''
416 416 path = self.path
417 417 if relpath:
418 418 path += b'/' + relpath
419 419 striplen = len(self.path) + 1
420 420 l = []
421 421 if self.rawvfs.isdir(path):
422 422 visit = [path]
423 423 readdir = self.rawvfs.readdir
424 424 while visit:
425 425 p = visit.pop()
426 426 for f, kind, st in readdir(p, stat=True):
427 427 fp = p + b'/' + f
428 428 if filefilter(f, kind, st):
429 429 n = util.pconvert(fp[striplen:])
430 430 l.append((decodedir(n), n, st.st_size))
431 431 elif kind == stat.S_IFDIR and recurse:
432 432 visit.append(fp)
433 433 l.sort()
434 434 return l
435 435
436 436 def changelog(self, trypending, concurrencychecker=None):
437 437 return changelog.changelog(
438 438 self.vfs,
439 439 trypending=trypending,
440 440 concurrencychecker=concurrencychecker,
441 441 )
442 442
443 443 def manifestlog(self, repo, storenarrowmatch):
444 444 rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs)
445 445 return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
446 446
447 447 def datafiles(self, matcher=None):
448 448 return self._walk(b'data', True) + self._walk(b'meta', True)
449 449
450 450 def topfiles(self):
451 451 # yield manifest before changelog
452 452 return reversed(self._walk(b'', False))
453 453
454 454 def walk(self, matcher=None):
455 """yields (unencoded, encoded, size)
455 """return file related to data storage (ie: revlogs)
456
457 yields (unencoded, encoded, size)
456 458
457 459 if a matcher is passed, storage files of only those tracked paths
458 460 are passed with matches the matcher
459 461 """
460 462 # yield data files first
461 463 for x in self.datafiles(matcher):
462 464 yield x
463 465 for x in self.topfiles():
464 466 yield x
465 467
466 468 def copylist(self):
467 469 return _data
468 470
469 471 def write(self, tr):
470 472 pass
471 473
472 474 def invalidatecaches(self):
473 475 pass
474 476
475 477 def markremoved(self, fn):
476 478 pass
477 479
478 480 def __contains__(self, path):
479 481 '''Checks if the store contains path'''
480 482 path = b"/".join((b"data", path))
481 483 # file?
482 484 if self.vfs.exists(path + b".i"):
483 485 return True
484 486 # dir?
485 487 if not path.endswith(b"/"):
486 488 path = path + b"/"
487 489 return self.vfs.exists(path)
488 490
489 491
490 492 class encodedstore(basicstore):
491 493 def __init__(self, path, vfstype):
492 494 vfs = vfstype(path + b'/store')
493 495 self.path = vfs.base
494 496 self.createmode = _calcmode(vfs)
495 497 vfs.createmode = self.createmode
496 498 self.rawvfs = vfs
497 499 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
498 500 self.opener = self.vfs
499 501
500 502 def datafiles(self, matcher=None):
501 503 for a, b, size in super(encodedstore, self).datafiles():
502 504 try:
503 505 a = decodefilename(a)
504 506 except KeyError:
505 507 a = None
506 508 if a is not None and not _matchtrackedpath(a, matcher):
507 509 continue
508 510 yield a, b, size
509 511
510 512 def join(self, f):
511 513 return self.path + b'/' + encodefilename(f)
512 514
513 515 def copylist(self):
514 516 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
515 517
516 518
517 519 class fncache(object):
518 520 # the filename used to be partially encoded
519 521 # hence the encodedir/decodedir dance
520 522 def __init__(self, vfs):
521 523 self.vfs = vfs
522 524 self.entries = None
523 525 self._dirty = False
524 526 # set of new additions to fncache
525 527 self.addls = set()
526 528
527 529 def ensureloaded(self, warn=None):
528 530 """read the fncache file if not already read.
529 531
530 532 If the file on disk is corrupted, raise. If warn is provided,
531 533 warn and keep going instead."""
532 534 if self.entries is None:
533 535 self._load(warn)
534 536
535 537 def _load(self, warn=None):
536 538 '''fill the entries from the fncache file'''
537 539 self._dirty = False
538 540 try:
539 541 fp = self.vfs(b'fncache', mode=b'rb')
540 542 except IOError:
541 543 # skip nonexistent file
542 544 self.entries = set()
543 545 return
544 546
545 547 self.entries = set()
546 548 chunk = b''
547 549 for c in iter(functools.partial(fp.read, fncache_chunksize), b''):
548 550 chunk += c
549 551 try:
550 552 p = chunk.rindex(b'\n')
551 553 self.entries.update(decodedir(chunk[: p + 1]).splitlines())
552 554 chunk = chunk[p + 1 :]
553 555 except ValueError:
554 556 # substring '\n' not found, maybe the entry is bigger than the
555 557 # chunksize, so let's keep iterating
556 558 pass
557 559
558 560 if chunk:
559 561 msg = _(b"fncache does not ends with a newline")
560 562 if warn:
561 563 warn(msg + b'\n')
562 564 else:
563 565 raise error.Abort(
564 566 msg,
565 567 hint=_(
566 568 b"use 'hg debugrebuildfncache' to "
567 569 b"rebuild the fncache"
568 570 ),
569 571 )
570 572 self._checkentries(fp, warn)
571 573 fp.close()
572 574
573 575 def _checkentries(self, fp, warn):
574 576 """ make sure there is no empty string in entries """
575 577 if b'' in self.entries:
576 578 fp.seek(0)
577 579 for n, line in enumerate(util.iterfile(fp)):
578 580 if not line.rstrip(b'\n'):
579 581 t = _(b'invalid entry in fncache, line %d') % (n + 1)
580 582 if warn:
581 583 warn(t + b'\n')
582 584 else:
583 585 raise error.Abort(t)
584 586
585 587 def write(self, tr):
586 588 if self._dirty:
587 589 assert self.entries is not None
588 590 self.entries = self.entries | self.addls
589 591 self.addls = set()
590 592 tr.addbackup(b'fncache')
591 593 fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
592 594 if self.entries:
593 595 fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
594 596 fp.close()
595 597 self._dirty = False
596 598 if self.addls:
597 599 # if we have just new entries, let's append them to the fncache
598 600 tr.addbackup(b'fncache')
599 601 fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
600 602 if self.addls:
601 603 fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
602 604 fp.close()
603 605 self.entries = None
604 606 self.addls = set()
605 607
606 608 def add(self, fn):
607 609 if self.entries is None:
608 610 self._load()
609 611 if fn not in self.entries:
610 612 self.addls.add(fn)
611 613
612 614 def remove(self, fn):
613 615 if self.entries is None:
614 616 self._load()
615 617 if fn in self.addls:
616 618 self.addls.remove(fn)
617 619 return
618 620 try:
619 621 self.entries.remove(fn)
620 622 self._dirty = True
621 623 except KeyError:
622 624 pass
623 625
624 626 def __contains__(self, fn):
625 627 if fn in self.addls:
626 628 return True
627 629 if self.entries is None:
628 630 self._load()
629 631 return fn in self.entries
630 632
631 633 def __iter__(self):
632 634 if self.entries is None:
633 635 self._load()
634 636 return iter(self.entries | self.addls)
635 637
636 638
637 639 class _fncachevfs(vfsmod.proxyvfs):
638 640 def __init__(self, vfs, fnc, encode):
639 641 vfsmod.proxyvfs.__init__(self, vfs)
640 642 self.fncache = fnc
641 643 self.encode = encode
642 644
643 645 def __call__(self, path, mode=b'r', *args, **kw):
644 646 encoded = self.encode(path)
645 647 if mode not in (b'r', b'rb') and (
646 648 path.startswith(b'data/') or path.startswith(b'meta/')
647 649 ):
648 650 # do not trigger a fncache load when adding a file that already is
649 651 # known to exist.
650 652 notload = self.fncache.entries is None and self.vfs.exists(encoded)
651 653 if notload and b'a' in mode and not self.vfs.stat(encoded).st_size:
652 654 # when appending to an existing file, if the file has size zero,
653 655 # it should be considered as missing. Such zero-size files are
654 656 # the result of truncation when a transaction is aborted.
655 657 notload = False
656 658 if not notload:
657 659 self.fncache.add(path)
658 660 return self.vfs(encoded, mode, *args, **kw)
659 661
660 662 def join(self, path):
661 663 if path:
662 664 return self.vfs.join(self.encode(path))
663 665 else:
664 666 return self.vfs.join(path)
665 667
666 668
667 669 class fncachestore(basicstore):
668 670 def __init__(self, path, vfstype, dotencode):
669 671 if dotencode:
670 672 encode = _pathencode
671 673 else:
672 674 encode = _plainhybridencode
673 675 self.encode = encode
674 676 vfs = vfstype(path + b'/store')
675 677 self.path = vfs.base
676 678 self.pathsep = self.path + b'/'
677 679 self.createmode = _calcmode(vfs)
678 680 vfs.createmode = self.createmode
679 681 self.rawvfs = vfs
680 682 fnc = fncache(vfs)
681 683 self.fncache = fnc
682 684 self.vfs = _fncachevfs(vfs, fnc, encode)
683 685 self.opener = self.vfs
684 686
685 687 def join(self, f):
686 688 return self.pathsep + self.encode(f)
687 689
688 690 def getsize(self, path):
689 691 return self.rawvfs.stat(path).st_size
690 692
691 693 def datafiles(self, matcher=None):
692 694 for f in sorted(self.fncache):
693 695 if not _matchtrackedpath(f, matcher):
694 696 continue
695 697 ef = self.encode(f)
696 698 try:
697 699 yield f, ef, self.getsize(ef)
698 700 except OSError as err:
699 701 if err.errno != errno.ENOENT:
700 702 raise
701 703
702 704 def copylist(self):
703 705 d = (
704 706 b'bookmarks',
705 707 b'narrowspec',
706 708 b'data',
707 709 b'meta',
708 710 b'dh',
709 711 b'fncache',
710 712 b'phaseroots',
711 713 b'obsstore',
712 714 b'00manifest.d',
713 715 b'00manifest.i',
714 716 b'00changelog.d',
715 717 b'00changelog.i',
716 718 b'requires',
717 719 )
718 720 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in d]
719 721
720 722 def write(self, tr):
721 723 self.fncache.write(tr)
722 724
723 725 def invalidatecaches(self):
724 726 self.fncache.entries = None
725 727 self.fncache.addls = set()
726 728
727 729 def markremoved(self, fn):
728 730 self.fncache.remove(fn)
729 731
730 732 def _exists(self, f):
731 733 ef = self.encode(f)
732 734 try:
733 735 self.getsize(ef)
734 736 return True
735 737 except OSError as err:
736 738 if err.errno != errno.ENOENT:
737 739 raise
738 740 # nonexistent entry
739 741 return False
740 742
741 743 def __contains__(self, path):
742 744 '''Checks if the store contains path'''
743 745 path = b"/".join((b"data", path))
744 746 # check for files (exact match)
745 747 e = path + b'.i'
746 748 if e in self.fncache and self._exists(e):
747 749 return True
748 750 # now check for directories (prefix match)
749 751 if not path.endswith(b'/'):
750 752 path += b'/'
751 753 for e in self.fncache:
752 754 if e.startswith(path) and self._exists(e):
753 755 return True
754 756 return False
General Comments 0
You need to be logged in to leave comments. Login now