##// END OF EJS Templates
store: use `endswith` to detect revlog extension...
marmoute -
r47112:374d7fff default
parent child Browse files
Show More
@@ -1,750 +1,750 b''
1 1 # store.py - repository store handling for Mercurial
2 2 #
3 3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import functools
12 12 import os
13 13 import stat
14 14
15 15 from .i18n import _
16 16 from .pycompat import getattr
17 17 from .node import hex
18 18 from . import (
19 19 changelog,
20 20 error,
21 21 manifest,
22 22 policy,
23 23 pycompat,
24 24 util,
25 25 vfs as vfsmod,
26 26 )
27 27 from .utils import hashutil
28 28
29 29 parsers = policy.importmod('parsers')
30 30 # how much bytes should be read from fncache in one read
31 31 # It is done to prevent loading large fncache files into memory
32 32 fncache_chunksize = 10 ** 6
33 33
34 34
35 35 def _matchtrackedpath(path, matcher):
36 36 """parses a fncache entry and returns whether the entry is tracking a path
37 37 matched by matcher or not.
38 38
39 39 If matcher is None, returns True"""
40 40
41 41 if matcher is None:
42 42 return True
43 43 path = decodedir(path)
44 44 if path.startswith(b'data/'):
45 45 return matcher(path[len(b'data/') : -len(b'.i')])
46 46 elif path.startswith(b'meta/'):
47 47 return matcher.visitdir(path[len(b'meta/') : -len(b'/00manifest.i')])
48 48
49 49 raise error.ProgrammingError(b"cannot decode path %s" % path)
50 50
51 51
52 52 # This avoids a collision between a file named foo and a dir named
53 53 # foo.i or foo.d
54 54 def _encodedir(path):
55 55 """
56 56 >>> _encodedir(b'data/foo.i')
57 57 'data/foo.i'
58 58 >>> _encodedir(b'data/foo.i/bla.i')
59 59 'data/foo.i.hg/bla.i'
60 60 >>> _encodedir(b'data/foo.i.hg/bla.i')
61 61 'data/foo.i.hg.hg/bla.i'
62 62 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
63 63 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
64 64 """
65 65 return (
66 66 path.replace(b".hg/", b".hg.hg/")
67 67 .replace(b".i/", b".i.hg/")
68 68 .replace(b".d/", b".d.hg/")
69 69 )
70 70
71 71
72 72 encodedir = getattr(parsers, 'encodedir', _encodedir)
73 73
74 74
75 75 def decodedir(path):
76 76 """
77 77 >>> decodedir(b'data/foo.i')
78 78 'data/foo.i'
79 79 >>> decodedir(b'data/foo.i.hg/bla.i')
80 80 'data/foo.i/bla.i'
81 81 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
82 82 'data/foo.i.hg/bla.i'
83 83 """
84 84 if b".hg/" not in path:
85 85 return path
86 86 return (
87 87 path.replace(b".d.hg/", b".d/")
88 88 .replace(b".i.hg/", b".i/")
89 89 .replace(b".hg.hg/", b".hg/")
90 90 )
91 91
92 92
93 93 def _reserved():
94 94 """characters that are problematic for filesystems
95 95
96 96 * ascii escapes (0..31)
97 97 * ascii hi (126..255)
98 98 * windows specials
99 99
100 100 these characters will be escaped by encodefunctions
101 101 """
102 102 winreserved = [ord(x) for x in u'\\:*?"<>|']
103 103 for x in range(32):
104 104 yield x
105 105 for x in range(126, 256):
106 106 yield x
107 107 for x in winreserved:
108 108 yield x
109 109
110 110
111 111 def _buildencodefun():
112 112 """
113 113 >>> enc, dec = _buildencodefun()
114 114
115 115 >>> enc(b'nothing/special.txt')
116 116 'nothing/special.txt'
117 117 >>> dec(b'nothing/special.txt')
118 118 'nothing/special.txt'
119 119
120 120 >>> enc(b'HELLO')
121 121 '_h_e_l_l_o'
122 122 >>> dec(b'_h_e_l_l_o')
123 123 'HELLO'
124 124
125 125 >>> enc(b'hello:world?')
126 126 'hello~3aworld~3f'
127 127 >>> dec(b'hello~3aworld~3f')
128 128 'hello:world?'
129 129
130 130 >>> enc(b'the\\x07quick\\xADshot')
131 131 'the~07quick~adshot'
132 132 >>> dec(b'the~07quick~adshot')
133 133 'the\\x07quick\\xadshot'
134 134 """
135 135 e = b'_'
136 136 xchr = pycompat.bytechr
137 137 asciistr = list(map(xchr, range(127)))
138 138 capitals = list(range(ord(b"A"), ord(b"Z") + 1))
139 139
140 140 cmap = {x: x for x in asciistr}
141 141 for x in _reserved():
142 142 cmap[xchr(x)] = b"~%02x" % x
143 143 for x in capitals + [ord(e)]:
144 144 cmap[xchr(x)] = e + xchr(x).lower()
145 145
146 146 dmap = {}
147 147 for k, v in pycompat.iteritems(cmap):
148 148 dmap[v] = k
149 149
150 150 def decode(s):
151 151 i = 0
152 152 while i < len(s):
153 153 for l in pycompat.xrange(1, 4):
154 154 try:
155 155 yield dmap[s[i : i + l]]
156 156 i += l
157 157 break
158 158 except KeyError:
159 159 pass
160 160 else:
161 161 raise KeyError
162 162
163 163 return (
164 164 lambda s: b''.join(
165 165 [cmap[s[c : c + 1]] for c in pycompat.xrange(len(s))]
166 166 ),
167 167 lambda s: b''.join(list(decode(s))),
168 168 )
169 169
170 170
171 171 _encodefname, _decodefname = _buildencodefun()
172 172
173 173
174 174 def encodefilename(s):
175 175 """
176 176 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
177 177 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
178 178 """
179 179 return _encodefname(encodedir(s))
180 180
181 181
182 182 def decodefilename(s):
183 183 """
184 184 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
185 185 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
186 186 """
187 187 return decodedir(_decodefname(s))
188 188
189 189
190 190 def _buildlowerencodefun():
191 191 """
192 192 >>> f = _buildlowerencodefun()
193 193 >>> f(b'nothing/special.txt')
194 194 'nothing/special.txt'
195 195 >>> f(b'HELLO')
196 196 'hello'
197 197 >>> f(b'hello:world?')
198 198 'hello~3aworld~3f'
199 199 >>> f(b'the\\x07quick\\xADshot')
200 200 'the~07quick~adshot'
201 201 """
202 202 xchr = pycompat.bytechr
203 203 cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
204 204 for x in _reserved():
205 205 cmap[xchr(x)] = b"~%02x" % x
206 206 for x in range(ord(b"A"), ord(b"Z") + 1):
207 207 cmap[xchr(x)] = xchr(x).lower()
208 208
209 209 def lowerencode(s):
210 210 return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
211 211
212 212 return lowerencode
213 213
214 214
215 215 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
216 216
217 217 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
218 218 _winres3 = (b'aux', b'con', b'prn', b'nul') # length 3
219 219 _winres4 = (b'com', b'lpt') # length 4 (with trailing 1..9)
220 220
221 221
222 222 def _auxencode(path, dotencode):
223 223 """
224 224 Encodes filenames containing names reserved by Windows or which end in
225 225 period or space. Does not touch other single reserved characters c.
226 226 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
227 227 Additionally encodes space or period at the beginning, if dotencode is
228 228 True. Parameter path is assumed to be all lowercase.
229 229 A segment only needs encoding if a reserved name appears as a
230 230 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
231 231 doesn't need encoding.
232 232
233 233 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
234 234 >>> _auxencode(s.split(b'/'), True)
235 235 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
236 236 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
237 237 >>> _auxencode(s.split(b'/'), False)
238 238 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
239 239 >>> _auxencode([b'foo. '], True)
240 240 ['foo.~20']
241 241 >>> _auxencode([b' .foo'], True)
242 242 ['~20.foo']
243 243 """
244 244 for i, n in enumerate(path):
245 245 if not n:
246 246 continue
247 247 if dotencode and n[0] in b'. ':
248 248 n = b"~%02x" % ord(n[0:1]) + n[1:]
249 249 path[i] = n
250 250 else:
251 251 l = n.find(b'.')
252 252 if l == -1:
253 253 l = len(n)
254 254 if (l == 3 and n[:3] in _winres3) or (
255 255 l == 4
256 256 and n[3:4] <= b'9'
257 257 and n[3:4] >= b'1'
258 258 and n[:3] in _winres4
259 259 ):
260 260 # encode third letter ('aux' -> 'au~78')
261 261 ec = b"~%02x" % ord(n[2:3])
262 262 n = n[0:2] + ec + n[3:]
263 263 path[i] = n
264 264 if n[-1] in b'. ':
265 265 # encode last period or space ('foo...' -> 'foo..~2e')
266 266 path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
267 267 return path
268 268
269 269
270 270 _maxstorepathlen = 120
271 271 _dirprefixlen = 8
272 272 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
273 273
274 274
275 275 def _hashencode(path, dotencode):
276 276 digest = hex(hashutil.sha1(path).digest())
277 277 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
278 278 parts = _auxencode(le, dotencode)
279 279 basename = parts[-1]
280 280 _root, ext = os.path.splitext(basename)
281 281 sdirs = []
282 282 sdirslen = 0
283 283 for p in parts[:-1]:
284 284 d = p[:_dirprefixlen]
285 285 if d[-1] in b'. ':
286 286 # Windows can't access dirs ending in period or space
287 287 d = d[:-1] + b'_'
288 288 if sdirslen == 0:
289 289 t = len(d)
290 290 else:
291 291 t = sdirslen + 1 + len(d)
292 292 if t > _maxshortdirslen:
293 293 break
294 294 sdirs.append(d)
295 295 sdirslen = t
296 296 dirs = b'/'.join(sdirs)
297 297 if len(dirs) > 0:
298 298 dirs += b'/'
299 299 res = b'dh/' + dirs + digest + ext
300 300 spaceleft = _maxstorepathlen - len(res)
301 301 if spaceleft > 0:
302 302 filler = basename[:spaceleft]
303 303 res = b'dh/' + dirs + filler + digest + ext
304 304 return res
305 305
306 306
307 307 def _hybridencode(path, dotencode):
308 308 """encodes path with a length limit
309 309
310 310 Encodes all paths that begin with 'data/', according to the following.
311 311
312 312 Default encoding (reversible):
313 313
314 314 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
315 315 characters are encoded as '~xx', where xx is the two digit hex code
316 316 of the character (see encodefilename).
317 317 Relevant path components consisting of Windows reserved filenames are
318 318 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
319 319
320 320 Hashed encoding (not reversible):
321 321
322 322 If the default-encoded path is longer than _maxstorepathlen, a
323 323 non-reversible hybrid hashing of the path is done instead.
324 324 This encoding uses up to _dirprefixlen characters of all directory
325 325 levels of the lowerencoded path, but not more levels than can fit into
326 326 _maxshortdirslen.
327 327 Then follows the filler followed by the sha digest of the full path.
328 328 The filler is the beginning of the basename of the lowerencoded path
329 329 (the basename is everything after the last path separator). The filler
330 330 is as long as possible, filling in characters from the basename until
331 331 the encoded path has _maxstorepathlen characters (or all chars of the
332 332 basename have been taken).
333 333 The extension (e.g. '.i' or '.d') is preserved.
334 334
335 335 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
336 336 encoding was used.
337 337 """
338 338 path = encodedir(path)
339 339 ef = _encodefname(path).split(b'/')
340 340 res = b'/'.join(_auxencode(ef, dotencode))
341 341 if len(res) > _maxstorepathlen:
342 342 res = _hashencode(path, dotencode)
343 343 return res
344 344
345 345
346 346 def _pathencode(path):
347 347 de = encodedir(path)
348 348 if len(path) > _maxstorepathlen:
349 349 return _hashencode(de, True)
350 350 ef = _encodefname(de).split(b'/')
351 351 res = b'/'.join(_auxencode(ef, True))
352 352 if len(res) > _maxstorepathlen:
353 353 return _hashencode(de, True)
354 354 return res
355 355
356 356
357 357 _pathencode = getattr(parsers, 'pathencode', _pathencode)
358 358
359 359
360 360 def _plainhybridencode(f):
361 361 return _hybridencode(f, False)
362 362
363 363
364 364 def _calcmode(vfs):
365 365 try:
366 366 # files in .hg/ will be created using this mode
367 367 mode = vfs.stat().st_mode
368 368 # avoid some useless chmods
369 369 if (0o777 & ~util.umask) == (0o777 & mode):
370 370 mode = None
371 371 except OSError:
372 372 mode = None
373 373 return mode
374 374
375 375
376 376 _data = [
377 377 b'bookmarks',
378 378 b'narrowspec',
379 379 b'data',
380 380 b'meta',
381 381 b'00manifest.d',
382 382 b'00manifest.i',
383 383 b'00changelog.d',
384 384 b'00changelog.i',
385 385 b'phaseroots',
386 386 b'obsstore',
387 387 b'requires',
388 388 ]
389 389
390 REVLOG_FILES_EXT = (b'.i', b'.d', b'.n', b'.nd')
391
390 392
391 393 def isrevlog(f, kind, st):
392 394 if kind != stat.S_IFREG:
393 395 return False
394 if f[-2:] in (b'.i', b'.d', b'.n'):
395 return True
396 return f[-3:] == b'.nd'
396 return f.endswith(REVLOG_FILES_EXT)
397 397
398 398
399 399 class basicstore(object):
400 400 '''base class for local repository stores'''
401 401
402 402 def __init__(self, path, vfstype):
403 403 vfs = vfstype(path)
404 404 self.path = vfs.base
405 405 self.createmode = _calcmode(vfs)
406 406 vfs.createmode = self.createmode
407 407 self.rawvfs = vfs
408 408 self.vfs = vfsmod.filtervfs(vfs, encodedir)
409 409 self.opener = self.vfs
410 410
411 411 def join(self, f):
412 412 return self.path + b'/' + encodedir(f)
413 413
414 414 def _walk(self, relpath, recurse, filefilter=isrevlog):
415 415 '''yields (unencoded, encoded, size)'''
416 416 path = self.path
417 417 if relpath:
418 418 path += b'/' + relpath
419 419 striplen = len(self.path) + 1
420 420 l = []
421 421 if self.rawvfs.isdir(path):
422 422 visit = [path]
423 423 readdir = self.rawvfs.readdir
424 424 while visit:
425 425 p = visit.pop()
426 426 for f, kind, st in readdir(p, stat=True):
427 427 fp = p + b'/' + f
428 428 if filefilter(f, kind, st):
429 429 n = util.pconvert(fp[striplen:])
430 430 l.append((decodedir(n), n, st.st_size))
431 431 elif kind == stat.S_IFDIR and recurse:
432 432 visit.append(fp)
433 433 l.sort()
434 434 return l
435 435
436 436 def changelog(self, trypending):
437 437 return changelog.changelog(self.vfs, trypending=trypending)
438 438
439 439 def manifestlog(self, repo, storenarrowmatch):
440 440 rootstore = manifest.manifestrevlog(self.vfs)
441 441 return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
442 442
443 443 def datafiles(self, matcher=None):
444 444 return self._walk(b'data', True) + self._walk(b'meta', True)
445 445
446 446 def topfiles(self):
447 447 # yield manifest before changelog
448 448 return reversed(self._walk(b'', False))
449 449
450 450 def walk(self, matcher=None):
451 451 """yields (unencoded, encoded, size)
452 452
453 453 if a matcher is passed, storage files of only those tracked paths
454 454 are passed with matches the matcher
455 455 """
456 456 # yield data files first
457 457 for x in self.datafiles(matcher):
458 458 yield x
459 459 for x in self.topfiles():
460 460 yield x
461 461
462 462 def copylist(self):
463 463 return _data
464 464
465 465 def write(self, tr):
466 466 pass
467 467
468 468 def invalidatecaches(self):
469 469 pass
470 470
471 471 def markremoved(self, fn):
472 472 pass
473 473
474 474 def __contains__(self, path):
475 475 '''Checks if the store contains path'''
476 476 path = b"/".join((b"data", path))
477 477 # file?
478 478 if self.vfs.exists(path + b".i"):
479 479 return True
480 480 # dir?
481 481 if not path.endswith(b"/"):
482 482 path = path + b"/"
483 483 return self.vfs.exists(path)
484 484
485 485
486 486 class encodedstore(basicstore):
487 487 def __init__(self, path, vfstype):
488 488 vfs = vfstype(path + b'/store')
489 489 self.path = vfs.base
490 490 self.createmode = _calcmode(vfs)
491 491 vfs.createmode = self.createmode
492 492 self.rawvfs = vfs
493 493 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
494 494 self.opener = self.vfs
495 495
496 496 def datafiles(self, matcher=None):
497 497 for a, b, size in super(encodedstore, self).datafiles():
498 498 try:
499 499 a = decodefilename(a)
500 500 except KeyError:
501 501 a = None
502 502 if a is not None and not _matchtrackedpath(a, matcher):
503 503 continue
504 504 yield a, b, size
505 505
506 506 def join(self, f):
507 507 return self.path + b'/' + encodefilename(f)
508 508
509 509 def copylist(self):
510 510 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
511 511
512 512
513 513 class fncache(object):
514 514 # the filename used to be partially encoded
515 515 # hence the encodedir/decodedir dance
516 516 def __init__(self, vfs):
517 517 self.vfs = vfs
518 518 self.entries = None
519 519 self._dirty = False
520 520 # set of new additions to fncache
521 521 self.addls = set()
522 522
523 523 def ensureloaded(self, warn=None):
524 524 """read the fncache file if not already read.
525 525
526 526 If the file on disk is corrupted, raise. If warn is provided,
527 527 warn and keep going instead."""
528 528 if self.entries is None:
529 529 self._load(warn)
530 530
531 531 def _load(self, warn=None):
532 532 '''fill the entries from the fncache file'''
533 533 self._dirty = False
534 534 try:
535 535 fp = self.vfs(b'fncache', mode=b'rb')
536 536 except IOError:
537 537 # skip nonexistent file
538 538 self.entries = set()
539 539 return
540 540
541 541 self.entries = set()
542 542 chunk = b''
543 543 for c in iter(functools.partial(fp.read, fncache_chunksize), b''):
544 544 chunk += c
545 545 try:
546 546 p = chunk.rindex(b'\n')
547 547 self.entries.update(decodedir(chunk[: p + 1]).splitlines())
548 548 chunk = chunk[p + 1 :]
549 549 except ValueError:
550 550 # substring '\n' not found, maybe the entry is bigger than the
551 551 # chunksize, so let's keep iterating
552 552 pass
553 553
554 554 if chunk:
555 555 msg = _(b"fncache does not ends with a newline")
556 556 if warn:
557 557 warn(msg + b'\n')
558 558 else:
559 559 raise error.Abort(
560 560 msg,
561 561 hint=_(
562 562 b"use 'hg debugrebuildfncache' to "
563 563 b"rebuild the fncache"
564 564 ),
565 565 )
566 566 self._checkentries(fp, warn)
567 567 fp.close()
568 568
569 569 def _checkentries(self, fp, warn):
570 570 """ make sure there is no empty string in entries """
571 571 if b'' in self.entries:
572 572 fp.seek(0)
573 573 for n, line in enumerate(util.iterfile(fp)):
574 574 if not line.rstrip(b'\n'):
575 575 t = _(b'invalid entry in fncache, line %d') % (n + 1)
576 576 if warn:
577 577 warn(t + b'\n')
578 578 else:
579 579 raise error.Abort(t)
580 580
581 581 def write(self, tr):
582 582 if self._dirty:
583 583 assert self.entries is not None
584 584 self.entries = self.entries | self.addls
585 585 self.addls = set()
586 586 tr.addbackup(b'fncache')
587 587 fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
588 588 if self.entries:
589 589 fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
590 590 fp.close()
591 591 self._dirty = False
592 592 if self.addls:
593 593 # if we have just new entries, let's append them to the fncache
594 594 tr.addbackup(b'fncache')
595 595 fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
596 596 if self.addls:
597 597 fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
598 598 fp.close()
599 599 self.entries = None
600 600 self.addls = set()
601 601
602 602 def add(self, fn):
603 603 if self.entries is None:
604 604 self._load()
605 605 if fn not in self.entries:
606 606 self.addls.add(fn)
607 607
608 608 def remove(self, fn):
609 609 if self.entries is None:
610 610 self._load()
611 611 if fn in self.addls:
612 612 self.addls.remove(fn)
613 613 return
614 614 try:
615 615 self.entries.remove(fn)
616 616 self._dirty = True
617 617 except KeyError:
618 618 pass
619 619
620 620 def __contains__(self, fn):
621 621 if fn in self.addls:
622 622 return True
623 623 if self.entries is None:
624 624 self._load()
625 625 return fn in self.entries
626 626
627 627 def __iter__(self):
628 628 if self.entries is None:
629 629 self._load()
630 630 return iter(self.entries | self.addls)
631 631
632 632
633 633 class _fncachevfs(vfsmod.proxyvfs):
634 634 def __init__(self, vfs, fnc, encode):
635 635 vfsmod.proxyvfs.__init__(self, vfs)
636 636 self.fncache = fnc
637 637 self.encode = encode
638 638
639 639 def __call__(self, path, mode=b'r', *args, **kw):
640 640 encoded = self.encode(path)
641 641 if mode not in (b'r', b'rb') and (
642 642 path.startswith(b'data/') or path.startswith(b'meta/')
643 643 ):
644 644 # do not trigger a fncache load when adding a file that already is
645 645 # known to exist.
646 646 notload = self.fncache.entries is None and self.vfs.exists(encoded)
647 647 if notload and b'a' in mode and not self.vfs.stat(encoded).st_size:
648 648 # when appending to an existing file, if the file has size zero,
649 649 # it should be considered as missing. Such zero-size files are
650 650 # the result of truncation when a transaction is aborted.
651 651 notload = False
652 652 if not notload:
653 653 self.fncache.add(path)
654 654 return self.vfs(encoded, mode, *args, **kw)
655 655
656 656 def join(self, path):
657 657 if path:
658 658 return self.vfs.join(self.encode(path))
659 659 else:
660 660 return self.vfs.join(path)
661 661
662 662
663 663 class fncachestore(basicstore):
664 664 def __init__(self, path, vfstype, dotencode):
665 665 if dotencode:
666 666 encode = _pathencode
667 667 else:
668 668 encode = _plainhybridencode
669 669 self.encode = encode
670 670 vfs = vfstype(path + b'/store')
671 671 self.path = vfs.base
672 672 self.pathsep = self.path + b'/'
673 673 self.createmode = _calcmode(vfs)
674 674 vfs.createmode = self.createmode
675 675 self.rawvfs = vfs
676 676 fnc = fncache(vfs)
677 677 self.fncache = fnc
678 678 self.vfs = _fncachevfs(vfs, fnc, encode)
679 679 self.opener = self.vfs
680 680
681 681 def join(self, f):
682 682 return self.pathsep + self.encode(f)
683 683
684 684 def getsize(self, path):
685 685 return self.rawvfs.stat(path).st_size
686 686
687 687 def datafiles(self, matcher=None):
688 688 for f in sorted(self.fncache):
689 689 if not _matchtrackedpath(f, matcher):
690 690 continue
691 691 ef = self.encode(f)
692 692 try:
693 693 yield f, ef, self.getsize(ef)
694 694 except OSError as err:
695 695 if err.errno != errno.ENOENT:
696 696 raise
697 697
698 698 def copylist(self):
699 699 d = (
700 700 b'bookmarks',
701 701 b'narrowspec',
702 702 b'data',
703 703 b'meta',
704 704 b'dh',
705 705 b'fncache',
706 706 b'phaseroots',
707 707 b'obsstore',
708 708 b'00manifest.d',
709 709 b'00manifest.i',
710 710 b'00changelog.d',
711 711 b'00changelog.i',
712 712 b'requires',
713 713 )
714 714 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in d]
715 715
716 716 def write(self, tr):
717 717 self.fncache.write(tr)
718 718
719 719 def invalidatecaches(self):
720 720 self.fncache.entries = None
721 721 self.fncache.addls = set()
722 722
723 723 def markremoved(self, fn):
724 724 self.fncache.remove(fn)
725 725
726 726 def _exists(self, f):
727 727 ef = self.encode(f)
728 728 try:
729 729 self.getsize(ef)
730 730 return True
731 731 except OSError as err:
732 732 if err.errno != errno.ENOENT:
733 733 raise
734 734 # nonexistent entry
735 735 return False
736 736
737 737 def __contains__(self, path):
738 738 '''Checks if the store contains path'''
739 739 path = b"/".join((b"data", path))
740 740 # check for files (exact match)
741 741 e = path + b'.i'
742 742 if e in self.fncache and self._exists(e):
743 743 return True
744 744 # now check for directories (prefix match)
745 745 if not path.endswith(b'/'):
746 746 path += b'/'
747 747 for e in self.fncache:
748 748 if e.startswith(path) and self._exists(e):
749 749 return True
750 750 return False
General Comments 0
You need to be logged in to leave comments. Login now