Show More
@@ -469,12 +469,36 b' class StoreFile:' | |||
|
469 | 469 | |
|
470 | 470 | def file_size(self, vfs): |
|
471 | 471 | if self._file_size is None: |
|
472 | if vfs is None: | |
|
473 | msg = b"calling vfs-less file_size without prior call: %s" | |
|
474 | msg %= self.unencoded_path | |
|
475 | raise error.ProgrammingError(msg) | |
|
472 | 476 | try: |
|
473 | 477 | self._file_size = vfs.stat(self.unencoded_path).st_size |
|
474 | 478 | except FileNotFoundError: |
|
475 | 479 | self._file_size = 0 |
|
476 | 480 | return self._file_size |
|
477 | 481 | |
|
482 | def get_stream(self, vfs, copies): | |
|
483 | """return data "stream" information for this file | |
|
484 | ||
|
485 | (unencoded_file_path, content_iterator, content_size) | |
|
486 | """ | |
|
487 | size = self.file_size(None) | |
|
488 | ||
|
489 | def get_stream(): | |
|
490 | actual_path = copies[vfs.join(self.unencoded_path)] | |
|
491 | with open(actual_path, 'rb') as fp: | |
|
492 | yield None # ready to stream | |
|
493 | if size <= 65536: | |
|
494 | yield fp.read(size) | |
|
495 | else: | |
|
496 | yield from util.filechunkiter(fp, limit=size) | |
|
497 | ||
|
498 | s = get_stream() | |
|
499 | next(s) | |
|
500 | return (self.unencoded_path, s, size) | |
|
501 | ||
|
478 | 502 | |
|
479 | 503 | @attr.s(slots=True, init=False) |
|
480 | 504 | class BaseStoreEntry: |
@@ -485,6 +509,14 b' class BaseStoreEntry:' | |||
|
485 | 509 | def files(self) -> List[StoreFile]: |
|
486 | 510 | raise NotImplementedError |
|
487 | 511 | |
|
512 | def get_streams(self, vfs, copies=None): | |
|
513 | """return a list of data stream associated to files for this entry | |
|
514 | ||
|
515 | return [(unencoded_file_path, content_iterator, content_size), β¦] | |
|
516 | """ | |
|
517 | assert vfs is not None | |
|
518 | return [f.get_stream(vfs, copies) for f in self.files()] | |
|
519 | ||
|
488 | 520 | |
|
489 | 521 | @attr.s(slots=True, init=False) |
|
490 | 522 | class SimpleStoreEntry(BaseStoreEntry): |
@@ -11,7 +11,6 b' import os' | |||
|
11 | 11 | import struct |
|
12 | 12 | |
|
13 | 13 | from .i18n import _ |
|
14 | from .pycompat import open | |
|
15 | 14 | from .interfaces import repository |
|
16 | 15 | from . import ( |
|
17 | 16 | bookmarks, |
@@ -658,36 +657,25 b' def _emit2(repo, entries):' | |||
|
658 | 657 | totalbytecount = 0 |
|
659 | 658 | |
|
660 | 659 | for src, vfs, e in entries: |
|
661 | for f in e.files(): | |
|
660 | for name, stream, size in e.get_streams(vfs, copies=copy): | |
|
662 | 661 | yield src |
|
663 | name = f.unencoded_path | |
|
664 | 662 | yield util.uvarintencode(len(name)) |
|
665 | actual_path = copy[vfs.join(name)] | |
|
666 | fp = open(actual_path, b'rb') | |
|
667 | size = f.file_size(vfs) | |
|
663 | yield util.uvarintencode(size) | |
|
664 | yield name | |
|
668 | 665 | bytecount = 0 |
|
669 |
|
|
|
670 |
|
|
|
671 |
|
|
|
672 | if size <= 65536: | |
|
673 |
|
|
|
674 |
|
|
|
675 | chunks = util.filechunkiter(fp, limit=size) | |
|
676 | for chunk in chunks: | |
|
677 |
|
|
|
678 | totalbytecount += len(chunk) | |
|
679 |
|
|
|
680 |
|
|
|
681 |
|
|
|
682 | # Would most likely be caused by a race due to `hg | |
|
683 | # strip` or a revlog split | |
|
684 | msg = _( | |
|
685 | b'clone could only read %d bytes from %s, but ' | |
|
686 | b'expected %d bytes' | |
|
687 | ) | |
|
688 | raise error.Abort(msg % (bytecount, name, size)) | |
|
689 | finally: | |
|
690 | fp.close() | |
|
666 | for chunk in stream: | |
|
667 | bytecount += len(chunk) | |
|
668 | totalbytecount += len(chunk) | |
|
669 | progress.update(totalbytecount) | |
|
670 | yield chunk | |
|
671 | if bytecount != size: | |
|
672 | # Would most likely be caused by a race due to `hg | |
|
673 | # strip` or a revlog split | |
|
674 | msg = _( | |
|
675 | b'clone could only read %d bytes from %s, but ' | |
|
676 | b'expected %d bytes' | |
|
677 | ) | |
|
678 | raise error.Abort(msg % (bytecount, name, size)) | |
|
691 | 679 | |
|
692 | 680 | |
|
693 | 681 | def _test_sync_point_walk_1(repo): |
General Comments 0
You need to be logged in to leave comments.
Login now