Show More
@@ -293,7 +293,8 b' def _narrow(' | |||||
293 | if f.startswith(b'data/'): |
|
293 | if f.startswith(b'data/'): | |
294 | file = f[5:-2] |
|
294 | file = f[5:-2] | |
295 | if not newmatch(file): |
|
295 | if not newmatch(file): | |
296 |
|
|
296 | for file_ in entry.files(): | |
|
297 | todelete.append(file_.unencoded_path) | |||
297 | elif f.startswith(b'meta/'): |
|
298 | elif f.startswith(b'meta/'): | |
298 | dir = f[5:-13] |
|
299 | dir = f[5:-13] | |
299 | dirs = sorted(pathutil.dirs({dir})) + [dir] |
|
300 | dirs = sorted(pathutil.dirs({dir})) + [dir] | |
@@ -306,7 +307,8 b' def _narrow(' | |||||
306 | if visit == b'all': |
|
307 | if visit == b'all': | |
307 | break |
|
308 | break | |
308 | if not include: |
|
309 | if not include: | |
309 |
|
|
310 | for file_ in entry.files(): | |
|
311 | todelete.append(file_.unencoded_path) | |||
310 |
|
312 | |||
311 | repo.destroying() |
|
313 | repo.destroying() | |
312 |
|
314 |
@@ -466,6 +466,24 b' class StoreEntry:' | |||||
466 | is_volatile = attr.ib(default=False) |
|
466 | is_volatile = attr.ib(default=False) | |
467 | file_size = attr.ib(default=None) |
|
467 | file_size = attr.ib(default=None) | |
468 |
|
468 | |||
|
469 | def files(self): | |||
|
470 | return [ | |||
|
471 | StoreFile( | |||
|
472 | unencoded_path=self.unencoded_path, | |||
|
473 | file_size=self.file_size, | |||
|
474 | is_volatile=self.is_volatile, | |||
|
475 | ) | |||
|
476 | ] | |||
|
477 | ||||
|
478 | ||||
|
479 | @attr.s(slots=True) | |||
|
480 | class StoreFile: | |||
|
481 | """a file matching an entry""" | |||
|
482 | ||||
|
483 | unencoded_path = attr.ib() | |||
|
484 | file_size = attr.ib() | |||
|
485 | is_volatile = attr.ib(default=False) | |||
|
486 | ||||
469 |
|
487 | |||
470 | class basicstore: |
|
488 | class basicstore: | |
471 | '''base class for local repository stores''' |
|
489 | '''base class for local repository stores''' |
@@ -270,9 +270,10 b' def generatev1(repo):' | |||||
270 | with repo.lock(): |
|
270 | with repo.lock(): | |
271 | repo.ui.debug(b'scanning\n') |
|
271 | repo.ui.debug(b'scanning\n') | |
272 | for entry in _walkstreamfiles(repo): |
|
272 | for entry in _walkstreamfiles(repo): | |
273 |
|
|
273 | for f in entry.files(): | |
274 | entries.append((entry.unencoded_path, entry.file_size)) |
|
274 | if f.file_size: | |
275 | total_bytes += entry.file_size |
|
275 | entries.append((f.unencoded_path, f.file_size)) | |
|
276 | total_bytes += f.file_size | |||
276 | _test_sync_point_walk_1(repo) |
|
277 | _test_sync_point_walk_1(repo) | |
277 | _test_sync_point_walk_2(repo) |
|
278 | _test_sync_point_walk_2(repo) | |
278 |
|
279 | |||
@@ -678,14 +679,13 b' def _v2_walk(repo, includes, excludes, i' | |||||
678 | matcher = narrowspec.match(repo.root, includes, excludes) |
|
679 | matcher = narrowspec.match(repo.root, includes, excludes) | |
679 |
|
680 | |||
680 | for entry in _walkstreamfiles(repo, matcher): |
|
681 | for entry in _walkstreamfiles(repo, matcher): | |
681 |
|
|
682 | for f in entry.files(): | |
682 |
f |
|
683 | if f.file_size: | |
683 | if entry.is_volatile: |
|
684 | ft = _fileappend | |
684 |
f |
|
685 | if f.is_volatile: | |
685 | entries.append( |
|
686 | ft = _filefull | |
686 |
(_srcstore, |
|
687 | entries.append((_srcstore, f.unencoded_path, ft, f.file_size)) | |
687 | ) |
|
688 | totalfilesize += f.file_size | |
688 | totalfilesize += entry.file_size |
|
|||
689 | for name in _walkstreamfullstorefiles(repo): |
|
689 | for name in _walkstreamfullstorefiles(repo): | |
690 | if repo.svfs.exists(name): |
|
690 | if repo.svfs.exists(name): | |
691 | totalfilesize += repo.svfs.lstat(name).st_size |
|
691 | totalfilesize += repo.svfs.lstat(name).st_size |
@@ -408,11 +408,12 b' class verifier:' | |||||
408 | revlogv1 = self.revlogv1 |
|
408 | revlogv1 = self.revlogv1 | |
409 | undecodable = [] |
|
409 | undecodable = [] | |
410 | for entry in repo.store.datafiles(undecodable=undecodable): |
|
410 | for entry in repo.store.datafiles(undecodable=undecodable): | |
411 |
f |
|
411 | for file_ in entry.files(): | |
412 |
|
|
412 | f = file_.unencoded_path | |
413 | if (size > 0 or not revlogv1) and f.startswith(b'meta/'): |
|
413 | size = file_.file_size | |
414 | storefiles.add(_normpath(f)) |
|
414 | if (size > 0 or not revlogv1) and f.startswith(b'meta/'): | |
415 |
|
|
415 | storefiles.add(_normpath(f)) | |
|
416 | subdirs.add(os.path.dirname(f)) | |||
416 | for f in undecodable: |
|
417 | for f in undecodable: | |
417 | self._err(None, _(b"cannot decode filename '%s'") % f) |
|
418 | self._err(None, _(b"cannot decode filename '%s'") % f) | |
418 | subdirprogress = ui.makeprogress( |
|
419 | subdirprogress = ui.makeprogress( | |
@@ -475,10 +476,11 b' class verifier:' | |||||
475 | storefiles = set() |
|
476 | storefiles = set() | |
476 | undecodable = [] |
|
477 | undecodable = [] | |
477 | for entry in repo.store.datafiles(undecodable=undecodable): |
|
478 | for entry in repo.store.datafiles(undecodable=undecodable): | |
478 |
|
|
479 | for file_ in entry.files(): | |
479 | f = entry.unencoded_path |
|
480 | size = file_.file_size | |
480 | if (size > 0 or not revlogv1) and f.startswith(b'data/'): |
|
481 | f = file_.unencoded_path | |
481 | storefiles.add(_normpath(f)) |
|
482 | if (size > 0 or not revlogv1) and f.startswith(b'data/'): | |
|
483 | storefiles.add(_normpath(f)) | |||
482 | for f in undecodable: |
|
484 | for f in undecodable: | |
483 | self._err(None, _(b"cannot decode filename '%s'") % f) |
|
485 | self._err(None, _(b"cannot decode filename '%s'") % f) | |
484 |
|
486 |
General Comments 0
You need to be logged in to leave comments.
Login now