Show More
@@ -405,6 +405,156 b' def setupclient(ui, repo):' | |||||
405 | shallowrepo.wraprepo(repo) |
|
405 | shallowrepo.wraprepo(repo) | |
406 | repo.store = shallowstore.wrapstore(repo.store) |
|
406 | repo.store = shallowstore.wrapstore(repo.store) | |
407 |
|
407 | |||
|
408 | def storewrapper(orig, requirements, path, vfstype): | |||
|
409 | s = orig(requirements, path, vfstype) | |||
|
410 | if constants.SHALLOWREPO_REQUIREMENT in requirements: | |||
|
411 | s = shallowstore.wrapstore(s) | |||
|
412 | ||||
|
413 | return s | |||
|
414 | ||||
|
415 | # prefetch files before update | |||
|
416 | def applyupdates(orig, repo, actions, wctx, mctx, overwrite, labels=None): | |||
|
417 | if isenabled(repo): | |||
|
418 | manifest = mctx.manifest() | |||
|
419 | files = [] | |||
|
420 | for f, args, msg in actions['g']: | |||
|
421 | files.append((f, hex(manifest[f]))) | |||
|
422 | # batch fetch the needed files from the server | |||
|
423 | repo.fileservice.prefetch(files) | |||
|
424 | return orig(repo, actions, wctx, mctx, overwrite, labels=labels) | |||
|
425 | ||||
|
426 | # Prefetch merge checkunknownfiles | |||
|
427 | def checkunknownfiles(orig, repo, wctx, mctx, force, actions, | |||
|
428 | *args, **kwargs): | |||
|
429 | if isenabled(repo): | |||
|
430 | files = [] | |||
|
431 | sparsematch = repo.maybesparsematch(mctx.rev()) | |||
|
432 | for f, (m, actionargs, msg) in actions.iteritems(): | |||
|
433 | if sparsematch and not sparsematch(f): | |||
|
434 | continue | |||
|
435 | if m in ('c', 'dc', 'cm'): | |||
|
436 | files.append((f, hex(mctx.filenode(f)))) | |||
|
437 | elif m == 'dg': | |||
|
438 | f2 = actionargs[0] | |||
|
439 | files.append((f2, hex(mctx.filenode(f2)))) | |||
|
440 | # batch fetch the needed files from the server | |||
|
441 | repo.fileservice.prefetch(files) | |||
|
442 | return orig(repo, wctx, mctx, force, actions, *args, **kwargs) | |||
|
443 | ||||
|
444 | # Prefetch files before status attempts to look at their size and contents | |||
|
445 | def checklookup(orig, self, files): | |||
|
446 | repo = self._repo | |||
|
447 | if isenabled(repo): | |||
|
448 | prefetchfiles = [] | |||
|
449 | for parent in self._parents: | |||
|
450 | for f in files: | |||
|
451 | if f in parent: | |||
|
452 | prefetchfiles.append((f, hex(parent.filenode(f)))) | |||
|
453 | # batch fetch the needed files from the server | |||
|
454 | repo.fileservice.prefetch(prefetchfiles) | |||
|
455 | return orig(self, files) | |||
|
456 | ||||
|
457 | # Prefetch the logic that compares added and removed files for renames | |||
|
458 | def findrenames(orig, repo, matcher, added, removed, *args, **kwargs): | |||
|
459 | if isenabled(repo): | |||
|
460 | files = [] | |||
|
461 | pmf = repo['.'].manifest() | |||
|
462 | for f in removed: | |||
|
463 | if f in pmf: | |||
|
464 | files.append((f, hex(pmf[f]))) | |||
|
465 | # batch fetch the needed files from the server | |||
|
466 | repo.fileservice.prefetch(files) | |||
|
467 | return orig(repo, matcher, added, removed, *args, **kwargs) | |||
|
468 | ||||
|
469 | # prefetch files before pathcopies check | |||
|
470 | def computeforwardmissing(orig, a, b, match=None): | |||
|
471 | missing = orig(a, b, match=match) | |||
|
472 | repo = a._repo | |||
|
473 | if isenabled(repo): | |||
|
474 | mb = b.manifest() | |||
|
475 | ||||
|
476 | files = [] | |||
|
477 | sparsematch = repo.maybesparsematch(b.rev()) | |||
|
478 | if sparsematch: | |||
|
479 | sparsemissing = set() | |||
|
480 | for f in missing: | |||
|
481 | if sparsematch(f): | |||
|
482 | files.append((f, hex(mb[f]))) | |||
|
483 | sparsemissing.add(f) | |||
|
484 | missing = sparsemissing | |||
|
485 | ||||
|
486 | # batch fetch the needed files from the server | |||
|
487 | repo.fileservice.prefetch(files) | |||
|
488 | return missing | |||
|
489 | ||||
|
490 | # close cache miss server connection after the command has finished | |||
|
491 | def runcommand(orig, lui, repo, *args, **kwargs): | |||
|
492 | fileservice = None | |||
|
493 | # repo can be None when running in chg: | |||
|
494 | # - at startup, reposetup was called because serve is not norepo | |||
|
495 | # - a norepo command like "help" is called | |||
|
496 | if repo and isenabled(repo): | |||
|
497 | fileservice = repo.fileservice | |||
|
498 | try: | |||
|
499 | return orig(lui, repo, *args, **kwargs) | |||
|
500 | finally: | |||
|
501 | if fileservice: | |||
|
502 | fileservice.close() | |||
|
503 | ||||
|
504 | # prevent strip from stripping remotefilelogs | |||
|
505 | def _collectbrokencsets(orig, repo, files, striprev): | |||
|
506 | if isenabled(repo): | |||
|
507 | files = list([f for f in files if not repo.shallowmatch(f)]) | |||
|
508 | return orig(repo, files, striprev) | |||
|
509 | ||||
|
510 | # changectx wrappers | |||
|
511 | def filectx(orig, self, path, fileid=None, filelog=None): | |||
|
512 | if fileid is None: | |||
|
513 | fileid = self.filenode(path) | |||
|
514 | if (isenabled(self._repo) and self._repo.shallowmatch(path)): | |||
|
515 | return remotefilectx.remotefilectx(self._repo, path, fileid=fileid, | |||
|
516 | changectx=self, filelog=filelog) | |||
|
517 | return orig(self, path, fileid=fileid, filelog=filelog) | |||
|
518 | ||||
|
519 | def workingfilectx(orig, self, path, filelog=None): | |||
|
520 | if (isenabled(self._repo) and self._repo.shallowmatch(path)): | |||
|
521 | return remotefilectx.remoteworkingfilectx(self._repo, path, | |||
|
522 | workingctx=self, | |||
|
523 | filelog=filelog) | |||
|
524 | return orig(self, path, filelog=filelog) | |||
|
525 | ||||
|
526 | # prefetch required revisions before a diff | |||
|
527 | def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed, | |||
|
528 | copy, getfilectx, *args, **kwargs): | |||
|
529 | if isenabled(repo): | |||
|
530 | prefetch = [] | |||
|
531 | mf1 = ctx1.manifest() | |||
|
532 | for fname in modified + added + removed: | |||
|
533 | if fname in mf1: | |||
|
534 | fnode = getfilectx(fname, ctx1).filenode() | |||
|
535 | # fnode can be None if it's a edited working ctx file | |||
|
536 | if fnode: | |||
|
537 | prefetch.append((fname, hex(fnode))) | |||
|
538 | if fname not in removed: | |||
|
539 | fnode = getfilectx(fname, ctx2).filenode() | |||
|
540 | if fnode: | |||
|
541 | prefetch.append((fname, hex(fnode))) | |||
|
542 | ||||
|
543 | repo.fileservice.prefetch(prefetch) | |||
|
544 | ||||
|
545 | return orig(repo, revs, ctx1, ctx2, modified, added, removed, copy, | |||
|
546 | getfilectx, *args, **kwargs) | |||
|
547 | ||||
|
548 | # Prevent verify from processing files | |||
|
549 | # a stub for mercurial.hg.verify() | |||
|
550 | def _verify(orig, repo, level=None): | |||
|
551 | lock = repo.lock() | |||
|
552 | try: | |||
|
553 | return shallowverifier.shallowverifier(repo).verify() | |||
|
554 | finally: | |||
|
555 | lock.release() | |||
|
556 | ||||
|
557 | ||||
408 | clientonetime = False |
|
558 | clientonetime = False | |
409 | def onetimeclientsetup(ui): |
|
559 | def onetimeclientsetup(ui): | |
410 | global clientonetime |
|
560 | global clientonetime | |
@@ -419,110 +569,21 b' def onetimeclientsetup(ui):' | |||||
419 | extensions.wrapfunction( |
|
569 | extensions.wrapfunction( | |
420 | changegroup, 'makechangegroup', shallowbundle.makechangegroup) |
|
570 | changegroup, 'makechangegroup', shallowbundle.makechangegroup) | |
421 |
|
571 | |||
422 | def storewrapper(orig, requirements, path, vfstype): |
|
|||
423 | s = orig(requirements, path, vfstype) |
|
|||
424 | if constants.SHALLOWREPO_REQUIREMENT in requirements: |
|
|||
425 | s = shallowstore.wrapstore(s) |
|
|||
426 |
|
||||
427 | return s |
|
|||
428 | extensions.wrapfunction(localrepo, 'makestore', storewrapper) |
|
572 | extensions.wrapfunction(localrepo, 'makestore', storewrapper) | |
429 |
|
573 | |||
430 | extensions.wrapfunction(exchange, 'pull', exchangepull) |
|
574 | extensions.wrapfunction(exchange, 'pull', exchangepull) | |
431 |
|
575 | |||
432 | # prefetch files before update |
|
|||
433 | def applyupdates(orig, repo, actions, wctx, mctx, overwrite, labels=None): |
|
|||
434 | if isenabled(repo): |
|
|||
435 | manifest = mctx.manifest() |
|
|||
436 | files = [] |
|
|||
437 | for f, args, msg in actions['g']: |
|
|||
438 | files.append((f, hex(manifest[f]))) |
|
|||
439 | # batch fetch the needed files from the server |
|
|||
440 | repo.fileservice.prefetch(files) |
|
|||
441 | return orig(repo, actions, wctx, mctx, overwrite, labels=labels) |
|
|||
442 | extensions.wrapfunction(merge, 'applyupdates', applyupdates) |
|
576 | extensions.wrapfunction(merge, 'applyupdates', applyupdates) | |
443 |
|
577 | |||
444 | # Prefetch merge checkunknownfiles |
|
|||
445 | def checkunknownfiles(orig, repo, wctx, mctx, force, actions, |
|
|||
446 | *args, **kwargs): |
|
|||
447 | if isenabled(repo): |
|
|||
448 | files = [] |
|
|||
449 | sparsematch = repo.maybesparsematch(mctx.rev()) |
|
|||
450 | for f, (m, actionargs, msg) in actions.iteritems(): |
|
|||
451 | if sparsematch and not sparsematch(f): |
|
|||
452 | continue |
|
|||
453 | if m in ('c', 'dc', 'cm'): |
|
|||
454 | files.append((f, hex(mctx.filenode(f)))) |
|
|||
455 | elif m == 'dg': |
|
|||
456 | f2 = actionargs[0] |
|
|||
457 | files.append((f2, hex(mctx.filenode(f2)))) |
|
|||
458 | # batch fetch the needed files from the server |
|
|||
459 | repo.fileservice.prefetch(files) |
|
|||
460 | return orig(repo, wctx, mctx, force, actions, *args, **kwargs) |
|
|||
461 | extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles) |
|
578 | extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles) | |
462 |
|
579 | |||
463 | # Prefetch files before status attempts to look at their size and contents |
|
|||
464 | def checklookup(orig, self, files): |
|
|||
465 | repo = self._repo |
|
|||
466 | if isenabled(repo): |
|
|||
467 | prefetchfiles = [] |
|
|||
468 | for parent in self._parents: |
|
|||
469 | for f in files: |
|
|||
470 | if f in parent: |
|
|||
471 | prefetchfiles.append((f, hex(parent.filenode(f)))) |
|
|||
472 | # batch fetch the needed files from the server |
|
|||
473 | repo.fileservice.prefetch(prefetchfiles) |
|
|||
474 | return orig(self, files) |
|
|||
475 | extensions.wrapfunction(context.workingctx, '_checklookup', checklookup) |
|
580 | extensions.wrapfunction(context.workingctx, '_checklookup', checklookup) | |
476 |
|
581 | |||
477 | # Prefetch the logic that compares added and removed files for renames |
|
|||
478 | def findrenames(orig, repo, matcher, added, removed, *args, **kwargs): |
|
|||
479 | if isenabled(repo): |
|
|||
480 | files = [] |
|
|||
481 | pmf = repo['.'].manifest() |
|
|||
482 | for f in removed: |
|
|||
483 | if f in pmf: |
|
|||
484 | files.append((f, hex(pmf[f]))) |
|
|||
485 | # batch fetch the needed files from the server |
|
|||
486 | repo.fileservice.prefetch(files) |
|
|||
487 | return orig(repo, matcher, added, removed, *args, **kwargs) |
|
|||
488 | extensions.wrapfunction(scmutil, '_findrenames', findrenames) |
|
582 | extensions.wrapfunction(scmutil, '_findrenames', findrenames) | |
489 |
|
583 | |||
490 | # prefetch files before pathcopies check |
|
|||
491 | def computeforwardmissing(orig, a, b, match=None): |
|
|||
492 | missing = orig(a, b, match=match) |
|
|||
493 | repo = a._repo |
|
|||
494 | if isenabled(repo): |
|
|||
495 | mb = b.manifest() |
|
|||
496 |
|
||||
497 | files = [] |
|
|||
498 | sparsematch = repo.maybesparsematch(b.rev()) |
|
|||
499 | if sparsematch: |
|
|||
500 | sparsemissing = set() |
|
|||
501 | for f in missing: |
|
|||
502 | if sparsematch(f): |
|
|||
503 | files.append((f, hex(mb[f]))) |
|
|||
504 | sparsemissing.add(f) |
|
|||
505 | missing = sparsemissing |
|
|||
506 |
|
||||
507 | # batch fetch the needed files from the server |
|
|||
508 | repo.fileservice.prefetch(files) |
|
|||
509 | return missing |
|
|||
510 | extensions.wrapfunction(copies, '_computeforwardmissing', |
|
584 | extensions.wrapfunction(copies, '_computeforwardmissing', | |
511 | computeforwardmissing) |
|
585 | computeforwardmissing) | |
512 |
|
586 | |||
513 | # close cache miss server connection after the command has finished |
|
|||
514 | def runcommand(orig, lui, repo, *args, **kwargs): |
|
|||
515 | fileservice = None |
|
|||
516 | # repo can be None when running in chg: |
|
|||
517 | # - at startup, reposetup was called because serve is not norepo |
|
|||
518 | # - a norepo command like "help" is called |
|
|||
519 | if repo and isenabled(repo): |
|
|||
520 | fileservice = repo.fileservice |
|
|||
521 | try: |
|
|||
522 | return orig(lui, repo, *args, **kwargs) |
|
|||
523 | finally: |
|
|||
524 | if fileservice: |
|
|||
525 | fileservice.close() |
|
|||
526 | extensions.wrapfunction(dispatch, 'runcommand', runcommand) |
|
587 | extensions.wrapfunction(dispatch, 'runcommand', runcommand) | |
527 |
|
588 | |||
528 | # disappointing hacks below |
|
589 | # disappointing hacks below | |
@@ -531,11 +592,6 b' def onetimeclientsetup(ui):' | |||||
531 | revset.symbols['filelog'] = revset.filelog |
|
592 | revset.symbols['filelog'] = revset.filelog | |
532 | extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs) |
|
593 | extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs) | |
533 |
|
594 | |||
534 | # prevent strip from stripping remotefilelogs |
|
|||
535 | def _collectbrokencsets(orig, repo, files, striprev): |
|
|||
536 | if isenabled(repo): |
|
|||
537 | files = list([f for f in files if not repo.shallowmatch(f)]) |
|
|||
538 | return orig(repo, files, striprev) |
|
|||
539 | extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets) |
|
595 | extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets) | |
540 |
|
596 | |||
541 | # Don't commit filelogs until we know the commit hash, since the hash |
|
597 | # Don't commit filelogs until we know the commit hash, since the hash | |
@@ -580,55 +636,12 b' def onetimeclientsetup(ui):' | |||||
580 | return node |
|
636 | return node | |
581 | extensions.wrapfunction(changelog.changelog, 'add', changelogadd) |
|
637 | extensions.wrapfunction(changelog.changelog, 'add', changelogadd) | |
582 |
|
638 | |||
583 | # changectx wrappers |
|
|||
584 | def filectx(orig, self, path, fileid=None, filelog=None): |
|
|||
585 | if fileid is None: |
|
|||
586 | fileid = self.filenode(path) |
|
|||
587 | if (isenabled(self._repo) and self._repo.shallowmatch(path)): |
|
|||
588 | return remotefilectx.remotefilectx(self._repo, path, |
|
|||
589 | fileid=fileid, changectx=self, filelog=filelog) |
|
|||
590 | return orig(self, path, fileid=fileid, filelog=filelog) |
|
|||
591 | extensions.wrapfunction(context.changectx, 'filectx', filectx) |
|
639 | extensions.wrapfunction(context.changectx, 'filectx', filectx) | |
592 |
|
640 | |||
593 | def workingfilectx(orig, self, path, filelog=None): |
|
|||
594 | if (isenabled(self._repo) and self._repo.shallowmatch(path)): |
|
|||
595 | return remotefilectx.remoteworkingfilectx(self._repo, |
|
|||
596 | path, workingctx=self, filelog=filelog) |
|
|||
597 | return orig(self, path, filelog=filelog) |
|
|||
598 | extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx) |
|
641 | extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx) | |
599 |
|
642 | |||
600 | # prefetch required revisions before a diff |
|
|||
601 | def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed, |
|
|||
602 | copy, getfilectx, *args, **kwargs): |
|
|||
603 | if isenabled(repo): |
|
|||
604 | prefetch = [] |
|
|||
605 | mf1 = ctx1.manifest() |
|
|||
606 | for fname in modified + added + removed: |
|
|||
607 | if fname in mf1: |
|
|||
608 | fnode = getfilectx(fname, ctx1).filenode() |
|
|||
609 | # fnode can be None if it's a edited working ctx file |
|
|||
610 | if fnode: |
|
|||
611 | prefetch.append((fname, hex(fnode))) |
|
|||
612 | if fname not in removed: |
|
|||
613 | fnode = getfilectx(fname, ctx2).filenode() |
|
|||
614 | if fnode: |
|
|||
615 | prefetch.append((fname, hex(fnode))) |
|
|||
616 |
|
||||
617 | repo.fileservice.prefetch(prefetch) |
|
|||
618 |
|
||||
619 | return orig(repo, revs, ctx1, ctx2, modified, added, removed, |
|
|||
620 | copy, getfilectx, *args, **kwargs) |
|
|||
621 | extensions.wrapfunction(patch, 'trydiff', trydiff) |
|
643 | extensions.wrapfunction(patch, 'trydiff', trydiff) | |
622 |
|
644 | |||
623 | # Prevent verify from processing files |
|
|||
624 | # a stub for mercurial.hg.verify() |
|
|||
625 | def _verify(orig, repo, level=None): |
|
|||
626 | lock = repo.lock() |
|
|||
627 | try: |
|
|||
628 | return shallowverifier.shallowverifier(repo).verify() |
|
|||
629 | finally: |
|
|||
630 | lock.release() |
|
|||
631 |
|
||||
632 | extensions.wrapfunction(hg, 'verify', _verify) |
|
645 | extensions.wrapfunction(hg, 'verify', _verify) | |
633 |
|
646 | |||
634 | scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook) |
|
647 | scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook) |
General Comments 0
You need to be logged in to leave comments.
Login now