diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py --- a/rhodecode/__init__.py +++ b/rhodecode/__init__.py @@ -45,7 +45,7 @@ PYRAMID_SETTINGS = {} EXTENSIONS = {} __version__ = ('.'.join((str(each) for each in VERSION[:3]))) -__dbversion__ = 98 # defines current db version for migrations +__dbversion__ = 99 # defines current db version for migrations __platform__ = platform.system() __license__ = 'AGPLv3, and Commercial License' __author__ = 'RhodeCode GmbH' diff --git a/rhodecode/apps/repository/views/repo_commits.py b/rhodecode/apps/repository/views/repo_commits.py --- a/rhodecode/apps/repository/views/repo_commits.py +++ b/rhodecode/apps/repository/views/repo_commits.py @@ -57,9 +57,6 @@ def _update_with_GET(params, request): params[k] += request.GET.getall(k) - - - class RepoCommitsView(RepoAppView): def load_default_context(self): c = self._get_local_tmpl_context(include_app_defaults=True) @@ -93,6 +90,8 @@ class RepoCommitsView(RepoAppView): try: pre_load = ['affected_files', 'author', 'branch', 'date', 'message', 'parents'] + if self.rhodecode_vcs_repo.alias == 'hg': + pre_load += ['hidden', 'obsolete', 'phase'] if len(commit_range) == 2: commits = self.rhodecode_vcs_repo.get_commits( diff --git a/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py @@ -3667,7 +3667,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py @@ -3750,7 +3750,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py @@ -3900,7 +3900,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py @@ -3974,7 +3974,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py b/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py --- a/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py @@ -3975,7 +3975,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py @@ -3343,7 +3343,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py @@ -3344,7 +3344,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py @@ -3602,7 +3602,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/versions/099_version_4_18_0.py b/rhodecode/lib/dbmigrate/versions/099_version_4_18_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/099_version_4_18_0.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +import logging + +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import Column, String + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_16_0_2 + + init_model_encryption(db_4_16_0_2) + + context = MigrationContext.configure(migrate_engine.connect()) + op = Operations(context) + + cache_key = db_4_16_0_2.CacheKey.__table__ + + with op.batch_alter_table(cache_key.name) as batch_op: + batch_op.add_column( + Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)) + + +def downgrade(migrate_engine): + pass diff --git a/rhodecode/lib/rc_cache/backends.py b/rhodecode/lib/rc_cache/backends.py --- a/rhodecode/lib/rc_cache/backends.py +++ b/rhodecode/lib/rc_cache/backends.py @@ -17,6 +17,7 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ + import time import errno import logging diff --git a/rhodecode/lib/rc_cache/utils.py b/rhodecode/lib/rc_cache/utils.py --- a/rhodecode/lib/rc_cache/utils.py +++ b/rhodecode/lib/rc_cache/utils.py @@ -195,16 +195,18 @@ def clear_cache_namespace(cache_region, class ActiveRegionCache(object): - def __init__(self, context): + def __init__(self, context, cache_data): self.context = context + self.cache_data = cache_data def should_invalidate(self): return False class FreshRegionCache(object): - def __init__(self, context): + def __init__(self, context, cache_data): self.context = context + self.cache_data = cache_data def should_invalidate(self): return True @@ -267,7 +269,7 @@ class InvalidationContext(object): self.thread_id = threading.current_thread().ident self.cache_key = compute_key_from_params(uid) - self.cache_key = 'proc:{}_thread:{}_{}'.format( + self.cache_key = 'proc:{}|thread:{}|params:{}'.format( self.proc_id, self.thread_id, self.cache_key) self.compute_time = 0 @@ -284,21 +286,23 @@ class InvalidationContext(object): Test if current object is valid, and return CacheRegion function that does invalidation and calculation """ + log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace) # register or get a new key based on uid self.cache_obj = self.get_or_create_cache_obj(uid=self.uid) + cache_data = self.cache_obj.get_dict() self._start_time = time.time() if self.cache_obj.cache_active: # means our cache obj is existing and marked as it's # cache is not outdated, we return ActiveRegionCache self.skip_cache_active_change = True - return ActiveRegionCache(context=self) + return ActiveRegionCache(context=self, cache_data=cache_data) - # the key is either not existing or set to False, we return + # the key is either not existing or set to False, we return # the real invalidator which re-computes value. We additionally set # the flag to actually update the Database objects self.skip_cache_active_change = False - return FreshRegionCache(context=self) + return FreshRegionCache(context=self, cache_data=cache_data) def __exit__(self, exc_type, exc_val, exc_tb): # save compute time diff --git a/rhodecode/lib/vcs/backends/git/repository.py b/rhodecode/lib/vcs/backends/git/repository.py --- a/rhodecode/lib/vcs/backends/git/repository.py +++ b/rhodecode/lib/vcs/backends/git/repository.py @@ -63,7 +63,7 @@ class GitRepository(BaseRepository): self.path = safe_str(os.path.abspath(repo_path)) self.config = config if config else self.get_default_config() - self.with_wire = with_wire + self.with_wire = with_wire or {"cache": False} # default should not use cache self._init_repo(create, src_url, do_workspace_checkout, bare) @@ -72,7 +72,8 @@ class GitRepository(BaseRepository): @LazyProperty def _remote(self): - return connection.Git(self.path, self.config, with_wire=self.with_wire) + repo_id = self.path + return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire) @LazyProperty def bare(self): @@ -354,7 +355,6 @@ class GitRepository(BaseRepository): :raises TagAlreadyExistError: if tag with same name already exists """ - print self._refs if name in self.tags: raise TagAlreadyExistError("Tag %s already exists" % name) commit = self.get_commit(commit_id=commit_id) @@ -804,8 +804,8 @@ class GitRepository(BaseRepository): return heads - def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): - return GitRepository(shadow_repository_path) + def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): + return GitRepository(shadow_repository_path, with_wire={"cache": cache}) def _local_pull(self, repository_path, branch_name, ff_only=True): """ @@ -913,8 +913,8 @@ class GitRepository(BaseRepository): if not os.path.exists(shadow_repository_path): self._local_clone( shadow_repository_path, target_ref.name, source_ref.name) - log.debug( - 'Prepared shadow repository in %s', shadow_repository_path) + log.debug('Prepared %s shadow repository in %s', + self.alias, shadow_repository_path) return shadow_repository_path @@ -934,7 +934,7 @@ class GitRepository(BaseRepository): shadow_repository_path = self._maybe_prepare_merge_workspace( repo_id, workspace_id, target_ref, source_ref) - shadow_repo = self._get_shadow_instance(shadow_repository_path) + shadow_repo = self.get_shadow_instance(shadow_repository_path) # checkout source, if it's different. Otherwise we could not # fetch proper commits for merge testing @@ -952,7 +952,7 @@ class GitRepository(BaseRepository): # Need to reload repo to invalidate the cache, or otherwise we cannot # retrieve the last target commit. - shadow_repo = self._get_shadow_instance(shadow_repository_path) + shadow_repo = self.get_shadow_instance(shadow_repository_path) if target_ref.commit_id != shadow_repo.branches[target_ref.name]: log.warning('Shadow Target ref %s commit mismatch %s vs %s', target_ref, target_ref.commit_id, @@ -984,9 +984,9 @@ class GitRepository(BaseRepository): [source_ref.commit_id]) merge_possible = True - # Need to reload repo to invalidate the cache, or otherwise we + # Need to invalidate the cache, or otherwise we # cannot retrieve the merge commit. - shadow_repo = GitRepository(shadow_repository_path) + shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path) merge_commit_id = shadow_repo.branches[pr_branch] # Set a reference pointing to the merge commit. This reference may diff --git a/rhodecode/lib/vcs/backends/hg/commit.py b/rhodecode/lib/vcs/backends/hg/commit.py --- a/rhodecode/lib/vcs/backends/hg/commit.py +++ b/rhodecode/lib/vcs/backends/hg/commit.py @@ -236,7 +236,7 @@ class MercurialCommit(base.BaseCommit): Returns content of the file at given ``path``. """ path = self._get_filectx(path) - return self._remote.fctx_data(self.idx, path) + return self._remote.fctx_node_data(self.idx, path) def get_file_size(self, path): """ diff --git a/rhodecode/lib/vcs/backends/hg/repository.py b/rhodecode/lib/vcs/backends/hg/repository.py --- a/rhodecode/lib/vcs/backends/hg/repository.py +++ b/rhodecode/lib/vcs/backends/hg/repository.py @@ -79,7 +79,7 @@ class MercurialRepository(BaseRepository # special requirements self.config = config if config else self.get_default_config( default=[('extensions', 'largefiles', '1')]) - self.with_wire = with_wire + self.with_wire = with_wire or {"cache": False} # default should not use cache self._init_repo(create, src_url, do_workspace_checkout) @@ -88,7 +88,8 @@ class MercurialRepository(BaseRepository @LazyProperty def _remote(self): - return connection.Hg(self.path, self.config, with_wire=self.with_wire) + repo_id = self.path + return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) @CachedProperty def commit_ids(self): @@ -185,7 +186,7 @@ class MercurialRepository(BaseRepository self._remote.invalidate_vcs_cache() # Reinitialize tags - self.tags = self._get_tags() + self._invalidate_prop_cache('tags') tag_id = self.tags[name] return self.get_commit(commit_id=tag_id) @@ -212,7 +213,7 @@ class MercurialRepository(BaseRepository self._remote.tag(name, nullid, message, local, user, date, tz) self._remote.invalidate_vcs_cache() - self.tags = self._get_tags() + self._invalidate_prop_cache('tags') @LazyProperty def bookmarks(self): @@ -359,7 +360,6 @@ class MercurialRepository(BaseRepository if create: os.makedirs(self.path, mode=0o755) - self._remote.localrepository(create) @LazyProperty @@ -738,7 +738,7 @@ class MercurialRepository(BaseRepository shadow_repository_path = self._maybe_prepare_merge_workspace( repo_id, workspace_id, target_ref, source_ref) - shadow_repo = self._get_shadow_instance(shadow_repository_path) + shadow_repo = self.get_shadow_instance(shadow_repository_path) log.debug('Pulling in target reference %s', target_ref) self._validate_pull_reference(target_ref) @@ -818,7 +818,7 @@ class MercurialRepository(BaseRepository shadow_repo.bookmark( target_ref.name, revision=merge_commit_id) try: - shadow_repo_with_hooks = self._get_shadow_instance( + shadow_repo_with_hooks = self.get_shadow_instance( shadow_repository_path, enable_hooks=True) # This is the actual merge action, we push from shadow @@ -854,11 +854,11 @@ class MercurialRepository(BaseRepository merge_possible, merge_succeeded, merge_ref, merge_failure_reason, metadata=metadata) - def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): + def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): config = self.config.copy() if not enable_hooks: config.clear_section('hooks') - return MercurialRepository(shadow_repository_path, config) + return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) def _validate_pull_reference(self, reference): if not (reference.name in self.bookmarks or diff --git a/rhodecode/lib/vcs/backends/svn/repository.py b/rhodecode/lib/vcs/backends/svn/repository.py --- a/rhodecode/lib/vcs/backends/svn/repository.py +++ b/rhodecode/lib/vcs/backends/svn/repository.py @@ -69,20 +69,21 @@ class SubversionRepository(base.BaseRepo contact = base.BaseRepository.DEFAULT_CONTACT description = base.BaseRepository.DEFAULT_DESCRIPTION - def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False, - **kwargs): + def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None, + bare=False, **kwargs): self.path = safe_str(os.path.abspath(repo_path)) self.config = config if config else self.get_default_config() + self.with_wire = with_wire or {"cache": False} # default should not use cache self._init_repo(create, src_url) # caches self._commit_ids = {} - @LazyProperty def _remote(self): - return connection.Svn(self.path, self.config) + repo_id = self.path + return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire) def _init_repo(self, create, src_url): if create and os.path.exists(self.path): diff --git a/rhodecode/lib/vcs/client_http.py b/rhodecode/lib/vcs/client_http.py --- a/rhodecode/lib/vcs/client_http.py +++ b/rhodecode/lib/vcs/client_http.py @@ -25,6 +25,7 @@ Client for the VCSServer implemented bas import copy import logging import threading +import time import urllib2 import urlparse import uuid @@ -39,7 +40,6 @@ import rhodecode from rhodecode.lib.system_info import get_cert_path from rhodecode.lib.vcs import exceptions, CurlSession - log = logging.getLogger(__name__) @@ -54,16 +54,14 @@ EXCEPTIONS_MAP = { class RepoMaker(object): def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): - self.url = urlparse.urljoin( - 'http://%s' % server_and_port, backend_endpoint) + self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) self._session_factory = session_factory self.backend_type = backend_type - def __call__(self, path, config, with_wire=None): - log.debug('RepoMaker call on %s', path) - return RemoteRepo( - path, config, self.url, self._session_factory(), - with_wire=with_wire) + def __call__(self, path, repo_id, config, with_wire=None): + log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) + return RemoteRepo(path, repo_id, config, self.url, self._session_factory(), + with_wire=with_wire) def __getattr__(self, name): def f(*args, **kwargs): @@ -84,8 +82,7 @@ class RepoMaker(object): class ServiceConnection(object): def __init__(self, server_and_port, backend_endpoint, session_factory): - self.url = urlparse.urljoin( - 'http://%s' % server_and_port, backend_endpoint) + self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) self._session_factory = session_factory def __getattr__(self, name): @@ -107,21 +104,27 @@ class ServiceConnection(object): class RemoteRepo(object): - def __init__(self, path, config, url, session, with_wire=None): + def __init__(self, path, repo_id, config, url, session, with_wire=None): self.url = url self._session = session + with_wire = with_wire or {} + + repo_state_uid = with_wire.get('repo_state_uid') or 'state' self._wire = { - "path": path, + "path": path, # repo path + "repo_id": repo_id, "config": config, - "context": self._create_vcs_cache_context(), + "repo_state_uid": repo_state_uid, + "context": self._create_vcs_cache_context(path, repo_state_uid) } + if with_wire: self._wire.update(with_wire) - # johbo: Trading complexity for performance. Avoiding the call to + # NOTE(johbo): Trading complexity for performance. Avoiding the call to # log.debug brings a few percent gain even if is is not active. if log.isEnabledFor(logging.DEBUG): - self._call = self._call_with_logging + self._call_with_logging = True self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) @@ -136,30 +139,35 @@ class RemoteRepo(object): # config object is being changed for hooking scenarios wire = copy.deepcopy(self._wire) wire["config"] = wire["config"].serialize() + wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) - wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) payload = { 'id': str(uuid.uuid4()), 'method': name, 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} } - return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session) - def _call_with_logging(self, name, *args, **kwargs): - context_uid = self._wire.get('context') - log.debug('Calling %s@%s with args:%.10240r. wire_context: %s', - self.url, name, args, context_uid) - return RemoteRepo._call(self, name, *args, **kwargs) + if self._call_with_logging: + start = time.time() + context_uid = wire.get('context') + log.debug('Calling %s@%s with args:%.10240r. wire_context: %s', + self.url, name, args, context_uid) + result = _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session) + if self._call_with_logging: + log.debug('Call %s@%s took: %.3fs. wire_context: %s', + self.url, name, time.time()-start, context_uid) + return result def __getitem__(self, key): return self.revision(key) - def _create_vcs_cache_context(self): + def _create_vcs_cache_context(self, *args): """ Creates a unique string which is passed to the VCSServer on every remote call. It is used as cache key in the VCSServer. """ - return str(uuid.uuid4()) + hash_key = '-'.join(map(str, args)) + return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) def invalidate_vcs_cache(self): """ @@ -167,7 +175,7 @@ class RemoteRepo(object): call to a remote method. It forces the VCSServer to create a fresh repository instance on the next call to a remote method. """ - self._wire['context'] = self._create_vcs_cache_context() + self._wire['context'] = str(uuid.uuid4()) class RemoteObject(object): @@ -254,8 +262,7 @@ class VcsHttpProxy(object): retries = Retry(total=5, connect=None, read=None, redirect=None) adapter = requests.adapters.HTTPAdapter(max_retries=retries) - self.base_url = urlparse.urljoin( - 'http://%s' % server_and_port, backend_endpoint) + self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) self.session = requests.Session() self.session.mount('http://', adapter) diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -29,6 +29,7 @@ import string import hashlib import logging import datetime +import uuid import warnings import ipaddress import functools @@ -2437,11 +2438,18 @@ class Repository(Base, BaseModel): # for repo2dbmapper config = kwargs.pop('config', None) cache = kwargs.pop('cache', None) - full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) + vcs_full_cache = kwargs.pop('vcs_full_cache', None) + if vcs_full_cache is not None: + # allows override global config + full_cache = vcs_full_cache + else: + full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) # if cache is NOT defined use default global, else we have a full # control over cache behaviour if cache is None and full_cache and not config: + log.debug('Initializing pure cached instance for %s', self.repo_path) return self._get_instance_cached() + # cache here is sent to the "vcs server" return self._get_instance(cache=bool(cache), config=config) @@ -2454,8 +2462,8 @@ class Repository(Base, BaseModel): region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) - def get_instance_cached(repo_id, context_id): - return self._get_instance() + def get_instance_cached(repo_id, context_id, _cache_state_uid): + return self._get_instance(repo_state_uid=_cache_state_uid) # we must use thread scoped cache here, # because each thread of gevent needs it's own not shared connection and cache @@ -2464,7 +2472,9 @@ class Repository(Base, BaseModel): uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace, thread_scoped=True) with inv_context_manager as invalidation_context: - args = (self.repo_id, inv_context_manager.cache_key) + cache_state_uid = invalidation_context.cache_data['cache_state_uid'] + args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid) + # re-compute and store cache if we get invalidate signal if invalidation_context.should_invalidate(): instance = get_instance_cached.refresh(*args) @@ -2474,10 +2484,13 @@ class Repository(Base, BaseModel): log.debug('Repo instance fetched in %.3fs', inv_context_manager.compute_time) return instance - def _get_instance(self, cache=True, config=None): + def _get_instance(self, cache=True, config=None, repo_state_uid=None): + log.debug('Initializing %s instance `%s` with cache flag set to: %s', + self.repo_type, self.repo_path, cache) config = config or self._config custom_wire = { - 'cache': cache # controls the vcs.remote cache + 'cache': cache, # controls the vcs.remote cache + 'repo_state_uid': repo_state_uid } repo = get_vcs_instance( repo_path=safe_str(self.repo_full_path), @@ -3497,12 +3510,15 @@ class CacheKey(Base, BaseModel): cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) + cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) - def __init__(self, cache_key, cache_args=''): + def __init__(self, cache_key, cache_args='', cache_state_uid=None): self.cache_key = cache_key self.cache_args = cache_args self.cache_active = False + # first key should be same for all entries, since all workers should share it + self.cache_state_uid = cache_state_uid or self.generate_new_state_uid(based_on=cache_args) def __unicode__(self): return u"<%s('%s:%s[%s]')>" % ( @@ -3531,6 +3547,13 @@ class CacheKey(Base, BaseModel): return self._cache_key_partition()[2] @classmethod + def generate_new_state_uid(cls, based_on=None): + if based_on: + return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) + else: + return str(uuid.uuid4()) + + @classmethod def delete_all_cache(cls): """ Delete all cache keys from database. @@ -3553,7 +3576,8 @@ class CacheKey(Base, BaseModel): log.debug('cache objects deleted for cache args %s', safe_str(cache_uid)) else: - qry.update({"cache_active": False}) + qry.update({"cache_active": False, + "cache_state_uid": cls.generate_new_state_uid()}) log.debug('cache objects marked as invalid for cache args %s', safe_str(cache_uid)) @@ -4166,7 +4190,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/templates/admin/repos/repo_edit_caches.mako b/rhodecode/templates/admin/repos/repo_edit_caches.mako --- a/rhodecode/templates/admin/repos/repo_edit_caches.mako +++ b/rhodecode/templates/admin/repos/repo_edit_caches.mako @@ -37,12 +37,14 @@ + %for cache in c.rhodecode_db_repo.cache_keys: + diff --git a/rhodecode/tests/models/test_scm.py b/rhodecode/tests/models/test_scm.py --- a/rhodecode/tests/models/test_scm.py +++ b/rhodecode/tests/models/test_scm.py @@ -49,7 +49,7 @@ def test_scm_instance_config(backend): mocks['_get_instance_cached'].assert_called() -def test__get_instance_config(backend): +def test_get_instance_config(backend): repo = backend.create_repo() vcs_class = Mock() with patch.multiple('rhodecode.lib.vcs.backends', @@ -61,13 +61,13 @@ def test__get_instance_config(backend): repo._get_instance() vcs_class.assert_called_with( repo_path=repo.repo_full_path, config=config_mock, - create=False, with_wire={'cache': True}) + create=False, with_wire={'cache': True, 'repo_state_uid': None}) new_config = {'override': 'old_config'} repo._get_instance(config=new_config) vcs_class.assert_called_with( repo_path=repo.repo_full_path, config=new_config, create=False, - with_wire={'cache': True}) + with_wire={'cache': True, 'repo_state_uid': None}) def test_mark_for_invalidation_config(backend): diff --git a/rhodecode/tests/server_utils.py b/rhodecode/tests/server_utils.py --- a/rhodecode/tests/server_utils.py +++ b/rhodecode/tests/server_utils.py @@ -133,8 +133,7 @@ class RcVCSServer(ServerBase): def __init__(self, config_file, log_file=None): super(RcVCSServer, self).__init__(config_file, log_file) - self._args = [ - 'gunicorn', '--paste', self.config_file] + self._args = ['gunicorn', '--paste', self.config_file] def start(self): env = os.environ.copy() @@ -145,6 +144,7 @@ class RcVCSServer(ServerBase): host_url = self.host_url() assert_no_running_instance(host_url) + print('rhodecode-vcsserver start command: {}'.format(' '.join(self._args))) print('rhodecode-vcsserver starting at: {}'.format(host_url)) print('rhodecode-vcsserver command: {}'.format(self.command)) print('rhodecode-vcsserver logfile: {}'.format(self.log_file)) diff --git a/rhodecode/tests/vcs/test_client_http.py b/rhodecode/tests/vcs/test_client_http.py --- a/rhodecode/tests/vcs/test_client_http.py +++ b/rhodecode/tests/vcs/test_client_http.py @@ -107,7 +107,7 @@ def test_repo_maker_uses_session_for_ins stub_session_factory, config): repo_maker = client_http.RepoMaker( 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) - repo = repo_maker('stub_path', config) + repo = repo_maker('stub_path', 'stub_repo_id', config) repo.example_call() stub_session_factory().post.assert_called_with( 'http://server_and_port/endpoint', data=mock.ANY) @@ -127,7 +127,7 @@ def test_repo_maker_uses_session_that_th stub_session_failing_factory, config): repo_maker = client_http.RepoMaker( 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory) - repo = repo_maker('stub_path', config) + repo = repo_maker('stub_path', 'stub_repo_id', config) with pytest.raises(exceptions.HttpVCSCommunicationError): repo.example_call() diff --git a/rhodecode/tests/vcs/test_hg.py b/rhodecode/tests/vcs/test_hg.py --- a/rhodecode/tests/vcs/test_hg.py +++ b/rhodecode/tests/vcs/test_hg.py @@ -751,15 +751,15 @@ class TestGetShadowInstance(object): return repo def test_passes_config(self, repo): - shadow = repo._get_shadow_instance(repo.path) + shadow = repo.get_shadow_instance(repo.path) assert shadow.config == repo.config.copy() def test_disables_hooks(self, repo): - shadow = repo._get_shadow_instance(repo.path) + shadow = repo.get_shadow_instance(repo.path) shadow.config.clear_section.assert_called_once_with('hooks') def test_allows_to_keep_hooks(self, repo): - shadow = repo._get_shadow_instance(repo.path, enable_hooks=True) + shadow = repo.get_shadow_instance(repo.path, enable_hooks=True) assert not shadow.config.clear_section.called diff --git a/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py b/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py --- a/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py +++ b/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py @@ -108,7 +108,7 @@ class TestMercurialRemoteRepoInvalidatio workspace_id = pr._workspace_id(pull_request) shadow_repository_path = target_vcs._maybe_prepare_merge_workspace( repo_id, workspace_id, target_ref, source_ref) - shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path) + shadow_repo = target_vcs.get_shadow_instance(shadow_repository_path, cache=True) # This will populate the cache of the mercurial repository object # inside of the VCSServer. @@ -127,32 +127,38 @@ class TestMercurialRemoteRepoInvalidatio from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError pull_request = pr_util.create_pull_request() + target_vcs = pull_request.target_repo.scm_instance() source_vcs = pull_request.source_repo.scm_instance() - shadow_repo, source_ref, target_ref = self._prepare_shadow_repo( - pull_request) + shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(pull_request) + + initial_cache_uid = shadow_repo._remote._wire['context'] + initial_commit_ids = shadow_repo._remote.get_all_commit_ids('visible') # Pull from target and source references but without invalidation of - # RemoteRepo objects and without VCSServer caching of mercurial - # repository objects. + # RemoteRepo objects and without VCSServer caching of mercurial repository objects. with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'): # NOTE: Do not use patch.dict() to disable the cache because it # restores the WHOLE dict and not only the patched keys. shadow_repo._remote._wire['cache'] = False shadow_repo._local_pull(target_vcs.path, target_ref) shadow_repo._local_pull(source_vcs.path, source_ref) - shadow_repo._remote._wire.pop('cache') + shadow_repo._remote._wire['cache'] = True # Try to lookup the target_ref in shadow repo. This should work because + # test_repo_maker_uses_session_for_instance_methods # the shadow repo is a clone of the target and always contains all off # it's commits in the initial cache. shadow_repo.get_commit(target_ref.commit_id) - # If we try to lookup the source_ref it should fail because the shadow + # we ensure that call context has not changed, this is what + # `invalidate_vcs_cache` does + assert initial_cache_uid == shadow_repo._remote._wire['context'] + + # If we try to lookup all commits. # repo commit cache doesn't get invalidated. (Due to patched # invalidation and caching above). - with pytest.raises(CommitDoesNotExistError): - shadow_repo.get_commit(source_ref.commit_id) + assert initial_commit_ids == shadow_repo._remote.get_all_commit_ids('visible') @pytest.mark.backends('hg') def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app): @@ -166,8 +172,7 @@ class TestMercurialRemoteRepoInvalidatio pull_request = pr_util.create_pull_request() target_vcs = pull_request.target_repo.scm_instance() source_vcs = pull_request.source_repo.scm_instance() - shadow_repo, source_ref, target_ref = self._prepare_shadow_repo( - pull_request) + shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(pull_request) # Pull from target and source references without without VCSServer # caching of mercurial repository objects but with active invalidation @@ -177,7 +182,7 @@ class TestMercurialRemoteRepoInvalidatio shadow_repo._remote._wire['cache'] = False shadow_repo._local_pull(target_vcs.path, target_ref) shadow_repo._local_pull(source_vcs.path, source_ref) - shadow_repo._remote._wire.pop('cache') + shadow_repo._remote._wire['cache'] = True # Try to lookup the target and source references in shadow repo. This # should work because the RemoteRepo object gets invalidated during the diff --git a/rhodecode/tests/vcsserver_http.ini b/rhodecode/tests/vcsserver_http.ini --- a/rhodecode/tests/vcsserver_http.ini +++ b/rhodecode/tests/vcsserver_http.ini @@ -65,7 +65,7 @@ propagate = 1 [handler_console] class = StreamHandler args = (sys.stderr,) -level = INFO +level = DEBUG formatter = generic ################
${_('Key')}${_('State UID')} ${_('Namespace')} ${_('Active')}
${cache.cache_key}${cache.cache_state_uid} ${cache.cache_args} ${h.bool2icon(cache.cache_active)}