caching_query.py
298 lines
| 10.9 KiB
| text/x-python
|
PythonLexer
r1 | # -*- coding: utf-8 -*- | |||
r3363 | # Copyright (C) 2010-2019 RhodeCode GmbH | |||
r1 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
r2883 | """caching_query.py | |||
r1 | ||||
r2883 | Represent functions and classes | |||
which allow the usage of Dogpile caching with SQLAlchemy. | ||||
Introduces a query option called FromCache. | ||||
r1 | ||||
The three new concepts introduced here are: | ||||
* CachingQuery - a Query subclass that caches and | ||||
r2883 | retrieves results in/from dogpile.cache. | |||
r1 | * FromCache - a query option that establishes caching | |||
parameters on a Query | ||||
* RelationshipCache - a variant of FromCache which is specific | ||||
to a query invoked during a lazy load. | ||||
* _params_from_query - extracts value parameters from | ||||
a Query. | ||||
The rest of what's here are standard SQLAlchemy and | ||||
r2883 | dogpile.cache constructs. | |||
r1 | ||||
""" | ||||
from sqlalchemy.orm.interfaces import MapperOption | ||||
from sqlalchemy.orm.query import Query | ||||
from sqlalchemy.sql import visitors | ||||
r2883 | from dogpile.cache.api import NO_VALUE | |||
r1 | ||||
from rhodecode.lib.utils2 import safe_str | ||||
class CachingQuery(Query): | ||||
r2883 | """A Query subclass which optionally loads full results from a dogpile | |||
r1 | cache region. | |||
r2883 | The CachingQuery optionally stores additional state that allows it to consult | |||
a dogpile.cache cache before accessing the database, in the form | ||||
of a FromCache or RelationshipCache object. Each of these objects | ||||
refer to the name of a :class:`dogpile.cache.Region` that's been configured | ||||
and stored in a lookup dictionary. When such an object has associated | ||||
itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region` | ||||
is used to locate a cached result. If none is present, then the | ||||
Query is invoked normally, the results being cached. | ||||
r1 | ||||
The FromCache and RelationshipCache mapper options below represent | ||||
the "public" method of configuring this state upon the CachingQuery. | ||||
""" | ||||
r2883 | def _get_region(self): | |||
from rhodecode.lib.rc_cache import region_meta | ||||
return region_meta.dogpile_cache_regions | ||||
r1 | ||||
r2883 | def __init__(self, regions, *args, **kw): | |||
self.cache_regions = regions or self._get_region() | ||||
r1 | Query.__init__(self, *args, **kw) | |||
def __iter__(self): | ||||
r2883 | """override __iter__ to pull results from dogpile | |||
r1 | if particular attributes have been configured. | |||
Note that this approach does *not* detach the loaded objects from | ||||
the current session. If the cache backend is an in-process cache | ||||
(like "memory") and lives beyond the scope of the current session's | ||||
transaction, those objects may be expired. The method here can be | ||||
modified to first expunge() each loaded item from the current | ||||
session before returning the list of items, so that the items | ||||
in the cache are not the same ones in the current Session. | ||||
""" | ||||
r2883 | super_ = super(CachingQuery, self) | |||
if hasattr(self, '_cache_region'): | ||||
return self.get_value(createfunc=lambda: list(super_.__iter__())) | ||||
else: | ||||
return super_.__iter__() | ||||
def _execute_and_instances(self, context): | ||||
"""override _execute_and_instances to pull results from dogpile | ||||
if the query is invoked directly from an external context. | ||||
This method is necessary in order to maintain compatibility | ||||
with the "baked query" system now used by default in some | ||||
relationship loader scenarios. Note also the | ||||
RelationshipCache._generate_cache_key method which enables | ||||
the baked query to be used within lazy loads. | ||||
r2671 | ||||
r2883 | .. versionadded:: 1.2.7 | |||
""" | ||||
super_ = super(CachingQuery, self) | ||||
r2671 | ||||
r2883 | if context.query is not self and hasattr(self, '_cache_region'): | |||
# special logic called when the Query._execute_and_instances() | ||||
# method is called directly from the baked query | ||||
return self.get_value( | ||||
createfunc=lambda: list( | ||||
super_._execute_and_instances(context) | ||||
) | ||||
) | ||||
r1 | else: | |||
r2883 | return super_._execute_and_instances(context) | |||
def _get_cache_plus_key(self): | ||||
"""Return a cache region plus key.""" | ||||
dogpile_region = self.cache_regions[self._cache_region.region] | ||||
if self._cache_region.cache_key: | ||||
key = self._cache_region.cache_key | ||||
else: | ||||
key = _key_from_query(self) | ||||
return dogpile_region, key | ||||
r1 | ||||
def invalidate(self): | ||||
r2883 | """Invalidate the cache value represented by this Query.""" | |||
r1 | ||||
r2883 | dogpile_region, cache_key = self._get_cache_plus_key() | |||
dogpile_region.delete(cache_key) | ||||
r1 | ||||
r2883 | def get_value(self, merge=True, createfunc=None, | |||
expiration_time=None, ignore_expiration=False): | ||||
r1 | """Return the value from the cache for this query. | |||
Raise KeyError if no value present and no | ||||
createfunc specified. | ||||
""" | ||||
r2883 | dogpile_region, cache_key = self._get_cache_plus_key() | |||
# ignore_expiration means, if the value is in the cache | ||||
# but is expired, return it anyway. This doesn't make sense | ||||
# with createfunc, which says, if the value is expired, generate | ||||
# a new value. | ||||
assert not ignore_expiration or not createfunc, \ | ||||
"Can't ignore expiration and also provide createfunc" | ||||
if ignore_expiration or not createfunc: | ||||
cached_value = dogpile_region.get(cache_key, | ||||
expiration_time=expiration_time, | ||||
ignore_expiration=ignore_expiration) | ||||
else: | ||||
cached_value = dogpile_region.get_or_create( | ||||
cache_key, | ||||
createfunc, | ||||
expiration_time=expiration_time | ||||
) | ||||
if cached_value is NO_VALUE: | ||||
raise KeyError(cache_key) | ||||
r1 | if merge: | |||
r2883 | cached_value = self.merge_result(cached_value, load=False) | |||
return cached_value | ||||
r1 | ||||
def set_value(self, value): | ||||
"""Set the value in the cache for this query.""" | ||||
r2883 | dogpile_region, cache_key = self._get_cache_plus_key() | |||
dogpile_region.set(cache_key, value) | ||||
r1 | ||||
r2883 | def query_callable(regions=None, query_cls=CachingQuery): | |||
r1 | def query(*arg, **kw): | |||
r2883 | return query_cls(regions, *arg, **kw) | |||
r1 | return query | |||
r2883 | def _key_from_query(query, qualifier=None): | |||
"""Given a Query, create a cache key. | ||||
r1 | ||||
r2883 | There are many approaches to this; here we use the simplest, | |||
which is to create an md5 hash of the text of the SQL statement, | ||||
combined with stringified versions of all the bound parameters | ||||
within it. There's a bit of a performance hit with | ||||
compiling out "query.statement" here; other approaches include | ||||
setting up an explicit cache key with a particular Query, | ||||
then combining that with the bound parameter values. | ||||
r1 | ||||
""" | ||||
r2883 | stmt = query.with_labels().statement | |||
compiled = stmt.compile() | ||||
params = compiled.params | ||||
r1 | ||||
r2883 | # here we return the key as a long string. our "key mangler" | |||
# set up with the region will boil it down to an md5. | ||||
return " ".join( | ||||
[safe_str(compiled)] + | ||||
[safe_str(params[k]) for k in sorted(params)]) | ||||
r1 | ||||
class FromCache(MapperOption): | ||||
"""Specifies that a Query should load results from a cache.""" | ||||
propagate_to_loaders = False | ||||
r2883 | def __init__(self, region="sql_cache_short", cache_key=None): | |||
r1 | """Construct a new FromCache. | |||
:param region: the cache region. Should be a | ||||
r2883 | region configured in the dictionary of dogpile | |||
regions. | ||||
r1 | ||||
:param cache_key: optional. A string cache key | ||||
that will serve as the key to the query. Use this | ||||
if your query has a huge amount of parameters (such | ||||
as when using in_()) which correspond more simply to | ||||
some other identifier. | ||||
""" | ||||
self.region = region | ||||
self.cache_key = cache_key | ||||
def process_query(self, query): | ||||
"""Process a Query during normal loading operation.""" | ||||
r2883 | query._cache_region = self | |||
r1 | ||||
class RelationshipCache(MapperOption): | ||||
"""Specifies that a Query as called within a "lazy load" | ||||
should load results from a cache.""" | ||||
propagate_to_loaders = True | ||||
r2883 | def __init__(self, attribute, region="sql_cache_short", cache_key=None): | |||
r1 | """Construct a new RelationshipCache. | |||
:param attribute: A Class.attribute which | ||||
indicates a particular class relationship() whose | ||||
lazy loader should be pulled from the cache. | ||||
r2883 | :param region: name of the cache region. | |||
:param cache_key: optional. A string cache key | ||||
that will serve as the key to the query, bypassing | ||||
the usual means of forming a key from the Query itself. | ||||
r1 | """ | |||
self.region = region | ||||
r2883 | self.cache_key = cache_key | |||
r1 | self._relationship_options = { | |||
(attribute.property.parent.class_, attribute.property.key): self | ||||
} | ||||
r2883 | def _generate_cache_key(self, path): | |||
"""Indicate to the lazy-loader strategy that a "baked" query | ||||
may be used by returning ``None``. | ||||
If this method is omitted, the default implementation of | ||||
:class:`.MapperOption._generate_cache_key` takes place, which | ||||
returns ``False`` to disable the "baked" query from being used. | ||||
.. versionadded:: 1.2.7 | ||||
""" | ||||
return None | ||||
r1 | def process_query_conditionally(self, query): | |||
"""Process a Query that is used within a lazy loader. | ||||
(the process_query_conditionally() method is a SQLAlchemy | ||||
hook invoked only within lazyload.) | ||||
""" | ||||
if query._current_path: | ||||
r2883 | mapper, prop = query._current_path[-2:] | |||
key = prop.key | ||||
r1 | ||||
for cls in mapper.class_.__mro__: | ||||
if (cls, key) in self._relationship_options: | ||||
r2883 | relationship_option = self._relationship_options[(cls, key)] | |||
query._cache_region = relationship_option | ||||
break | ||||
r1 | ||||
def and_(self, option): | ||||
"""Chain another RelationshipCache option to this one. | ||||
While many RelationshipCache objects can be specified on a single | ||||
Query separately, chaining them together allows for a more efficient | ||||
lookup during load. | ||||
""" | ||||
self._relationship_options.update(option._relationship_options) | ||||
return self | ||||