|
@@
-24,190
+24,129
b' Represent functions and classes'
|
|
24
|
which allow the usage of Dogpile caching with SQLAlchemy.
|
|
24
|
which allow the usage of Dogpile caching with SQLAlchemy.
|
|
25
|
Introduces a query option called FromCache.
|
|
25
|
Introduces a query option called FromCache.
|
|
26
|
|
|
26
|
|
|
|
|
|
27
|
.. versionchanged:: 1.4 the caching approach has been altered to work
|
|
|
|
|
28
|
based on a session event.
|
|
|
|
|
29
|
|
|
|
|
|
30
|
|
|
27
|
The three new concepts introduced here are:
|
|
31
|
The three new concepts introduced here are:
|
|
28
|
|
|
32
|
|
|
29
|
* CachingQuery - a Query subclass that caches and
|
|
33
|
* ORMCache - an extension for an ORM :class:`.Session`
|
|
30
|
retrieves results in/from dogpile.cache.
|
|
34
|
retrieves results in/from dogpile.cache.
|
|
31
|
* FromCache - a query option that establishes caching
|
|
35
|
* FromCache - a query option that establishes caching
|
|
32
|
parameters on a Query
|
|
36
|
parameters on a Query
|
|
33
|
* RelationshipCache - a variant of FromCache which is specific
|
|
37
|
* RelationshipCache - a variant of FromCache which is specific
|
|
34
|
to a query invoked during a lazy load.
|
|
38
|
to a query invoked during a lazy load.
|
|
35
|
* _params_from_query - extracts value parameters from
|
|
|
|
|
36
|
a Query.
|
|
|
|
|
37
|
|
|
39
|
|
|
38
|
The rest of what's here are standard SQLAlchemy and
|
|
40
|
The rest of what's here are standard SQLAlchemy and
|
|
39
|
dogpile.cache constructs.
|
|
41
|
dogpile.cache constructs.
|
|
40
|
|
|
42
|
|
|
41
|
"""
|
|
43
|
"""
|
|
42
|
from sqlalchemy.orm.interfaces import MapperOption
|
|
|
|
|
43
|
from sqlalchemy.orm.query import Query
|
|
|
|
|
44
|
from sqlalchemy.sql import visitors
|
|
|
|
|
45
|
from dogpile.cache.api import NO_VALUE
|
|
44
|
from dogpile.cache.api import NO_VALUE
|
|
46
|
|
|
45
|
|
|
47
|
from rhodecode.lib.utils2 import safe_str
|
|
46
|
from sqlalchemy import event
|
|
|
|
|
47
|
from sqlalchemy.orm import loading
|
|
|
|
|
48
|
from sqlalchemy.orm.interfaces import UserDefinedOption
|
|
|
|
|
49
|
|
|
|
|
|
50
|
|
|
|
|
|
51
|
DEFAULT_REGION = "sql_cache_short"
|
|
48
|
|
|
52
|
|
|
49
|
|
|
53
|
|
|
50
|
class CachingQuery(Query):
|
|
54
|
class ORMCache:
|
|
51
|
"""A Query subclass which optionally loads full results from a dogpile
|
|
|
|
|
52
|
cache region.
|
|
|
|
|
53
|
|
|
55
|
|
|
54
|
The CachingQuery optionally stores additional state that allows it to consult
|
|
56
|
"""An add-on for an ORM :class:`.Session` optionally loads full results
|
|
55
|
a dogpile.cache cache before accessing the database, in the form
|
|
57
|
from a dogpile cache region.
|
|
56
|
of a FromCache or RelationshipCache object. Each of these objects
|
|
|
|
|
57
|
refer to the name of a :class:`dogpile.cache.Region` that's been configured
|
|
|
|
|
58
|
and stored in a lookup dictionary. When such an object has associated
|
|
|
|
|
59
|
itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region`
|
|
|
|
|
60
|
is used to locate a cached result. If none is present, then the
|
|
|
|
|
61
|
Query is invoked normally, the results being cached.
|
|
|
|
|
62
|
|
|
58
|
|
|
63
|
The FromCache and RelationshipCache mapper options below represent
|
|
59
|
cache = ORMCache(regions={})
|
|
64
|
the "public" method of configuring this state upon the CachingQuery.
|
|
60
|
cache.listen_on_session(Session)
|
|
65
|
|
|
61
|
|
|
66
|
"""
|
|
62
|
"""
|
|
67
|
def _get_region(self):
|
|
63
|
|
|
|
|
|
64
|
def __init__(self, regions):
|
|
|
|
|
65
|
self.cache_regions = regions or self._get_region()
|
|
|
|
|
66
|
self._statement_cache = {}
|
|
|
|
|
67
|
|
|
|
|
|
68
|
@classmethod
|
|
|
|
|
69
|
def _get_region(cls):
|
|
68
|
from rhodecode.lib.rc_cache import region_meta
|
|
70
|
from rhodecode.lib.rc_cache import region_meta
|
|
69
|
return region_meta.dogpile_cache_regions
|
|
71
|
return region_meta.dogpile_cache_regions
|
|
70
|
|
|
72
|
|
|
71
|
def __init__(self, regions, *args, **kw):
|
|
73
|
def listen_on_session(self, session_factory):
|
|
72
|
self.cache_regions = regions or self._get_region()
|
|
74
|
event.listen(session_factory, "do_orm_execute", self._do_orm_execute)
|
|
73
|
Query.__init__(self, *args, **kw)
|
|
|
|
|
74
|
|
|
75
|
|
|
75
|
def __iter__(self):
|
|
76
|
def _do_orm_execute(self, orm_context):
|
|
76
|
"""override __iter__ to pull results from dogpile
|
|
|
|
|
77
|
if particular attributes have been configured.
|
|
|
|
|
78
|
|
|
77
|
|
|
79
|
Note that this approach does *not* detach the loaded objects from
|
|
78
|
for opt in orm_context.user_defined_options:
|
|
80
|
the current session. If the cache backend is an in-process cache
|
|
79
|
if isinstance(opt, RelationshipCache):
|
|
81
|
(like "memory") and lives beyond the scope of the current session's
|
|
80
|
opt = opt._process_orm_context(orm_context)
|
|
82
|
transaction, those objects may be expired. The method here can be
|
|
81
|
if opt is None:
|
|
83
|
modified to first expunge() each loaded item from the current
|
|
82
|
continue
|
|
84
|
session before returning the list of items, so that the items
|
|
|
|
|
85
|
in the cache are not the same ones in the current Session.
|
|
|
|
|
86
|
|
|
83
|
|
|
87
|
"""
|
|
84
|
if isinstance(opt, FromCache):
|
|
88
|
super_ = super(CachingQuery, self)
|
|
85
|
dogpile_region = self.cache_regions[opt.region]
|
|
89
|
|
|
86
|
|
|
90
|
if hasattr(self, '_cache_region'):
|
|
87
|
if opt.cache_key:
|
|
91
|
return self.get_value(createfunc=lambda: list(super_.__iter__()))
|
|
88
|
our_cache_key = f'SQL_CACHE_{opt.cache_key}'
|
|
92
|
else:
|
|
89
|
else:
|
|
93
|
return super_.__iter__()
|
|
90
|
our_cache_key = opt._generate_cache_key(
|
|
94
|
|
|
91
|
orm_context.statement, orm_context.parameters, self
|
|
95
|
def _execute_and_instances(self, context):
|
|
92
|
)
|
|
96
|
"""override _execute_and_instances to pull results from dogpile
|
|
|
|
|
97
|
if the query is invoked directly from an external context.
|
|
|
|
|
98
|
|
|
|
|
|
99
|
This method is necessary in order to maintain compatibility
|
|
|
|
|
100
|
with the "baked query" system now used by default in some
|
|
|
|
|
101
|
relationship loader scenarios. Note also the
|
|
|
|
|
102
|
RelationshipCache._generate_cache_key method which enables
|
|
|
|
|
103
|
the baked query to be used within lazy loads.
|
|
|
|
|
104
|
|
|
93
|
|
|
105
|
.. versionadded:: 1.2.7
|
|
94
|
if opt.ignore_expiration:
|
|
106
|
"""
|
|
95
|
cached_value = dogpile_region.get(
|
|
107
|
super_ = super(CachingQuery, self)
|
|
96
|
our_cache_key,
|
|
108
|
|
|
97
|
expiration_time=opt.expiration_time,
|
|
109
|
if context.query is not self and hasattr(self, '_cache_region'):
|
|
98
|
ignore_expiration=opt.ignore_expiration,
|
|
110
|
# special logic called when the Query._execute_and_instances()
|
|
|
|
|
111
|
# method is called directly from the baked query
|
|
|
|
|
112
|
return self.get_value(
|
|
|
|
|
113
|
createfunc=lambda: list(
|
|
|
|
|
114
|
super_._execute_and_instances(context)
|
|
|
|
|
115
|
)
|
|
|
|
|
116
|
)
|
|
99
|
)
|
|
117
|
else:
|
|
100
|
else:
|
|
118
|
return super_._execute_and_instances(context)
|
|
101
|
|
|
|
|
|
102
|
def createfunc():
|
|
|
|
|
103
|
return orm_context.invoke_statement().freeze()
|
|
|
|
|
104
|
|
|
|
|
|
105
|
cached_value = dogpile_region.get_or_create(
|
|
|
|
|
106
|
our_cache_key,
|
|
|
|
|
107
|
createfunc,
|
|
|
|
|
108
|
expiration_time=opt.expiration_time,
|
|
|
|
|
109
|
)
|
|
119
|
|
|
110
|
|
|
120
|
def _get_cache_plus_key(self):
|
|
111
|
if cached_value is NO_VALUE:
|
|
121
|
"""Return a cache region plus key."""
|
|
112
|
# keyerror? this is bigger than a keyerror...
|
|
122
|
dogpile_region = self.cache_regions[self._cache_region.region]
|
|
113
|
raise KeyError()
|
|
123
|
if self._cache_region.cache_key:
|
|
114
|
|
|
124
|
key = self._cache_region.cache_key
|
|
115
|
orm_result = loading.merge_frozen_result(
|
|
|
|
|
116
|
orm_context.session,
|
|
|
|
|
117
|
orm_context.statement,
|
|
|
|
|
118
|
cached_value,
|
|
|
|
|
119
|
load=False,
|
|
|
|
|
120
|
)
|
|
|
|
|
121
|
return orm_result()
|
|
|
|
|
122
|
|
|
125
|
else:
|
|
123
|
else:
|
|
126
|
key = _key_from_query(self)
|
|
124
|
return None
|
|
127
|
return dogpile_region, key
|
|
125
|
|
|
|
|
|
126
|
def invalidate(self, statement, parameters, opt):
|
|
|
|
|
127
|
"""Invalidate the cache value represented by a statement."""
|
|
128
|
|
|
128
|
|
|
129
|
def invalidate(self):
|
|
129
|
statement = statement.__clause_element__()
|
|
130
|
"""Invalidate the cache value represented by this Query."""
|
|
|
|
|
131
|
|
|
130
|
|
|
132
|
dogpile_region, cache_key = self._get_cache_plus_key()
|
|
131
|
dogpile_region = self.cache_regions[opt.region]
|
|
|
|
|
132
|
|
|
|
|
|
133
|
cache_key = opt._generate_cache_key(statement, parameters, self)
|
|
|
|
|
134
|
|
|
133
|
dogpile_region.delete(cache_key)
|
|
135
|
dogpile_region.delete(cache_key)
|
|
134
|
|
|
136
|
|
|
135
|
def get_value(self, merge=True, createfunc=None,
|
|
|
|
|
136
|
expiration_time=None, ignore_expiration=False):
|
|
|
|
|
137
|
"""Return the value from the cache for this query.
|
|
|
|
|
138
|
|
|
137
|
|
|
139
|
Raise KeyError if no value present and no
|
|
138
|
class FromCache(UserDefinedOption):
|
|
140
|
createfunc specified.
|
|
|
|
|
141
|
|
|
|
|
|
142
|
"""
|
|
|
|
|
143
|
dogpile_region, cache_key = self._get_cache_plus_key()
|
|
|
|
|
144
|
|
|
|
|
|
145
|
# ignore_expiration means, if the value is in the cache
|
|
|
|
|
146
|
# but is expired, return it anyway. This doesn't make sense
|
|
|
|
|
147
|
# with createfunc, which says, if the value is expired, generate
|
|
|
|
|
148
|
# a new value.
|
|
|
|
|
149
|
assert not ignore_expiration or not createfunc, \
|
|
|
|
|
150
|
"Can't ignore expiration and also provide createfunc"
|
|
|
|
|
151
|
|
|
|
|
|
152
|
if ignore_expiration or not createfunc:
|
|
|
|
|
153
|
cached_value = dogpile_region.get(cache_key,
|
|
|
|
|
154
|
expiration_time=expiration_time,
|
|
|
|
|
155
|
ignore_expiration=ignore_expiration)
|
|
|
|
|
156
|
else:
|
|
|
|
|
157
|
cached_value = dogpile_region.get_or_create(
|
|
|
|
|
158
|
cache_key,
|
|
|
|
|
159
|
createfunc,
|
|
|
|
|
160
|
expiration_time=expiration_time
|
|
|
|
|
161
|
)
|
|
|
|
|
162
|
if cached_value is NO_VALUE:
|
|
|
|
|
163
|
raise KeyError(cache_key)
|
|
|
|
|
164
|
if merge:
|
|
|
|
|
165
|
cached_value = self.merge_result(cached_value, load=False)
|
|
|
|
|
166
|
return cached_value
|
|
|
|
|
167
|
|
|
|
|
|
168
|
def set_value(self, value):
|
|
|
|
|
169
|
"""Set the value in the cache for this query."""
|
|
|
|
|
170
|
|
|
|
|
|
171
|
dogpile_region, cache_key = self._get_cache_plus_key()
|
|
|
|
|
172
|
dogpile_region.set(cache_key, value)
|
|
|
|
|
173
|
|
|
|
|
|
174
|
|
|
|
|
|
175
|
def query_callable(regions=None, query_cls=CachingQuery):
|
|
|
|
|
176
|
def query(*arg, **kw):
|
|
|
|
|
177
|
return query_cls(regions, *arg, **kw)
|
|
|
|
|
178
|
return query
|
|
|
|
|
179
|
|
|
|
|
|
180
|
|
|
|
|
|
181
|
def _key_from_query(query, qualifier=None):
|
|
|
|
|
182
|
"""Given a Query, create a cache key.
|
|
|
|
|
183
|
|
|
|
|
|
184
|
There are many approaches to this; here we use the simplest,
|
|
|
|
|
185
|
which is to create an md5 hash of the text of the SQL statement,
|
|
|
|
|
186
|
combined with stringified versions of all the bound parameters
|
|
|
|
|
187
|
within it. There's a bit of a performance hit with
|
|
|
|
|
188
|
compiling out "query.statement" here; other approaches include
|
|
|
|
|
189
|
setting up an explicit cache key with a particular Query,
|
|
|
|
|
190
|
then combining that with the bound parameter values.
|
|
|
|
|
191
|
|
|
|
|
|
192
|
"""
|
|
|
|
|
193
|
|
|
|
|
|
194
|
stmt = query.with_labels().statement
|
|
|
|
|
195
|
compiled = stmt.compile()
|
|
|
|
|
196
|
params = compiled.params
|
|
|
|
|
197
|
|
|
|
|
|
198
|
# here we return the key as a long string. our "key mangler"
|
|
|
|
|
199
|
# set up with the region will boil it down to an md5.
|
|
|
|
|
200
|
return " ".join(
|
|
|
|
|
201
|
[safe_str(compiled)] +
|
|
|
|
|
202
|
[safe_str(params[k]) for k in sorted(params)])
|
|
|
|
|
203
|
|
|
|
|
|
204
|
|
|
|
|
|
205
|
class FromCache(MapperOption):
|
|
|
|
|
206
|
"""Specifies that a Query should load results from a cache."""
|
|
139
|
"""Specifies that a Query should load results from a cache."""
|
|
207
|
|
|
140
|
|
|
208
|
propagate_to_loaders = False
|
|
141
|
propagate_to_loaders = False
|
|
209
|
|
|
142
|
|
|
210
|
def __init__(self, region="sql_cache_short", cache_key=None):
|
|
143
|
def __init__(
|
|
|
|
|
144
|
self,
|
|
|
|
|
145
|
region=DEFAULT_REGION,
|
|
|
|
|
146
|
cache_key=None,
|
|
|
|
|
147
|
expiration_time=None,
|
|
|
|
|
148
|
ignore_expiration=False,
|
|
|
|
|
149
|
):
|
|
211
|
"""Construct a new FromCache.
|
|
150
|
"""Construct a new FromCache.
|
|
212
|
|
|
151
|
|
|
213
|
:param region: the cache region. Should be a
|
|
152
|
:param region: the cache region. Should be a
|
|
@@
-223,19
+162,45
b' class FromCache(MapperOption):'
|
|
223
|
"""
|
|
162
|
"""
|
|
224
|
self.region = region
|
|
163
|
self.region = region
|
|
225
|
self.cache_key = cache_key
|
|
164
|
self.cache_key = cache_key
|
|
|
|
|
165
|
self.expiration_time = expiration_time
|
|
|
|
|
166
|
self.ignore_expiration = ignore_expiration
|
|
226
|
|
|
167
|
|
|
227
|
def process_query(self, query):
|
|
168
|
# this is not needed as of SQLAlchemy 1.4.28;
|
|
228
|
"""Process a Query during normal loading operation."""
|
|
169
|
# UserDefinedOption classes no longer participate in the SQL
|
|
229
|
query._cache_region = self
|
|
170
|
# compilation cache key
|
|
|
|
|
171
|
def _gen_cache_key(self, anon_map, bindparams):
|
|
|
|
|
172
|
return None
|
|
|
|
|
173
|
|
|
|
|
|
174
|
def _generate_cache_key(self, statement, parameters, orm_cache):
|
|
|
|
|
175
|
"""generate a cache key with which to key the results of a statement.
|
|
|
|
|
176
|
|
|
|
|
|
177
|
This leverages the use of the SQL compilation cache key which is
|
|
|
|
|
178
|
repurposed as a SQL results key.
|
|
|
|
|
179
|
|
|
|
|
|
180
|
"""
|
|
|
|
|
181
|
statement_cache_key = statement._generate_cache_key()
|
|
|
|
|
182
|
|
|
|
|
|
183
|
key = statement_cache_key.to_offline_string(
|
|
|
|
|
184
|
orm_cache._statement_cache, statement, parameters
|
|
|
|
|
185
|
) + repr(self.cache_key)
|
|
|
|
|
186
|
# print("here's our key...%s" % key)
|
|
|
|
|
187
|
return key
|
|
230
|
|
|
188
|
|
|
231
|
|
|
189
|
|
|
232
|
class RelationshipCache(MapperOption):
|
|
190
|
class RelationshipCache(FromCache):
|
|
233
|
"""Specifies that a Query as called within a "lazy load"
|
|
191
|
"""Specifies that a Query as called within a "lazy load"
|
|
234
|
should load results from a cache."""
|
|
192
|
should load results from a cache."""
|
|
235
|
|
|
193
|
|
|
236
|
propagate_to_loaders = True
|
|
194
|
propagate_to_loaders = True
|
|
237
|
|
|
195
|
|
|
238
|
def __init__(self, attribute, region="sql_cache_short", cache_key=None):
|
|
196
|
def __init__(
|
|
|
|
|
197
|
self,
|
|
|
|
|
198
|
attribute,
|
|
|
|
|
199
|
region=DEFAULT_REGION,
|
|
|
|
|
200
|
cache_key=None,
|
|
|
|
|
201
|
expiration_time=None,
|
|
|
|
|
202
|
ignore_expiration=False,
|
|
|
|
|
203
|
):
|
|
239
|
"""Construct a new RelationshipCache.
|
|
204
|
"""Construct a new RelationshipCache.
|
|
240
|
|
|
205
|
|
|
241
|
:param attribute: A Class.attribute which
|
|
206
|
:param attribute: A Class.attribute which
|
|
@@
-251,39
+216,25
b' class RelationshipCache(MapperOption):'
|
|
251
|
"""
|
|
216
|
"""
|
|
252
|
self.region = region
|
|
217
|
self.region = region
|
|
253
|
self.cache_key = cache_key
|
|
218
|
self.cache_key = cache_key
|
|
|
|
|
219
|
self.expiration_time = expiration_time
|
|
|
|
|
220
|
self.ignore_expiration = ignore_expiration
|
|
254
|
self._relationship_options = {
|
|
221
|
self._relationship_options = {
|
|
255
|
(attribute.property.parent.class_, attribute.property.key): self
|
|
222
|
(attribute.property.parent.class_, attribute.property.key): self
|
|
256
|
}
|
|
223
|
}
|
|
257
|
|
|
224
|
|
|
258
|
def _generate_cache_key(self, path):
|
|
225
|
def _process_orm_context(self, orm_context):
|
|
259
|
"""Indicate to the lazy-loader strategy that a "baked" query
|
|
226
|
current_path = orm_context.loader_strategy_path
|
|
260
|
may be used by returning ``None``.
|
|
|
|
|
261
|
|
|
|
|
|
262
|
If this method is omitted, the default implementation of
|
|
|
|
|
263
|
:class:`.MapperOption._generate_cache_key` takes place, which
|
|
|
|
|
264
|
returns ``False`` to disable the "baked" query from being used.
|
|
|
|
|
265
|
|
|
|
|
|
266
|
.. versionadded:: 1.2.7
|
|
|
|
|
267
|
|
|
227
|
|
|
268
|
"""
|
|
228
|
if current_path:
|
|
269
|
return None
|
|
229
|
mapper, prop = current_path[-2:]
|
|
270
|
|
|
|
|
|
271
|
def process_query_conditionally(self, query):
|
|
|
|
|
272
|
"""Process a Query that is used within a lazy loader.
|
|
|
|
|
273
|
|
|
|
|
|
274
|
(the process_query_conditionally() method is a SQLAlchemy
|
|
|
|
|
275
|
hook invoked only within lazyload.)
|
|
|
|
|
276
|
|
|
|
|
|
277
|
"""
|
|
|
|
|
278
|
if query._current_path:
|
|
|
|
|
279
|
mapper, prop = query._current_path[-2:]
|
|
|
|
|
280
|
key = prop.key
|
|
230
|
key = prop.key
|
|
281
|
|
|
231
|
|
|
282
|
for cls in mapper.class_.__mro__:
|
|
232
|
for cls in mapper.class_.__mro__:
|
|
283
|
if (cls, key) in self._relationship_options:
|
|
233
|
if (cls, key) in self._relationship_options:
|
|
284
|
relationship_option = self._relationship_options[(cls, key)]
|
|
234
|
relationship_option = self._relationship_options[
|
|
285
|
query._cache_region = relationship_option
|
|
235
|
(cls, key)
|
|
286
|
break
|
|
236
|
]
|
|
|
|
|
237
|
return relationship_option
|
|
287
|
|
|
238
|
|
|
288
|
def and_(self, option):
|
|
239
|
def and_(self, option):
|
|
289
|
"""Chain another RelationshipCache option to this one.
|
|
240
|
"""Chain another RelationshipCache option to this one.
|