Show More
@@ -0,0 +1,267 b'' | |||||
|
1 | """caching_query.py | |||
|
2 | ||||
|
3 | Represent persistence structures which allow the usage of | |||
|
4 | Beaker caching with SQLAlchemy. | |||
|
5 | ||||
|
6 | The three new concepts introduced here are: | |||
|
7 | ||||
|
8 | * CachingQuery - a Query subclass that caches and | |||
|
9 | retrieves results in/from Beaker. | |||
|
10 | * FromCache - a query option that establishes caching | |||
|
11 | parameters on a Query | |||
|
12 | * RelationshipCache - a variant of FromCache which is specific | |||
|
13 | to a query invoked during a lazy load. | |||
|
14 | * _params_from_query - extracts value parameters from | |||
|
15 | a Query. | |||
|
16 | ||||
|
17 | The rest of what's here are standard SQLAlchemy and | |||
|
18 | Beaker constructs. | |||
|
19 | ||||
|
20 | """ | |||
|
21 | from sqlalchemy.orm.interfaces import MapperOption | |||
|
22 | from sqlalchemy.orm.query import Query | |||
|
23 | from sqlalchemy.sql import visitors | |||
|
24 | ||||
|
25 | class CachingQuery(Query): | |||
|
26 | """A Query subclass which optionally loads full results from a Beaker | |||
|
27 | cache region. | |||
|
28 | ||||
|
29 | The CachingQuery stores additional state that allows it to consult | |||
|
30 | a Beaker cache before accessing the database: | |||
|
31 | ||||
|
32 | * A "region", which is a cache region argument passed to a | |||
|
33 | Beaker CacheManager, specifies a particular cache configuration | |||
|
34 | (including backend implementation, expiration times, etc.) | |||
|
35 | * A "namespace", which is a qualifying name that identifies a | |||
|
36 | group of keys within the cache. A query that filters on a name | |||
|
37 | might use the name "by_name", a query that filters on a date range | |||
|
38 | to a joined table might use the name "related_date_range". | |||
|
39 | ||||
|
40 | When the above state is present, a Beaker cache is retrieved. | |||
|
41 | ||||
|
42 | The "namespace" name is first concatenated with | |||
|
43 | a string composed of the individual entities and columns the Query | |||
|
44 | requests, i.e. such as ``Query(User.id, User.name)``. | |||
|
45 | ||||
|
46 | The Beaker cache is then loaded from the cache manager based | |||
|
47 | on the region and composed namespace. The key within the cache | |||
|
48 | itself is then constructed against the bind parameters specified | |||
|
49 | by this query, which are usually literals defined in the | |||
|
50 | WHERE clause. | |||
|
51 | ||||
|
52 | The FromCache and RelationshipCache mapper options below represent | |||
|
53 | the "public" method of configuring this state upon the CachingQuery. | |||
|
54 | ||||
|
55 | """ | |||
|
56 | ||||
|
57 | def __init__(self, manager, *args, **kw): | |||
|
58 | self.cache_manager = manager | |||
|
59 | Query.__init__(self, *args, **kw) | |||
|
60 | ||||
|
61 | def __iter__(self): | |||
|
62 | """override __iter__ to pull results from Beaker | |||
|
63 | if particular attributes have been configured. | |||
|
64 | ||||
|
65 | Note that this approach does *not* detach the loaded objects from | |||
|
66 | the current session. If the cache backend is an in-process cache | |||
|
67 | (like "memory") and lives beyond the scope of the current session's | |||
|
68 | transaction, those objects may be expired. The method here can be | |||
|
69 | modified to first expunge() each loaded item from the current | |||
|
70 | session before returning the list of items, so that the items | |||
|
71 | in the cache are not the same ones in the current Session. | |||
|
72 | ||||
|
73 | """ | |||
|
74 | if hasattr(self, '_cache_parameters'): | |||
|
75 | return self.get_value(createfunc=lambda: list(Query.__iter__(self))) | |||
|
76 | else: | |||
|
77 | return Query.__iter__(self) | |||
|
78 | ||||
|
79 | def invalidate(self): | |||
|
80 | """Invalidate the value represented by this Query.""" | |||
|
81 | ||||
|
82 | cache, cache_key = _get_cache_parameters(self) | |||
|
83 | cache.remove(cache_key) | |||
|
84 | ||||
|
85 | def get_value(self, merge=True, createfunc=None): | |||
|
86 | """Return the value from the cache for this query. | |||
|
87 | ||||
|
88 | Raise KeyError if no value present and no | |||
|
89 | createfunc specified. | |||
|
90 | ||||
|
91 | """ | |||
|
92 | cache, cache_key = _get_cache_parameters(self) | |||
|
93 | ret = cache.get_value(cache_key, createfunc=createfunc) | |||
|
94 | if merge: | |||
|
95 | ret = self.merge_result(ret, load=False) | |||
|
96 | return ret | |||
|
97 | ||||
|
98 | def set_value(self, value): | |||
|
99 | """Set the value in the cache for this query.""" | |||
|
100 | ||||
|
101 | cache, cache_key = _get_cache_parameters(self) | |||
|
102 | cache.put(cache_key, value) | |||
|
103 | ||||
|
104 | def query_callable(manager): | |||
|
105 | def query(*arg, **kw): | |||
|
106 | return CachingQuery(manager, *arg, **kw) | |||
|
107 | return query | |||
|
108 | ||||
|
109 | def _get_cache_parameters(query): | |||
|
110 | """For a query with cache_region and cache_namespace configured, | |||
|
111 | return the correspoinding Cache instance and cache key, based | |||
|
112 | on this query's current criterion and parameter values. | |||
|
113 | ||||
|
114 | """ | |||
|
115 | if not hasattr(query, '_cache_parameters'): | |||
|
116 | raise ValueError("This Query does not have caching parameters configured.") | |||
|
117 | ||||
|
118 | region, namespace, cache_key = query._cache_parameters | |||
|
119 | ||||
|
120 | namespace = _namespace_from_query(namespace, query) | |||
|
121 | ||||
|
122 | if cache_key is None: | |||
|
123 | # cache key - the value arguments from this query's parameters. | |||
|
124 | args = _params_from_query(query) | |||
|
125 | cache_key = " ".join([str(x) for x in args]) | |||
|
126 | ||||
|
127 | # get cache | |||
|
128 | cache = query.cache_manager.get_cache_region(namespace, region) | |||
|
129 | ||||
|
130 | # optional - hash the cache_key too for consistent length | |||
|
131 | # import uuid | |||
|
132 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) | |||
|
133 | ||||
|
134 | return cache, cache_key | |||
|
135 | ||||
|
136 | def _namespace_from_query(namespace, query): | |||
|
137 | # cache namespace - the token handed in by the | |||
|
138 | # option + class we're querying against | |||
|
139 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) | |||
|
140 | ||||
|
141 | # memcached wants this | |||
|
142 | namespace = namespace.replace(' ', '_') | |||
|
143 | ||||
|
144 | return namespace | |||
|
145 | ||||
|
146 | def _set_cache_parameters(query, region, namespace, cache_key): | |||
|
147 | ||||
|
148 | if hasattr(query, '_cache_parameters'): | |||
|
149 | region, namespace, cache_key = query._cache_parameters | |||
|
150 | raise ValueError("This query is already configured " | |||
|
151 | "for region %r namespace %r" % | |||
|
152 | (region, namespace) | |||
|
153 | ) | |||
|
154 | query._cache_parameters = region, namespace, cache_key | |||
|
155 | ||||
|
156 | class FromCache(MapperOption): | |||
|
157 | """Specifies that a Query should load results from a cache.""" | |||
|
158 | ||||
|
159 | propagate_to_loaders = False | |||
|
160 | ||||
|
161 | def __init__(self, region, namespace, cache_key=None): | |||
|
162 | """Construct a new FromCache. | |||
|
163 | ||||
|
164 | :param region: the cache region. Should be a | |||
|
165 | region configured in the Beaker CacheManager. | |||
|
166 | ||||
|
167 | :param namespace: the cache namespace. Should | |||
|
168 | be a name uniquely describing the target Query's | |||
|
169 | lexical structure. | |||
|
170 | ||||
|
171 | :param cache_key: optional. A string cache key | |||
|
172 | that will serve as the key to the query. Use this | |||
|
173 | if your query has a huge amount of parameters (such | |||
|
174 | as when using in_()) which correspond more simply to | |||
|
175 | some other identifier. | |||
|
176 | ||||
|
177 | """ | |||
|
178 | self.region = region | |||
|
179 | self.namespace = namespace | |||
|
180 | self.cache_key = cache_key | |||
|
181 | ||||
|
182 | def process_query(self, query): | |||
|
183 | """Process a Query during normal loading operation.""" | |||
|
184 | ||||
|
185 | _set_cache_parameters(query, self.region, self.namespace, self.cache_key) | |||
|
186 | ||||
|
187 | class RelationshipCache(MapperOption): | |||
|
188 | """Specifies that a Query as called within a "lazy load" | |||
|
189 | should load results from a cache.""" | |||
|
190 | ||||
|
191 | propagate_to_loaders = True | |||
|
192 | ||||
|
193 | def __init__(self, region, namespace, attribute): | |||
|
194 | """Construct a new RelationshipCache. | |||
|
195 | ||||
|
196 | :param region: the cache region. Should be a | |||
|
197 | region configured in the Beaker CacheManager. | |||
|
198 | ||||
|
199 | :param namespace: the cache namespace. Should | |||
|
200 | be a name uniquely describing the target Query's | |||
|
201 | lexical structure. | |||
|
202 | ||||
|
203 | :param attribute: A Class.attribute which | |||
|
204 | indicates a particular class relationship() whose | |||
|
205 | lazy loader should be pulled from the cache. | |||
|
206 | ||||
|
207 | """ | |||
|
208 | self.region = region | |||
|
209 | self.namespace = namespace | |||
|
210 | self._relationship_options = { | |||
|
211 | (attribute.property.parent.class_, attribute.property.key) : self | |||
|
212 | } | |||
|
213 | ||||
|
214 | def process_query_conditionally(self, query): | |||
|
215 | """Process a Query that is used within a lazy loader. | |||
|
216 | ||||
|
217 | (the process_query_conditionally() method is a SQLAlchemy | |||
|
218 | hook invoked only within lazyload.) | |||
|
219 | ||||
|
220 | """ | |||
|
221 | if query._current_path: | |||
|
222 | mapper, key = query._current_path[-2:] | |||
|
223 | ||||
|
224 | for cls in mapper.class_.__mro__: | |||
|
225 | if (cls, key) in self._relationship_options: | |||
|
226 | relationship_option = self._relationship_options[(cls, key)] | |||
|
227 | _set_cache_parameters( | |||
|
228 | query, | |||
|
229 | relationship_option.region, | |||
|
230 | relationship_option.namespace, | |||
|
231 | None) | |||
|
232 | ||||
|
233 | def and_(self, option): | |||
|
234 | """Chain another RelationshipCache option to this one. | |||
|
235 | ||||
|
236 | While many RelationshipCache objects can be specified on a single | |||
|
237 | Query separately, chaining them together allows for a more efficient | |||
|
238 | lookup during load. | |||
|
239 | ||||
|
240 | """ | |||
|
241 | self._relationship_options.update(option._relationship_options) | |||
|
242 | return self | |||
|
243 | ||||
|
244 | ||||
|
245 | def _params_from_query(query): | |||
|
246 | """Pull the bind parameter values from a query. | |||
|
247 | ||||
|
248 | This takes into account any scalar attribute bindparam set up. | |||
|
249 | ||||
|
250 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) | |||
|
251 | would return [5, 7]. | |||
|
252 | ||||
|
253 | """ | |||
|
254 | v = [] | |||
|
255 | def visit_bindparam(bind): | |||
|
256 | value = query._params.get(bind.key, bind.value) | |||
|
257 | ||||
|
258 | # lazyloader may dig a callable in here, intended | |||
|
259 | # to late-evaluate params after autoflush is called. | |||
|
260 | # convert to a scalar value. | |||
|
261 | if callable(value): | |||
|
262 | value = value() | |||
|
263 | ||||
|
264 | v.append(value) | |||
|
265 | if query._criterion is not None: | |||
|
266 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) | |||
|
267 | return v |
@@ -43,16 +43,14 b' except ImportError:' | |||||
43 | raise Exception('Unable to import vcs') |
|
43 | raise Exception('Unable to import vcs') | |
44 |
|
44 | |||
45 | def _get_repos_cached_initial(app_globals, initial): |
|
45 | def _get_repos_cached_initial(app_globals, initial): | |
46 | """ |
|
46 | """return cached dict with repos | |
47 | return cached dict with repos |
|
|||
48 | """ |
|
47 | """ | |
49 | g = app_globals |
|
48 | g = app_globals | |
50 | return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial) |
|
49 | return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial) | |
51 |
|
50 | |||
52 | @cache_region('long_term', 'cached_repo_list') |
|
51 | @cache_region('long_term', 'cached_repo_list') | |
53 | def _get_repos_cached(): |
|
52 | def _get_repos_cached(): | |
54 | """ |
|
53 | """return cached dict with repos | |
55 | return cached dict with repos |
|
|||
56 | """ |
|
54 | """ | |
57 | log.info('getting all repositories list') |
|
55 | log.info('getting all repositories list') | |
58 | from pylons import app_globals as g |
|
56 | from pylons import app_globals as g | |
@@ -62,7 +60,8 b' def _get_repos_cached():' | |||||
62 | def _get_repos_switcher_cached(cached_repo_list): |
|
60 | def _get_repos_switcher_cached(cached_repo_list): | |
63 | repos_lst = [] |
|
61 | repos_lst = [] | |
64 | for repo in sorted(x for x in cached_repo_list.values()): |
|
62 | for repo in sorted(x for x in cached_repo_list.values()): | |
65 |
if HasRepoPermissionAny('repository.write', 'repository.read', |
|
63 | if HasRepoPermissionAny('repository.write', 'repository.read', | |
|
64 | 'repository.admin')(repo.name.lower(), 'main page check'): | |||
66 | repos_lst.append((repo.name.lower(), repo.dbrepo.private,)) |
|
65 | repos_lst.append((repo.name.lower(), repo.dbrepo.private,)) | |
67 |
|
66 | |||
68 | return repos_lst |
|
67 | return repos_lst | |
@@ -73,14 +72,11 b' def _full_changelog_cached(repo_name):' | |||||
73 | return list(reversed(list(HgModel().get_repo(repo_name)))) |
|
72 | return list(reversed(list(HgModel().get_repo(repo_name)))) | |
74 |
|
73 | |||
75 | class HgModel(object): |
|
74 | class HgModel(object): | |
76 | """ |
|
75 | """Mercurial Model | |
77 | Mercurial Model |
|
|||
78 | """ |
|
76 | """ | |
79 |
|
77 | |||
80 | def __init__(self): |
|
78 | def __init__(self): | |
81 |
|
|
79 | pass | |
82 | Constructor |
|
|||
83 | """ |
|
|||
84 |
|
80 | |||
85 | @staticmethod |
|
81 | @staticmethod | |
86 | def repo_scan(repos_prefix, repos_path, baseui, initial=False): |
|
82 | def repo_scan(repos_prefix, repos_path, baseui, initial=False): | |
@@ -92,8 +88,7 b' class HgModel(object):' | |||||
92 | """ |
|
88 | """ | |
93 | sa = meta.Session() |
|
89 | sa = meta.Session() | |
94 | def check_repo_dir(path): |
|
90 | def check_repo_dir(path): | |
95 | """ |
|
91 | """Checks the repository | |
96 | Checks the repository |
|
|||
97 | :param path: |
|
92 | :param path: | |
98 | """ |
|
93 | """ | |
99 | repos_path = path.split('/') |
|
94 | repos_path = path.split('/') |
@@ -1,15 +1,58 b'' | |||||
1 | """SQLAlchemy Metadata and Session object""" |
|
1 | """SQLAlchemy Metadata and Session object""" | |
2 | from sqlalchemy.ext.declarative import declarative_base |
|
2 | from sqlalchemy.ext.declarative import declarative_base | |
3 | from sqlalchemy.orm import scoped_session, sessionmaker |
|
3 | from sqlalchemy.orm import scoped_session, sessionmaker | |
|
4 | from pylons_app.model import caching_query | |||
|
5 | from beaker import cache | |||
|
6 | import os | |||
|
7 | from os.path import join as jn, dirname as dn, abspath | |||
|
8 | import time | |||
|
9 | ||||
|
10 | # Beaker CacheManager. A home base for cache configurations. | |||
|
11 | cache_manager = cache.CacheManager() | |||
4 |
|
12 | |||
5 | __all__ = ['Base', 'Session'] |
|
13 | __all__ = ['Base', 'Session'] | |
6 | # |
|
14 | # | |
7 | # SQLAlchemy session manager. Updated by model.init_model() |
|
15 | # SQLAlchemy session manager. Updated by model.init_model() | |
8 | # |
|
16 | # | |
9 |
Session = scoped_session( |
|
17 | Session = scoped_session( | |
10 | # |
|
18 | sessionmaker( | |
|
19 | query_cls=caching_query.query_callable(cache_manager) | |||
|
20 | ) | |||
|
21 | ) | |||
11 |
|
22 | |||
12 | # The declarative Base |
|
23 | # The declarative Base | |
13 | Base = declarative_base() |
|
24 | Base = declarative_base() | |
14 | #For another db... |
|
25 | #For another db... | |
15 | #Base2 = declarative_base() |
|
26 | #Base2 = declarative_base() | |
|
27 | ||||
|
28 | #=============================================================================== | |||
|
29 | # CACHE OPTIONS | |||
|
30 | #=============================================================================== | |||
|
31 | cache_dir = jn(dn(dn(dn(abspath(__file__)))), 'data', 'cache') | |||
|
32 | if not os.path.isdir(cache_dir): | |||
|
33 | os.mkdir(cache_dir) | |||
|
34 | # set start_time to current time | |||
|
35 | # to re-cache everything | |||
|
36 | # upon application startup | |||
|
37 | start_time = time.time() | |||
|
38 | # configure the "sqlalchemy" cache region. | |||
|
39 | cache_manager.regions['sql_cache_short'] = { | |||
|
40 | 'type':'memory', | |||
|
41 | 'data_dir':cache_dir, | |||
|
42 | 'expire':10, | |||
|
43 | 'start_time':start_time | |||
|
44 | } | |||
|
45 | cache_manager.regions['sql_cache_med'] = { | |||
|
46 | 'type':'memory', | |||
|
47 | 'data_dir':cache_dir, | |||
|
48 | 'expire':360, | |||
|
49 | 'start_time':start_time | |||
|
50 | } | |||
|
51 | cache_manager.regions['sql_cache_long'] = { | |||
|
52 | 'type':'file', | |||
|
53 | 'data_dir':cache_dir, | |||
|
54 | 'expire':3600, | |||
|
55 | 'start_time':start_time | |||
|
56 | } | |||
|
57 | #to use cache use this in query | |||
|
58 | #.options(FromCache("sqlalchemy_cache_type", "cachekey")) |
General Comments 0
You need to be logged in to leave comments.
Login now