##// END OF EJS Templates
file-store: don't response with cookies on file-store download.
marcink -
r4236:a948e8d8 stable
parent child Browse files
Show More
@@ -1,174 +1,177 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
20 import logging
21
21
22 from pyramid.view import view_config
22 from pyramid.view import view_config
23 from pyramid.response import FileResponse
23 from pyramid.response import FileResponse
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
25
25
26 from rhodecode.apps._base import BaseAppView
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.apps.file_store import utils
27 from rhodecode.apps.file_store import utils
28 from rhodecode.apps.file_store.exceptions import (
28 from rhodecode.apps.file_store.exceptions import (
29 FileNotAllowedException, FileOverSizeException)
29 FileNotAllowedException, FileOverSizeException)
30
30
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny,
34 CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny,
35 LoginRequired)
35 LoginRequired)
36 from rhodecode.model.db import Session, FileStore, UserApiKeys
36 from rhodecode.model.db import Session, FileStore, UserApiKeys
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 class FileStoreView(BaseAppView):
41 class FileStoreView(BaseAppView):
42 upload_key = 'store_file'
42 upload_key = 'store_file'
43
43
44 def load_default_context(self):
44 def load_default_context(self):
45 c = self._get_local_tmpl_context()
45 c = self._get_local_tmpl_context()
46 self.storage = utils.get_file_storage(self.request.registry.settings)
46 self.storage = utils.get_file_storage(self.request.registry.settings)
47 return c
47 return c
48
48
49 def _serve_file(self, file_uid):
49 def _serve_file(self, file_uid):
50
50
51 if not self.storage.exists(file_uid):
51 if not self.storage.exists(file_uid):
52 store_path = self.storage.store_path(file_uid)
52 store_path = self.storage.store_path(file_uid)
53 log.debug('File with FID:%s not found in the store under `%s`',
53 log.debug('File with FID:%s not found in the store under `%s`',
54 file_uid, store_path)
54 file_uid, store_path)
55 raise HTTPNotFound()
55 raise HTTPNotFound()
56
56
57 db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar()
57 db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar()
58 if not db_obj:
58 if not db_obj:
59 raise HTTPNotFound()
59 raise HTTPNotFound()
60
60
61 # private upload for user
61 # private upload for user
62 if db_obj.check_acl and db_obj.scope_user_id:
62 if db_obj.check_acl and db_obj.scope_user_id:
63 log.debug('Artifact: checking scope access for bound artifact user: `%s`',
63 log.debug('Artifact: checking scope access for bound artifact user: `%s`',
64 db_obj.scope_user_id)
64 db_obj.scope_user_id)
65 user = db_obj.user
65 user = db_obj.user
66 if self._rhodecode_db_user.user_id != user.user_id:
66 if self._rhodecode_db_user.user_id != user.user_id:
67 log.warning('Access to file store object forbidden')
67 log.warning('Access to file store object forbidden')
68 raise HTTPNotFound()
68 raise HTTPNotFound()
69
69
70 # scoped to repository permissions
70 # scoped to repository permissions
71 if db_obj.check_acl and db_obj.scope_repo_id:
71 if db_obj.check_acl and db_obj.scope_repo_id:
72 log.debug('Artifact: checking scope access for bound artifact repo: `%s`',
72 log.debug('Artifact: checking scope access for bound artifact repo: `%s`',
73 db_obj.scope_repo_id)
73 db_obj.scope_repo_id)
74 repo = db_obj.repo
74 repo = db_obj.repo
75 perm_set = ['repository.read', 'repository.write', 'repository.admin']
75 perm_set = ['repository.read', 'repository.write', 'repository.admin']
76 has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
76 has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
77 if not has_perm:
77 if not has_perm:
78 log.warning('Access to file store object `%s` forbidden', file_uid)
78 log.warning('Access to file store object `%s` forbidden', file_uid)
79 raise HTTPNotFound()
79 raise HTTPNotFound()
80
80
81 # scoped to repository group permissions
81 # scoped to repository group permissions
82 if db_obj.check_acl and db_obj.scope_repo_group_id:
82 if db_obj.check_acl and db_obj.scope_repo_group_id:
83 log.debug('Artifact: checking scope access for bound artifact repo group: `%s`',
83 log.debug('Artifact: checking scope access for bound artifact repo group: `%s`',
84 db_obj.scope_repo_group_id)
84 db_obj.scope_repo_group_id)
85 repo_group = db_obj.repo_group
85 repo_group = db_obj.repo_group
86 perm_set = ['group.read', 'group.write', 'group.admin']
86 perm_set = ['group.read', 'group.write', 'group.admin']
87 has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
87 has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
88 if not has_perm:
88 if not has_perm:
89 log.warning('Access to file store object `%s` forbidden', file_uid)
89 log.warning('Access to file store object `%s` forbidden', file_uid)
90 raise HTTPNotFound()
90 raise HTTPNotFound()
91
91
92 FileStore.bump_access_counter(file_uid)
92 FileStore.bump_access_counter(file_uid)
93
93
94 file_path = self.storage.store_path(file_uid)
94 file_path = self.storage.store_path(file_uid)
95 return FileResponse(file_path)
95 return FileResponse(file_path)
96 # For file store we don't submit any session data, this logic tells the
97 # Session lib to skip it
98 setattr(self.request, '_file_response', True)
96
99
97 @LoginRequired()
100 @LoginRequired()
98 @NotAnonymous()
101 @NotAnonymous()
99 @CSRFRequired()
102 @CSRFRequired()
100 @view_config(route_name='upload_file', request_method='POST', renderer='json_ext')
103 @view_config(route_name='upload_file', request_method='POST', renderer='json_ext')
101 def upload_file(self):
104 def upload_file(self):
102 self.load_default_context()
105 self.load_default_context()
103 file_obj = self.request.POST.get(self.upload_key)
106 file_obj = self.request.POST.get(self.upload_key)
104
107
105 if file_obj is None:
108 if file_obj is None:
106 return {'store_fid': None,
109 return {'store_fid': None,
107 'access_path': None,
110 'access_path': None,
108 'error': '{} data field is missing'.format(self.upload_key)}
111 'error': '{} data field is missing'.format(self.upload_key)}
109
112
110 if not hasattr(file_obj, 'filename'):
113 if not hasattr(file_obj, 'filename'):
111 return {'store_fid': None,
114 return {'store_fid': None,
112 'access_path': None,
115 'access_path': None,
113 'error': 'filename cannot be read from the data field'}
116 'error': 'filename cannot be read from the data field'}
114
117
115 filename = file_obj.filename
118 filename = file_obj.filename
116
119
117 metadata = {
120 metadata = {
118 'user_uploaded': {'username': self._rhodecode_user.username,
121 'user_uploaded': {'username': self._rhodecode_user.username,
119 'user_id': self._rhodecode_user.user_id,
122 'user_id': self._rhodecode_user.user_id,
120 'ip': self._rhodecode_user.ip_addr}}
123 'ip': self._rhodecode_user.ip_addr}}
121 try:
124 try:
122 store_uid, metadata = self.storage.save_file(
125 store_uid, metadata = self.storage.save_file(
123 file_obj.file, filename, extra_metadata=metadata)
126 file_obj.file, filename, extra_metadata=metadata)
124 except FileNotAllowedException:
127 except FileNotAllowedException:
125 return {'store_fid': None,
128 return {'store_fid': None,
126 'access_path': None,
129 'access_path': None,
127 'error': 'File {} is not allowed.'.format(filename)}
130 'error': 'File {} is not allowed.'.format(filename)}
128
131
129 except FileOverSizeException:
132 except FileOverSizeException:
130 return {'store_fid': None,
133 return {'store_fid': None,
131 'access_path': None,
134 'access_path': None,
132 'error': 'File {} is exceeding allowed limit.'.format(filename)}
135 'error': 'File {} is exceeding allowed limit.'.format(filename)}
133
136
134 try:
137 try:
135 entry = FileStore.create(
138 entry = FileStore.create(
136 file_uid=store_uid, filename=metadata["filename"],
139 file_uid=store_uid, filename=metadata["filename"],
137 file_hash=metadata["sha256"], file_size=metadata["size"],
140 file_hash=metadata["sha256"], file_size=metadata["size"],
138 file_description=u'upload attachment',
141 file_description=u'upload attachment',
139 check_acl=False, user_id=self._rhodecode_user.user_id
142 check_acl=False, user_id=self._rhodecode_user.user_id
140 )
143 )
141 Session().add(entry)
144 Session().add(entry)
142 Session().commit()
145 Session().commit()
143 log.debug('Stored upload in DB as %s', entry)
146 log.debug('Stored upload in DB as %s', entry)
144 except Exception:
147 except Exception:
145 log.exception('Failed to store file %s', filename)
148 log.exception('Failed to store file %s', filename)
146 return {'store_fid': None,
149 return {'store_fid': None,
147 'access_path': None,
150 'access_path': None,
148 'error': 'File {} failed to store in DB.'.format(filename)}
151 'error': 'File {} failed to store in DB.'.format(filename)}
149
152
150 return {'store_fid': store_uid,
153 return {'store_fid': store_uid,
151 'access_path': h.route_path('download_file', fid=store_uid)}
154 'access_path': h.route_path('download_file', fid=store_uid)}
152
155
153 # ACL is checked by scopes, if no scope the file is accessible to all
156 # ACL is checked by scopes, if no scope the file is accessible to all
154 @view_config(route_name='download_file')
157 @view_config(route_name='download_file')
155 def download_file(self):
158 def download_file(self):
156 self.load_default_context()
159 self.load_default_context()
157 file_uid = self.request.matchdict['fid']
160 file_uid = self.request.matchdict['fid']
158 log.debug('Requesting FID:%s from store %s', file_uid, self.storage)
161 log.debug('Requesting FID:%s from store %s', file_uid, self.storage)
159 return self._serve_file(file_uid)
162 return self._serve_file(file_uid)
160
163
161 # in addition to @LoginRequired ACL is checked by scopes
164 # in addition to @LoginRequired ACL is checked by scopes
162 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD])
165 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD])
163 @NotAnonymous()
166 @NotAnonymous()
164 @view_config(route_name='download_file_by_token')
167 @view_config(route_name='download_file_by_token')
165 def download_file_by_token(self):
168 def download_file_by_token(self):
166 """
169 """
167 Special view that allows to access the download file by special URL that
170 Special view that allows to access the download file by special URL that
168 is stored inside the URL.
171 is stored inside the URL.
169
172
170 http://example.com/_file_store/token-download/TOKEN/FILE_UID
173 http://example.com/_file_store/token-download/TOKEN/FILE_UID
171 """
174 """
172 self.load_default_context()
175 self.load_default_context()
173 file_uid = self.request.matchdict['fid']
176 file_uid = self.request.matchdict['fid']
174 return self._serve_file(file_uid)
177 return self._serve_file(file_uid)
@@ -1,200 +1,204 b''
1 # Copyright (c) 2010 Agendaless Consulting and Contributors.
1 # Copyright (c) 2010 Agendaless Consulting and Contributors.
2 # (http://www.agendaless.com), All Rights Reserved
2 # (http://www.agendaless.com), All Rights Reserved
3 # License: BSD-derived (http://www.repoze.org/LICENSE.txt)
3 # License: BSD-derived (http://www.repoze.org/LICENSE.txt)
4 # With Patches from RhodeCode GmBH
4 # With Patches from RhodeCode GmBH
5
5
6
6
7 import os
7 import os
8
8
9 from beaker import cache
9 from beaker import cache
10 from beaker.session import SessionObject
10 from beaker.session import SessionObject
11 from beaker.util import coerce_cache_params
11 from beaker.util import coerce_cache_params
12 from beaker.util import coerce_session_params
12 from beaker.util import coerce_session_params
13
13
14 from pyramid.interfaces import ISession
14 from pyramid.interfaces import ISession
15 from pyramid.settings import asbool
15 from pyramid.settings import asbool
16 from zope.interface import implementer
16 from zope.interface import implementer
17
17
18 from binascii import hexlify
18 from binascii import hexlify
19
19
20
20
21 def BeakerSessionFactoryConfig(**options):
21 def BeakerSessionFactoryConfig(**options):
22 """ Return a Pyramid session factory using Beaker session settings
22 """ Return a Pyramid session factory using Beaker session settings
23 supplied directly as ``**options``"""
23 supplied directly as ``**options``"""
24
24
25 class PyramidBeakerSessionObject(SessionObject):
25 class PyramidBeakerSessionObject(SessionObject):
26 _options = options
26 _options = options
27 _cookie_on_exception = _options.pop('cookie_on_exception', True)
27 _cookie_on_exception = _options.pop('cookie_on_exception', True)
28 _constant_csrf_token = _options.pop('constant_csrf_token', False)
28 _constant_csrf_token = _options.pop('constant_csrf_token', False)
29
29
30 def __init__(self, request):
30 def __init__(self, request):
31 SessionObject.__init__(self, request.environ, **self._options)
31 SessionObject.__init__(self, request.environ, **self._options)
32
32
33 def session_callback(request, response):
33 def session_callback(request, response):
34 exception = getattr(request, 'exception', None)
34 exception = getattr(request, 'exception', None)
35 if (exception is None or self._cookie_on_exception) and self.accessed():
35 file_response = getattr(request, '_file_response', None)
36
37 if file_response is None \
38 and (exception is None or self._cookie_on_exception) \
39 and self.accessed():
36 self.persist()
40 self.persist()
37 headers = self.__dict__['_headers']
41 headers = self.__dict__['_headers']
38 if headers['set_cookie'] and headers['cookie_out']:
42 if headers.get('set_cookie') and headers.get('cookie_out'):
39 response.headerlist.append(('Set-Cookie', headers['cookie_out']))
43 response.headerlist.append(('Set-Cookie', headers['cookie_out']))
40 request.add_response_callback(session_callback)
44 request.add_response_callback(session_callback)
41
45
42 # ISession API
46 # ISession API
43
47
44 @property
48 @property
45 def id(self):
49 def id(self):
46 # this is as inspected in SessionObject.__init__
50 # this is as inspected in SessionObject.__init__
47 if self.__dict__['_params'].get('type') != 'cookie':
51 if self.__dict__['_params'].get('type') != 'cookie':
48 return self._session().id
52 return self._session().id
49 return None
53 return None
50
54
51 @property
55 @property
52 def new(self):
56 def new(self):
53 return self.last_accessed is None
57 return self.last_accessed is None
54
58
55 changed = SessionObject.save
59 changed = SessionObject.save
56
60
57 # modifying dictionary methods
61 # modifying dictionary methods
58
62
59 @call_save
63 @call_save
60 def clear(self):
64 def clear(self):
61 return self._session().clear()
65 return self._session().clear()
62
66
63 @call_save
67 @call_save
64 def update(self, d, **kw):
68 def update(self, d, **kw):
65 return self._session().update(d, **kw)
69 return self._session().update(d, **kw)
66
70
67 @call_save
71 @call_save
68 def setdefault(self, k, d=None):
72 def setdefault(self, k, d=None):
69 return self._session().setdefault(k, d)
73 return self._session().setdefault(k, d)
70
74
71 @call_save
75 @call_save
72 def pop(self, k, d=None):
76 def pop(self, k, d=None):
73 return self._session().pop(k, d)
77 return self._session().pop(k, d)
74
78
75 @call_save
79 @call_save
76 def popitem(self):
80 def popitem(self):
77 return self._session().popitem()
81 return self._session().popitem()
78
82
79 __setitem__ = call_save(SessionObject.__setitem__)
83 __setitem__ = call_save(SessionObject.__setitem__)
80 __delitem__ = call_save(SessionObject.__delitem__)
84 __delitem__ = call_save(SessionObject.__delitem__)
81
85
82 # Flash API methods
86 # Flash API methods
83 def flash(self, msg, queue='', allow_duplicate=True):
87 def flash(self, msg, queue='', allow_duplicate=True):
84 storage = self.setdefault('_f_' + queue, [])
88 storage = self.setdefault('_f_' + queue, [])
85 if allow_duplicate or (msg not in storage):
89 if allow_duplicate or (msg not in storage):
86 storage.append(msg)
90 storage.append(msg)
87
91
88 def pop_flash(self, queue=''):
92 def pop_flash(self, queue=''):
89 storage = self.pop('_f_' + queue, [])
93 storage = self.pop('_f_' + queue, [])
90 return storage
94 return storage
91
95
92 def peek_flash(self, queue=''):
96 def peek_flash(self, queue=''):
93 storage = self.get('_f_' + queue, [])
97 storage = self.get('_f_' + queue, [])
94 return storage
98 return storage
95
99
96 # CSRF API methods
100 # CSRF API methods
97 def new_csrf_token(self):
101 def new_csrf_token(self):
98 token = (self._constant_csrf_token
102 token = (self._constant_csrf_token
99 or hexlify(os.urandom(20)).decode('ascii'))
103 or hexlify(os.urandom(20)).decode('ascii'))
100 self['_csrft_'] = token
104 self['_csrft_'] = token
101 return token
105 return token
102
106
103 def get_csrf_token(self):
107 def get_csrf_token(self):
104 token = self.get('_csrft_', None)
108 token = self.get('_csrft_', None)
105 if token is None:
109 if token is None:
106 token = self.new_csrf_token()
110 token = self.new_csrf_token()
107 return token
111 return token
108
112
109 return implementer(ISession)(PyramidBeakerSessionObject)
113 return implementer(ISession)(PyramidBeakerSessionObject)
110
114
111
115
112 def call_save(wrapped):
116 def call_save(wrapped):
113 """ By default, in non-auto-mode beaker badly wants people to
117 """ By default, in non-auto-mode beaker badly wants people to
114 call save even though it should know something has changed when
118 call save even though it should know something has changed when
115 a mutating method is called. This hack should be removed if
119 a mutating method is called. This hack should be removed if
116 Beaker ever starts to do this by default. """
120 Beaker ever starts to do this by default. """
117 def save(session, *arg, **kw):
121 def save(session, *arg, **kw):
118 value = wrapped(session, *arg, **kw)
122 value = wrapped(session, *arg, **kw)
119 session.save()
123 session.save()
120 return value
124 return value
121 save.__doc__ = wrapped.__doc__
125 save.__doc__ = wrapped.__doc__
122 return save
126 return save
123
127
124
128
125 def session_factory_from_settings(settings):
129 def session_factory_from_settings(settings):
126 """ Return a Pyramid session factory using Beaker session settings
130 """ Return a Pyramid session factory using Beaker session settings
127 supplied from a Paste configuration file"""
131 supplied from a Paste configuration file"""
128 prefixes = ('session.', 'beaker.session.')
132 prefixes = ('session.', 'beaker.session.')
129 options = {}
133 options = {}
130
134
131 # Pull out any config args meant for beaker session. if there are any
135 # Pull out any config args meant for beaker session. if there are any
132 for k, v in settings.items():
136 for k, v in settings.items():
133 for prefix in prefixes:
137 for prefix in prefixes:
134 if k.startswith(prefix):
138 if k.startswith(prefix):
135 option_name = k[len(prefix):]
139 option_name = k[len(prefix):]
136 if option_name == 'cookie_on_exception':
140 if option_name == 'cookie_on_exception':
137 v = asbool(v)
141 v = asbool(v)
138 options[option_name] = v
142 options[option_name] = v
139
143
140 options = coerce_session_params(options)
144 options = coerce_session_params(options)
141 return BeakerSessionFactoryConfig(**options)
145 return BeakerSessionFactoryConfig(**options)
142
146
143
147
144 def set_cache_regions_from_settings(settings):
148 def set_cache_regions_from_settings(settings):
145 """ Add cache support to the Pylons application.
149 """ Add cache support to the Pylons application.
146
150
147 The ``settings`` passed to the configurator are used to setup
151 The ``settings`` passed to the configurator are used to setup
148 the cache options. Cache options in the settings should start
152 the cache options. Cache options in the settings should start
149 with either 'beaker.cache.' or 'cache.'.
153 with either 'beaker.cache.' or 'cache.'.
150
154
151 """
155 """
152 cache_settings = {'regions': []}
156 cache_settings = {'regions': []}
153 for key in settings.keys():
157 for key in settings.keys():
154 for prefix in ['beaker.cache.', 'cache.']:
158 for prefix in ['beaker.cache.', 'cache.']:
155 if key.startswith(prefix):
159 if key.startswith(prefix):
156 name = key.split(prefix)[1].strip()
160 name = key.split(prefix)[1].strip()
157 cache_settings[name] = settings[key].strip()
161 cache_settings[name] = settings[key].strip()
158
162
159 if ('expire' in cache_settings
163 if ('expire' in cache_settings
160 and isinstance(cache_settings['expire'], basestring)
164 and isinstance(cache_settings['expire'], basestring)
161 and cache_settings['expire'].lower() in ['none', 'no']):
165 and cache_settings['expire'].lower() in ['none', 'no']):
162 cache_settings['expire'] = None
166 cache_settings['expire'] = None
163
167
164 coerce_cache_params(cache_settings)
168 coerce_cache_params(cache_settings)
165
169
166 if 'enabled' not in cache_settings:
170 if 'enabled' not in cache_settings:
167 cache_settings['enabled'] = True
171 cache_settings['enabled'] = True
168
172
169 regions = cache_settings['regions']
173 regions = cache_settings['regions']
170 if regions:
174 if regions:
171 for region in regions:
175 for region in regions:
172 if not region:
176 if not region:
173 continue
177 continue
174
178
175 region_settings = {
179 region_settings = {
176 'data_dir': cache_settings.get('data_dir'),
180 'data_dir': cache_settings.get('data_dir'),
177 'lock_dir': cache_settings.get('lock_dir'),
181 'lock_dir': cache_settings.get('lock_dir'),
178 'expire': cache_settings.get('expire', 60),
182 'expire': cache_settings.get('expire', 60),
179 'enabled': cache_settings['enabled'],
183 'enabled': cache_settings['enabled'],
180 'key_length': cache_settings.get('key_length', 250),
184 'key_length': cache_settings.get('key_length', 250),
181 'type': cache_settings.get('type'),
185 'type': cache_settings.get('type'),
182 'url': cache_settings.get('url'),
186 'url': cache_settings.get('url'),
183 }
187 }
184 region_prefix = '%s.' % region
188 region_prefix = '%s.' % region
185 region_len = len(region_prefix)
189 region_len = len(region_prefix)
186 for key in list(cache_settings.keys()):
190 for key in list(cache_settings.keys()):
187 if key.startswith(region_prefix):
191 if key.startswith(region_prefix):
188 region_settings[key[region_len:]] = cache_settings.pop(key)
192 region_settings[key[region_len:]] = cache_settings.pop(key)
189
193
190 if (isinstance(region_settings['expire'], basestring)
194 if (isinstance(region_settings['expire'], basestring)
191 and region_settings['expire'].lower() in ['none', 'no']):
195 and region_settings['expire'].lower() in ['none', 'no']):
192 region_settings['expire'] = None
196 region_settings['expire'] = None
193 coerce_cache_params(region_settings)
197 coerce_cache_params(region_settings)
194 cache.cache_regions[region] = region_settings
198 cache.cache_regions[region] = region_settings
195
199
196
200
197 def includeme(config):
201 def includeme(config):
198 session_factory = session_factory_from_settings(config.registry.settings)
202 session_factory = session_factory_from_settings(config.registry.settings)
199 config.set_session_factory(session_factory)
203 config.set_session_factory(session_factory)
200 set_cache_regions_from_settings(config.registry.settings)
204 set_cache_regions_from_settings(config.registry.settings)
General Comments 0
You need to be logged in to leave comments. Login now