##// END OF EJS Templates
elasticsearch: support multiple ES nodes
ergo -
r5:36e6039f
parent child Browse files
Show More
@@ -1,285 +1,285 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # App Enlight Enterprise Edition, including its added features, Support
19 19 # services, and proprietary license terms, please see
20 20 # https://rhodecode.com/licenses/
21 21
22 22 import datetime
23 23 import logging
24 24 import pyelasticsearch
25 25 import redis
26 26 import os
27 27 from pkg_resources import iter_entry_points
28 28
29 29 import appenlight.lib.jinja2_filters as jinja2_filters
30 30 import appenlight.lib.encryption as encryption
31 31
32 32 from authomatic.providers import oauth2, oauth1
33 33 from authomatic import Authomatic
34 34 from pyramid.config import Configurator, PHASE3_CONFIG
35 35 from pyramid.authentication import AuthTktAuthenticationPolicy
36 36 from pyramid.authorization import ACLAuthorizationPolicy
37 37 from pyramid_mailer.mailer import Mailer
38 38 from pyramid.renderers import JSON
39 39 from pyramid_redis_sessions import session_factory_from_settings
40 40 from pyramid.settings import asbool, aslist
41 41 from pyramid.security import AllPermissionsList
42 42 from pyramid_authstack import AuthenticationStackPolicy
43 43 from redlock import Redlock
44 44 from sqlalchemy import engine_from_config
45 45
46 46 from appenlight.celery import configure_celery
47 47 from appenlight.lib import cache_regions
48 48 from appenlight.lib.ext_json import json
49 49 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
50 50
51 51 json_renderer = JSON(serializer=json.dumps, indent=4)
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 def datetime_adapter(obj, request):
57 57 return obj.isoformat()
58 58
59 59
60 60 def all_permissions_adapter(obj, request):
61 61 return '__all_permissions__'
62 62
63 63
64 64 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
65 65 json_renderer.add_adapter(AllPermissionsList, all_permissions_adapter)
66 66
67 67
68 68 def main(global_config, **settings):
69 69 """ This function returns a Pyramid WSGI application.
70 70 """
71 71 auth_tkt_policy = AuthTktAuthenticationPolicy(
72 72 settings['authtkt.secret'],
73 73 hashalg='sha512',
74 74 callback=groupfinder,
75 75 max_age=2592000,
76 76 secure=asbool(settings.get('authtkt.secure', 'false')))
77 77 auth_token_policy = AuthTokenAuthenticationPolicy(
78 78 callback=groupfinder
79 79 )
80 80 authorization_policy = ACLAuthorizationPolicy()
81 81 authentication_policy = AuthenticationStackPolicy()
82 82 authentication_policy.add_policy('auth_tkt', auth_tkt_policy)
83 83 authentication_policy.add_policy('auth_token', auth_token_policy)
84 84 # set crypto key
85 85 encryption.ENCRYPTION_SECRET = settings.get('encryption_secret')
86 86 # import this later so encyption key can be monkeypatched
87 87 from appenlight.models import DBSession, register_datastores
88 88 # update config with cometd info
89 89 settings['cometd_servers'] = {'server': settings['cometd.server'],
90 90 'secret': settings['cometd.secret']}
91 91
92 92 # Create the Pyramid Configurator.
93 93 settings['_mail_url'] = settings['mailing.app_url']
94 94 config = Configurator(settings=settings,
95 95 authentication_policy=authentication_policy,
96 96 authorization_policy=authorization_policy,
97 97 root_factory='appenlight.security.RootFactory',
98 98 default_permission='view')
99 99 config.set_default_csrf_options(require_csrf=True, header='X-XSRF-TOKEN')
100 100 config.add_view_deriver('appenlight.predicates.csrf_view',
101 101 name='csrf_view')
102 102
103 103
104 104 # later, when config is available
105 105 dogpile_config = {'url': settings['redis.url'],
106 106 "redis_expiration_time": 86400,
107 107 "redis_distributed_lock": True}
108 108 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
109 109 config.registry.cache_regions = cache_regions.regions
110 110 engine = engine_from_config(settings, 'sqlalchemy.',
111 111 json_serializer=json.dumps)
112 112 DBSession.configure(bind=engine)
113 113
114 114 # json rederer that serializes datetime
115 115 config.add_renderer('json', json_renderer)
116 116 config.set_request_property('appenlight.lib.request.es_conn', 'es_conn')
117 117 config.set_request_property('appenlight.lib.request.get_user', 'user',
118 118 reify=True)
119 119 config.set_request_property('appenlight.lib.request.get_csrf_token',
120 120 'csrf_token', reify=True)
121 121 config.set_request_property('appenlight.lib.request.safe_json_body',
122 122 'safe_json_body', reify=True)
123 123 config.set_request_property('appenlight.lib.request.unsafe_json_body',
124 124 'unsafe_json_body', reify=True)
125 125 config.add_request_method('appenlight.lib.request.add_flash_to_headers',
126 126 'add_flash_to_headers')
127 127
128 128 config.include('pyramid_redis_sessions')
129 129 config.include('pyramid_tm')
130 130 config.include('pyramid_jinja2')
131 131 config.include('appenlight_client.ext.pyramid_tween')
132 132 config.include('ziggurat_foundations.ext.pyramid.sign_in')
133 133 config.registry.es_conn = pyelasticsearch.ElasticSearch(
134 settings['elasticsearch.nodes'])
134 aslist(settings['elasticsearch.nodes']))
135 135 config.registry.redis_conn = redis.StrictRedis.from_url(
136 136 settings['redis.url'])
137 137
138 138 config.registry.redis_lockmgr = Redlock([settings['redis.redlock.url'], ],
139 139 retry_count=0, retry_delay=0)
140 140 # mailer
141 141 config.registry.mailer = Mailer.from_settings(settings)
142 142
143 143 # Configure sessions
144 144 session_factory = session_factory_from_settings(settings)
145 145 config.set_session_factory(session_factory)
146 146
147 147 # Configure renderers and event subscribers
148 148 config.add_jinja2_extension('jinja2.ext.loopcontrols')
149 149 config.add_jinja2_search_path('appenlight:templates')
150 150 # event subscribers
151 151 config.add_subscriber("appenlight.subscribers.application_created",
152 152 "pyramid.events.ApplicationCreated")
153 153 config.add_subscriber("appenlight.subscribers.add_renderer_globals",
154 154 "pyramid.events.BeforeRender")
155 155 config.add_subscriber('appenlight.subscribers.new_request',
156 156 'pyramid.events.NewRequest')
157 157 config.add_view_predicate('context_type_class',
158 158 'appenlight.predicates.contextTypeClass')
159 159
160 160 register_datastores(es_conn=config.registry.es_conn,
161 161 redis_conn=config.registry.redis_conn,
162 162 redis_lockmgr=config.registry.redis_lockmgr)
163 163
164 164 # base stuff and scan
165 165
166 166 # need to ensure webassets exists otherwise config.override_asset()
167 167 # throws exception
168 168 if not os.path.exists(settings['webassets.dir']):
169 169 os.mkdir(settings['webassets.dir'])
170 170 config.add_static_view(path='appenlight:webassets',
171 171 name='static', cache_max_age=3600)
172 172 config.override_asset(to_override='appenlight:webassets/',
173 173 override_with=settings['webassets.dir'])
174 174
175 175 config.include('appenlight.views')
176 176 config.include('appenlight.views.admin')
177 177 config.scan(ignore=['appenlight.migrations',
178 178 'appenlight.scripts',
179 179 'appenlight.tests'])
180 180
181 181 # authomatic social auth
182 182 authomatic_conf = {
183 183 # callback http://yourapp.com/social_auth/twitter
184 184 'twitter': {
185 185 'class_': oauth1.Twitter,
186 186 'consumer_key': settings.get('authomatic.pr.twitter.key', 'X'),
187 187 'consumer_secret': settings.get('authomatic.pr.twitter.secret',
188 188 'X'),
189 189 },
190 190 # callback http://yourapp.com/social_auth/facebook
191 191 'facebook': {
192 192 'class_': oauth2.Facebook,
193 193 'consumer_key': settings.get('authomatic.pr.facebook.app_id', 'X'),
194 194 'consumer_secret': settings.get('authomatic.pr.facebook.secret',
195 195 'X'),
196 196 'scope': ['email'],
197 197 },
198 198 # callback http://yourapp.com/social_auth/google
199 199 'google': {
200 200 'class_': oauth2.Google,
201 201 'consumer_key': settings.get('authomatic.pr.google.key', 'X'),
202 202 'consumer_secret': settings.get(
203 203 'authomatic.pr.google.secret', 'X'),
204 204 'scope': ['profile', 'email'],
205 205 },
206 206 'github': {
207 207 'class_': oauth2.GitHub,
208 208 'consumer_key': settings.get('authomatic.pr.github.key', 'X'),
209 209 'consumer_secret': settings.get(
210 210 'authomatic.pr.github.secret', 'X'),
211 211 'scope': ['repo', 'public_repo', 'user:email'],
212 212 'access_headers': {'User-Agent': 'AppEnlight'},
213 213 },
214 214 'bitbucket': {
215 215 'class_': oauth1.Bitbucket,
216 216 'consumer_key': settings.get('authomatic.pr.bitbucket.key', 'X'),
217 217 'consumer_secret': settings.get(
218 218 'authomatic.pr.bitbucket.secret', 'X')
219 219 }
220 220 }
221 221 config.registry.authomatic = Authomatic(
222 222 config=authomatic_conf, secret=settings['authomatic.secret'])
223 223
224 224 # resource type information
225 225 config.registry.resource_types = ['resource', 'application']
226 226
227 227 # plugin information
228 228 config.registry.appenlight_plugins = {}
229 229
230 230 def register_appenlight_plugin(config, plugin_name, plugin_config):
231 231 def register():
232 232 log.warning('Registering plugin: {}'.format(plugin_name))
233 233 if plugin_name not in config.registry.appenlight_plugins:
234 234 config.registry.appenlight_plugins[plugin_name] = {
235 235 'javascript': None,
236 236 'static': None,
237 237 'css': None,
238 238 'top_nav': None,
239 239 'celery_tasks': None,
240 240 'celery_beats': None,
241 241 'fulltext_indexer': None,
242 242 'sqlalchemy_migrations': None,
243 243 'default_values_setter': None,
244 244 'resource_types': [],
245 245 'url_gen': None
246 246 }
247 247 config.registry.appenlight_plugins[plugin_name].update(
248 248 plugin_config)
249 249 # inform AE what kind of resource types we have available
250 250 # so we can avoid failing when a plugin is removed but data
251 251 # is still present in the db
252 252 if plugin_config.get('resource_types'):
253 253 config.registry.resource_types.extend(
254 254 plugin_config['resource_types'])
255 255
256 256 config.action('appenlight_plugin={}'.format(plugin_name), register)
257 257
258 258 config.add_directive('register_appenlight_plugin',
259 259 register_appenlight_plugin)
260 260
261 261 for entry_point in iter_entry_points(group='appenlight.plugins'):
262 262 plugin = entry_point.load()
263 263 plugin.includeme(config)
264 264
265 265 # include other appenlight plugins explictly if needed
266 266 includes = aslist(settings.get('appenlight.includes', []))
267 267 for inc in includes:
268 268 config.include(inc)
269 269
270 270 # run this after everything registers in configurator
271 271
272 272 def pre_commit():
273 273 jinja_env = config.get_jinja2_environment()
274 274 jinja_env.filters['tojson'] = json.dumps
275 275 jinja_env.filters['toJSONUnsafe'] = jinja2_filters.toJSONUnsafe
276 276
277 277 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
278 278
279 279 def wrap_config_celery():
280 280 configure_celery(config.registry)
281 281
282 282 config.action(None, wrap_config_celery, order=PHASE3_CONFIG + 999)
283 283
284 284 app = config.make_wsgi_app()
285 285 return app
@@ -1,135 +1,135 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # App Enlight Enterprise Edition, including its added features, Support
19 19 # services, and proprietary license terms, please see
20 20 # https://rhodecode.com/licenses/
21 21
22 22 import sqlalchemy as sa
23 23 import logging
24 24 import hashlib
25 25
26 26 from datetime import datetime
27 27 from appenlight.models import Base
28 28 from appenlight.lib.utils import convert_es_type
29 29 from appenlight.lib.enums import LogLevel
30 30 from sqlalchemy.dialects.postgresql import JSON
31 31 from ziggurat_foundations.models.base import BaseModel
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class Log(Base, BaseModel):
37 37 __tablename__ = 'logs'
38 38 __table_args__ = {'implicit_returning': False}
39 39
40 40 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
41 41 resource_id = sa.Column(sa.Integer(),
42 42 sa.ForeignKey('applications.resource_id',
43 43 onupdate='CASCADE',
44 44 ondelete='CASCADE'),
45 45 nullable=False,
46 46 index=True)
47 47 log_level = sa.Column(sa.Unicode, nullable=False, index=True,
48 48 default='INFO')
49 49 message = sa.Column(sa.UnicodeText(), default='')
50 50 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
51 51 server_default=sa.func.now())
52 52 request_id = sa.Column(sa.Unicode())
53 53 namespace = sa.Column(sa.Unicode())
54 54 primary_key = sa.Column(sa.Unicode())
55 55
56 56 tags = sa.Column(JSON(), default={})
57 57 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
58 58
59 59 def __str__(self):
60 60 return self.__unicode__().encode('utf8')
61 61
62 62 def __unicode__(self):
63 63 return '<Log id:%s, lv:%s, ns:%s >' % (
64 64 self.log_id, self.log_level, self.namespace)
65 65
66 66 def set_data(self, data, resource):
67 67 level = data.get('log_level').upper()
68 68 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
69 69 self.message = data.get('message', '')
70 70 server_name = data.get('server', '').lower() or 'unknown'
71 71 self.tags = {
72 72 'server_name': server_name
73 73 }
74 74 if data.get('tags'):
75 75 for tag_tuple in data['tags']:
76 76 self.tags[tag_tuple[0]] = tag_tuple[1]
77 77 self.timestamp = data['date']
78 78 r_id = data.get('request_id', '')
79 79 if not r_id:
80 80 r_id = ''
81 81 self.request_id = r_id.replace('-', '')
82 82 self.resource_id = resource.resource_id
83 83 self.namespace = data.get('namespace') or ''
84 84 self.permanent = data.get('permanent')
85 85 self.primary_key = data.get('primary_key')
86 86 if self.primary_key is not None:
87 87 self.tags['appenlight_primary_key'] = self.primary_key
88 88
89 89 def get_dict(self):
90 90 instance_dict = super(Log, self).get_dict()
91 91 instance_dict['log_level'] = LogLevel.key_from_value(self.log_level)
92 92 instance_dict['resource_name'] = self.application.resource_name
93 93 return instance_dict
94 94
95 95 @property
96 96 def delete_hash(self):
97 97 if not self.primary_key:
98 98 return None
99 99
100 100 to_hash = '{}_{}_{}'.format(self.resource_id, self.primary_key,
101 self.namespace)
101 self.namespace)
102 102 return hashlib.sha1(to_hash.encode('utf8')).hexdigest()
103 103
104 104 def es_doc(self):
105 105 tags = {}
106 106 tag_list = []
107 107 for name, value in self.tags.items():
108 108 # replace dot in indexed tag name
109 109 name = name.replace('.', '_')
110 110 tag_list.append(name)
111 111 tags[name] = {
112 112 "values": convert_es_type(value),
113 113 "numeric_values": value if (
114 114 isinstance(value, (int, float)) and
115 115 not isinstance(value, bool)) else None
116 116 }
117 117 return {
118 118 'pg_id': str(self.log_id),
119 119 'delete_hash': self.delete_hash,
120 120 'resource_id': self.resource_id,
121 121 'request_id': self.request_id,
122 122 'log_level': LogLevel.key_from_value(self.log_level),
123 123 'timestamp': self.timestamp,
124 124 'message': self.message if self.message else '',
125 125 'namespace': self.namespace if self.namespace else '',
126 126 'tags': tags,
127 127 'tag_list': tag_list
128 128 }
129 129
130 130 @property
131 131 def partition_id(self):
132 132 if self.permanent:
133 133 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m')
134 134 else:
135 135 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m_%d')
@@ -1,196 +1,196 b''
1 1 [app:appenlight]
2 2 use = egg:appenlight
3 3 reload_templates = true
4 4 debug_authorization = true
5 5 debug_notfound = true
6 6 debug_routematch = true
7 7 debug_templates = true
8 8 default_locale_name = en
9 9 sqlalchemy.url = postgresql://test:test@localhost/appenlight
10 10 sqlalchemy.pool_size = 10
11 11 sqlalchemy.max_overflow = 50
12 12 sqlalchemy.echo = false
13 13 jinja2.directories = appenlight:templates
14 14 jinja2.filters = nl2br = appenlight.lib.jinja2_filters.nl2br
15 15
16 16
17 17 pyramid.includes = pyramid_debugtoolbar
18 18
19 19 debugtoolbar.panels =
20 20 pyramid_debugtoolbar.panels.versions.VersionDebugPanel
21 21 pyramid_debugtoolbar.panels.settings.SettingsDebugPanel
22 22 pyramid_debugtoolbar.panels.headers.HeaderDebugPanel
23 23 pyramid_debugtoolbar.panels.renderings.RenderingsDebugPanel
24 24 pyramid_debugtoolbar.panels.logger.LoggingPanel
25 25 pyramid_debugtoolbar.panels.performance.PerformanceDebugPanel
26 26 pyramid_debugtoolbar.panels.routes.RoutesDebugPanel
27 27 pyramid_debugtoolbar.panels.sqla.SQLADebugPanel
28 28 pyramid_debugtoolbar.panels.tweens.TweensDebugPanel
29 29 pyramid_debugtoolbar.panels.introspection.IntrospectionDebugPanel
30 30
31 31 appenlight.includes =
32 32
33 33 # encryption
34 34 encryption_secret = oEOikr_T98wTh_xLH3w8Se3kmbgAQYSM4poZvPosya0=
35 35
36 36 #redis
37 37 redis.url = redis://localhost:6379/0
38 38 redis.redlock.url = redis://localhost:6379/3
39 39
40 #solr
40 #elasticsearch
41 41 elasticsearch.nodes = http://127.0.0.1:9200
42 42
43 43 #dirs
44 44 webassets.dir = %(here)s/webassets/
45 45
46 46 #authtkt
47 47 authtkt.secure = false
48 48 authtkt.secret = SECRET
49 49 # session settings
50 50 redis.sessions.secret = SECRET
51 51 redis.sessions.timeout = 86400
52 52
53 53 # session cookie settings
54 54 redis.sessions.cookie_name = appenlight
55 55 redis.sessions.cookie_max_age = 2592000
56 56 redis.sessions.cookie_path = /
57 57 redis.sessions.cookie_domain =
58 58 redis.sessions.cookie_secure = False
59 59 redis.sessions.cookie_httponly = False
60 60 redis.sessions.cookie_on_exception = True
61 61 redis.sessions.prefix = appenlight:session:
62 62
63 63
64 64 #cache
65 65 cache.regions = default_term, second, short_term, long_term
66 66 cache.type = ext:memcached
67 67 cache.url = 127.0.0.1:11211
68 68 cache.lock_dir = %(here)s/data/cache/lock
69 69 cache.second.expire = 1
70 70 cache.short_term.expire = 60
71 71 cache.default_term.expire = 300
72 72
73 73 #mailing
74 74 mailing.app_url = http://localhost:6543
75 75 mailing.from_name = App Enlight LOCAL
76 76 mailing.from_email = no-reply@appenlight.local
77 77
78 78
79 79 ###
80 80 # Authomatic configuration
81 81 ###
82 82
83 83 authomatic.secret = SECRET
84 84 authomatic.pr.facebook.app_id =
85 85 authomatic.pr.facebook.secret =
86 86 authomatic.pr.twitter.key =
87 87 authomatic.pr.twitter.secret =
88 88 authomatic.pr.google.key =
89 89 authomatic.pr.google.secret =
90 90 authomatic.pr.github.key =
91 91 authomatic.pr.github.secret =
92 92 authomatic.pr.github.scope = repo, public_repo, user:email
93 93 authomatic.pr.bitbucket.key =
94 94 authomatic.pr.bitbucket.secret =
95 95
96 96 #ziggurat
97 97 ziggurat_foundations.model_locations.User = appenlight.models.user:User
98 98 ziggurat_foundations.sign_in.username_key = sign_in_user_name
99 99 ziggurat_foundations.sign_in.password_key = sign_in_user_password
100 100 ziggurat_foundations.sign_in.came_from_key = came_from
101 101
102 102 #cometd
103 103 cometd.server = http://127.0.0.1:8088/
104 104 cometd.secret = secret
105 105 cometd.ws_url = http://127.0.0.1:8088/
106 106
107 107
108 108 # for celery
109 109 appenlight.api_key =
110 110 appenlight.transport_config =
111 111 appenlight.public_api_key =
112 112
113 113 celery.broker_type = redis
114 114 celery.broker_url = redis://localhost:6379/3
115 115 celery.concurrency = 4
116 116 celery.timezone = UTC
117 117
118 118
119 119 [filter:paste_prefix]
120 120 use = egg:PasteDeploy#prefix
121 121
122 122
123 123 [filter:appenlight_client]
124 124 use = egg:appenlight_client
125 125 appenlight.api_key =
126 126 appenlight.transport_config =
127 127 appenlight.report_local_vars = true
128 128 appenlight.report_404 = true
129 129 appenlight.logging.level = DEBUG
130 130 appenlight.timing.dbapi2_psycopg2 = 0.3
131 131
132 132
133 133 [pipeline:main]
134 134 pipeline =
135 135 paste_prefix
136 136 appenlight_client
137 137 appenlight
138 138
139 139
140 140
141 141 [server:main]
142 142 use = egg:waitress
143 143 host = 0.0.0.0
144 144 port = 6543
145 145
146 146 [server:main_prod]
147 147 use = egg:gunicorn#main
148 148 host = 0.0.0.0:6543, unix:/tmp/appenlight.sock
149 149 workers = 6
150 150 timeout = 90
151 151 #max_requests = 1000
152 152
153 153
154 154 # Begin logging configuration
155 155
156 156 [loggers]
157 157 keys = root, appenlight, sqlalchemy, elasticsearch
158 158
159 159 [handlers]
160 160 keys = console
161 161
162 162 [formatters]
163 163 keys = generic
164 164
165 165 [logger_root]
166 166 level = INFO
167 167 handlers = console
168 168
169 169 [logger_appenlight]
170 170 level = INFO
171 171 handlers =
172 172 qualname = appenlight
173 173
174 174 [logger_elasticsearch]
175 175 level = WARN
176 176 handlers =
177 177 qualname = elasticsearch
178 178
179 179 [logger_sqlalchemy]
180 180 level = WARN
181 181 handlers =
182 182 qualname = sqlalchemy.engine
183 183 # "level = INFO" logs SQL queries.
184 184 # "level = DEBUG" logs SQL queries and results.
185 185 # "level = WARN" logs neither. (Recommended for production systems.)
186 186
187 187 [handler_console]
188 188 class = StreamHandler
189 189 args = (sys.stderr,)
190 190 level = NOTSET
191 191 formatter = generic
192 192
193 193 [formatter_generic]
194 194 format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s
195 195
196 196 # End logging configuration
@@ -1,182 +1,182 b''
1 1 [app:appenlight]
2 2 use = egg:appenlight
3 3 reload_templates = true
4 4 debug_authorization = true
5 5 debug_notfound = true
6 6 debug_routematch = true
7 7 debug_templates = true
8 8 default_locale_name = en
9 9 sqlalchemy.url = postgresql://test:test@localhost/appenlight_test
10 10 sqlalchemy.pool_size = 2
11 11 sqlalchemy.max_overflow = 5
12 12 sqlalchemy.echo = false
13 13 jinja2.directories = appenlight:templates
14 14 jinja2.filters = nl2br = appenlight.lib.jinja2_filters.nl2br
15 15
16 16
17 17 pyramid.includes = pyramid_debugtoolbar
18 18
19 19 appenlight.includes =
20 20
21 21 # encryption
22 22 encryption_secret = oEOikr_T98wTh_xLH3w8Se3kmbgAQYSM4poZvPosya0=
23 23
24 24 #redis
25 25 redis.url = redis://localhost:6379/0
26 26 redis.redlock.url = redis://localhost:6379/3
27 27
28 #solr
28 #elasticsearch
29 29 elasticsearch.nodes = http://127.0.0.1:9200
30 30
31 31 #dirs
32 32 webassets.dir = %(here)s/webassets/
33 33
34 34 #authtkt
35 35 authtkt.secure = false
36 36 authtkt.secret = SECRET
37 37 # session settings
38 38 redis.sessions.secret = SECRET
39 39 redis.sessions.timeout = 3600
40 40
41 41 # session cookie settings
42 42 redis.sessions.cookie_name = appenlight
43 43 redis.sessions.cookie_max_age = 2592000
44 44 redis.sessions.cookie_path = /
45 45 redis.sessions.cookie_domain =
46 46 redis.sessions.cookie_secure = False
47 47 redis.sessions.cookie_httponly = False
48 48 redis.sessions.cookie_on_exception = True
49 49 redis.sessions.prefix = appenlight:session:
50 50
51 51
52 52 #cache
53 53 cache.regions = default_term, second, short_term, long_term
54 54 cache.type = ext:memcached
55 55 cache.url = 127.0.0.1:11211
56 56 cache.lock_dir = %(here)s/data/cache/lock
57 57 cache.second.expire = 1
58 58 cache.short_term.expire = 60
59 59 cache.default_term.expire = 300
60 60
61 61 #mailing
62 62 mailing.app_url = https://appenlight.com
63 63 mailing.from_name = App Enlight LOCAL
64 64 mailing.from_email = no-reply@status.appenlight.com
65 65
66 66
67 67 ###
68 68 # Authomatic configuration
69 69 ###
70 70
71 71 authomatic.secret = secret
72 72 authomatic.pr.facebook.app_id =
73 73 authomatic.pr.facebook.secret =
74 74 authomatic.pr.twitter.key =
75 75 authomatic.pr.twitter.secret =
76 76 authomatic.pr.google.key =
77 77 authomatic.pr.google.secret =
78 78 authomatic.pr.github.key =
79 79 authomatic.pr.github.secret =
80 80 authomatic.pr.github.scope = repo, public_repo, user:email
81 81 authomatic.pr.bitbucket.key =
82 82 authomatic.pr.bitbucket.secret =
83 83
84 84 #ziggurat
85 85 ziggurat_foundations.model_locations.User = appenlight.models.user:User
86 86 ziggurat_foundations.sign_in.username_key = sign_in_user_name
87 87 ziggurat_foundations.sign_in.password_key = sign_in_user_password
88 88 ziggurat_foundations.sign_in.came_from_key = came_from
89 89
90 90 #cometd
91 91 cometd.server = http://127.0.0.1:8088/
92 92 cometd.secret = secret
93 93 cometd.ws_url = wss://127.0.0.1:8088/
94 94
95 95
96 96 # for celery
97 97 appenlight.api_key =
98 98 appenlight.transport_config = http://127.0.0.1:6543
99 99
100 100 celery.broker_type = redis
101 101 celery.broker_url = redis://localhost:6379/4
102 102 celery.concurrency = 4
103 103 celery.timezone = UTC
104 104 celery.always_eager = true
105 105
106 106 [filter:paste_prefix]
107 107 use = egg:PasteDeploy#prefix
108 108
109 109
110 110 [filter:appenlight_client]
111 111 use = egg:appenlight_client
112 112 appenlight.api_key =
113 113 appenlight.transport_config = http://127.0.0.1:6543
114 114 appenlight.report_local_vars = true
115 115 appenlight.report_404 = true
116 116 appenlight.timing.dbapi2_psycopg2 = 0.3
117 117
118 118
119 119 [pipeline:main]
120 120 pipeline =
121 121 paste_prefix
122 122 appenlight_client
123 123 appenlight
124 124
125 125
126 126
127 127 [server:main]
128 128 use = egg:waitress
129 129 host = 0.0.0.0
130 130 port = 6543
131 131
132 132 [server:main_prod]
133 133 use = egg:gunicorn#main
134 134 host = 0.0.0.0:6543, unix:/tmp/appenlight.sock
135 135 workers = 6
136 136 timeout = 90
137 137 #max_requests = 1000
138 138
139 139
140 140 # Begin logging configuration
141 141
142 142 [loggers]
143 143 keys = root, appenlight, sqlalchemy, elasticsearch
144 144
145 145 [handlers]
146 146 keys = console
147 147
148 148 [formatters]
149 149 keys = generic
150 150
151 151 [logger_root]
152 152 level = INFO
153 153 handlers = console
154 154
155 155 [logger_appenlight]
156 156 level = INFO
157 157 handlers =
158 158 qualname = appenlight
159 159
160 160 [logger_elasticsearch]
161 161 level = WARN
162 162 handlers =
163 163 qualname = elasticsearch
164 164
165 165 [logger_sqlalchemy]
166 166 level = WARN
167 167 handlers =
168 168 qualname = sqlalchemy.engine
169 169 # "level = INFO" logs SQL queries.
170 170 # "level = DEBUG" logs SQL queries and results.
171 171 # "level = WARN" logs neither. (Recommended for production systems.)
172 172
173 173 [handler_console]
174 174 class = StreamHandler
175 175 args = (sys.stderr,)
176 176 level = NOTSET
177 177 formatter = generic
178 178
179 179 [formatter_generic]
180 180 format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s
181 181
182 182 # End logging configuration
General Comments 0
You need to be logged in to leave comments. Login now