##// END OF EJS Templates
black: reformat source
ergo -
Show More
@@ -4,12 +4,12 b' import re'
4 from setuptools import setup, find_packages
4 from setuptools import setup, find_packages
5
5
6 here = os.path.abspath(os.path.dirname(__file__))
6 here = os.path.abspath(os.path.dirname(__file__))
7 README = open(os.path.join(here, 'README.rst')).read()
7 README = open(os.path.join(here, "README.rst")).read()
8 CHANGES = open(os.path.join(here, 'CHANGELOG.rst')).read()
8 CHANGES = open(os.path.join(here, "CHANGELOG.rst")).read()
9
9
10 REQUIREMENTS = open(os.path.join(here, 'requirements.txt')).readlines()
10 REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines()
11
11
12 compiled = re.compile('([^=><]*).*')
12 compiled = re.compile("([^=><]*).*")
13
13
14
14
15 def parse_req(req):
15 def parse_req(req):
@@ -21,7 +21,8 b' requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]'
21
21
22 def _get_meta_var(name, data, callback_handler=None):
22 def _get_meta_var(name, data, callback_handler=None):
23 import re
23 import re
24 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
24
25 matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data)
25 if matches:
26 if matches:
26 if not callable(callback_handler):
27 if not callable(callback_handler):
27 callback_handler = lambda v: v
28 callback_handler = lambda v: v
@@ -29,53 +30,60 b' def _get_meta_var(name, data, callback_handler=None):'
29 return callback_handler(eval(matches.groups()[0]))
30 return callback_handler(eval(matches.groups()[0]))
30
31
31
32
32 with open(os.path.join(here, 'src', 'appenlight', '__init__.py'), 'r') as _meta:
33 with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta:
33 _metadata = _meta.read()
34 _metadata = _meta.read()
34
35
35 with open(os.path.join(here, 'VERSION'), 'r') as _meta_version:
36 with open(os.path.join(here, "VERSION"), "r") as _meta_version:
36 __version__ = _meta_version.read().strip()
37 __version__ = _meta_version.read().strip()
37
38
38 __license__ = _get_meta_var('__license__', _metadata)
39 __license__ = _get_meta_var("__license__", _metadata)
39 __author__ = _get_meta_var('__author__', _metadata)
40 __author__ = _get_meta_var("__author__", _metadata)
40 __url__ = _get_meta_var('__url__', _metadata)
41 __url__ = _get_meta_var("__url__", _metadata)
41
42
42 found_packages = find_packages('src')
43 found_packages = find_packages("src")
43 found_packages.append('appenlight.migrations.versions')
44 found_packages.append("appenlight.migrations.versions")
44 setup(name='appenlight',
45 setup(
45 description='appenlight',
46 name="appenlight",
46 long_description=README + '\n\n' + CHANGES,
47 description="appenlight",
47 classifiers=[
48 long_description=README + "\n\n" + CHANGES,
48 "Programming Language :: Python",
49 classifiers=[
49 "Framework :: Pylons",
50 "Programming Language :: Python",
50 "Topic :: Internet :: WWW/HTTP",
51 "Framework :: Pylons",
51 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
52 "Topic :: Internet :: WWW/HTTP",
52 ],
53 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
53 version=__version__,
54 ],
54 license=__license__,
55 version=__version__,
55 author=__author__,
56 license=__license__,
56 url=__url__,
57 author=__author__,
57 keywords='web wsgi bfg pylons pyramid',
58 url=__url__,
58 package_dir={'': 'src'},
59 keywords="web wsgi bfg pylons pyramid",
59 packages=found_packages,
60 package_dir={"": "src"},
60 include_package_data=True,
61 packages=found_packages,
61 zip_safe=False,
62 include_package_data=True,
62 test_suite='appenlight',
63 zip_safe=False,
63 install_requires=requires,
64 test_suite="appenlight",
64 extras_require={
65 install_requires=requires,
65 "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "pytest-mock", "webtest"],
66 extras_require={
66 "lint": ["black"],
67 "dev": [
67 },
68 "coverage",
68 entry_points={
69 "pytest",
69 'paste.app_factory': [
70 "pyramid",
70 'main = appenlight:main'
71 "tox",
71 ],
72 "mock",
72 'console_scripts': [
73 "pytest-mock",
73 'appenlight-cleanup = appenlight.scripts.cleanup:main',
74 "webtest",
74 'appenlight-initializedb = appenlight.scripts.initialize_db:main',
75 ],
75 'appenlight-migratedb = appenlight.scripts.migratedb:main',
76 "lint": ["black"],
76 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main',
77 },
77 'appenlight-static = appenlight.scripts.static:main',
78 entry_points={
78 'appenlight-make-config = appenlight.scripts.make_config:main',
79 "paste.app_factory": ["main = appenlight:main"],
79 ]
80 "console_scripts": [
80 }
81 "appenlight-cleanup = appenlight.scripts.cleanup:main",
81 )
82 "appenlight-initializedb = appenlight.scripts.initialize_db:main",
83 "appenlight-migratedb = appenlight.scripts.migratedb:main",
84 "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main",
85 "appenlight-static = appenlight.scripts.static:main",
86 "appenlight-make-config = appenlight.scripts.make_config:main",
87 ],
88 },
89 )
@@ -38,15 +38,17 b' from redlock import Redlock'
38 from sqlalchemy import engine_from_config
38 from sqlalchemy import engine_from_config
39
39
40 from appenlight.celery import configure_celery
40 from appenlight.celery import configure_celery
41 from appenlight.lib.configurator import (CythonCompatConfigurator,
41 from appenlight.lib.configurator import (
42 register_appenlight_plugin)
42 CythonCompatConfigurator,
43 register_appenlight_plugin,
44 )
43 from appenlight.lib import cache_regions
45 from appenlight.lib import cache_regions
44 from appenlight.lib.ext_json import json
46 from appenlight.lib.ext_json import json
45 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
47 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
46
48
47 __license__ = 'Apache 2.0'
49 __license__ = "Apache 2.0"
48 __author__ = 'RhodeCode GmbH'
50 __author__ = "RhodeCode GmbH"
49 __url__ = 'http://rhodecode.com'
51 __url__ = "http://rhodecode.com"
50 __version__ = pkg_resources.get_distribution("appenlight").parsed_version
52 __version__ = pkg_resources.get_distribution("appenlight").parsed_version
51
53
52 json_renderer = JSON(serializer=json.dumps, indent=4)
54 json_renderer = JSON(serializer=json.dumps, indent=4)
@@ -59,7 +61,7 b' def datetime_adapter(obj, request):'
59
61
60
62
61 def all_permissions_adapter(obj, request):
63 def all_permissions_adapter(obj, request):
62 return '__all_permissions__'
64 return "__all_permissions__"
63
65
64
66
65 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
67 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
@@ -70,91 +72,109 b' def main(global_config, **settings):'
70 """ This function returns a Pyramid WSGI application.
72 """ This function returns a Pyramid WSGI application.
71 """
73 """
72 auth_tkt_policy = AuthTktAuthenticationPolicy(
74 auth_tkt_policy = AuthTktAuthenticationPolicy(
73 settings['authtkt.secret'],
75 settings["authtkt.secret"],
74 hashalg='sha512',
76 hashalg="sha512",
75 callback=groupfinder,
77 callback=groupfinder,
76 max_age=2592000,
78 max_age=2592000,
77 secure=asbool(settings.get('authtkt.secure', 'false')))
79 secure=asbool(settings.get("authtkt.secure", "false")),
78 auth_token_policy = AuthTokenAuthenticationPolicy(
79 callback=groupfinder
80 )
80 )
81 auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder)
81 authorization_policy = ACLAuthorizationPolicy()
82 authorization_policy = ACLAuthorizationPolicy()
82 authentication_policy = AuthenticationStackPolicy()
83 authentication_policy = AuthenticationStackPolicy()
83 authentication_policy.add_policy('auth_tkt', auth_tkt_policy)
84 authentication_policy.add_policy("auth_tkt", auth_tkt_policy)
84 authentication_policy.add_policy('auth_token', auth_token_policy)
85 authentication_policy.add_policy("auth_token", auth_token_policy)
85 # set crypto key
86 # set crypto key
86 encryption.ENCRYPTION_SECRET = settings.get('encryption_secret')
87 encryption.ENCRYPTION_SECRET = settings.get("encryption_secret")
87 # import this later so encyption key can be monkeypatched
88 # import this later so encyption key can be monkeypatched
88 from appenlight.models import DBSession, register_datastores
89 from appenlight.models import DBSession, register_datastores
89
90
90 # registration
91 # registration
91 settings['appenlight.disable_registration'] = asbool(
92 settings["appenlight.disable_registration"] = asbool(
92 settings.get('appenlight.disable_registration'))
93 settings.get("appenlight.disable_registration")
94 )
93
95
94 # update config with cometd info
96 # update config with cometd info
95 settings['cometd_servers'] = {'server': settings['cometd.server'],
97 settings["cometd_servers"] = {
96 'secret': settings['cometd.secret']}
98 "server": settings["cometd.server"],
99 "secret": settings["cometd.secret"],
100 }
97
101
98 # Create the Pyramid Configurator.
102 # Create the Pyramid Configurator.
99 settings['_mail_url'] = settings['mailing.app_url']
103 settings["_mail_url"] = settings["mailing.app_url"]
100 config = CythonCompatConfigurator(
104 config = CythonCompatConfigurator(
101 settings=settings,
105 settings=settings,
102 authentication_policy=authentication_policy,
106 authentication_policy=authentication_policy,
103 authorization_policy=authorization_policy,
107 authorization_policy=authorization_policy,
104 root_factory='appenlight.security.RootFactory',
108 root_factory="appenlight.security.RootFactory",
105 default_permission='view')
109 default_permission="view",
110 )
106 # custom registry variables
111 # custom registry variables
107
112
108 # resource type information
113 # resource type information
109 config.registry.resource_types = ['resource', 'application']
114 config.registry.resource_types = ["resource", "application"]
110 # plugin information
115 # plugin information
111 config.registry.appenlight_plugins = {}
116 config.registry.appenlight_plugins = {}
112
117
113 config.set_default_csrf_options(require_csrf=True, header='X-XSRF-TOKEN')
118 config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN")
114 config.add_view_deriver('appenlight.predicates.csrf_view',
119 config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view")
115 name='csrf_view')
116
120
117 # later, when config is available
121 # later, when config is available
118 dogpile_config = {'url': settings['redis.url'],
122 dogpile_config = {
119 "redis_expiration_time": 86400,
123 "url": settings["redis.url"],
120 "redis_distributed_lock": True}
124 "redis_expiration_time": 86400,
125 "redis_distributed_lock": True,
126 }
121 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
127 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
122 config.registry.cache_regions = cache_regions.regions
128 config.registry.cache_regions = cache_regions.regions
123 engine = engine_from_config(settings, 'sqlalchemy.',
129 engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps)
124 json_serializer=json.dumps)
125 DBSession.configure(bind=engine)
130 DBSession.configure(bind=engine)
126
131
127 # json rederer that serializes datetime
132 # json rederer that serializes datetime
128 config.add_renderer('json', json_renderer)
133 config.add_renderer("json", json_renderer)
129 config.add_request_method('appenlight.lib.request.es_conn', 'es_conn', property=True)
134 config.add_request_method(
130 config.add_request_method('appenlight.lib.request.get_user', 'user',
135 "appenlight.lib.request.es_conn", "es_conn", property=True
131 reify=True, property=True)
136 )
132 config.add_request_method('appenlight.lib.request.get_csrf_token',
137 config.add_request_method(
133 'csrf_token', reify=True, property=True)
138 "appenlight.lib.request.get_user", "user", reify=True, property=True
134 config.add_request_method('appenlight.lib.request.safe_json_body',
139 )
135 'safe_json_body', reify=True, property=True)
140 config.add_request_method(
136 config.add_request_method('appenlight.lib.request.unsafe_json_body',
141 "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True
137 'unsafe_json_body', reify=True, property=True)
142 )
138 config.add_request_method('appenlight.lib.request.add_flash_to_headers',
143 config.add_request_method(
139 'add_flash_to_headers')
144 "appenlight.lib.request.safe_json_body",
140 config.add_request_method('appenlight.lib.request.get_authomatic',
145 "safe_json_body",
141 'authomatic', reify=True)
146 reify=True,
142
147 property=True,
143 config.include('pyramid_redis_sessions')
148 )
144 config.include('pyramid_tm')
149 config.add_request_method(
145 config.include('pyramid_jinja2')
150 "appenlight.lib.request.unsafe_json_body",
146 config.include('pyramid_mailer')
151 "unsafe_json_body",
147 config.include('appenlight_client.ext.pyramid_tween')
152 reify=True,
148 config.include('ziggurat_foundations.ext.pyramid.sign_in')
153 property=True,
149 es_server_list = aslist(settings['elasticsearch.nodes'])
154 )
150 redis_url = settings['redis.url']
155 config.add_request_method(
151 log.warning('Elasticsearch server list: {}'.format(es_server_list))
156 "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers"
152 log.warning('Redis server: {}'.format(redis_url))
157 )
158 config.add_request_method(
159 "appenlight.lib.request.get_authomatic", "authomatic", reify=True
160 )
161
162 config.include("pyramid_redis_sessions")
163 config.include("pyramid_tm")
164 config.include("pyramid_jinja2")
165 config.include("pyramid_mailer")
166 config.include("appenlight_client.ext.pyramid_tween")
167 config.include("ziggurat_foundations.ext.pyramid.sign_in")
168 es_server_list = aslist(settings["elasticsearch.nodes"])
169 redis_url = settings["redis.url"]
170 log.warning("Elasticsearch server list: {}".format(es_server_list))
171 log.warning("Redis server: {}".format(redis_url))
153 config.registry.es_conn = Elasticsearch(es_server_list)
172 config.registry.es_conn = Elasticsearch(es_server_list)
154 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
173 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
155
174
156 config.registry.redis_lockmgr = Redlock([settings['redis.redlock.url'], ],
175 config.registry.redis_lockmgr = Redlock(
157 retry_count=0, retry_delay=0)
176 [settings["redis.redlock.url"]], retry_count=0, retry_delay=0
177 )
158 # mailer bw compat
178 # mailer bw compat
159 config.registry.mailer = config.registry.getUtility(IMailer)
179 config.registry.mailer = config.registry.getUtility(IMailer)
160
180
@@ -163,47 +183,56 b' def main(global_config, **settings):'
163 config.set_session_factory(session_factory)
183 config.set_session_factory(session_factory)
164
184
165 # Configure renderers and event subscribers
185 # Configure renderers and event subscribers
166 config.add_jinja2_extension('jinja2.ext.loopcontrols')
186 config.add_jinja2_extension("jinja2.ext.loopcontrols")
167 config.add_jinja2_search_path('appenlight:templates')
187 config.add_jinja2_search_path("appenlight:templates")
168 # event subscribers
188 # event subscribers
169 config.add_subscriber("appenlight.subscribers.application_created",
189 config.add_subscriber(
170 "pyramid.events.ApplicationCreated")
190 "appenlight.subscribers.application_created",
171 config.add_subscriber("appenlight.subscribers.add_renderer_globals",
191 "pyramid.events.ApplicationCreated",
172 "pyramid.events.BeforeRender")
192 )
173 config.add_subscriber('appenlight.subscribers.new_request',
193 config.add_subscriber(
174 'pyramid.events.NewRequest')
194 "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender"
175 config.add_view_predicate('context_type_class',
195 )
176 'appenlight.predicates.contextTypeClass')
196 config.add_subscriber(
177
197 "appenlight.subscribers.new_request", "pyramid.events.NewRequest"
178 register_datastores(es_conn=config.registry.es_conn,
198 )
179 redis_conn=config.registry.redis_conn,
199 config.add_view_predicate(
180 redis_lockmgr=config.registry.redis_lockmgr)
200 "context_type_class", "appenlight.predicates.contextTypeClass"
201 )
202
203 register_datastores(
204 es_conn=config.registry.es_conn,
205 redis_conn=config.registry.redis_conn,
206 redis_lockmgr=config.registry.redis_lockmgr,
207 )
181
208
182 # base stuff and scan
209 # base stuff and scan
183
210
184 # need to ensure webassets exists otherwise config.override_asset()
211 # need to ensure webassets exists otherwise config.override_asset()
185 # throws exception
212 # throws exception
186 if not os.path.exists(settings['webassets.dir']):
213 if not os.path.exists(settings["webassets.dir"]):
187 os.mkdir(settings['webassets.dir'])
214 os.mkdir(settings["webassets.dir"])
188 config.add_static_view(path='appenlight:webassets',
215 config.add_static_view(
189 name='static', cache_max_age=3600)
216 path="appenlight:webassets", name="static", cache_max_age=3600
190 config.override_asset(to_override='appenlight:webassets/',
217 )
191 override_with=settings['webassets.dir'])
218 config.override_asset(
192
219 to_override="appenlight:webassets/", override_with=settings["webassets.dir"]
193 config.include('appenlight.views')
220 )
194 config.include('appenlight.views.admin')
221
195 config.scan(ignore=['appenlight.migrations', 'appenlight.scripts',
222 config.include("appenlight.views")
196 'appenlight.tests'])
223 config.include("appenlight.views.admin")
197
224 config.scan(
198 config.add_directive('register_appenlight_plugin',
225 ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"]
199 register_appenlight_plugin)
226 )
200
227
201 for entry_point in iter_entry_points(group='appenlight.plugins'):
228 config.add_directive("register_appenlight_plugin", register_appenlight_plugin)
229
230 for entry_point in iter_entry_points(group="appenlight.plugins"):
202 plugin = entry_point.load()
231 plugin = entry_point.load()
203 plugin.includeme(config)
232 plugin.includeme(config)
204
233
205 # include other appenlight plugins explictly if needed
234 # include other appenlight plugins explictly if needed
206 includes = aslist(settings.get('appenlight.includes', []))
235 includes = aslist(settings.get("appenlight.includes", []))
207 for inc in includes:
236 for inc in includes:
208 config.include(inc)
237 config.include(inc)
209
238
@@ -211,8 +240,8 b' def main(global_config, **settings):'
211
240
212 def pre_commit():
241 def pre_commit():
213 jinja_env = config.get_jinja2_environment()
242 jinja_env = config.get_jinja2_environment()
214 jinja_env.filters['tojson'] = json.dumps
243 jinja_env.filters["tojson"] = json.dumps
215 jinja_env.filters['toJSONUnsafe'] = jinja2_filters.toJSONUnsafe
244 jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe
216
245
217 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
246 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
218
247
@@ -34,15 +34,23 b' from appenlight_client.ext.celery import register_signals'
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37 register('date_json', json_dumps, json_loads,
37 register(
38 content_type='application/x-date_json',
38 "date_json",
39 content_encoding='utf-8')
39 json_dumps,
40 json_loads,
41 content_type="application/x-date_json",
42 content_encoding="utf-8",
43 )
40
44
41 celery = Celery()
45 celery = Celery()
42
46
43 celery.user_options['preload'].add(
47 celery.user_options["preload"].add(
44 Option('--ini', dest='ini', default=None,
48 Option(
45 help='Specifies pyramid configuration file location.')
49 "--ini",
50 dest="ini",
51 default=None,
52 help="Specifies pyramid configuration file location.",
53 )
46 )
54 )
47
55
48
56
@@ -51,19 +59,21 b' def on_preload_parsed(options, **kwargs):'
51 """
59 """
52 This actually configures celery from pyramid config file
60 This actually configures celery from pyramid config file
53 """
61 """
54 celery.conf['INI_PYRAMID'] = options['ini']
62 celery.conf["INI_PYRAMID"] = options["ini"]
55 import appenlight_client.client as e_client
63 import appenlight_client.client as e_client
56 ini_location = options['ini']
64
65 ini_location = options["ini"]
57 if not ini_location:
66 if not ini_location:
58 raise Exception('You need to pass pyramid ini location using '
67 raise Exception(
59 '--ini=filename.ini argument to the worker')
68 "You need to pass pyramid ini location using "
69 "--ini=filename.ini argument to the worker"
70 )
60 env = bootstrap(ini_location[0])
71 env = bootstrap(ini_location[0])
61 api_key = env['request'].registry.settings['appenlight.api_key']
72 api_key = env["request"].registry.settings["appenlight.api_key"]
62 tr_config = env['request'].registry.settings.get(
73 tr_config = env["request"].registry.settings.get("appenlight.transport_config")
63 'appenlight.transport_config')
74 CONFIG = e_client.get_config({"appenlight.api_key": api_key})
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
65 if tr_config:
75 if tr_config:
66 CONFIG['appenlight.transport_config'] = tr_config
76 CONFIG["appenlight.transport_config"] = tr_config
67 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
77 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
68 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
78 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
69 register_signals(APPENLIGHT_CLIENT)
79 register_signals(APPENLIGHT_CLIENT)
@@ -71,101 +81,101 b' def on_preload_parsed(options, **kwargs):'
71
81
72
82
73 celery_config = {
83 celery_config = {
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
84 "CELERY_IMPORTS": ["appenlight.celery.tasks"],
75 'CELERYD_TASK_TIME_LIMIT': 60,
85 "CELERYD_TASK_TIME_LIMIT": 60,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
86 "CELERYD_MAX_TASKS_PER_CHILD": 1000,
77 'CELERY_IGNORE_RESULT': True,
87 "CELERY_IGNORE_RESULT": True,
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
88 "CELERY_ACCEPT_CONTENT": ["date_json"],
79 'CELERY_TASK_SERIALIZER': 'date_json',
89 "CELERY_TASK_SERIALIZER": "date_json",
80 'CELERY_RESULT_SERIALIZER': 'date_json',
90 "CELERY_RESULT_SERIALIZER": "date_json",
81 'BROKER_URL': None,
91 "BROKER_URL": None,
82 'CELERYD_CONCURRENCY': None,
92 "CELERYD_CONCURRENCY": None,
83 'CELERY_TIMEZONE': None,
93 "CELERY_TIMEZONE": None,
84 'CELERYBEAT_SCHEDULE': {
94 "CELERYBEAT_SCHEDULE": {
85 'alerting_reports': {
95 "alerting_reports": {
86 'task': 'appenlight.celery.tasks.alerting_reports',
96 "task": "appenlight.celery.tasks.alerting_reports",
87 'schedule': timedelta(seconds=60)
97 "schedule": timedelta(seconds=60),
88 },
98 },
89 'close_alerts': {
99 "close_alerts": {
90 'task': 'appenlight.celery.tasks.close_alerts',
100 "task": "appenlight.celery.tasks.close_alerts",
91 'schedule': timedelta(seconds=60)
101 "schedule": timedelta(seconds=60),
92 }
102 },
93 }
103 },
94 }
104 }
95 celery.config_from_object(celery_config)
105 celery.config_from_object(celery_config)
96
106
97
107
98 def configure_celery(pyramid_registry):
108 def configure_celery(pyramid_registry):
99 settings = pyramid_registry.settings
109 settings = pyramid_registry.settings
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
110 celery_config["BROKER_URL"] = settings["celery.broker_url"]
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
111 celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"]
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
112 celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"]
103
113
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
114 notifications_seconds = int(
115 settings.get("tasks.notifications_reports.interval", 60)
116 )
105
117
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
118 celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = {
107 'task': 'appenlight.celery.tasks.notifications_reports',
119 "task": "appenlight.celery.tasks.notifications_reports",
108 'schedule': timedelta(seconds=notifications_seconds)
120 "schedule": timedelta(seconds=notifications_seconds),
109 }
121 }
110
122
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
123 celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = {
112 'task': 'appenlight.celery.tasks.daily_digest',
124 "task": "appenlight.celery.tasks.daily_digest",
113 'schedule': crontab(minute=1, hour='4,12,20')
125 "schedule": crontab(minute=1, hour="4,12,20"),
114 }
126 }
115
127
116 if asbool(settings.get('celery.always_eager')):
128 if asbool(settings.get("celery.always_eager")):
117 celery_config['CELERY_ALWAYS_EAGER'] = True
129 celery_config["CELERY_ALWAYS_EAGER"] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
130 celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True
119
131
120 for plugin in pyramid_registry.appenlight_plugins.values():
132 for plugin in pyramid_registry.appenlight_plugins.values():
121 if plugin.get('celery_tasks'):
133 if plugin.get("celery_tasks"):
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
134 celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"])
123 if plugin.get('celery_beats'):
135 if plugin.get("celery_beats"):
124 for name, config in plugin['celery_beats']:
136 for name, config in plugin["celery_beats"]:
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
137 celery_config["CELERYBEAT_SCHEDULE"][name] = config
126 celery.config_from_object(celery_config)
138 celery.config_from_object(celery_config)
127
139
128
140
129 @task_prerun.connect
141 @task_prerun.connect
130 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
142 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
131 if hasattr(celery, 'pyramid'):
143 if hasattr(celery, "pyramid"):
132 env = celery.pyramid
144 env = celery.pyramid
133 env = prepare(registry=env['request'].registry)
145 env = prepare(registry=env["request"].registry)
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
146 proper_base_url = env["request"].registry.settings["mailing.app_url"]
135 tmp_req = Request.blank('/', base_url=proper_base_url)
147 tmp_req = Request.blank("/", base_url=proper_base_url)
136 # ensure tasks generate url for right domain from config
148 # ensure tasks generate url for right domain from config
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
149 env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"]
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
150 env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"]
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
151 env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"]
140 env['request'].environ['wsgi.url_scheme'] = \
152 env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"]
141 tmp_req.environ['wsgi.url_scheme']
142 get_current_request().tm.begin()
153 get_current_request().tm.begin()
143
154
144
155
145 @task_success.connect
156 @task_success.connect
146 def task_success_signal(result, **kwargs):
157 def task_success_signal(result, **kwargs):
147 get_current_request().tm.commit()
158 get_current_request().tm.commit()
148 if hasattr(celery, 'pyramid'):
159 if hasattr(celery, "pyramid"):
149 celery.pyramid["closer"]()
160 celery.pyramid["closer"]()
150
161
151
162
152 @task_retry.connect
163 @task_retry.connect
153 def task_retry_signal(request, reason, einfo, **kwargs):
164 def task_retry_signal(request, reason, einfo, **kwargs):
154 get_current_request().tm.abort()
165 get_current_request().tm.abort()
155 if hasattr(celery, 'pyramid'):
166 if hasattr(celery, "pyramid"):
156 celery.pyramid["closer"]()
167 celery.pyramid["closer"]()
157
168
158
169
159 @task_failure.connect
170 @task_failure.connect
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
171 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs):
161 **kwaargs):
162 get_current_request().tm.abort()
172 get_current_request().tm.abort()
163 if hasattr(celery, 'pyramid'):
173 if hasattr(celery, "pyramid"):
164 celery.pyramid["closer"]()
174 celery.pyramid["closer"]()
165
175
166
176
167 @task_revoked.connect
177 @task_revoked.connect
168 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
178 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
169 get_current_request().tm.abort()
179 get_current_request().tm.abort()
170 if hasattr(celery, 'pyramid'):
180 if hasattr(celery, "pyramid"):
171 celery.pyramid["closer"]()
181 celery.pyramid["closer"]()
@@ -17,38 +17,29 b''
17 import json
17 import json
18 from datetime import datetime, date, timedelta
18 from datetime import datetime, date, timedelta
19
19
20 DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
20 DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
21
21
22
22
23 class DateEncoder(json.JSONEncoder):
23 class DateEncoder(json.JSONEncoder):
24 def default(self, obj):
24 def default(self, obj):
25 if isinstance(obj, datetime):
25 if isinstance(obj, datetime):
26 return {
26 return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)}
27 '__type__': '__datetime__',
28 'iso': obj.strftime(DATE_FORMAT)
29 }
30 elif isinstance(obj, date):
27 elif isinstance(obj, date):
31 return {
28 return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)}
32 '__type__': '__date__',
33 'iso': obj.strftime(DATE_FORMAT)
34 }
35 elif isinstance(obj, timedelta):
29 elif isinstance(obj, timedelta):
36 return {
30 return {"__type__": "__timedelta__", "seconds": obj.total_seconds()}
37 '__type__': '__timedelta__',
38 'seconds': obj.total_seconds()
39 }
40 else:
31 else:
41 return json.JSONEncoder.default(self, obj)
32 return json.JSONEncoder.default(self, obj)
42
33
43
34
44 def date_decoder(dct):
35 def date_decoder(dct):
45 if '__type__' in dct:
36 if "__type__" in dct:
46 if dct['__type__'] == '__datetime__':
37 if dct["__type__"] == "__datetime__":
47 return datetime.strptime(dct['iso'], DATE_FORMAT)
38 return datetime.strptime(dct["iso"], DATE_FORMAT)
48 elif dct['__type__'] == '__date__':
39 elif dct["__type__"] == "__date__":
49 return datetime.strptime(dct['iso'], DATE_FORMAT).date()
40 return datetime.strptime(dct["iso"], DATE_FORMAT).date()
50 elif dct['__type__'] == '__timedelta__':
41 elif dct["__type__"] == "__timedelta__":
51 return timedelta(seconds=dct['seconds'])
42 return timedelta(seconds=dct["seconds"])
52 return dct
43 return dct
53
44
54
45
@@ -57,4 +48,4 b' def json_dumps(obj):'
57
48
58
49
59 def json_loads(obj):
50 def json_loads(obj):
60 return json.loads(obj.decode('utf8'), object_hook=date_decoder)
51 return json.loads(obj.decode("utf8"), object_hook=date_decoder)
@@ -51,9 +51,11 b' from appenlight.lib.enums import ReportType'
51
51
52 log = get_task_logger(__name__)
52 log = get_task_logger(__name__)
53
53
54 sample_boundries = list(range(100, 1000, 100)) + \
54 sample_boundries = (
55 list(range(1000, 10000, 1000)) + \
55 list(range(100, 1000, 100))
56 list(range(10000, 100000, 5000))
56 + list(range(1000, 10000, 1000))
57 + list(range(10000, 100000, 5000))
58 )
57
59
58
60
59 def pick_sample(total_occurences, report_type=None):
61 def pick_sample(total_occurences, report_type=None):
@@ -70,9 +72,9 b' def pick_sample(total_occurences, report_type=None):'
70
72
71 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
72 def test_exception_task():
74 def test_exception_task():
73 log.error('test celery log', extra={'location': 'celery'})
75 log.error("test celery log", extra={"location": "celery"})
74 log.warning('test celery log', extra={'location': 'celery'})
76 log.warning("test celery log", extra={"location": "celery"})
75 raise Exception('Celery exception test')
77 raise Exception("Celery exception test")
76
78
77
79
78 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
@@ -81,9 +83,9 b' def test_retry_exception_task():'
81 import time
83 import time
82
84
83 time.sleep(1.3)
85 time.sleep(1.3)
84 log.error('test retry celery log', extra={'location': 'celery'})
86 log.error("test retry celery log", extra={"location": "celery"})
85 log.warning('test retry celery log', extra={'location': 'celery'})
87 log.warning("test retry celery log", extra={"location": "celery"})
86 raise Exception('Celery exception test')
88 raise Exception("Celery exception test")
87 except Exception as exc:
89 except Exception as exc:
88 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
90 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
89 raise
91 raise
@@ -92,7 +94,7 b' def test_retry_exception_task():'
92
94
93 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
95 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
94 def add_reports(resource_id, request_params, dataset, **kwargs):
96 def add_reports(resource_id, request_params, dataset, **kwargs):
95 proto_version = parse_proto(request_params.get('protocol_version', ''))
97 proto_version = parse_proto(request_params.get("protocol_version", ""))
96 current_time = datetime.utcnow().replace(second=0, microsecond=0)
98 current_time = datetime.utcnow().replace(second=0, microsecond=0)
97 try:
99 try:
98 # we will store solr docs here for single insert
100 # we will store solr docs here for single insert
@@ -114,22 +116,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
114 report_group = ReportGroupService.by_hash_and_resource(
116 report_group = ReportGroupService.by_hash_and_resource(
115 report.resource_id,
117 report.resource_id,
116 report.grouping_hash,
118 report.grouping_hash,
117 since_when=datetime.utcnow().date().replace(day=1)
119 since_when=datetime.utcnow().date().replace(day=1),
118 )
120 )
119 occurences = report_data.get('occurences', 1)
121 occurences = report_data.get("occurences", 1)
120 if not report_group:
122 if not report_group:
121 # total reports will be +1 moment later
123 # total reports will be +1 moment later
122 report_group = ReportGroup(grouping_hash=report.grouping_hash,
124 report_group = ReportGroup(
123 occurences=0, total_reports=0,
125 grouping_hash=report.grouping_hash,
124 last_report=0,
126 occurences=0,
125 priority=report.priority,
127 total_reports=0,
126 error=report.error,
128 last_report=0,
127 first_timestamp=report.start_time)
129 priority=report.priority,
130 error=report.error,
131 first_timestamp=report.start_time,
132 )
128 report_group._skip_ft_index = True
133 report_group._skip_ft_index = True
129 report_group.report_type = report.report_type
134 report_group.report_type = report.report_type
130 report.report_group_time = report_group.first_timestamp
135 report.report_group_time = report_group.first_timestamp
131 add_sample = pick_sample(report_group.occurences,
136 add_sample = pick_sample(
132 report_type=report_group.report_type)
137 report_group.occurences, report_type=report_group.report_type
138 )
133 if add_sample:
139 if add_sample:
134 resource.report_groups.append(report_group)
140 resource.report_groups.append(report_group)
135 report_group.reports.append(report)
141 report_group.reports.append(report)
@@ -144,28 +150,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
144 for s_call in slow_calls:
150 for s_call in slow_calls:
145 if s_call.partition_id not in es_slow_calls_docs:
151 if s_call.partition_id not in es_slow_calls_docs:
146 es_slow_calls_docs[s_call.partition_id] = []
152 es_slow_calls_docs[s_call.partition_id] = []
147 es_slow_calls_docs[s_call.partition_id].append(
153 es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc())
148 s_call.es_doc())
149 # try generating new stat rows if needed
154 # try generating new stat rows if needed
150 else:
155 else:
151 # required for postprocessing to not fail later
156 # required for postprocessing to not fail later
152 report.report_group = report_group
157 report.report_group = report_group
153
158
154 stat_row = ReportService.generate_stat_rows(
159 stat_row = ReportService.generate_stat_rows(report, resource, report_group)
155 report, resource, report_group)
156 if stat_row.partition_id not in es_reports_stats_rows:
160 if stat_row.partition_id not in es_reports_stats_rows:
157 es_reports_stats_rows[stat_row.partition_id] = []
161 es_reports_stats_rows[stat_row.partition_id] = []
158 es_reports_stats_rows[stat_row.partition_id].append(
162 es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc())
159 stat_row.es_doc())
160
163
161 # see if we should mark 10th occurence of report
164 # see if we should mark 10th occurence of report
162 last_occurences_10 = int(math.floor(report_group.occurences / 10))
165 last_occurences_10 = int(math.floor(report_group.occurences / 10))
163 curr_occurences_10 = int(math.floor(
166 curr_occurences_10 = int(
164 (report_group.occurences + report.occurences) / 10))
167 math.floor((report_group.occurences + report.occurences) / 10)
165 last_occurences_100 = int(
168 )
166 math.floor(report_group.occurences / 100))
169 last_occurences_100 = int(math.floor(report_group.occurences / 100))
167 curr_occurences_100 = int(math.floor(
170 curr_occurences_100 = int(
168 (report_group.occurences + report.occurences) / 100))
171 math.floor((report_group.occurences + report.occurences) / 100)
172 )
169 notify_occurences_10 = last_occurences_10 != curr_occurences_10
173 notify_occurences_10 = last_occurences_10 != curr_occurences_10
170 notify_occurences_100 = last_occurences_100 != curr_occurences_100
174 notify_occurences_100 = last_occurences_100 != curr_occurences_100
171 report_group.occurences = ReportGroup.occurences + occurences
175 report_group.occurences = ReportGroup.occurences + occurences
@@ -178,39 +182,47 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
178 if added_details:
182 if added_details:
179 report_group.total_reports = ReportGroup.total_reports + 1
183 report_group.total_reports = ReportGroup.total_reports + 1
180 report_group.last_report = report.id
184 report_group.last_report = report.id
181 report_group.set_notification_info(notify_10=notify_occurences_10,
185 report_group.set_notification_info(
182 notify_100=notify_occurences_100)
186 notify_10=notify_occurences_10, notify_100=notify_occurences_100
187 )
183 DBSession.flush()
188 DBSession.flush()
184 report_group.get_report().notify_channel(report_group)
189 report_group.get_report().notify_channel(report_group)
185 if report_group.partition_id not in es_report_group_docs:
190 if report_group.partition_id not in es_report_group_docs:
186 es_report_group_docs[report_group.partition_id] = []
191 es_report_group_docs[report_group.partition_id] = []
187 es_report_group_docs[report_group.partition_id].append(
192 es_report_group_docs[report_group.partition_id].append(
188 report_group.es_doc())
193 report_group.es_doc()
194 )
189
195
190 action = 'REPORT'
196 action = "REPORT"
191 log_msg = '%s: %s %s, client: %s, proto: %s' % (
197 log_msg = "%s: %s %s, client: %s, proto: %s" % (
192 action,
198 action,
193 report_data.get('http_status', 'unknown'),
199 report_data.get("http_status", "unknown"),
194 str(resource),
200 str(resource),
195 report_data.get('client'),
201 report_data.get("client"),
196 proto_version)
202 proto_version,
203 )
197 log.info(log_msg)
204 log.info(log_msg)
198 total_reports = len(dataset)
205 total_reports = len(dataset)
199 redis_pipeline = Datastores.redis.pipeline(transaction=False)
206 redis_pipeline = Datastores.redis.pipeline(transaction=False)
200 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
207 key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
201 redis_pipeline.incr(key, total_reports)
208 redis_pipeline.incr(key, total_reports)
202 redis_pipeline.expire(key, 3600 * 24)
209 redis_pipeline.expire(key, 3600 * 24)
203 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
210 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
204 resource.owner_user_id, current_time)
211 resource.owner_user_id, current_time
212 )
205 redis_pipeline.incr(key, total_reports)
213 redis_pipeline.incr(key, total_reports)
206 redis_pipeline.expire(key, 3600)
214 redis_pipeline.expire(key, 3600)
207 key = REDIS_KEYS['counters']['reports_per_hour_per_app'].format(
215 key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format(
208 resource_id, current_time.replace(minute=0))
216 resource_id, current_time.replace(minute=0)
217 )
209 redis_pipeline.incr(key, total_reports)
218 redis_pipeline.incr(key, total_reports)
210 redis_pipeline.expire(key, 3600 * 24 * 7)
219 redis_pipeline.expire(key, 3600 * 24 * 7)
211 redis_pipeline.sadd(
220 redis_pipeline.sadd(
212 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
221 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
213 current_time.replace(minute=0)), resource_id)
222 current_time.replace(minute=0)
223 ),
224 resource_id,
225 )
214 redis_pipeline.execute()
226 redis_pipeline.execute()
215
227
216 add_reports_es(es_report_group_docs, es_report_docs)
228 add_reports_es(es_report_group_docs, es_report_docs)
@@ -227,11 +239,11 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
227 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
239 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
228 def add_reports_es(report_group_docs, report_docs):
240 def add_reports_es(report_group_docs, report_docs):
229 for k, v in report_group_docs.items():
241 for k, v in report_group_docs.items():
230 to_update = {'_index': k, '_type': 'report_group'}
242 to_update = {"_index": k, "_type": "report_group"}
231 [i.update(to_update) for i in v]
243 [i.update(to_update) for i in v]
232 elasticsearch.helpers.bulk(Datastores.es, v)
244 elasticsearch.helpers.bulk(Datastores.es, v)
233 for k, v in report_docs.items():
245 for k, v in report_docs.items():
234 to_update = {'_index': k, '_type': 'report'}
246 to_update = {"_index": k, "_type": "report"}
235 [i.update(to_update) for i in v]
247 [i.update(to_update) for i in v]
236 elasticsearch.helpers.bulk(Datastores.es, v)
248 elasticsearch.helpers.bulk(Datastores.es, v)
237
249
@@ -239,7 +251,7 b' def add_reports_es(report_group_docs, report_docs):'
239 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
251 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
240 def add_reports_slow_calls_es(es_docs):
252 def add_reports_slow_calls_es(es_docs):
241 for k, v in es_docs.items():
253 for k, v in es_docs.items():
242 to_update = {'_index': k, '_type': 'log'}
254 to_update = {"_index": k, "_type": "log"}
243 [i.update(to_update) for i in v]
255 [i.update(to_update) for i in v]
244 elasticsearch.helpers.bulk(Datastores.es, v)
256 elasticsearch.helpers.bulk(Datastores.es, v)
245
257
@@ -247,14 +259,14 b' def add_reports_slow_calls_es(es_docs):'
247 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
259 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
248 def add_reports_stats_rows_es(es_docs):
260 def add_reports_stats_rows_es(es_docs):
249 for k, v in es_docs.items():
261 for k, v in es_docs.items():
250 to_update = {'_index': k, '_type': 'log'}
262 to_update = {"_index": k, "_type": "log"}
251 [i.update(to_update) for i in v]
263 [i.update(to_update) for i in v]
252 elasticsearch.helpers.bulk(Datastores.es, v)
264 elasticsearch.helpers.bulk(Datastores.es, v)
253
265
254
266
255 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
267 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
256 def add_logs(resource_id, request_params, dataset, **kwargs):
268 def add_logs(resource_id, request_params, dataset, **kwargs):
257 proto_version = request_params.get('protocol_version')
269 proto_version = request_params.get("protocol_version")
258 current_time = datetime.utcnow().replace(second=0, microsecond=0)
270 current_time = datetime.utcnow().replace(second=0, microsecond=0)
259
271
260 try:
272 try:
@@ -264,16 +276,15 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
264 ns_pairs = []
276 ns_pairs = []
265 for entry in dataset:
277 for entry in dataset:
266 # gather pk and ns so we can remove older versions of row later
278 # gather pk and ns so we can remove older versions of row later
267 if entry['primary_key'] is not None:
279 if entry["primary_key"] is not None:
268 ns_pairs.append({"pk": entry['primary_key'],
280 ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]})
269 "ns": entry['namespace']})
270 log_entry = Log()
281 log_entry = Log()
271 log_entry.set_data(entry, resource=resource)
282 log_entry.set_data(entry, resource=resource)
272 log_entry._skip_ft_index = True
283 log_entry._skip_ft_index = True
273 resource.logs.append(log_entry)
284 resource.logs.append(log_entry)
274 DBSession.flush()
285 DBSession.flush()
275 # insert non pk rows first
286 # insert non pk rows first
276 if entry['primary_key'] is None:
287 if entry["primary_key"] is None:
277 es_docs[log_entry.partition_id].append(log_entry.es_doc())
288 es_docs[log_entry.partition_id].append(log_entry.es_doc())
278
289
279 # 2nd pass to delete all log entries from db foe same pk/ns pair
290 # 2nd pass to delete all log entries from db foe same pk/ns pair
@@ -282,7 +293,8 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
282 es_docs = collections.defaultdict(list)
293 es_docs = collections.defaultdict(list)
283 es_docs_to_delete = collections.defaultdict(list)
294 es_docs_to_delete = collections.defaultdict(list)
284 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
295 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
285 list_of_pairs=ns_pairs)
296 list_of_pairs=ns_pairs
297 )
286 log_dict = {}
298 log_dict = {}
287 for log_entry in found_pkey_logs:
299 for log_entry in found_pkey_logs:
288 log_key = (log_entry.primary_key, log_entry.namespace)
300 log_key = (log_entry.primary_key, log_entry.namespace)
@@ -299,51 +311,58 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
299 ids_to_delete.append(e.log_id)
311 ids_to_delete.append(e.log_id)
300 es_docs_to_delete[e.partition_id].append(e.delete_hash)
312 es_docs_to_delete[e.partition_id].append(e.delete_hash)
301
313
302 es_docs_to_delete[log_entry.partition_id].append(
314 es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash)
303 log_entry.delete_hash)
304
315
305 es_docs[log_entry.partition_id].append(log_entry.es_doc())
316 es_docs[log_entry.partition_id].append(log_entry.es_doc())
306
317
307 if ids_to_delete:
318 if ids_to_delete:
308 query = DBSession.query(Log).filter(
319 query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete))
309 Log.log_id.in_(ids_to_delete))
310 query.delete(synchronize_session=False)
320 query.delete(synchronize_session=False)
311 if es_docs_to_delete:
321 if es_docs_to_delete:
312 # batch this to avoid problems with default ES bulk limits
322 # batch this to avoid problems with default ES bulk limits
313 for es_index in es_docs_to_delete.keys():
323 for es_index in es_docs_to_delete.keys():
314 for batch in in_batches(es_docs_to_delete[es_index], 20):
324 for batch in in_batches(es_docs_to_delete[es_index], 20):
315 query = {"query": {'terms': {'delete_hash': batch}}}
325 query = {"query": {"terms": {"delete_hash": batch}}}
316
326
317 try:
327 try:
318 Datastores.es.transport.perform_request(
328 Datastores.es.transport.perform_request(
319 "DELETE", '/{}/{}/_query'.format(es_index, 'log'), body=query)
329 "DELETE",
330 "/{}/{}/_query".format(es_index, "log"),
331 body=query,
332 )
320 except elasticsearch.exceptions.NotFoundError as exc:
333 except elasticsearch.exceptions.NotFoundError as exc:
321 msg = 'skipping index {}'.format(es_index)
334 msg = "skipping index {}".format(es_index)
322 log.info(msg)
335 log.info(msg)
323
336
324 total_logs = len(dataset)
337 total_logs = len(dataset)
325
338
326 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
339 log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % (
327 str(resource),
340 str(resource),
328 total_logs,
341 total_logs,
329 proto_version)
342 proto_version,
343 )
330 log.info(log_msg)
344 log.info(log_msg)
331 # mark_changed(session)
345 # mark_changed(session)
332 redis_pipeline = Datastores.redis.pipeline(transaction=False)
346 redis_pipeline = Datastores.redis.pipeline(transaction=False)
333 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
347 key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
334 redis_pipeline.incr(key, total_logs)
348 redis_pipeline.incr(key, total_logs)
335 redis_pipeline.expire(key, 3600 * 24)
349 redis_pipeline.expire(key, 3600 * 24)
336 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
350 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
337 resource.owner_user_id, current_time)
351 resource.owner_user_id, current_time
352 )
338 redis_pipeline.incr(key, total_logs)
353 redis_pipeline.incr(key, total_logs)
339 redis_pipeline.expire(key, 3600)
354 redis_pipeline.expire(key, 3600)
340 key = REDIS_KEYS['counters']['logs_per_hour_per_app'].format(
355 key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format(
341 resource_id, current_time.replace(minute=0))
356 resource_id, current_time.replace(minute=0)
357 )
342 redis_pipeline.incr(key, total_logs)
358 redis_pipeline.incr(key, total_logs)
343 redis_pipeline.expire(key, 3600 * 24 * 7)
359 redis_pipeline.expire(key, 3600 * 24 * 7)
344 redis_pipeline.sadd(
360 redis_pipeline.sadd(
345 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
361 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
346 current_time.replace(minute=0)), resource_id)
362 current_time.replace(minute=0)
363 ),
364 resource_id,
365 )
347 redis_pipeline.execute()
366 redis_pipeline.execute()
348 add_logs_es(es_docs)
367 add_logs_es(es_docs)
349 return True
368 return True
@@ -357,7 +376,7 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
357 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
376 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
358 def add_logs_es(es_docs):
377 def add_logs_es(es_docs):
359 for k, v in es_docs.items():
378 for k, v in es_docs.items():
360 to_update = {'_index': k, '_type': 'log'}
379 to_update = {"_index": k, "_type": "log"}
361 [i.update(to_update) for i in v]
380 [i.update(to_update) for i in v]
362 elasticsearch.helpers.bulk(Datastores.es, v)
381 elasticsearch.helpers.bulk(Datastores.es, v)
363
382
@@ -371,45 +390,51 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
371 es_docs = []
390 es_docs = []
372 rows = []
391 rows = []
373 for metric in dataset:
392 for metric in dataset:
374 tags = dict(metric['tags'])
393 tags = dict(metric["tags"])
375 server_n = tags.get('server_name', metric['server_name']).lower()
394 server_n = tags.get("server_name", metric["server_name"]).lower()
376 tags['server_name'] = server_n or 'unknown'
395 tags["server_name"] = server_n or "unknown"
377 new_metric = Metric(
396 new_metric = Metric(
378 timestamp=metric['timestamp'],
397 timestamp=metric["timestamp"],
379 resource_id=resource.resource_id,
398 resource_id=resource.resource_id,
380 namespace=metric['namespace'],
399 namespace=metric["namespace"],
381 tags=tags)
400 tags=tags,
401 )
382 rows.append(new_metric)
402 rows.append(new_metric)
383 es_docs.append(new_metric.es_doc())
403 es_docs.append(new_metric.es_doc())
384 session = DBSession()
404 session = DBSession()
385 session.bulk_save_objects(rows)
405 session.bulk_save_objects(rows)
386 session.flush()
406 session.flush()
387
407
388 action = 'METRICS'
408 action = "METRICS"
389 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
409 metrics_msg = "%s: %s, metrics: %s, proto:%s" % (
390 action,
410 action,
391 str(resource),
411 str(resource),
392 len(dataset),
412 len(dataset),
393 proto_version
413 proto_version,
394 )
414 )
395 log.info(metrics_msg)
415 log.info(metrics_msg)
396
416
397 mark_changed(session)
417 mark_changed(session)
398 redis_pipeline = Datastores.redis.pipeline(transaction=False)
418 redis_pipeline = Datastores.redis.pipeline(transaction=False)
399 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
419 key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
400 redis_pipeline.incr(key, len(rows))
420 redis_pipeline.incr(key, len(rows))
401 redis_pipeline.expire(key, 3600 * 24)
421 redis_pipeline.expire(key, 3600 * 24)
402 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
422 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
403 resource.owner_user_id, current_time)
423 resource.owner_user_id, current_time
424 )
404 redis_pipeline.incr(key, len(rows))
425 redis_pipeline.incr(key, len(rows))
405 redis_pipeline.expire(key, 3600)
426 redis_pipeline.expire(key, 3600)
406 key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format(
427 key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format(
407 resource_id, current_time.replace(minute=0))
428 resource_id, current_time.replace(minute=0)
429 )
408 redis_pipeline.incr(key, len(rows))
430 redis_pipeline.incr(key, len(rows))
409 redis_pipeline.expire(key, 3600 * 24 * 7)
431 redis_pipeline.expire(key, 3600 * 24 * 7)
410 redis_pipeline.sadd(
432 redis_pipeline.sadd(
411 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
433 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
412 current_time.replace(minute=0)), resource_id)
434 current_time.replace(minute=0)
435 ),
436 resource_id,
437 )
413 redis_pipeline.execute()
438 redis_pipeline.execute()
414 add_metrics_es(es_docs)
439 add_metrics_es(es_docs)
415 return True
440 return True
@@ -423,8 +448,8 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
423 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
448 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
424 def add_metrics_es(es_docs):
449 def add_metrics_es(es_docs):
425 for doc in es_docs:
450 for doc in es_docs:
426 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
451 partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d")
427 Datastores.es.index(partition, 'log', doc)
452 Datastores.es.index(partition, "log", doc)
428
453
429
454
430 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
455 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
@@ -435,10 +460,12 b' def check_user_report_notifications(resource_id):'
435 application = ApplicationService.by_id(resource_id)
460 application = ApplicationService.by_id(resource_id)
436 if not application:
461 if not application:
437 return
462 return
438 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
463 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
439 ReportType.error, resource_id)
464 ReportType.error, resource_id
440 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
465 )
441 ReportType.slow, resource_id)
466 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
467 ReportType.slow, resource_id
468 )
442 error_group_ids = Datastores.redis.smembers(error_key)
469 error_group_ids = Datastores.redis.smembers(error_key)
443 slow_group_ids = Datastores.redis.smembers(slow_key)
470 slow_group_ids = Datastores.redis.smembers(slow_key)
444 Datastores.redis.delete(error_key)
471 Datastores.redis.delete(error_key)
@@ -448,8 +475,7 b' def check_user_report_notifications(resource_id):'
448 group_ids = err_gids + slow_gids
475 group_ids = err_gids + slow_gids
449 occurence_dict = {}
476 occurence_dict = {}
450 for g_id in group_ids:
477 for g_id in group_ids:
451 key = REDIS_KEYS['counters']['report_group_occurences'].format(
478 key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id)
452 g_id)
453 val = Datastores.redis.get(key)
479 val = Datastores.redis.get(key)
454 Datastores.redis.delete(key)
480 Datastores.redis.delete(key)
455 if val:
481 if val:
@@ -460,14 +486,23 b' def check_user_report_notifications(resource_id):'
460 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
486 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
461
487
462 ApplicationService.check_for_groups_alert(
488 ApplicationService.check_for_groups_alert(
463 application, 'alert', report_groups=report_groups,
489 application,
464 occurence_dict=occurence_dict)
490 "alert",
465 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
491 report_groups=report_groups,
492 occurence_dict=occurence_dict,
493 )
494 users = set(
495 [p.user for p in ResourceService.users_for_perm(application, "view")]
496 )
466 report_groups = report_groups.all()
497 report_groups = report_groups.all()
467 for user in users:
498 for user in users:
468 UserService.report_notify(user, request, application,
499 UserService.report_notify(
469 report_groups=report_groups,
500 user,
470 occurence_dict=occurence_dict)
501 request,
502 application,
503 report_groups=report_groups,
504 occurence_dict=occurence_dict,
505 )
471 for group in report_groups:
506 for group in report_groups:
472 # marks report_groups as notified
507 # marks report_groups as notified
473 if not group.notified:
508 if not group.notified:
@@ -485,12 +520,12 b' def check_alerts(resource_id):'
485 application = ApplicationService.by_id(resource_id)
520 application = ApplicationService.by_id(resource_id)
486 if not application:
521 if not application:
487 return
522 return
488 error_key = REDIS_KEYS[
523 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
489 'reports_to_notify_per_type_per_app_alerting'].format(
524 ReportType.error, resource_id
490 ReportType.error, resource_id)
525 )
491 slow_key = REDIS_KEYS[
526 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
492 'reports_to_notify_per_type_per_app_alerting'].format(
527 ReportType.slow, resource_id
493 ReportType.slow, resource_id)
528 )
494 error_group_ids = Datastores.redis.smembers(error_key)
529 error_group_ids = Datastores.redis.smembers(error_key)
495 slow_group_ids = Datastores.redis.smembers(slow_key)
530 slow_group_ids = Datastores.redis.smembers(slow_key)
496 Datastores.redis.delete(error_key)
531 Datastores.redis.delete(error_key)
@@ -500,9 +535,9 b' def check_alerts(resource_id):'
500 group_ids = err_gids + slow_gids
535 group_ids = err_gids + slow_gids
501 occurence_dict = {}
536 occurence_dict = {}
502 for g_id in group_ids:
537 for g_id in group_ids:
503 key = REDIS_KEYS['counters'][
538 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(
504 'report_group_occurences_alerting'].format(
539 g_id
505 g_id)
540 )
506 val = Datastores.redis.get(key)
541 val = Datastores.redis.get(key)
507 Datastores.redis.delete(key)
542 Datastores.redis.delete(key)
508 if val:
543 if val:
@@ -513,8 +548,12 b' def check_alerts(resource_id):'
513 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
548 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
514
549
515 ApplicationService.check_for_groups_alert(
550 ApplicationService.check_for_groups_alert(
516 application, 'alert', report_groups=report_groups,
551 application,
517 occurence_dict=occurence_dict, since_when=since_when)
552 "alert",
553 report_groups=report_groups,
554 occurence_dict=occurence_dict,
555 since_when=since_when,
556 )
518 except Exception as exc:
557 except Exception as exc:
519 print_traceback(log)
558 print_traceback(log)
520 raise
559 raise
@@ -522,21 +561,21 b' def check_alerts(resource_id):'
522
561
523 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
562 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
524 def close_alerts():
563 def close_alerts():
525 log.warning('Checking alerts')
564 log.warning("Checking alerts")
526 since_when = datetime.utcnow()
565 since_when = datetime.utcnow()
527 try:
566 try:
528 event_types = [Event.types['error_report_alert'],
567 event_types = [
529 Event.types['slow_report_alert'], ]
568 Event.types["error_report_alert"],
530 statuses = [Event.statuses['active']]
569 Event.types["slow_report_alert"],
570 ]
571 statuses = [Event.statuses["active"]]
531 # get events older than 5 min
572 # get events older than 5 min
532 events = EventService.by_type_and_status(
573 events = EventService.by_type_and_status(
533 event_types,
574 event_types, statuses, older_than=(since_when - timedelta(minutes=5))
534 statuses,
575 )
535 older_than=(since_when - timedelta(minutes=5)))
536 for event in events:
576 for event in events:
537 # see if we can close them
577 # see if we can close them
538 event.validate_or_close(
578 event.validate_or_close(since_when=(since_when - timedelta(minutes=1)))
539 since_when=(since_when - timedelta(minutes=1)))
540 except Exception as exc:
579 except Exception as exc:
541 print_traceback(log)
580 print_traceback(log)
542 raise
581 raise
@@ -545,12 +584,18 b' def close_alerts():'
545 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
584 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
546 def update_tag_counter(tag_name, tag_value, count):
585 def update_tag_counter(tag_name, tag_value, count):
547 try:
586 try:
548 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
587 query = (
549 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
588 DBSession.query(Tag)
550 sa.types.TEXT))
589 .filter(Tag.name == tag_name)
551 query.update({'times_seen': Tag.times_seen + count,
590 .filter(
552 'last_timestamp': datetime.utcnow()},
591 sa.cast(Tag.value, sa.types.TEXT)
553 synchronize_session=False)
592 == sa.cast(json.dumps(tag_value), sa.types.TEXT)
593 )
594 )
595 query.update(
596 {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()},
597 synchronize_session=False,
598 )
554 session = DBSession()
599 session = DBSession()
555 mark_changed(session)
600 mark_changed(session)
556 return True
601 return True
@@ -566,8 +611,8 b' def update_tag_counters():'
566 """
611 """
567 Sets task to update counters for application tags
612 Sets task to update counters for application tags
568 """
613 """
569 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
614 tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1)
570 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
615 Datastores.redis.delete(REDIS_KEYS["seen_tag_list"])
571 c = collections.Counter(tags)
616 c = collections.Counter(tags)
572 for t_json, count in c.items():
617 for t_json, count in c.items():
573 tag_info = json.loads(t_json)
618 tag_info = json.loads(t_json)
@@ -580,28 +625,34 b' def daily_digest():'
580 Sends daily digest with top 50 error reports
625 Sends daily digest with top 50 error reports
581 """
626 """
582 request = get_current_request()
627 request = get_current_request()
583 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
628 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
584 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
629 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
585 since_when = datetime.utcnow() - timedelta(hours=8)
630 since_when = datetime.utcnow() - timedelta(hours=8)
586 log.warning('Generating daily digests')
631 log.warning("Generating daily digests")
587 for resource_id in apps:
632 for resource_id in apps:
588 resource_id = resource_id.decode('utf8')
633 resource_id = resource_id.decode("utf8")
589 end_date = datetime.utcnow().replace(microsecond=0, second=0)
634 end_date = datetime.utcnow().replace(microsecond=0, second=0)
590 filter_settings = {'resource': [resource_id],
635 filter_settings = {
591 'tags': [{'name': 'type',
636 "resource": [resource_id],
592 'value': ['error'], 'op': None}],
637 "tags": [{"name": "type", "value": ["error"], "op": None}],
593 'type': 'error', 'start_date': since_when,
638 "type": "error",
594 'end_date': end_date}
639 "start_date": since_when,
640 "end_date": end_date,
641 }
595
642
596 reports = ReportGroupService.get_trending(
643 reports = ReportGroupService.get_trending(
597 request, filter_settings=filter_settings, limit=50)
644 request, filter_settings=filter_settings, limit=50
645 )
598
646
599 application = ApplicationService.by_id(resource_id)
647 application = ApplicationService.by_id(resource_id)
600 if application:
648 if application:
601 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
649 users = set(
650 [p.user for p in ResourceService.users_for_perm(application, "view")]
651 )
602 for user in users:
652 for user in users:
603 user.send_digest(request, application, reports=reports,
653 user.send_digest(
604 since_when=since_when)
654 request, application, reports=reports, since_when=since_when
655 )
605
656
606
657
607 @celery.task(queue="default")
658 @celery.task(queue="default")
@@ -610,11 +661,12 b' def notifications_reports():'
610 Loop that checks redis for info and then issues new tasks to celery to
661 Loop that checks redis for info and then issues new tasks to celery to
611 issue notifications
662 issue notifications
612 """
663 """
613 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
664 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
614 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
665 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
615 for app in apps:
666 for app in apps:
616 log.warning('Notify for app: %s' % app)
667 log.warning("Notify for app: %s" % app)
617 check_user_report_notifications.delay(app.decode('utf8'))
668 check_user_report_notifications.delay(app.decode("utf8"))
669
618
670
619 @celery.task(queue="default")
671 @celery.task(queue="default")
620 def alerting_reports():
672 def alerting_reports():
@@ -624,34 +676,33 b' def alerting_reports():'
624 - which applications should have new alerts opened
676 - which applications should have new alerts opened
625 """
677 """
626
678
627 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
679 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"])
628 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
680 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"])
629 for app in apps:
681 for app in apps:
630 log.warning('Notify for app: %s' % app)
682 log.warning("Notify for app: %s" % app)
631 check_alerts.delay(app.decode('utf8'))
683 check_alerts.delay(app.decode("utf8"))
632
684
633
685
634 @celery.task(queue="default", soft_time_limit=3600 * 4,
686 @celery.task(
635 hard_time_limit=3600 * 4, max_retries=144)
687 queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144
688 )
636 def logs_cleanup(resource_id, filter_settings):
689 def logs_cleanup(resource_id, filter_settings):
637 request = get_current_request()
690 request = get_current_request()
638 request.tm.begin()
691 request.tm.begin()
639 es_query = {
692 es_query = {
640 "query": {
693 "query": {
641 "filtered": {
694 "filtered": {"filter": {"and": [{"term": {"resource_id": resource_id}}]}}
642 "filter": {
643 "and": [{"term": {"resource_id": resource_id}}]
644 }
645 }
646 }
695 }
647 }
696 }
648
697
649 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
698 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
650 if filter_settings['namespace']:
699 if filter_settings["namespace"]:
651 query = query.filter(Log.namespace == filter_settings['namespace'][0])
700 query = query.filter(Log.namespace == filter_settings["namespace"][0])
652 es_query['query']['filtered']['filter']['and'].append(
701 es_query["query"]["filtered"]["filter"]["and"].append(
653 {"term": {"namespace": filter_settings['namespace'][0]}}
702 {"term": {"namespace": filter_settings["namespace"][0]}}
654 )
703 )
655 query.delete(synchronize_session=False)
704 query.delete(synchronize_session=False)
656 request.tm.commit()
705 request.tm.commit()
657 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format('rcae_l_*', 'log'), body=es_query)
706 Datastores.es.transport.perform_request(
707 "DELETE", "/{}/{}/_query".format("rcae_l_*", "log"), body=es_query
708 )
@@ -14,6 +14,7 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17
17 def filter_callable(structure, section=None):
18 def filter_callable(structure, section=None):
18 structure['SOMEVAL'] = '***REMOVED***'
19 structure["SOMEVAL"] = "***REMOVED***"
19 return structure
20 return structure
This diff has been collapsed as it changes many lines, (863 lines changed) Show them Hide them
@@ -43,7 +43,7 b' _ = str'
43 strip_filter = lambda x: x.strip() if x else None
43 strip_filter = lambda x: x.strip() if x else None
44 uppercase_filter = lambda x: x.upper() if x else None
44 uppercase_filter = lambda x: x.upper() if x else None
45
45
46 FALSE_VALUES = ('false', '', False, None)
46 FALSE_VALUES = ("false", "", False, None)
47
47
48
48
49 class CSRFException(Exception):
49 class CSRFException(Exception):
@@ -51,11 +51,14 b' class CSRFException(Exception):'
51
51
52
52
53 class ReactorForm(SecureForm):
53 class ReactorForm(SecureForm):
54 def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None,
54 def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
55 **kwargs):
55 super(ReactorForm, self).__init__(
56 super(ReactorForm, self).__init__(formdata=formdata, obj=obj,
56 formdata=formdata,
57 prefix=prefix,
57 obj=obj,
58 csrf_context=csrf_context, **kwargs)
58 prefix=prefix,
59 csrf_context=csrf_context,
60 **kwargs
61 )
59 self._csrf_context = csrf_context
62 self._csrf_context = csrf_context
60
63
61 def generate_csrf_token(self, csrf_context):
64 def generate_csrf_token(self, csrf_context):
@@ -63,14 +66,14 b' class ReactorForm(SecureForm):'
63
66
64 def validate_csrf_token(self, field):
67 def validate_csrf_token(self, field):
65 request = self._csrf_context or pyramid.threadlocal.get_current_request()
68 request = self._csrf_context or pyramid.threadlocal.get_current_request()
66 is_from_auth_token = 'auth:auth_token' in request.effective_principals
69 is_from_auth_token = "auth:auth_token" in request.effective_principals
67 if is_from_auth_token:
70 if is_from_auth_token:
68 return True
71 return True
69
72
70 if field.data != field.current_token:
73 if field.data != field.current_token:
71 # try to save the day by using token from angular
74 # try to save the day by using token from angular
72 if request.headers.get('X-XSRF-TOKEN') != field.current_token:
75 if request.headers.get("X-XSRF-TOKEN") != field.current_token:
73 raise CSRFException('Invalid CSRF token')
76 raise CSRFException("Invalid CSRF token")
74
77
75 @property
78 @property
76 def errors_dict(self):
79 def errors_dict(self):
@@ -105,45 +108,47 b' class ReactorForm(SecureForm):'
105
108
106 class SignInForm(ReactorForm):
109 class SignInForm(ReactorForm):
107 came_from = wtforms.HiddenField()
110 came_from = wtforms.HiddenField()
108 sign_in_user_name = wtforms.StringField(_('User Name'))
111 sign_in_user_name = wtforms.StringField(_("User Name"))
109 sign_in_user_password = wtforms.PasswordField(_('Password'))
112 sign_in_user_password = wtforms.PasswordField(_("Password"))
110
113
111 ignore_labels = ['submit']
114 ignore_labels = ["submit"]
112 css_classes = {'submit': 'btn btn-primary'}
115 css_classes = {"submit": "btn btn-primary"}
113
116
114 html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'},
117 html_attrs = {
115 'sign_in_user_password': {
118 "sign_in_user_name": {"placeholder": "Your login"},
116 'placeholder': 'Your password'}}
119 "sign_in_user_password": {"placeholder": "Your password"},
120 }
117
121
118
122
119 from wtforms.widgets import html_params, HTMLString
123 from wtforms.widgets import html_params, HTMLString
120
124
121
125
122 def select_multi_checkbox(field, ul_class='set', **kwargs):
126 def select_multi_checkbox(field, ul_class="set", **kwargs):
123 """Render a multi-checkbox widget"""
127 """Render a multi-checkbox widget"""
124 kwargs.setdefault('type', 'checkbox')
128 kwargs.setdefault("type", "checkbox")
125 field_id = kwargs.pop('id', field.id)
129 field_id = kwargs.pop("id", field.id)
126 html = ['<ul %s>' % html_params(id=field_id, class_=ul_class)]
130 html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)]
127 for value, label, checked in field.iter_choices():
131 for value, label, checked in field.iter_choices():
128 choice_id = '%s-%s' % (field_id, value)
132 choice_id = "%s-%s" % (field_id, value)
129 options = dict(kwargs, name=field.name, value=value, id=choice_id)
133 options = dict(kwargs, name=field.name, value=value, id=choice_id)
130 if checked:
134 if checked:
131 options['checked'] = 'checked'
135 options["checked"] = "checked"
132 html.append('<li><input %s /> ' % html_params(**options))
136 html.append("<li><input %s /> " % html_params(**options))
133 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
137 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
134 html.append('</ul>')
138 html.append("</ul>")
135 return HTMLString(''.join(html))
139 return HTMLString("".join(html))
136
140
137
141
138 def button_widget(field, button_cls='ButtonField btn btn-default', **kwargs):
142 def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs):
139 """Render a button widget"""
143 """Render a button widget"""
140 kwargs.setdefault('type', 'button')
144 kwargs.setdefault("type", "button")
141 field_id = kwargs.pop('id', field.id)
145 field_id = kwargs.pop("id", field.id)
142 kwargs.setdefault('value', field.label.text)
146 kwargs.setdefault("value", field.label.text)
143 html = ['<button %s>%s</button>' % (html_params(id=field_id,
147 html = [
144 class_=button_cls),
148 "<button %s>%s</button>"
145 kwargs['value'],)]
149 % (html_params(id=field_id, class_=button_cls), kwargs["value"])
146 return HTMLString(''.join(html))
150 ]
151 return HTMLString("".join(html))
147
152
148
153
149 def clean_whitespace(value):
154 def clean_whitespace(value):
@@ -157,33 +162,32 b' def found_username_validator(form, field):'
157 # sets user to recover in email validator
162 # sets user to recover in email validator
158 form.field_user = user
163 form.field_user = user
159 if not user:
164 if not user:
160 raise wtforms.ValidationError('This username does not exist')
165 raise wtforms.ValidationError("This username does not exist")
161
166
162
167
163 def found_username_email_validator(form, field):
168 def found_username_email_validator(form, field):
164 user = UserService.by_email(field.data)
169 user = UserService.by_email(field.data)
165 if not user:
170 if not user:
166 raise wtforms.ValidationError('Email is incorrect')
171 raise wtforms.ValidationError("Email is incorrect")
167
172
168
173
169 def unique_username_validator(form, field):
174 def unique_username_validator(form, field):
170 user = UserService.by_user_name(field.data)
175 user = UserService.by_user_name(field.data)
171 if user:
176 if user:
172 raise wtforms.ValidationError('This username already exists in system')
177 raise wtforms.ValidationError("This username already exists in system")
173
178
174
179
175 def unique_groupname_validator(form, field):
180 def unique_groupname_validator(form, field):
176 group = GroupService.by_group_name(field.data)
181 group = GroupService.by_group_name(field.data)
177 mod_group = getattr(form, '_modified_group', None)
182 mod_group = getattr(form, "_modified_group", None)
178 if group and (not mod_group or mod_group.id != group.id):
183 if group and (not mod_group or mod_group.id != group.id):
179 raise wtforms.ValidationError(
184 raise wtforms.ValidationError("This group name already exists in system")
180 'This group name already exists in system')
181
185
182
186
183 def unique_email_validator(form, field):
187 def unique_email_validator(form, field):
184 user = UserService.by_email(field.data)
188 user = UserService.by_email(field.data)
185 if user:
189 if user:
186 raise wtforms.ValidationError('This email already exists in system')
190 raise wtforms.ValidationError("This email already exists in system")
187
191
188
192
189 def email_validator(form, field):
193 def email_validator(form, field):
@@ -196,145 +200,168 b' def email_validator(form, field):'
196
200
197 def unique_alert_email_validator(form, field):
201 def unique_alert_email_validator(form, field):
198 q = DBSession.query(AlertChannel)
202 q = DBSession.query(AlertChannel)
199 q = q.filter(AlertChannel.channel_name == 'email')
203 q = q.filter(AlertChannel.channel_name == "email")
200 q = q.filter(AlertChannel.channel_value == field.data)
204 q = q.filter(AlertChannel.channel_value == field.data)
201 email = q.first()
205 email = q.first()
202 if email:
206 if email:
203 raise wtforms.ValidationError(
207 raise wtforms.ValidationError("This email already exists in alert system")
204 'This email already exists in alert system')
205
208
206
209
207 def blocked_email_validator(form, field):
210 def blocked_email_validator(form, field):
208 blocked_emails = [
211 blocked_emails = [
209 'goood-mail.org',
212 "goood-mail.org",
210 'shoeonlineblog.com',
213 "shoeonlineblog.com",
211 'louboutinemart.com',
214 "louboutinemart.com",
212 'guccibagshere.com',
215 "guccibagshere.com",
213 'nikeshoesoutletforsale.com'
216 "nikeshoesoutletforsale.com",
214 ]
217 ]
215 data = field.data or ''
218 data = field.data or ""
216 domain = data.split('@')[-1]
219 domain = data.split("@")[-1]
217 if domain in blocked_emails:
220 if domain in blocked_emails:
218 raise wtforms.ValidationError('Don\'t spam')
221 raise wtforms.ValidationError("Don't spam")
219
222
220
223
221 def old_password_validator(form, field):
224 def old_password_validator(form, field):
222 if not UserService.check_password(field.user, field.data or ''):
225 if not UserService.check_password(field.user, field.data or ""):
223 raise wtforms.ValidationError('You need to enter correct password')
226 raise wtforms.ValidationError("You need to enter correct password")
224
227
225
228
226 class UserRegisterForm(ReactorForm):
229 class UserRegisterForm(ReactorForm):
227 user_name = wtforms.StringField(
230 user_name = wtforms.StringField(
228 _('User Name'),
231 _("User Name"),
229 filters=[strip_filter],
232 filters=[strip_filter],
230 validators=[
233 validators=[
231 wtforms.validators.Length(min=2, max=30),
234 wtforms.validators.Length(min=2, max=30),
232 wtforms.validators.Regexp(
235 wtforms.validators.Regexp(
233 re.compile(r'^[\.\w-]+$', re.UNICODE),
236 re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used"
234 message="Invalid characters used"),
237 ),
235 unique_username_validator,
238 unique_username_validator,
236 wtforms.validators.DataRequired()
239 wtforms.validators.DataRequired(),
237 ])
240 ],
241 )
238
242
239 user_password = wtforms.PasswordField(_('User Password'),
243 user_password = wtforms.PasswordField(
240 filters=[strip_filter],
244 _("User Password"),
241 validators=[
245 filters=[strip_filter],
242 wtforms.validators.Length(min=4),
246 validators=[
243 wtforms.validators.DataRequired()
247 wtforms.validators.Length(min=4),
244 ])
248 wtforms.validators.DataRequired(),
249 ],
250 )
245
251
246 email = wtforms.StringField(_('Email Address'),
252 email = wtforms.StringField(
247 filters=[strip_filter],
253 _("Email Address"),
248 validators=[email_validator,
254 filters=[strip_filter],
249 unique_email_validator,
255 validators=[
250 blocked_email_validator,
256 email_validator,
251 wtforms.validators.DataRequired()])
257 unique_email_validator,
252 first_name = wtforms.HiddenField(_('First Name'))
258 blocked_email_validator,
253 last_name = wtforms.HiddenField(_('Last Name'))
259 wtforms.validators.DataRequired(),
260 ],
261 )
262 first_name = wtforms.HiddenField(_("First Name"))
263 last_name = wtforms.HiddenField(_("Last Name"))
254
264
255 ignore_labels = ['submit']
265 ignore_labels = ["submit"]
256 css_classes = {'submit': 'btn btn-primary'}
266 css_classes = {"submit": "btn btn-primary"}
257
267
258 html_attrs = {'user_name': {'placeholder': 'Your login'},
268 html_attrs = {
259 'user_password': {'placeholder': 'Your password'},
269 "user_name": {"placeholder": "Your login"},
260 'email': {'placeholder': 'Your email'}}
270 "user_password": {"placeholder": "Your password"},
271 "email": {"placeholder": "Your email"},
272 }
261
273
262
274
263 class UserCreateForm(UserRegisterForm):
275 class UserCreateForm(UserRegisterForm):
264 status = wtforms.BooleanField('User status',
276 status = wtforms.BooleanField("User status", false_values=FALSE_VALUES)
265 false_values=FALSE_VALUES)
266
277
267
278
268 class UserUpdateForm(UserCreateForm):
279 class UserUpdateForm(UserCreateForm):
269 user_name = None
280 user_name = None
270 user_password = wtforms.PasswordField(_('User Password'),
281 user_password = wtforms.PasswordField(
271 filters=[strip_filter],
282 _("User Password"),
272 validators=[
283 filters=[strip_filter],
273 wtforms.validators.Length(min=4),
284 validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()],
274 wtforms.validators.Optional()
285 )
275 ])
286 email = wtforms.StringField(
276 email = wtforms.StringField(_('Email Address'),
287 _("Email Address"),
277 filters=[strip_filter],
288 filters=[strip_filter],
278 validators=[email_validator,
289 validators=[email_validator, wtforms.validators.DataRequired()],
279 wtforms.validators.DataRequired()])
290 )
280
291
281
292
282 class LostPasswordForm(ReactorForm):
293 class LostPasswordForm(ReactorForm):
283 email = wtforms.StringField(_('Email Address'),
294 email = wtforms.StringField(
284 filters=[strip_filter],
295 _("Email Address"),
285 validators=[email_validator,
296 filters=[strip_filter],
286 found_username_email_validator,
297 validators=[
287 wtforms.validators.DataRequired()])
298 email_validator,
299 found_username_email_validator,
300 wtforms.validators.DataRequired(),
301 ],
302 )
288
303
289 submit = wtforms.SubmitField(_('Reset password'))
304 submit = wtforms.SubmitField(_("Reset password"))
290 ignore_labels = ['submit']
305 ignore_labels = ["submit"]
291 css_classes = {'submit': 'btn btn-primary'}
306 css_classes = {"submit": "btn btn-primary"}
292
307
293
308
294 class ChangePasswordForm(ReactorForm):
309 class ChangePasswordForm(ReactorForm):
295 old_password = wtforms.PasswordField(
310 old_password = wtforms.PasswordField(
296 'Old Password',
311 "Old Password",
297 filters=[strip_filter],
312 filters=[strip_filter],
298 validators=[old_password_validator,
313 validators=[old_password_validator, wtforms.validators.DataRequired()],
299 wtforms.validators.DataRequired()])
314 )
300
315
301 new_password = wtforms.PasswordField(
316 new_password = wtforms.PasswordField(
302 'New Password',
317 "New Password",
303 filters=[strip_filter],
318 filters=[strip_filter],
304 validators=[wtforms.validators.Length(min=4),
319 validators=[
305 wtforms.validators.DataRequired()])
320 wtforms.validators.Length(min=4),
321 wtforms.validators.DataRequired(),
322 ],
323 )
306 new_password_confirm = wtforms.PasswordField(
324 new_password_confirm = wtforms.PasswordField(
307 'Confirm Password',
325 "Confirm Password",
308 filters=[strip_filter],
326 filters=[strip_filter],
309 validators=[wtforms.validators.EqualTo('new_password'),
327 validators=[
310 wtforms.validators.DataRequired()])
328 wtforms.validators.EqualTo("new_password"),
311 submit = wtforms.SubmitField('Change Password')
329 wtforms.validators.DataRequired(),
312 ignore_labels = ['submit']
330 ],
313 css_classes = {'submit': 'btn btn-primary'}
331 )
332 submit = wtforms.SubmitField("Change Password")
333 ignore_labels = ["submit"]
334 css_classes = {"submit": "btn btn-primary"}
314
335
315
336
316 class CheckPasswordForm(ReactorForm):
337 class CheckPasswordForm(ReactorForm):
317 password = wtforms.PasswordField(
338 password = wtforms.PasswordField(
318 'Password',
339 "Password",
319 filters=[strip_filter],
340 filters=[strip_filter],
320 validators=[old_password_validator,
341 validators=[old_password_validator, wtforms.validators.DataRequired()],
321 wtforms.validators.DataRequired()])
342 )
322
343
323
344
324 class NewPasswordForm(ReactorForm):
345 class NewPasswordForm(ReactorForm):
325 new_password = wtforms.PasswordField(
346 new_password = wtforms.PasswordField(
326 'New Password',
347 "New Password",
327 filters=[strip_filter],
348 filters=[strip_filter],
328 validators=[wtforms.validators.Length(min=4),
349 validators=[
329 wtforms.validators.DataRequired()])
350 wtforms.validators.Length(min=4),
351 wtforms.validators.DataRequired(),
352 ],
353 )
330 new_password_confirm = wtforms.PasswordField(
354 new_password_confirm = wtforms.PasswordField(
331 'Confirm Password',
355 "Confirm Password",
332 filters=[strip_filter],
356 filters=[strip_filter],
333 validators=[wtforms.validators.EqualTo('new_password'),
357 validators=[
334 wtforms.validators.DataRequired()])
358 wtforms.validators.EqualTo("new_password"),
335 submit = wtforms.SubmitField('Set Password')
359 wtforms.validators.DataRequired(),
336 ignore_labels = ['submit']
360 ],
337 css_classes = {'submit': 'btn btn-primary'}
361 )
362 submit = wtforms.SubmitField("Set Password")
363 ignore_labels = ["submit"]
364 css_classes = {"submit": "btn btn-primary"}
338
365
339
366
340 class CORSTextAreaField(wtforms.StringField):
367 class CORSTextAreaField(wtforms.StringField):
@@ -342,261 +369,290 b' class CORSTextAreaField(wtforms.StringField):'
342 This field represents an HTML ``<textarea>`` and can be used to take
369 This field represents an HTML ``<textarea>`` and can be used to take
343 multi-line input.
370 multi-line input.
344 """
371 """
372
345 widget = wtforms.widgets.TextArea()
373 widget = wtforms.widgets.TextArea()
346
374
347 def process_formdata(self, valuelist):
375 def process_formdata(self, valuelist):
348 self.data = []
376 self.data = []
349 if valuelist:
377 if valuelist:
350 data = [x.strip() for x in valuelist[0].split('\n')]
378 data = [x.strip() for x in valuelist[0].split("\n")]
351 for d in data:
379 for d in data:
352 if not d:
380 if not d:
353 continue
381 continue
354 if d.startswith('www.'):
382 if d.startswith("www."):
355 d = d[4:]
383 d = d[4:]
356 if data:
384 if data:
357 self.data.append(d)
385 self.data.append(d)
358 else:
386 else:
359 self.data = []
387 self.data = []
360 self.data = '\n'.join(self.data)
388 self.data = "\n".join(self.data)
361
389
362
390
363 class ApplicationCreateForm(ReactorForm):
391 class ApplicationCreateForm(ReactorForm):
364 resource_name = wtforms.StringField(
392 resource_name = wtforms.StringField(
365 _('Application name'),
393 _("Application name"),
366 filters=[strip_filter],
394 filters=[strip_filter],
367 validators=[wtforms.validators.Length(min=1),
395 validators=[
368 wtforms.validators.DataRequired()])
396 wtforms.validators.Length(min=1),
397 wtforms.validators.DataRequired(),
398 ],
399 )
369
400
370 domains = CORSTextAreaField(
401 domains = CORSTextAreaField(
371 _('Domain names for CORS headers '),
402 _("Domain names for CORS headers "),
372 validators=[wtforms.validators.Length(min=1),
403 validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()],
373 wtforms.validators.Optional()],
404 description="Required for Javascript error "
374 description='Required for Javascript error '
405 "tracking (one line one domain, skip http:// part)",
375 'tracking (one line one domain, skip http:// part)')
406 )
376
407
377 submit = wtforms.SubmitField(_('Create Application'))
408 submit = wtforms.SubmitField(_("Create Application"))
378
409
379 ignore_labels = ['submit']
410 ignore_labels = ["submit"]
380 css_classes = {'submit': 'btn btn-primary'}
411 css_classes = {"submit": "btn btn-primary"}
381 html_attrs = {'resource_name': {'placeholder': 'Application Name'},
412 html_attrs = {
382 'uptime_url': {'placeholder': 'http://somedomain.com'}}
413 "resource_name": {"placeholder": "Application Name"},
414 "uptime_url": {"placeholder": "http://somedomain.com"},
415 }
383
416
384
417
385 class ApplicationUpdateForm(ApplicationCreateForm):
418 class ApplicationUpdateForm(ApplicationCreateForm):
386 default_grouping = wtforms.SelectField(
419 default_grouping = wtforms.SelectField(
387 _('Default grouping for errors'),
420 _("Default grouping for errors"),
388 choices=[('url_type', 'Error Type + location',),
421 choices=[
389 ('url_traceback', 'Traceback + location',),
422 ("url_type", "Error Type + location"),
390 ('traceback_server', 'Traceback + Server',)],
423 ("url_traceback", "Traceback + location"),
391 default='url_traceback')
424 ("traceback_server", "Traceback + Server"),
425 ],
426 default="url_traceback",
427 )
392
428
393 error_report_threshold = wtforms.IntegerField(
429 error_report_threshold = wtforms.IntegerField(
394 _('Alert on error reports'),
430 _("Alert on error reports"),
395 validators=[
431 validators=[
396 wtforms.validators.NumberRange(min=1),
432 wtforms.validators.NumberRange(min=1),
397 wtforms.validators.DataRequired()
433 wtforms.validators.DataRequired(),
398 ],
434 ],
399 description='Application requires to send at least this amount of '
435 description="Application requires to send at least this amount of "
400 'error reports per minute to open alert'
436 "error reports per minute to open alert",
401 )
437 )
402
438
403 slow_report_threshold = wtforms.IntegerField(
439 slow_report_threshold = wtforms.IntegerField(
404 _('Alert on slow reports'),
440 _("Alert on slow reports"),
405 validators=[wtforms.validators.NumberRange(min=1),
441 validators=[
406 wtforms.validators.DataRequired()],
442 wtforms.validators.NumberRange(min=1),
407 description='Application requires to send at least this amount of '
443 wtforms.validators.DataRequired(),
408 'slow reports per minute to open alert')
444 ],
445 description="Application requires to send at least this amount of "
446 "slow reports per minute to open alert",
447 )
409
448
410 allow_permanent_storage = wtforms.BooleanField(
449 allow_permanent_storage = wtforms.BooleanField(
411 _('Permanent logs'),
450 _("Permanent logs"),
412 false_values=FALSE_VALUES,
451 false_values=FALSE_VALUES,
413 description=_(
452 description=_("Allow permanent storage of logs in separate DB partitions"),
414 'Allow permanent storage of logs in separate DB partitions'))
453 )
415
454
416 submit = wtforms.SubmitField(_('Create Application'))
455 submit = wtforms.SubmitField(_("Create Application"))
417
456
418
457
419 class UserSearchSchemaForm(ReactorForm):
458 class UserSearchSchemaForm(ReactorForm):
420 user_name = wtforms.StringField('User Name',
459 user_name = wtforms.StringField("User Name", filters=[strip_filter])
421 filters=[strip_filter], )
422
460
423 submit = wtforms.SubmitField(_('Search User'))
461 submit = wtforms.SubmitField(_("Search User"))
424 ignore_labels = ['submit']
462 ignore_labels = ["submit"]
425 css_classes = {'submit': 'btn btn-primary'}
463 css_classes = {"submit": "btn btn-primary"}
426
464
427 '<li class="user_exists"><span></span></li>'
465 '<li class="user_exists"><span></span></li>'
428
466
429
467
430 class YesNoForm(ReactorForm):
468 class YesNoForm(ReactorForm):
431 no = wtforms.SubmitField('No', default='')
469 no = wtforms.SubmitField("No", default="")
432 yes = wtforms.SubmitField('Yes', default='')
470 yes = wtforms.SubmitField("Yes", default="")
433 ignore_labels = ['submit']
471 ignore_labels = ["submit"]
434 css_classes = {'submit': 'btn btn-primary'}
472 css_classes = {"submit": "btn btn-primary"}
435
473
436
474
437 status_codes = [('', 'All',), ('500', '500',), ('404', '404',)]
475 status_codes = [("", "All"), ("500", "500"), ("404", "404")]
438
476
439 priorities = [('', 'All',)]
477 priorities = [("", "All")]
440 for i in range(1, 11):
478 for i in range(1, 11):
441 priorities.append((str(i), str(i),))
479 priorities.append((str(i), str(i)))
442
480
443 report_status_choices = [('', 'All',),
481 report_status_choices = [
444 ('never_reviewed', 'Never revieved',),
482 ("", "All"),
445 ('reviewed', 'Revieved',),
483 ("never_reviewed", "Never revieved"),
446 ('public', 'Public',),
484 ("reviewed", "Revieved"),
447 ('fixed', 'Fixed',), ]
485 ("public", "Public"),
486 ("fixed", "Fixed"),
487 ]
448
488
449
489
450 class ReportBrowserForm(ReactorForm):
490 class ReportBrowserForm(ReactorForm):
451 applications = wtforms.SelectMultipleField('Applications',
491 applications = wtforms.SelectMultipleField(
452 widget=select_multi_checkbox)
492 "Applications", widget=select_multi_checkbox
453 http_status = wtforms.SelectField('HTTP Status', choices=status_codes)
493 )
454 priority = wtforms.SelectField('Priority', choices=priorities, default='')
494 http_status = wtforms.SelectField("HTTP Status", choices=status_codes)
455 start_date = wtforms.DateField('Start Date')
495 priority = wtforms.SelectField("Priority", choices=priorities, default="")
456 end_date = wtforms.DateField('End Date')
496 start_date = wtforms.DateField("Start Date")
457 error = wtforms.StringField('Error')
497 end_date = wtforms.DateField("End Date")
458 url_path = wtforms.StringField('URL Path')
498 error = wtforms.StringField("Error")
459 url_domain = wtforms.StringField('URL Domain')
499 url_path = wtforms.StringField("URL Path")
460 report_status = wtforms.SelectField('Report status',
500 url_domain = wtforms.StringField("URL Domain")
461 choices=report_status_choices,
501 report_status = wtforms.SelectField(
462 default='')
502 "Report status", choices=report_status_choices, default=""
463 submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">'
503 )
464 '</span> Filter results',
504 submit = wtforms.SubmitField(
465 widget=button_widget)
505 '<span class="glyphicon glyphicon-search">' "</span> Filter results",
466
506 widget=button_widget,
467 ignore_labels = ['submit']
507 )
468 css_classes = {'submit': 'btn btn-primary'}
508
469
509 ignore_labels = ["submit"]
470
510 css_classes = {"submit": "btn btn-primary"}
471 slow_report_status_choices = [('', 'All',),
511
472 ('never_reviewed', 'Never revieved',),
512
473 ('reviewed', 'Revieved',),
513 slow_report_status_choices = [
474 ('public', 'Public',), ]
514 ("", "All"),
515 ("never_reviewed", "Never revieved"),
516 ("reviewed", "Revieved"),
517 ("public", "Public"),
518 ]
475
519
476
520
477 class BulkOperationForm(ReactorForm):
521 class BulkOperationForm(ReactorForm):
478 applications = wtforms.SelectField('Applications')
522 applications = wtforms.SelectField("Applications")
479 start_date = wtforms.DateField(
523 start_date = wtforms.DateField(
480 'Start Date',
524 "Start Date",
481 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(
525 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90),
482 days=90))
526 )
483 end_date = wtforms.DateField('End Date')
527 end_date = wtforms.DateField("End Date")
484 confirm = wtforms.BooleanField(
528 confirm = wtforms.BooleanField(
485 'Confirm operation',
529 "Confirm operation", validators=[wtforms.validators.DataRequired()]
486 validators=[wtforms.validators.DataRequired()])
530 )
487
531
488
532
489 class LogBrowserForm(ReactorForm):
533 class LogBrowserForm(ReactorForm):
490 applications = wtforms.SelectMultipleField('Applications',
534 applications = wtforms.SelectMultipleField(
491 widget=select_multi_checkbox)
535 "Applications", widget=select_multi_checkbox
492 start_date = wtforms.DateField('Start Date')
536 )
493 log_level = wtforms.StringField('Log level')
537 start_date = wtforms.DateField("Start Date")
494 message = wtforms.StringField('Message')
538 log_level = wtforms.StringField("Log level")
495 namespace = wtforms.StringField('Namespace')
539 message = wtforms.StringField("Message")
540 namespace = wtforms.StringField("Namespace")
496 submit = wtforms.SubmitField(
541 submit = wtforms.SubmitField(
497 '<span class="glyphicon glyphicon-search"></span> Filter results',
542 '<span class="glyphicon glyphicon-search"></span> Filter results',
498 widget=button_widget)
543 widget=button_widget,
499 ignore_labels = ['submit']
544 )
500 css_classes = {'submit': 'btn btn-primary'}
545 ignore_labels = ["submit"]
546 css_classes = {"submit": "btn btn-primary"}
501
547
502
548
503 class CommentForm(ReactorForm):
549 class CommentForm(ReactorForm):
504 body = wtforms.TextAreaField('Comment', validators=[
550 body = wtforms.TextAreaField(
505 wtforms.validators.Length(min=1),
551 "Comment",
506 wtforms.validators.DataRequired()
552 validators=[
507 ])
553 wtforms.validators.Length(min=1),
508 submit = wtforms.SubmitField('Comment', )
554 wtforms.validators.DataRequired(),
509 ignore_labels = ['submit']
555 ],
510 css_classes = {'submit': 'btn btn-primary'}
556 )
557 submit = wtforms.SubmitField("Comment")
558 ignore_labels = ["submit"]
559 css_classes = {"submit": "btn btn-primary"}
511
560
512
561
513 class EmailChannelCreateForm(ReactorForm):
562 class EmailChannelCreateForm(ReactorForm):
514 email = wtforms.StringField(_('Email Address'),
563 email = wtforms.StringField(
515 filters=[strip_filter],
564 _("Email Address"),
516 validators=[email_validator,
565 filters=[strip_filter],
517 unique_alert_email_validator,
566 validators=[
518 wtforms.validators.DataRequired()])
567 email_validator,
519 submit = wtforms.SubmitField('Add email channel', )
568 unique_alert_email_validator,
520 ignore_labels = ['submit']
569 wtforms.validators.DataRequired(),
521 css_classes = {'submit': 'btn btn-primary'}
570 ],
571 )
572 submit = wtforms.SubmitField("Add email channel")
573 ignore_labels = ["submit"]
574 css_classes = {"submit": "btn btn-primary"}
522
575
523
576
524 def gen_user_profile_form():
577 def gen_user_profile_form():
525 class UserProfileForm(ReactorForm):
578 class UserProfileForm(ReactorForm):
526 email = wtforms.StringField(
579 email = wtforms.StringField(
527 _('Email Address'),
580 _("Email Address"),
528 validators=[email_validator, wtforms.validators.DataRequired()])
581 validators=[email_validator, wtforms.validators.DataRequired()],
529 first_name = wtforms.StringField(_('First Name'))
582 )
530 last_name = wtforms.StringField(_('Last Name'))
583 first_name = wtforms.StringField(_("First Name"))
531 company_name = wtforms.StringField(_('Company Name'))
584 last_name = wtforms.StringField(_("Last Name"))
532 company_address = wtforms.TextAreaField(_('Company Address'))
585 company_name = wtforms.StringField(_("Company Name"))
533 zip_code = wtforms.StringField(_('ZIP code'))
586 company_address = wtforms.TextAreaField(_("Company Address"))
534 city = wtforms.StringField(_('City'))
587 zip_code = wtforms.StringField(_("ZIP code"))
535 notifications = wtforms.BooleanField('Account notifications',
588 city = wtforms.StringField(_("City"))
536 false_values=FALSE_VALUES)
589 notifications = wtforms.BooleanField(
537 submit = wtforms.SubmitField(_('Update Account'))
590 "Account notifications", false_values=FALSE_VALUES
538 ignore_labels = ['submit']
591 )
539 css_classes = {'submit': 'btn btn-primary'}
592 submit = wtforms.SubmitField(_("Update Account"))
593 ignore_labels = ["submit"]
594 css_classes = {"submit": "btn btn-primary"}
540
595
541 return UserProfileForm
596 return UserProfileForm
542
597
543
598
544 class PurgeAppForm(ReactorForm):
599 class PurgeAppForm(ReactorForm):
545 resource_id = wtforms.HiddenField(
600 resource_id = wtforms.HiddenField(
546 'App Id',
601 "App Id", validators=[wtforms.validators.DataRequired()]
547 validators=[wtforms.validators.DataRequired()])
602 )
548 days = wtforms.IntegerField(
603 days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()])
549 'Days',
550 validators=[wtforms.validators.DataRequired()])
551 password = wtforms.PasswordField(
604 password = wtforms.PasswordField(
552 'Admin Password',
605 "Admin Password",
553 validators=[old_password_validator, wtforms.validators.DataRequired()])
606 validators=[old_password_validator, wtforms.validators.DataRequired()],
554 submit = wtforms.SubmitField(_('Purge Data'))
607 )
555 ignore_labels = ['submit']
608 submit = wtforms.SubmitField(_("Purge Data"))
556 css_classes = {'submit': 'btn btn-primary'}
609 ignore_labels = ["submit"]
610 css_classes = {"submit": "btn btn-primary"}
557
611
558
612
559 class IntegrationRepoForm(ReactorForm):
613 class IntegrationRepoForm(ReactorForm):
560 host_name = wtforms.StringField("Service Host", default='')
614 host_name = wtforms.StringField("Service Host", default="")
561 user_name = wtforms.StringField(
615 user_name = wtforms.StringField(
562 "User Name",
616 "User Name",
563 filters=[strip_filter],
617 filters=[strip_filter],
564 validators=[wtforms.validators.DataRequired(),
618 validators=[
565 wtforms.validators.Length(min=1)])
619 wtforms.validators.DataRequired(),
620 wtforms.validators.Length(min=1),
621 ],
622 )
566 repo_name = wtforms.StringField(
623 repo_name = wtforms.StringField(
567 "Repo Name",
624 "Repo Name",
568 filters=[strip_filter],
625 filters=[strip_filter],
569 validators=[wtforms.validators.DataRequired(),
626 validators=[
570 wtforms.validators.Length(min=1)])
627 wtforms.validators.DataRequired(),
628 wtforms.validators.Length(min=1),
629 ],
630 )
571
631
572
632
573 class IntegrationBitbucketForm(IntegrationRepoForm):
633 class IntegrationBitbucketForm(IntegrationRepoForm):
574 host_name = wtforms.StringField("Service Host",
634 host_name = wtforms.StringField("Service Host", default="https://bitbucket.org")
575 default='https://bitbucket.org')
576
635
577 def validate_user_name(self, field):
636 def validate_user_name(self, field):
578 try:
637 try:
579 request = pyramid.threadlocal.get_current_request()
638 request = pyramid.threadlocal.get_current_request()
580 client = BitbucketIntegration.create_client(
639 client = BitbucketIntegration.create_client(
581 request,
640 request, self.user_name.data, self.repo_name.data
582 self.user_name.data,
641 )
583 self.repo_name.data)
584 client.get_assignees()
642 client.get_assignees()
585 except IntegrationException as e:
643 except IntegrationException as e:
586 raise wtforms.validators.ValidationError(str(e))
644 raise wtforms.validators.ValidationError(str(e))
587
645
588
646
589 class IntegrationGithubForm(IntegrationRepoForm):
647 class IntegrationGithubForm(IntegrationRepoForm):
590 host_name = wtforms.StringField("Service Host",
648 host_name = wtforms.StringField("Service Host", default="https://github.com")
591 default='https://github.com')
592
649
593 def validate_user_name(self, field):
650 def validate_user_name(self, field):
594 try:
651 try:
595 request = pyramid.threadlocal.get_current_request()
652 request = pyramid.threadlocal.get_current_request()
596 client = GithubIntegration.create_client(
653 client = GithubIntegration.create_client(
597 request,
654 request, self.user_name.data, self.repo_name.data
598 self.user_name.data,
655 )
599 self.repo_name.data)
600 client.get_assignees()
656 client.get_assignees()
601 except IntegrationException as e:
657 except IntegrationException as e:
602 raise wtforms.validators.ValidationError(str(e))
658 raise wtforms.validators.ValidationError(str(e))
@@ -605,25 +661,28 b' class IntegrationGithubForm(IntegrationRepoForm):'
605
661
606 def filter_rooms(data):
662 def filter_rooms(data):
607 if data is not None:
663 if data is not None:
608 rooms = data.split(',')
664 rooms = data.split(",")
609 return ','.join([r.strip() for r in rooms])
665 return ",".join([r.strip() for r in rooms])
610
666
611
667
612 class IntegrationCampfireForm(ReactorForm):
668 class IntegrationCampfireForm(ReactorForm):
613 account = wtforms.StringField(
669 account = wtforms.StringField(
614 'Account',
670 "Account",
615 filters=[strip_filter],
671 filters=[strip_filter],
616 validators=[wtforms.validators.DataRequired()])
672 validators=[wtforms.validators.DataRequired()],
673 )
617 api_token = wtforms.StringField(
674 api_token = wtforms.StringField(
618 'Api Token',
675 "Api Token",
619 filters=[strip_filter],
676 filters=[strip_filter],
620 validators=[wtforms.validators.DataRequired()])
677 validators=[wtforms.validators.DataRequired()],
621 rooms = wtforms.StringField('Room ID list', filters=[filter_rooms])
678 )
679 rooms = wtforms.StringField("Room ID list", filters=[filter_rooms])
622
680
623 def validate_api_token(self, field):
681 def validate_api_token(self, field):
624 try:
682 try:
625 client = CampfireIntegration.create_client(self.api_token.data,
683 client = CampfireIntegration.create_client(
626 self.account.data)
684 self.api_token.data, self.account.data
685 )
627 client.get_account()
686 client.get_account()
628 except IntegrationException as e:
687 except IntegrationException as e:
629 raise wtforms.validators.ValidationError(str(e))
688 raise wtforms.validators.ValidationError(str(e))
@@ -631,17 +690,18 b' class IntegrationCampfireForm(ReactorForm):'
631 def validate_rooms(self, field):
690 def validate_rooms(self, field):
632 if not field.data:
691 if not field.data:
633 return
692 return
634 client = CampfireIntegration.create_client(self.api_token.data,
693 client = CampfireIntegration.create_client(
635 self.account.data)
694 self.api_token.data, self.account.data
695 )
636
696
637 try:
697 try:
638 room_list = [r['id'] for r in client.get_rooms()]
698 room_list = [r["id"] for r in client.get_rooms()]
639 except IntegrationException as e:
699 except IntegrationException as e:
640 raise wtforms.validators.ValidationError(str(e))
700 raise wtforms.validators.ValidationError(str(e))
641
701
642 rooms = field.data.split(',')
702 rooms = field.data.split(",")
643 if len(rooms) > 3:
703 if len(rooms) > 3:
644 msg = 'You can use up to 3 room ids'
704 msg = "You can use up to 3 room ids"
645 raise wtforms.validators.ValidationError(msg)
705 raise wtforms.validators.ValidationError(msg)
646 if rooms:
706 if rooms:
647 for room_id in rooms:
707 for room_id in rooms:
@@ -649,75 +709,78 b' class IntegrationCampfireForm(ReactorForm):'
649 msg = "Room %s doesn't exist"
709 msg = "Room %s doesn't exist"
650 raise wtforms.validators.ValidationError(msg % room_id)
710 raise wtforms.validators.ValidationError(msg % room_id)
651 if not room_id.strip().isdigit():
711 if not room_id.strip().isdigit():
652 msg = 'You must use only integers for room ids'
712 msg = "You must use only integers for room ids"
653 raise wtforms.validators.ValidationError(msg)
713 raise wtforms.validators.ValidationError(msg)
654
714
655 submit = wtforms.SubmitField(_('Connect to Campfire'))
715 submit = wtforms.SubmitField(_("Connect to Campfire"))
656 ignore_labels = ['submit']
716 ignore_labels = ["submit"]
657 css_classes = {'submit': 'btn btn-primary'}
717 css_classes = {"submit": "btn btn-primary"}
658
718
659
719
660 def filter_rooms(data):
720 def filter_rooms(data):
661 if data is not None:
721 if data is not None:
662 rooms = data.split(',')
722 rooms = data.split(",")
663 return ','.join([r.strip() for r in rooms])
723 return ",".join([r.strip() for r in rooms])
664
724
665
725
666 class IntegrationHipchatForm(ReactorForm):
726 class IntegrationHipchatForm(ReactorForm):
667 api_token = wtforms.StringField(
727 api_token = wtforms.StringField(
668 'Api Token',
728 "Api Token",
669 filters=[strip_filter],
729 filters=[strip_filter],
670 validators=[wtforms.validators.DataRequired()])
730 validators=[wtforms.validators.DataRequired()],
731 )
671 rooms = wtforms.StringField(
732 rooms = wtforms.StringField(
672 'Room ID list',
733 "Room ID list",
673 filters=[filter_rooms],
734 filters=[filter_rooms],
674 validators=[wtforms.validators.DataRequired()])
735 validators=[wtforms.validators.DataRequired()],
736 )
675
737
676 def validate_rooms(self, field):
738 def validate_rooms(self, field):
677 if not field.data:
739 if not field.data:
678 return
740 return
679 client = HipchatIntegration.create_client(self.api_token.data)
741 client = HipchatIntegration.create_client(self.api_token.data)
680 rooms = field.data.split(',')
742 rooms = field.data.split(",")
681 if len(rooms) > 3:
743 if len(rooms) > 3:
682 msg = 'You can use up to 3 room ids'
744 msg = "You can use up to 3 room ids"
683 raise wtforms.validators.ValidationError(msg)
745 raise wtforms.validators.ValidationError(msg)
684 if rooms:
746 if rooms:
685 for room_id in rooms:
747 for room_id in rooms:
686 if not room_id.strip().isdigit():
748 if not room_id.strip().isdigit():
687 msg = 'You must use only integers for room ids'
749 msg = "You must use only integers for room ids"
688 raise wtforms.validators.ValidationError(msg)
750 raise wtforms.validators.ValidationError(msg)
689 try:
751 try:
690 client.send({
752 client.send(
691 "message_format": 'text',
753 {
692 "message": "testing for room existence",
754 "message_format": "text",
693 "from": "AppEnlight",
755 "message": "testing for room existence",
694 "room_id": room_id,
756 "from": "AppEnlight",
695 "color": "green"
757 "room_id": room_id,
696 })
758 "color": "green",
759 }
760 )
697 except IntegrationException as exc:
761 except IntegrationException as exc:
698 msg = 'Room id: %s exception: %s'
762 msg = "Room id: %s exception: %s"
699 raise wtforms.validators.ValidationError(msg % (room_id,
763 raise wtforms.validators.ValidationError(msg % (room_id, exc))
700 exc))
701
764
702
765
703 class IntegrationFlowdockForm(ReactorForm):
766 class IntegrationFlowdockForm(ReactorForm):
704 api_token = wtforms.StringField('API Token',
767 api_token = wtforms.StringField(
705 filters=[strip_filter],
768 "API Token",
706 validators=[
769 filters=[strip_filter],
707 wtforms.validators.DataRequired()
770 validators=[wtforms.validators.DataRequired()],
708 ], )
771 )
709
772
710 def validate_api_token(self, field):
773 def validate_api_token(self, field):
711 try:
774 try:
712 client = FlowdockIntegration.create_client(self.api_token.data)
775 client = FlowdockIntegration.create_client(self.api_token.data)
713 registry = pyramid.threadlocal.get_current_registry()
776 registry = pyramid.threadlocal.get_current_registry()
714 payload = {
777 payload = {
715 "source": registry.settings['mailing.from_name'],
778 "source": registry.settings["mailing.from_name"],
716 "from_address": registry.settings['mailing.from_email'],
779 "from_address": registry.settings["mailing.from_email"],
717 "subject": "Integration test",
780 "subject": "Integration test",
718 "content": "If you can see this it was successful",
781 "content": "If you can see this it was successful",
719 "tags": ["appenlight"],
782 "tags": ["appenlight"],
720 "link": registry.settings['mailing.app_url']
783 "link": registry.settings["mailing.app_url"],
721 }
784 }
722 client.send_to_inbox(payload)
785 client.send_to_inbox(payload)
723 except IntegrationException as e:
786 except IntegrationException as e:
@@ -726,30 +789,35 b' class IntegrationFlowdockForm(ReactorForm):'
726
789
727 class IntegrationSlackForm(ReactorForm):
790 class IntegrationSlackForm(ReactorForm):
728 webhook_url = wtforms.StringField(
791 webhook_url = wtforms.StringField(
729 'Reports webhook',
792 "Reports webhook",
730 filters=[strip_filter],
793 filters=[strip_filter],
731 validators=[wtforms.validators.DataRequired()])
794 validators=[wtforms.validators.DataRequired()],
795 )
732
796
733 def validate_webhook_url(self, field):
797 def validate_webhook_url(self, field):
734 registry = pyramid.threadlocal.get_current_registry()
798 registry = pyramid.threadlocal.get_current_registry()
735 client = SlackIntegration.create_client(field.data)
799 client = SlackIntegration.create_client(field.data)
736 link = "<%s|%s>" % (registry.settings['mailing.app_url'],
800 link = "<%s|%s>" % (
737 registry.settings['mailing.from_name'])
801 registry.settings["mailing.app_url"],
802 registry.settings["mailing.from_name"],
803 )
738 test_data = {
804 test_data = {
739 "username": "AppEnlight",
805 "username": "AppEnlight",
740 "icon_emoji": ":fire:",
806 "icon_emoji": ":fire:",
741 "attachments": [
807 "attachments": [
742 {"fallback": "Testing integration channel: %s" % link,
808 {
743 "pretext": "Testing integration channel: %s" % link,
809 "fallback": "Testing integration channel: %s" % link,
744 "color": "good",
810 "pretext": "Testing integration channel: %s" % link,
745 "fields": [
811 "color": "good",
746 {
812 "fields": [
747 "title": "Status",
813 {
748 "value": "Integration is working fine",
814 "title": "Status",
749 "short": False
815 "value": "Integration is working fine",
750 }
816 "short": False,
751 ]}
817 }
752 ]
818 ],
819 }
820 ],
753 }
821 }
754 try:
822 try:
755 client.make_request(data=test_data)
823 client.make_request(data=test_data)
@@ -759,44 +827,52 b' class IntegrationSlackForm(ReactorForm):'
759
827
760 class IntegrationWebhooksForm(ReactorForm):
828 class IntegrationWebhooksForm(ReactorForm):
761 reports_webhook = wtforms.StringField(
829 reports_webhook = wtforms.StringField(
762 'Reports webhook',
830 "Reports webhook",
763 filters=[strip_filter],
831 filters=[strip_filter],
764 validators=[wtforms.validators.DataRequired()])
832 validators=[wtforms.validators.DataRequired()],
833 )
765 alerts_webhook = wtforms.StringField(
834 alerts_webhook = wtforms.StringField(
766 'Alerts webhook',
835 "Alerts webhook",
767 filters=[strip_filter],
836 filters=[strip_filter],
768 validators=[wtforms.validators.DataRequired()])
837 validators=[wtforms.validators.DataRequired()],
769 submit = wtforms.SubmitField(_('Setup webhooks'))
838 )
770 ignore_labels = ['submit']
839 submit = wtforms.SubmitField(_("Setup webhooks"))
771 css_classes = {'submit': 'btn btn-primary'}
840 ignore_labels = ["submit"]
841 css_classes = {"submit": "btn btn-primary"}
772
842
773
843
774 class IntegrationJiraForm(ReactorForm):
844 class IntegrationJiraForm(ReactorForm):
775 host_name = wtforms.StringField(
845 host_name = wtforms.StringField(
776 'Server URL',
846 "Server URL",
777 filters=[strip_filter],
847 filters=[strip_filter],
778 validators=[wtforms.validators.DataRequired()])
848 validators=[wtforms.validators.DataRequired()],
849 )
779 user_name = wtforms.StringField(
850 user_name = wtforms.StringField(
780 'Username',
851 "Username",
781 filters=[strip_filter],
852 filters=[strip_filter],
782 validators=[wtforms.validators.DataRequired()])
853 validators=[wtforms.validators.DataRequired()],
854 )
783 password = wtforms.PasswordField(
855 password = wtforms.PasswordField(
784 'Password',
856 "Password",
785 filters=[strip_filter],
857 filters=[strip_filter],
786 validators=[wtforms.validators.DataRequired()])
858 validators=[wtforms.validators.DataRequired()],
859 )
787 project = wtforms.StringField(
860 project = wtforms.StringField(
788 'Project key',
861 "Project key",
789 filters=[uppercase_filter, strip_filter],
862 filters=[uppercase_filter, strip_filter],
790 validators=[wtforms.validators.DataRequired()])
863 validators=[wtforms.validators.DataRequired()],
864 )
791
865
792 def validate_project(self, field):
866 def validate_project(self, field):
793 if not field.data:
867 if not field.data:
794 return
868 return
795 try:
869 try:
796 client = JiraClient(self.user_name.data,
870 client = JiraClient(
797 self.password.data,
871 self.user_name.data,
798 self.host_name.data,
872 self.password.data,
799 self.project.data)
873 self.host_name.data,
874 self.project.data,
875 )
800 except Exception as exc:
876 except Exception as exc:
801 raise wtforms.validators.ValidationError(str(exc))
877 raise wtforms.validators.ValidationError(str(exc))
802
878
@@ -809,88 +885,97 b' class IntegrationJiraForm(ReactorForm):'
809 def get_deletion_form(resource):
885 def get_deletion_form(resource):
810 class F(ReactorForm):
886 class F(ReactorForm):
811 application_name = wtforms.StringField(
887 application_name = wtforms.StringField(
812 'Application Name',
888 "Application Name",
813 filters=[strip_filter],
889 filters=[strip_filter],
814 validators=[wtforms.validators.AnyOf([resource.resource_name])])
890 validators=[wtforms.validators.AnyOf([resource.resource_name])],
891 )
815 resource_id = wtforms.HiddenField(default=resource.resource_id)
892 resource_id = wtforms.HiddenField(default=resource.resource_id)
816 submit = wtforms.SubmitField(_('Delete my application'))
893 submit = wtforms.SubmitField(_("Delete my application"))
817 ignore_labels = ['submit']
894 ignore_labels = ["submit"]
818 css_classes = {'submit': 'btn btn-danger'}
895 css_classes = {"submit": "btn btn-danger"}
819
896
820 return F
897 return F
821
898
822
899
823 class ChangeApplicationOwnerForm(ReactorForm):
900 class ChangeApplicationOwnerForm(ReactorForm):
824 password = wtforms.PasswordField(
901 password = wtforms.PasswordField(
825 'Password',
902 "Password",
826 filters=[strip_filter],
903 filters=[strip_filter],
827 validators=[old_password_validator,
904 validators=[old_password_validator, wtforms.validators.DataRequired()],
828 wtforms.validators.DataRequired()])
905 )
829
906
830 user_name = wtforms.StringField(
907 user_name = wtforms.StringField(
831 'New owners username',
908 "New owners username",
832 filters=[strip_filter],
909 filters=[strip_filter],
833 validators=[found_username_validator,
910 validators=[found_username_validator, wtforms.validators.DataRequired()],
834 wtforms.validators.DataRequired()])
911 )
835 submit = wtforms.SubmitField(_('Transfer ownership of application'))
912 submit = wtforms.SubmitField(_("Transfer ownership of application"))
836 ignore_labels = ['submit']
913 ignore_labels = ["submit"]
837 css_classes = {'submit': 'btn btn-danger'}
914 css_classes = {"submit": "btn btn-danger"}
838
915
839
916
840 def default_filename():
917 def default_filename():
841 return 'Invoice %s' % datetime.datetime.utcnow().strftime('%Y/%m')
918 return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m")
842
919
843
920
844 class FileUploadForm(ReactorForm):
921 class FileUploadForm(ReactorForm):
845 title = wtforms.StringField('File Title',
922 title = wtforms.StringField(
846 default=default_filename,
923 "File Title",
847 validators=[wtforms.validators.DataRequired()])
924 default=default_filename,
848 file = wtforms.FileField('File')
925 validators=[wtforms.validators.DataRequired()],
926 )
927 file = wtforms.FileField("File")
849
928
850 def validate_file(self, field):
929 def validate_file(self, field):
851 if not hasattr(field.data, 'file'):
930 if not hasattr(field.data, "file"):
852 raise wtforms.ValidationError('File is missing')
931 raise wtforms.ValidationError("File is missing")
853
932
854 submit = wtforms.SubmitField(_('Upload'))
933 submit = wtforms.SubmitField(_("Upload"))
855
934
856
935
857 def get_partition_deletion_form(es_indices, pg_indices):
936 def get_partition_deletion_form(es_indices, pg_indices):
858 class F(ReactorForm):
937 class F(ReactorForm):
859 es_index = wtforms.SelectMultipleField('Elasticsearch',
938 es_index = wtforms.SelectMultipleField(
860 choices=[(ix, '') for ix in
939 "Elasticsearch", choices=[(ix, "") for ix in es_indices]
861 es_indices])
940 )
862 pg_index = wtforms.SelectMultipleField('pg',
941 pg_index = wtforms.SelectMultipleField(
863 choices=[(ix, '') for ix in
942 "pg", choices=[(ix, "") for ix in pg_indices]
864 pg_indices])
943 )
865 confirm = wtforms.TextField('Confirm',
944 confirm = wtforms.TextField(
866 filters=[uppercase_filter, strip_filter],
945 "Confirm",
867 validators=[
946 filters=[uppercase_filter, strip_filter],
868 wtforms.validators.AnyOf(['CONFIRM']),
947 validators=[
869 wtforms.validators.DataRequired()])
948 wtforms.validators.AnyOf(["CONFIRM"]),
870 ignore_labels = ['submit']
949 wtforms.validators.DataRequired(),
871 css_classes = {'submit': 'btn btn-danger'}
950 ],
951 )
952 ignore_labels = ["submit"]
953 css_classes = {"submit": "btn btn-danger"}
872
954
873 return F
955 return F
874
956
875
957
876 class GroupCreateForm(ReactorForm):
958 class GroupCreateForm(ReactorForm):
877 group_name = wtforms.StringField(
959 group_name = wtforms.StringField(
878 _('Group Name'),
960 _("Group Name"),
879 filters=[strip_filter],
961 filters=[strip_filter],
880 validators=[
962 validators=[
881 wtforms.validators.Length(min=2, max=50),
963 wtforms.validators.Length(min=2, max=50),
882 unique_groupname_validator,
964 unique_groupname_validator,
883 wtforms.validators.DataRequired()
965 wtforms.validators.DataRequired(),
884 ])
966 ],
885 description = wtforms.StringField(_('Group description'))
967 )
968 description = wtforms.StringField(_("Group description"))
886
969
887
970
888 time_choices = [(k, v['label'],) for k, v in h.time_deltas.items()]
971 time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()]
889
972
890
973
891 class AuthTokenCreateForm(ReactorForm):
974 class AuthTokenCreateForm(ReactorForm):
892 description = wtforms.StringField(_('Token description'))
975 description = wtforms.StringField(_("Token description"))
893 expires = wtforms.SelectField('Expires',
976 expires = wtforms.SelectField(
894 coerce=lambda x: x,
977 "Expires",
895 choices=time_choices,
978 coerce=lambda x: x,
896 validators=[wtforms.validators.Optional()])
979 choices=time_choices,
980 validators=[wtforms.validators.Optional()],
981 )
@@ -24,20 +24,20 b' from appenlight_client.exceptions import get_current_traceback'
24
24
25
25
26 def generate_random_string(chars=10):
26 def generate_random_string(chars=10):
27 return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
27 return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars))
28 chars))
29
28
30
29
31 def to_integer_safe(input):
30 def to_integer_safe(input):
32 try:
31 try:
33 return int(input)
32 return int(input)
34 except (TypeError, ValueError,):
33 except (TypeError, ValueError):
35 return None
34 return None
36
35
37
36
38 def print_traceback(log):
37 def print_traceback(log):
39 traceback = get_current_traceback(skip=1, show_hidden_frames=True,
38 traceback = get_current_traceback(
40 ignore_system_exceptions=True)
39 skip=1, show_hidden_frames=True, ignore_system_exceptions=True
40 )
41 exception_text = traceback.exception
41 exception_text = traceback.exception
42 log.error(exception_text)
42 log.error(exception_text)
43 log.error(traceback.plaintext)
43 log.error(traceback.plaintext)
@@ -45,6 +45,5 b' def print_traceback(log):'
45
45
46
46
47 def get_callable(import_string):
47 def get_callable(import_string):
48 import_module, indexer_callable = import_string.split(':')
48 import_module, indexer_callable = import_string.split(":")
49 return getattr(importlib.import_module(import_module),
49 return getattr(importlib.import_module(import_module), indexer_callable)
50 indexer_callable)
@@ -27,21 +27,18 b' log = logging.getLogger(__name__)'
27
27
28 def rate_limiting(request, resource, section, to_increment=1):
28 def rate_limiting(request, resource, section, to_increment=1):
29 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
29 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
30 key = REDIS_KEYS['rate_limits'][section].format(tsample,
30 key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id)
31 resource.resource_id)
32 redis_pipeline = request.registry.redis_conn.pipeline()
31 redis_pipeline = request.registry.redis_conn.pipeline()
33 redis_pipeline.incr(key, to_increment)
32 redis_pipeline.incr(key, to_increment)
34 redis_pipeline.expire(key, 3600 * 24)
33 redis_pipeline.expire(key, 3600 * 24)
35 results = redis_pipeline.execute()
34 results = redis_pipeline.execute()
36 current_count = results[0]
35 current_count = results[0]
37 config = ConfigService.by_key_and_section(section, 'global')
36 config = ConfigService.by_key_and_section(section, "global")
38 limit = config.value if config else 1000
37 limit = config.value if config else 1000
39 if current_count > int(limit):
38 if current_count > int(limit):
40 log.info('RATE LIMITING: {}: {}, {}'.format(
39 log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count))
41 section, resource, current_count))
40 abort_msg = "Rate limits are in effect for this application"
42 abort_msg = 'Rate limits are in effect for this application'
41 raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg})
43 raise HTTPTooManyRequests(abort_msg,
44 headers={'X-AppEnlight': abort_msg})
45
42
46
43
47 def check_cors(request, application, should_return=True):
44 def check_cors(request, application, should_return=True):
@@ -50,31 +47,34 b' def check_cors(request, application, should_return=True):'
50 application, otherwise return 403
47 application, otherwise return 403
51 """
48 """
52 origin_found = False
49 origin_found = False
53 origin = request.headers.get('Origin')
50 origin = request.headers.get("Origin")
54 if should_return:
51 if should_return:
55 log.info('CORS for %s' % origin)
52 log.info("CORS for %s" % origin)
56 if not origin:
53 if not origin:
57 return False
54 return False
58 for domain in application.domains.split('\n'):
55 for domain in application.domains.split("\n"):
59 if domain in origin:
56 if domain in origin:
60 origin_found = True
57 origin_found = True
61 if origin_found:
58 if origin_found:
62 request.response.headers.add('Access-Control-Allow-Origin', origin)
59 request.response.headers.add("Access-Control-Allow-Origin", origin)
63 request.response.headers.add('XDomainRequestAllowed', '1')
60 request.response.headers.add("XDomainRequestAllowed", "1")
64 request.response.headers.add('Access-Control-Allow-Methods',
61 request.response.headers.add(
65 'GET, POST, OPTIONS')
62 "Access-Control-Allow-Methods", "GET, POST, OPTIONS"
66 request.response.headers.add('Access-Control-Allow-Headers',
63 )
67 'Accept-Encoding, Accept-Language, '
64 request.response.headers.add(
68 'Content-Type, '
65 "Access-Control-Allow-Headers",
69 'Depth, User-Agent, X-File-Size, '
66 "Accept-Encoding, Accept-Language, "
70 'X-Requested-With, If-Modified-Since, '
67 "Content-Type, "
71 'X-File-Name, '
68 "Depth, User-Agent, X-File-Size, "
72 'Cache-Control, Host, Pragma, Accept, '
69 "X-Requested-With, If-Modified-Since, "
73 'Origin, Connection, '
70 "X-File-Name, "
74 'Referer, Cookie, '
71 "Cache-Control, Host, Pragma, Accept, "
75 'X-appenlight-public-api-key, '
72 "Origin, Connection, "
76 'x-appenlight-public-api-key')
73 "Referer, Cookie, "
77 request.response.headers.add('Access-Control-Max-Age', '86400')
74 "X-appenlight-public-api-key, "
75 "x-appenlight-public-api-key",
76 )
77 request.response.headers.add("Access-Control-Max-Age", "86400")
78 return request.response
78 return request.response
79 else:
79 else:
80 return HTTPForbidden()
80 return HTTPForbidden()
@@ -42,23 +42,27 b' def hashgen(namespace, fn, to_str=compat.string_type):'
42 """
42 """
43
43
44 if namespace is None:
44 if namespace is None:
45 namespace = '%s:%s' % (fn.__module__, fn.__name__)
45 namespace = "%s:%s" % (fn.__module__, fn.__name__)
46 else:
46 else:
47 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
47 namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
48
48
49 args = inspect.getargspec(fn)
49 args = inspect.getargspec(fn)
50 has_self = args[0] and args[0][0] in ('self', 'cls')
50 has_self = args[0] and args[0][0] in ("self", "cls")
51
51
52 def generate_key(*args, **kw):
52 def generate_key(*args, **kw):
53 if kw:
53 if kw:
54 raise ValueError(
54 raise ValueError(
55 "dogpile.cache's default key creation "
55 "dogpile.cache's default key creation "
56 "function does not accept keyword arguments.")
56 "function does not accept keyword arguments."
57 )
57 if has_self:
58 if has_self:
58 args = args[1:]
59 args = args[1:]
59
60
60 return namespace + "|" + hashlib.sha1(
61 return (
61 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
62 namespace
63 + "|"
64 + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest()
65 )
62
66
63 return generate_key
67 return generate_key
64
68
@@ -68,116 +72,97 b' class CacheRegions(object):'
68 config_redis = {"arguments": settings}
72 config_redis = {"arguments": settings}
69
73
70 self.redis_min_1 = make_region(
74 self.redis_min_1 = make_region(
71 function_key_generator=hashgen,
75 function_key_generator=hashgen, key_mangler=key_mangler
72 key_mangler=key_mangler).configure(
76 ).configure(
73 "dogpile.cache.redis",
77 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
74 expiration_time=60,
78 )
75 **copy.deepcopy(config_redis))
76 self.redis_min_5 = make_region(
79 self.redis_min_5 = make_region(
77 function_key_generator=hashgen,
80 function_key_generator=hashgen, key_mangler=key_mangler
78 key_mangler=key_mangler).configure(
81 ).configure(
79 "dogpile.cache.redis",
82 "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis)
80 expiration_time=300,
83 )
81 **copy.deepcopy(config_redis))
82
84
83 self.redis_min_10 = make_region(
85 self.redis_min_10 = make_region(
84 function_key_generator=hashgen,
86 function_key_generator=hashgen, key_mangler=key_mangler
85 key_mangler=key_mangler).configure(
87 ).configure(
86 "dogpile.cache.redis",
88 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
87 expiration_time=60,
89 )
88 **copy.deepcopy(config_redis))
89
90
90 self.redis_min_60 = make_region(
91 self.redis_min_60 = make_region(
91 function_key_generator=hashgen,
92 function_key_generator=hashgen, key_mangler=key_mangler
92 key_mangler=key_mangler).configure(
93 ).configure(
93 "dogpile.cache.redis",
94 "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis)
94 expiration_time=3600,
95 )
95 **copy.deepcopy(config_redis))
96
96
97 self.redis_sec_1 = make_region(
97 self.redis_sec_1 = make_region(
98 function_key_generator=hashgen,
98 function_key_generator=hashgen, key_mangler=key_mangler
99 key_mangler=key_mangler).configure(
99 ).configure(
100 "dogpile.cache.redis",
100 "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis)
101 expiration_time=1,
101 )
102 **copy.deepcopy(config_redis))
103
102
104 self.redis_sec_5 = make_region(
103 self.redis_sec_5 = make_region(
105 function_key_generator=hashgen,
104 function_key_generator=hashgen, key_mangler=key_mangler
106 key_mangler=key_mangler).configure(
105 ).configure(
107 "dogpile.cache.redis",
106 "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis)
108 expiration_time=5,
107 )
109 **copy.deepcopy(config_redis))
110
108
111 self.redis_sec_30 = make_region(
109 self.redis_sec_30 = make_region(
112 function_key_generator=hashgen,
110 function_key_generator=hashgen, key_mangler=key_mangler
113 key_mangler=key_mangler).configure(
111 ).configure(
114 "dogpile.cache.redis",
112 "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis)
115 expiration_time=30,
113 )
116 **copy.deepcopy(config_redis))
117
114
118 self.redis_day_1 = make_region(
115 self.redis_day_1 = make_region(
119 function_key_generator=hashgen,
116 function_key_generator=hashgen, key_mangler=key_mangler
120 key_mangler=key_mangler).configure(
117 ).configure(
121 "dogpile.cache.redis",
118 "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis)
122 expiration_time=86400,
119 )
123 **copy.deepcopy(config_redis))
124
120
125 self.redis_day_7 = make_region(
121 self.redis_day_7 = make_region(
126 function_key_generator=hashgen,
122 function_key_generator=hashgen, key_mangler=key_mangler
127 key_mangler=key_mangler).configure(
123 ).configure(
128 "dogpile.cache.redis",
124 "dogpile.cache.redis",
129 expiration_time=86400 * 7,
125 expiration_time=86400 * 7,
130 **copy.deepcopy(config_redis))
126 **copy.deepcopy(config_redis)
127 )
131
128
132 self.redis_day_30 = make_region(
129 self.redis_day_30 = make_region(
133 function_key_generator=hashgen,
130 function_key_generator=hashgen, key_mangler=key_mangler
134 key_mangler=key_mangler).configure(
131 ).configure(
135 "dogpile.cache.redis",
132 "dogpile.cache.redis",
136 expiration_time=86400 * 30,
133 expiration_time=86400 * 30,
137 **copy.deepcopy(config_redis))
134 **copy.deepcopy(config_redis)
135 )
138
136
139 self.memory_day_1 = make_region(
137 self.memory_day_1 = make_region(
140 function_key_generator=hashgen,
138 function_key_generator=hashgen, key_mangler=key_mangler
141 key_mangler=key_mangler).configure(
139 ).configure(
142 "dogpile.cache.memory",
140 "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis)
143 expiration_time=86400,
141 )
144 **copy.deepcopy(config_redis))
145
142
146 self.memory_sec_1 = make_region(
143 self.memory_sec_1 = make_region(
147 function_key_generator=hashgen,
144 function_key_generator=hashgen, key_mangler=key_mangler
148 key_mangler=key_mangler).configure(
145 ).configure("dogpile.cache.memory", expiration_time=1)
149 "dogpile.cache.memory",
150 expiration_time=1)
151
146
152 self.memory_sec_5 = make_region(
147 self.memory_sec_5 = make_region(
153 function_key_generator=hashgen,
148 function_key_generator=hashgen, key_mangler=key_mangler
154 key_mangler=key_mangler).configure(
149 ).configure("dogpile.cache.memory", expiration_time=5)
155 "dogpile.cache.memory",
156 expiration_time=5)
157
150
158 self.memory_min_1 = make_region(
151 self.memory_min_1 = make_region(
159 function_key_generator=hashgen,
152 function_key_generator=hashgen, key_mangler=key_mangler
160 key_mangler=key_mangler).configure(
153 ).configure("dogpile.cache.memory", expiration_time=60)
161 "dogpile.cache.memory",
162 expiration_time=60)
163
154
164 self.memory_min_5 = make_region(
155 self.memory_min_5 = make_region(
165 function_key_generator=hashgen,
156 function_key_generator=hashgen, key_mangler=key_mangler
166 key_mangler=key_mangler).configure(
157 ).configure("dogpile.cache.memory", expiration_time=300)
167 "dogpile.cache.memory",
168 expiration_time=300)
169
158
170 self.memory_min_10 = make_region(
159 self.memory_min_10 = make_region(
171 function_key_generator=hashgen,
160 function_key_generator=hashgen, key_mangler=key_mangler
172 key_mangler=key_mangler).configure(
161 ).configure("dogpile.cache.memory", expiration_time=600)
173 "dogpile.cache.memory",
174 expiration_time=600)
175
162
176 self.memory_min_60 = make_region(
163 self.memory_min_60 = make_region(
177 function_key_generator=hashgen,
164 function_key_generator=hashgen, key_mangler=key_mangler
178 key_mangler=key_mangler).configure(
165 ).configure("dogpile.cache.memory", expiration_time=3600)
179 "dogpile.cache.memory",
180 expiration_time=3600)
181
166
182
167
183 def get_region(region):
168 def get_region(region):
@@ -5,6 +5,7 b' from pyramid.config import Configurator'
5
5
6 log = logging.getLogger(__name__)
6 log = logging.getLogger(__name__)
7
7
8
8 class InspectProxy(object):
9 class InspectProxy(object):
9 """
10 """
10 Proxy to the `inspect` module that allows us to use the pyramid include
11 Proxy to the `inspect` module that allows us to use the pyramid include
@@ -17,7 +18,7 b' class InspectProxy(object):'
17 """
18 """
18 if inspect.ismethod(cyfunction):
19 if inspect.ismethod(cyfunction):
19 cyfunction = cyfunction.im_func
20 cyfunction = cyfunction.im_func
20 return getattr(cyfunction, 'func_code')
21 return getattr(cyfunction, "func_code")
21
22
22 def getmodule(self, *args, **kwds):
23 def getmodule(self, *args, **kwds):
23 """
24 """
@@ -40,14 +41,14 b' class InspectProxy(object):'
40 """
41 """
41 # Check if it's called to look up the source file that contains the
42 # Check if it's called to look up the source file that contains the
42 # magic pyramid `includeme` callable.
43 # magic pyramid `includeme` callable.
43 if getattr(obj, '__name__') == 'includeme':
44 if getattr(obj, "__name__") == "includeme":
44 try:
45 try:
45 return inspect.getfile(obj)
46 return inspect.getfile(obj)
46 except TypeError as e:
47 except TypeError as e:
47 # Cython functions are not recognized as functions by the
48 # Cython functions are not recognized as functions by the
48 # inspect module. We have to unpack the func_code attribute
49 # inspect module. We have to unpack the func_code attribute
49 # ourself.
50 # ourself.
50 if 'cyfunction' in e.message:
51 if "cyfunction" in e.message:
51 obj = self._get_cyfunction_func_code(obj)
52 obj = self._get_cyfunction_func_code(obj)
52 return inspect.getfile(obj)
53 return inspect.getfile(obj)
53 raise
54 raise
@@ -60,33 +61,32 b' class CythonCompatConfigurator(Configurator):'
60 Customized configurator to replace the inspect class attribute with
61 Customized configurator to replace the inspect class attribute with
61 a custom one that is cython compatible.
62 a custom one that is cython compatible.
62 """
63 """
64
63 inspect = InspectProxy()
65 inspect = InspectProxy()
64
66
65
67
66 def register_appenlight_plugin(config, plugin_name, plugin_config):
68 def register_appenlight_plugin(config, plugin_name, plugin_config):
67 def register():
69 def register():
68 log.warning('Registering plugin: {}'.format(plugin_name))
70 log.warning("Registering plugin: {}".format(plugin_name))
69 if plugin_name not in config.registry.appenlight_plugins:
71 if plugin_name not in config.registry.appenlight_plugins:
70 config.registry.appenlight_plugins[plugin_name] = {
72 config.registry.appenlight_plugins[plugin_name] = {
71 'javascript': None,
73 "javascript": None,
72 'static': None,
74 "static": None,
73 'css': None,
75 "css": None,
74 'celery_tasks': None,
76 "celery_tasks": None,
75 'celery_beats': None,
77 "celery_beats": None,
76 'fulltext_indexer': None,
78 "fulltext_indexer": None,
77 'sqlalchemy_migrations': None,
79 "sqlalchemy_migrations": None,
78 'default_values_setter': None,
80 "default_values_setter": None,
79 'header_html': None,
81 "header_html": None,
80 'resource_types': [],
82 "resource_types": [],
81 'url_gen': None
83 "url_gen": None,
82 }
84 }
83 config.registry.appenlight_plugins[plugin_name].update(
85 config.registry.appenlight_plugins[plugin_name].update(plugin_config)
84 plugin_config)
85 # inform AE what kind of resource types we have available
86 # inform AE what kind of resource types we have available
86 # so we can avoid failing when a plugin is removed but data
87 # so we can avoid failing when a plugin is removed but data
87 # is still present in the db
88 # is still present in the db
88 if plugin_config.get('resource_types'):
89 if plugin_config.get("resource_types"):
89 config.registry.resource_types.extend(
90 config.registry.resource_types.extend(plugin_config["resource_types"])
90 plugin_config['resource_types'])
91
91
92 config.action('appenlight_plugin={}'.format(plugin_name), register)
92 config.action("appenlight_plugin={}".format(plugin_name), register)
@@ -23,20 +23,20 b' ENCRYPTION_SECRET = None'
23 def encrypt_fernet(value):
23 def encrypt_fernet(value):
24 # avoid double encryption
24 # avoid double encryption
25 # not sure if this is needed but it won't hurt too much to have this
25 # not sure if this is needed but it won't hurt too much to have this
26 if value.startswith('enc$fernet$'):
26 if value.startswith("enc$fernet$"):
27 return value
27 return value
28 f = Fernet(ENCRYPTION_SECRET)
28 f = Fernet(ENCRYPTION_SECRET)
29 return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
29 return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8"))
30
30
31
31
32 def decrypt_fernet(value):
32 def decrypt_fernet(value):
33 parts = value.split('$', 3)
33 parts = value.split("$", 3)
34 if not len(parts) == 3:
34 if not len(parts) == 3:
35 # not encrypted values
35 # not encrypted values
36 return value
36 return value
37 else:
37 else:
38 f = Fernet(ENCRYPTION_SECRET)
38 f = Fernet(ENCRYPTION_SECRET)
39 decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
39 decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8")
40 return decrypted_data
40 return decrypted_data
41
41
42
42
@@ -1,4 +1,5 b''
1 import collections
1 import collections
2
2 # -*- coding: utf-8 -*-
3 # -*- coding: utf-8 -*-
3
4
4 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
5 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
@@ -20,13 +21,14 b' class StupidEnum(object):'
20 @classmethod
21 @classmethod
21 def set_inverse(cls):
22 def set_inverse(cls):
22 cls._inverse_values = dict(
23 cls._inverse_values = dict(
23 (y, x) for x, y in vars(cls).items() if
24 (y, x)
24 not x.startswith('_') and not callable(y)
25 for x, y in vars(cls).items()
26 if not x.startswith("_") and not callable(y)
25 )
27 )
26
28
27 @classmethod
29 @classmethod
28 def key_from_value(cls, value):
30 def key_from_value(cls, value):
29 if not hasattr(cls, '_inverse_values'):
31 if not hasattr(cls, "_inverse_values"):
30 cls.set_inverse()
32 cls.set_inverse()
31 return cls._inverse_values.get(value)
33 return cls._inverse_values.get(value)
32
34
@@ -25,7 +25,7 b' import functools'
25 import decimal
25 import decimal
26 import imp
26 import imp
27
27
28 __all__ = ['json', 'simplejson', 'stdlibjson']
28 __all__ = ["json", "simplejson", "stdlibjson"]
29
29
30
30
31 def _is_aware(value):
31 def _is_aware(value):
@@ -35,8 +35,7 b' def _is_aware(value):'
35 The logic is described in Python's docs:
35 The logic is described in Python's docs:
36 http://docs.python.org/library/datetime.html#datetime.tzinfo
36 http://docs.python.org/library/datetime.html#datetime.tzinfo
37 """
37 """
38 return (value.tzinfo is not None
38 return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
39 and value.tzinfo.utcoffset(value) is not None)
40
39
41
40
42 def _obj_dump(obj):
41 def _obj_dump(obj):
@@ -55,8 +54,8 b' def _obj_dump(obj):'
55 r = obj.isoformat()
54 r = obj.isoformat()
56 # if obj.microsecond:
55 # if obj.microsecond:
57 # r = r[:23] + r[26:]
56 # r = r[:23] + r[26:]
58 if r.endswith('+00:00'):
57 if r.endswith("+00:00"):
59 r = r[:-6] + 'Z'
58 r = r[:-6] + "Z"
60 return r
59 return r
61 elif isinstance(obj, datetime.date):
60 elif isinstance(obj, datetime.date):
62 return obj.isoformat()
61 return obj.isoformat()
@@ -71,7 +70,7 b' def _obj_dump(obj):'
71 return r
70 return r
72 elif isinstance(obj, set):
71 elif isinstance(obj, set):
73 return list(obj)
72 return list(obj)
74 elif hasattr(obj, '__json__'):
73 elif hasattr(obj, "__json__"):
75 if callable(obj.__json__):
74 if callable(obj.__json__):
76 return obj.__json__()
75 return obj.__json__()
77 else:
76 else:
@@ -83,8 +82,7 b' def _obj_dump(obj):'
83 # Import simplejson
82 # Import simplejson
84 try:
83 try:
85 # import simplejson initially
84 # import simplejson initially
86 _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
85 _sj = imp.load_module("_sj", *imp.find_module("simplejson"))
87
88
86
89 def extended_encode(obj):
87 def extended_encode(obj):
90 try:
88 try:
@@ -93,22 +91,21 b' try:'
93 pass
91 pass
94 raise TypeError("%r is not JSON serializable" % (obj,))
92 raise TypeError("%r is not JSON serializable" % (obj,))
95
93
96
97 # we handle decimals our own it makes unified behavior of json vs
94 # we handle decimals our own it makes unified behavior of json vs
98 # simplejson
95 # simplejson
99 sj_version = [int(x) for x in _sj.__version__.split('.')]
96 sj_version = [int(x) for x in _sj.__version__.split(".")]
100 major, minor = sj_version[0], sj_version[1]
97 major, minor = sj_version[0], sj_version[1]
101 if major < 2 or (major == 2 and minor < 1):
98 if major < 2 or (major == 2 and minor < 1):
102 # simplejson < 2.1 doesnt support use_decimal
99 # simplejson < 2.1 doesnt support use_decimal
103 _sj.dumps = functools.partial(
100 _sj.dumps = functools.partial(_sj.dumps, default=extended_encode)
104 _sj.dumps, default=extended_encode)
101 _sj.dump = functools.partial(_sj.dump, default=extended_encode)
105 _sj.dump = functools.partial(
106 _sj.dump, default=extended_encode)
107 else:
102 else:
108 _sj.dumps = functools.partial(
103 _sj.dumps = functools.partial(
109 _sj.dumps, default=extended_encode, use_decimal=False)
104 _sj.dumps, default=extended_encode, use_decimal=False
105 )
110 _sj.dump = functools.partial(
106 _sj.dump = functools.partial(
111 _sj.dump, default=extended_encode, use_decimal=False)
107 _sj.dump, default=extended_encode, use_decimal=False
108 )
112 simplejson = _sj
109 simplejson = _sj
113
110
114 except ImportError:
111 except ImportError:
@@ -117,8 +114,7 b' except ImportError:'
117
114
118 try:
115 try:
119 # simplejson not found try out regular json module
116 # simplejson not found try out regular json module
120 _json = imp.load_module('_json', *imp.find_module('json'))
117 _json = imp.load_module("_json", *imp.find_module("json"))
121
122
118
123 # extended JSON encoder for json
119 # extended JSON encoder for json
124 class ExtendedEncoder(_json.JSONEncoder):
120 class ExtendedEncoder(_json.JSONEncoder):
@@ -129,7 +125,6 b' try:'
129 pass
125 pass
130 raise TypeError("%r is not JSON serializable" % (obj,))
126 raise TypeError("%r is not JSON serializable" % (obj,))
131
127
132
133 # monkey-patch JSON encoder to use extended version
128 # monkey-patch JSON encoder to use extended version
134 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
129 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
135 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
130 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
@@ -145,4 +140,4 b' if simplejson:'
145 elif _json:
140 elif _json:
146 json = _json
141 json = _json
147 else:
142 else:
148 raise ImportError('Could not find any json modules')
143 raise ImportError("Could not find any json modules")
@@ -26,94 +26,135 b' _ = lambda x: x'
26
26
27 time_deltas = OrderedDict()
27 time_deltas = OrderedDict()
28
28
29 time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
29 time_deltas["1m"] = {
30 'label': '1 minute', 'minutes': 1}
30 "delta": datetime.timedelta(minutes=1),
31
31 "label": "1 minute",
32 time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
32 "minutes": 1,
33 'label': '5 minutes', 'minutes': 5}
33 }
34 time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
34
35 'label': '30 minutes', 'minutes': 30}
35 time_deltas["5m"] = {
36 time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
36 "delta": datetime.timedelta(minutes=5),
37 'label': '60 minutes', 'minutes': 60}
37 "label": "5 minutes",
38 time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
38 "minutes": 5,
39 'minutes': 60 * 4}
39 }
40 time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
40 time_deltas["30m"] = {
41 'label': '12 hours', 'minutes': 60 * 12}
41 "delta": datetime.timedelta(minutes=30),
42 time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
42 "label": "30 minutes",
43 'label': '24 hours', 'minutes': 60 * 24}
43 "minutes": 30,
44 time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
44 }
45 'minutes': 60 * 24 * 3}
45 time_deltas["1h"] = {
46 time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
46 "delta": datetime.timedelta(hours=1),
47 'minutes': 60 * 24 * 7}
47 "label": "60 minutes",
48 time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
48 "minutes": 60,
49 'minutes': 60 * 24 * 14}
49 }
50 time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
50 time_deltas["4h"] = {
51 'minutes': 60 * 24 * 31}
51 "delta": datetime.timedelta(hours=4),
52 time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
52 "label": "4 hours",
53 'label': '3 months',
53 "minutes": 60 * 4,
54 'minutes': 60 * 24 * 31 * 3}
54 }
55 time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
55 time_deltas["12h"] = {
56 'label': '6 months',
56 "delta": datetime.timedelta(hours=12),
57 'minutes': 60 * 24 * 31 * 6}
57 "label": "12 hours",
58 time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
58 "minutes": 60 * 12,
59 'label': '12 months',
59 }
60 'minutes': 60 * 24 * 31 * 12}
60 time_deltas["24h"] = {
61 "delta": datetime.timedelta(hours=24),
62 "label": "24 hours",
63 "minutes": 60 * 24,
64 }
65 time_deltas["3d"] = {
66 "delta": datetime.timedelta(days=3),
67 "label": "3 days",
68 "minutes": 60 * 24 * 3,
69 }
70 time_deltas["1w"] = {
71 "delta": datetime.timedelta(days=7),
72 "label": "7 days",
73 "minutes": 60 * 24 * 7,
74 }
75 time_deltas["2w"] = {
76 "delta": datetime.timedelta(days=14),
77 "label": "14 days",
78 "minutes": 60 * 24 * 14,
79 }
80 time_deltas["1M"] = {
81 "delta": datetime.timedelta(days=31),
82 "label": "31 days",
83 "minutes": 60 * 24 * 31,
84 }
85 time_deltas["3M"] = {
86 "delta": datetime.timedelta(days=31 * 3),
87 "label": "3 months",
88 "minutes": 60 * 24 * 31 * 3,
89 }
90 time_deltas["6M"] = {
91 "delta": datetime.timedelta(days=31 * 6),
92 "label": "6 months",
93 "minutes": 60 * 24 * 31 * 6,
94 }
95 time_deltas["12M"] = {
96 "delta": datetime.timedelta(days=31 * 12),
97 "label": "12 months",
98 "minutes": 60 * 24 * 31 * 12,
99 }
61
100
62 # used in json representation
101 # used in json representation
63 time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
102 time_options = dict(
64 for k, v in time_deltas.items()])
103 [
65 FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
104 (k, {"label": v["label"], "minutes": v["minutes"]})
105 for k, v in time_deltas.items()
106 ]
107 )
108 FlashMsg = namedtuple("FlashMsg", ["msg", "level"])
66
109
67
110
68 def get_flash(request):
111 def get_flash(request):
69 messages = []
112 messages = []
70 messages.extend(
113 messages.extend(
71 [FlashMsg(msg, 'error')
114 [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")]
72 for msg in request.session.peek_flash('error')])
115 )
73 messages.extend([FlashMsg(msg, 'warning')
74 for msg in request.session.peek_flash('warning')])
75 messages.extend(
116 messages.extend(
76 [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
117 [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")]
118 )
119 messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()])
77 return messages
120 return messages
78
121
79
122
80 def clear_flash(request):
123 def clear_flash(request):
81 request.session.pop_flash('error')
124 request.session.pop_flash("error")
82 request.session.pop_flash('warning')
125 request.session.pop_flash("warning")
83 request.session.pop_flash()
126 request.session.pop_flash()
84
127
85
128
86 def get_type_formatted_flash(request):
129 def get_type_formatted_flash(request):
87 return [{'msg': message.msg, 'type': message.level}
130 return [
88 for message in get_flash(request)]
131 {"msg": message.msg, "type": message.level} for message in get_flash(request)
132 ]
89
133
90
134
91 def gen_pagination_headers(request, paginator):
135 def gen_pagination_headers(request, paginator):
92 headers = {
136 headers = {
93 'x-total-count': str(paginator.item_count),
137 "x-total-count": str(paginator.item_count),
94 'x-current-page': str(paginator.page),
138 "x-current-page": str(paginator.page),
95 'x-items-per-page': str(paginator.items_per_page)
139 "x-items-per-page": str(paginator.items_per_page),
96 }
140 }
97 params_dict = request.GET.dict_of_lists()
141 params_dict = request.GET.dict_of_lists()
98 last_page_params = copy.deepcopy(params_dict)
142 last_page_params = copy.deepcopy(params_dict)
99 last_page_params['page'] = paginator.last_page or 1
143 last_page_params["page"] = paginator.last_page or 1
100 first_page_params = copy.deepcopy(params_dict)
144 first_page_params = copy.deepcopy(params_dict)
101 first_page_params.pop('page', None)
145 first_page_params.pop("page", None)
102 next_page_params = copy.deepcopy(params_dict)
146 next_page_params = copy.deepcopy(params_dict)
103 next_page_params['page'] = paginator.next_page or paginator.last_page or 1
147 next_page_params["page"] = paginator.next_page or paginator.last_page or 1
104 prev_page_params = copy.deepcopy(params_dict)
148 prev_page_params = copy.deepcopy(params_dict)
105 prev_page_params['page'] = paginator.previous_page or 1
149 prev_page_params["page"] = paginator.previous_page or 1
106 lp_url = request.current_route_url(_query=last_page_params)
150 lp_url = request.current_route_url(_query=last_page_params)
107 fp_url = request.current_route_url(_query=first_page_params)
151 fp_url = request.current_route_url(_query=first_page_params)
108 links = [
152 links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)]
109 'rel="last", <{}>'.format(lp_url),
110 'rel="first", <{}>'.format(fp_url),
111 ]
112 if first_page_params != prev_page_params:
153 if first_page_params != prev_page_params:
113 prev_url = request.current_route_url(_query=prev_page_params)
154 prev_url = request.current_route_url(_query=prev_page_params)
114 links.append('rel="prev", <{}>'.format(prev_url))
155 links.append('rel="prev", <{}>'.format(prev_url))
115 if last_page_params != next_page_params:
156 if last_page_params != next_page_params:
116 next_url = request.current_route_url(_query=next_page_params)
157 next_url = request.current_route_url(_query=next_page_params)
117 links.append('rel="next", <{}>'.format(next_url))
158 links.append('rel="next", <{}>'.format(next_url))
118 headers['link'] = '; '.join(links)
159 headers["link"] = "; ".join(links)
119 return headers
160 return headers
@@ -18,17 +18,21 b' import re'
18 from appenlight.lib.ext_json import json
18 from appenlight.lib.ext_json import json
19 from jinja2 import Markup, escape, evalcontextfilter
19 from jinja2 import Markup, escape, evalcontextfilter
20
20
21 _paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
21 _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}")
22
22
23
23
24 @evalcontextfilter
24 @evalcontextfilter
25 def nl2br(eval_ctx, value):
25 def nl2br(eval_ctx, value):
26 if eval_ctx.autoescape:
26 if eval_ctx.autoescape:
27 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n'))
27 result = "\n\n".join(
28 for p in _paragraph_re.split(escape(value)))
28 "<p>%s</p>" % p.replace("\n", Markup("<br>\n"))
29 for p in _paragraph_re.split(escape(value))
30 )
29 else:
31 else:
30 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n')
32 result = "\n\n".join(
31 for p in _paragraph_re.split(escape(value)))
33 "<p>%s</p>" % p.replace("\n", "<br>\n")
34 for p in _paragraph_re.split(escape(value))
35 )
32 if eval_ctx.autoescape:
36 if eval_ctx.autoescape:
33 result = Markup(result)
37 result = Markup(result)
34 return result
38 return result
@@ -36,11 +40,14 b' def nl2br(eval_ctx, value):'
36
40
37 @evalcontextfilter
41 @evalcontextfilter
38 def toJSONUnsafe(eval_ctx, value):
42 def toJSONUnsafe(eval_ctx, value):
39 encoded = json.dumps(value).replace('&', '\\u0026') \
43 encoded = (
40 .replace('<', '\\u003c') \
44 json.dumps(value)
41 .replace('>', '\\u003e') \
45 .replace("&", "\\u0026")
42 .replace('>', '\\u003e') \
46 .replace("<", "\\u003c")
43 .replace('"', '\\u0022') \
47 .replace(">", "\\u003e")
44 .replace("'", '\\u0027') \
48 .replace(">", "\\u003e")
45 .replace(r'\n', '/\\\n')
49 .replace('"', "\\u0022")
50 .replace("'", "\\u0027")
51 .replace(r"\n", "/\\\n")
52 )
46 return Markup("'%s'" % encoded)
53 return Markup("'%s'" % encoded)
@@ -17,11 +17,30 b''
17 import json
17 import json
18 import logging
18 import logging
19
19
20 ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text',
20 ignored_keys = [
21 'filename', 'funcName', 'levelname', 'levelno', 'lineno',
21 "args",
22 'message', 'module', 'msecs', 'msg', 'name', 'pathname',
22 "asctime",
23 'process', 'processName', 'relativeCreated', 'stack_info',
23 "created",
24 'thread', 'threadName']
24 "exc_info",
25 "exc_text",
26 "filename",
27 "funcName",
28 "levelname",
29 "levelno",
30 "lineno",
31 "message",
32 "module",
33 "msecs",
34 "msg",
35 "name",
36 "pathname",
37 "process",
38 "processName",
39 "relativeCreated",
40 "stack_info",
41 "thread",
42 "threadName",
43 ]
25
44
26
45
27 class JSONFormatter(logging.Formatter):
46 class JSONFormatter(logging.Formatter):
@@ -41,7 +60,7 b' class JSONFormatter(logging.Formatter):'
41 record.message = record.getMessage()
60 record.message = record.getMessage()
42 log_dict = vars(record)
61 log_dict = vars(record)
43 keys = [k for k in log_dict.keys() if k not in ignored_keys]
62 keys = [k for k in log_dict.keys() if k not in ignored_keys]
44 payload = {'message': record.message}
63 payload = {"message": record.message}
45 payload.update({k: log_dict[k] for k in keys})
64 payload.update({k: log_dict[k] for k in keys})
46 record.message = json.dumps(payload, default=lambda x: str(x))
65 record.message = json.dumps(payload, default=lambda x: str(x))
47
66
@@ -14,52 +14,56 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 BASE = 'appenlight:data:{}'
17 BASE = "appenlight:data:{}"
18
18
19 REDIS_KEYS = {
19 REDIS_KEYS = {
20 'tasks': {
20 "tasks": {
21 'add_reports_lock': BASE.format('add_reports_lock:{}'),
21 "add_reports_lock": BASE.format("add_reports_lock:{}"),
22 'add_logs_lock': BASE.format('add_logs_lock:{}'),
22 "add_logs_lock": BASE.format("add_logs_lock:{}"),
23 },
23 },
24 'counters': {
24 "counters": {
25 'events_per_minute_per_user': BASE.format(
25 "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"),
26 'events_per_minute_per_user:{}:{}'),
26 "reports_per_minute": BASE.format("reports_per_minute:{}"),
27 'reports_per_minute': BASE.format('reports_per_minute:{}'),
27 "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"),
28 'reports_per_hour_per_app': BASE.format(
28 "reports_per_type": BASE.format("reports_per_type:{}"),
29 'reports_per_hour_per_app:{}:{}'),
29 "logs_per_minute": BASE.format("logs_per_minute:{}"),
30 'reports_per_type': BASE.format('reports_per_type:{}'),
30 "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"),
31 'logs_per_minute': BASE.format('logs_per_minute:{}'),
31 "metrics_per_minute": BASE.format("metrics_per_minute:{}"),
32 'logs_per_hour_per_app': BASE.format(
32 "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"),
33 'logs_per_hour_per_app:{}:{}'),
33 "report_group_occurences": BASE.format("report_group_occurences:{}"),
34 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
34 "report_group_occurences_alerting": BASE.format(
35 'metrics_per_hour_per_app': BASE.format(
35 "report_group_occurences_alerting:{}"
36 'metrics_per_hour_per_app:{}:{}'),
36 ),
37 'report_group_occurences': BASE.format('report_group_occurences:{}'),
37 "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"),
38 'report_group_occurences_alerting': BASE.format(
38 "report_group_occurences_100th": BASE.format(
39 'report_group_occurences_alerting:{}'),
39 "report_group_occurences_100th:{}"
40 'report_group_occurences_10th': BASE.format(
40 ),
41 'report_group_occurences_10th:{}'),
42 'report_group_occurences_100th': BASE.format(
43 'report_group_occurences_100th:{}'),
44 },
41 },
45 'rate_limits': {
42 "rate_limits": {
46 'per_application_reports_rate_limit': BASE.format(
43 "per_application_reports_rate_limit": BASE.format(
47 'per_application_reports_limit:{}:{}'),
44 "per_application_reports_limit:{}:{}"
48 'per_application_logs_rate_limit': BASE.format(
45 ),
49 'per_application_logs_rate_limit:{}:{}'),
46 "per_application_logs_rate_limit": BASE.format(
50 'per_application_metrics_rate_limit': BASE.format(
47 "per_application_logs_rate_limit:{}:{}"
51 'per_application_metrics_rate_limit:{}:{}'),
48 ),
49 "per_application_metrics_rate_limit": BASE.format(
50 "per_application_metrics_rate_limit:{}:{}"
51 ),
52 },
52 },
53 'apps_that_got_new_data_per_hour': BASE.format('apps_that_got_new_data_per_hour:{}'),
53 "apps_that_got_new_data_per_hour": BASE.format(
54 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
54 "apps_that_got_new_data_per_hour:{}"
55 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
55 ),
56 'apps_that_had_reports_alerting': BASE.format(
56 "apps_that_had_reports": BASE.format("apps_that_had_reports"),
57 'apps_that_had_reports_alerting'),
57 "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"),
58 'apps_that_had_error_reports_alerting': BASE.format(
58 "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"),
59 'apps_that_had_error_reports_alerting'),
59 "apps_that_had_error_reports_alerting": BASE.format(
60 'reports_to_notify_per_type_per_app': BASE.format(
60 "apps_that_had_error_reports_alerting"
61 'reports_to_notify_per_type_per_app:{}:{}'),
61 ),
62 'reports_to_notify_per_type_per_app_alerting': BASE.format(
62 "reports_to_notify_per_type_per_app": BASE.format(
63 'reports_to_notify_per_type_per_app_alerting:{}:{}'),
63 "reports_to_notify_per_type_per_app:{}:{}"
64 'seen_tag_list': BASE.format('seen_tag_list')
64 ),
65 "reports_to_notify_per_type_per_app_alerting": BASE.format(
66 "reports_to_notify_per_type_per_app_alerting:{}:{}"
67 ),
68 "seen_tag_list": BASE.format("seen_tag_list"),
65 }
69 }
@@ -54,11 +54,11 b' def unsafe_json_body(request):'
54 try:
54 try:
55 return request.json_body
55 return request.json_body
56 except ValueError:
56 except ValueError:
57 raise JSONException('Incorrect JSON')
57 raise JSONException("Incorrect JSON")
58
58
59
59
60 def get_user(request):
60 def get_user(request):
61 if not request.path_info.startswith('/static'):
61 if not request.path_info.startswith("/static"):
62 user_id = unauthenticated_userid(request)
62 user_id = unauthenticated_userid(request)
63 try:
63 try:
64 user_id = int(user_id)
64 user_id = int(user_id)
@@ -68,8 +68,10 b' def get_user(request):'
68 if user_id:
68 if user_id:
69 user = UserService.by_id(user_id)
69 user = UserService.by_id(user_id)
70 if user:
70 if user:
71 request.environ['appenlight.username'] = '%d:%s' % (
71 request.environ["appenlight.username"] = "%d:%s" % (
72 user_id, user.user_name)
72 user_id,
73 user.user_name,
74 )
73 return user
75 return user
74 else:
76 else:
75 return None
77 return None
@@ -85,7 +87,7 b' def add_flash_to_headers(request, clear=True):'
85 flash queue
87 flash queue
86 """
88 """
87 flash_msgs = helpers.get_type_formatted_flash(request)
89 flash_msgs = helpers.get_type_formatted_flash(request)
88 request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
90 request.response.headers["x-flash-messages"] = json.dumps(flash_msgs)
89 helpers.clear_flash(request)
91 helpers.clear_flash(request)
90
92
91
93
@@ -94,42 +96,36 b' def get_authomatic(request):'
94 # authomatic social auth
96 # authomatic social auth
95 authomatic_conf = {
97 authomatic_conf = {
96 # callback http://yourapp.com/social_auth/twitter
98 # callback http://yourapp.com/social_auth/twitter
97 'twitter': {
99 "twitter": {
98 'class_': oauth1.Twitter,
100 "class_": oauth1.Twitter,
99 'consumer_key': settings.get('authomatic.pr.twitter.key', ''),
101 "consumer_key": settings.get("authomatic.pr.twitter.key", ""),
100 'consumer_secret': settings.get('authomatic.pr.twitter.secret',
102 "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""),
101 ''),
102 },
103 },
103 # callback http://yourapp.com/social_auth/facebook
104 # callback http://yourapp.com/social_auth/facebook
104 'facebook': {
105 "facebook": {
105 'class_': oauth2.Facebook,
106 "class_": oauth2.Facebook,
106 'consumer_key': settings.get('authomatic.pr.facebook.app_id', ''),
107 "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""),
107 'consumer_secret': settings.get('authomatic.pr.facebook.secret',
108 "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""),
108 ''),
109 "scope": ["email"],
109 'scope': ['email'],
110 },
110 },
111 # callback http://yourapp.com/social_auth/google
111 # callback http://yourapp.com/social_auth/google
112 'google': {
112 "google": {
113 'class_': oauth2.Google,
113 "class_": oauth2.Google,
114 'consumer_key': settings.get('authomatic.pr.google.key', ''),
114 "consumer_key": settings.get("authomatic.pr.google.key", ""),
115 'consumer_secret': settings.get(
115 "consumer_secret": settings.get("authomatic.pr.google.secret", ""),
116 'authomatic.pr.google.secret', ''),
116 "scope": ["profile", "email"],
117 'scope': ['profile', 'email'],
118 },
117 },
119 'github': {
118 "github": {
120 'class_': oauth2.GitHub,
119 "class_": oauth2.GitHub,
121 'consumer_key': settings.get('authomatic.pr.github.key', ''),
120 "consumer_key": settings.get("authomatic.pr.github.key", ""),
122 'consumer_secret': settings.get(
121 "consumer_secret": settings.get("authomatic.pr.github.secret", ""),
123 'authomatic.pr.github.secret', ''),
122 "scope": ["repo", "public_repo", "user:email"],
124 'scope': ['repo', 'public_repo', 'user:email'],
123 "access_headers": {"User-Agent": "AppEnlight"},
125 'access_headers': {'User-Agent': 'AppEnlight'},
124 },
125 "bitbucket": {
126 "class_": oauth1.Bitbucket,
127 "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""),
128 "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""),
126 },
129 },
127 'bitbucket': {
128 'class_': oauth1.Bitbucket,
129 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''),
130 'consumer_secret': settings.get(
131 'authomatic.pr.bitbucket.secret', '')
132 }
133 }
130 }
134 return Authomatic(
131 return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"])
135 config=authomatic_conf, secret=settings['authomatic.secret'])
@@ -52,13 +52,13 b' class RuleBase(object):'
52 :param field_name:
52 :param field_name:
53 :return:
53 :return:
54 """
54 """
55 parts = field_name.split(':') if field_name else []
55 parts = field_name.split(":") if field_name else []
56 found = struct
56 found = struct
57 while parts:
57 while parts:
58 current_key = parts.pop(0)
58 current_key = parts.pop(0)
59 found = found.get(current_key)
59 found = found.get(current_key)
60 if not found and parts:
60 if not found and parts:
61 raise KeyNotFoundException('Key not found in structure')
61 raise KeyNotFoundException("Key not found in structure")
62 return found
62 return found
63
63
64 @classmethod
64 @classmethod
@@ -72,13 +72,13 b' class RuleBase(object):'
72 :param field_name:
72 :param field_name:
73 :return:
73 :return:
74 """
74 """
75 parts = field_name.split(':')
75 parts = field_name.split(":")
76 found = struct
76 found = struct
77 while parts:
77 while parts:
78 current_key = parts.pop(0)
78 current_key = parts.pop(0)
79 found = getattr(found, current_key, None)
79 found = getattr(found, current_key, None)
80 if not found and parts:
80 if not found and parts:
81 raise KeyNotFoundException('Key not found in structure')
81 raise KeyNotFoundException("Key not found in structure")
82 return found
82 return found
83
83
84 def normalized_type(self, field, value):
84 def normalized_type(self, field, value):
@@ -89,28 +89,32 b' class RuleBase(object):'
89 """
89 """
90 f_type = self.type_matrix.get(field)
90 f_type = self.type_matrix.get(field)
91 if f_type:
91 if f_type:
92 cast_to = f_type['type']
92 cast_to = f_type["type"]
93 else:
93 else:
94 raise UnknownTypeException('Unknown type')
94 raise UnknownTypeException("Unknown type")
95
95
96 if value is None:
96 if value is None:
97 return None
97 return None
98
98
99 try:
99 try:
100 if cast_to == 'int':
100 if cast_to == "int":
101 return int(value)
101 return int(value)
102 elif cast_to == 'float':
102 elif cast_to == "float":
103 return float(value)
103 return float(value)
104 elif cast_to == 'unicode':
104 elif cast_to == "unicode":
105 return str(value)
105 return str(value)
106 except ValueError as exc:
106 except ValueError as exc:
107 raise InvalidValueException(exc)
107 raise InvalidValueException(exc)
108
108
109
109
110 class Rule(RuleBase):
110 class Rule(RuleBase):
111 def __init__(self, config, type_matrix,
111 def __init__(
112 struct_getter=RuleBase.default_dict_struct_getter,
112 self,
113 config_manipulator=None):
113 config,
114 type_matrix,
115 struct_getter=RuleBase.default_dict_struct_getter,
116 config_manipulator=None,
117 ):
114 """
118 """
115
119
116 :param config: dict - contains rule configuration
120 :param config: dict - contains rule configuration
@@ -159,8 +163,9 b' class Rule(RuleBase):'
159 config_manipulator(self)
163 config_manipulator(self)
160
164
161 def subrule_check(self, rule_config, struct):
165 def subrule_check(self, rule_config, struct):
162 rule = Rule(rule_config, self.type_matrix,
166 rule = Rule(
163 config_manipulator=self.config_manipulator)
167 rule_config, self.type_matrix, config_manipulator=self.config_manipulator
168 )
164 return rule.match(struct)
169 return rule.match(struct)
165
170
166 def match(self, struct):
171 def match(self, struct):
@@ -169,32 +174,41 b' class Rule(RuleBase):'
169 First tries report value, then tests tags in not found, then finally
174 First tries report value, then tests tags in not found, then finally
170 report group
175 report group
171 """
176 """
172 field_name = self.config.get('field')
177 field_name = self.config.get("field")
173 test_value = self.config.get('value')
178 test_value = self.config.get("value")
174
179
175 if not field_name:
180 if not field_name:
176 return False
181 return False
177
182
178 if field_name == '__AND__':
183 if field_name == "__AND__":
179 rule = AND(self.config['rules'], self.type_matrix,
184 rule = AND(
180 config_manipulator=self.config_manipulator)
185 self.config["rules"],
186 self.type_matrix,
187 config_manipulator=self.config_manipulator,
188 )
181 return rule.match(struct)
189 return rule.match(struct)
182 elif field_name == '__OR__':
190 elif field_name == "__OR__":
183 rule = OR(self.config['rules'], self.type_matrix,
191 rule = OR(
184 config_manipulator=self.config_manipulator)
192 self.config["rules"],
193 self.type_matrix,
194 config_manipulator=self.config_manipulator,
195 )
185 return rule.match(struct)
196 return rule.match(struct)
186 elif field_name == '__NOT__':
197 elif field_name == "__NOT__":
187 rule = NOT(self.config['rules'], self.type_matrix,
198 rule = NOT(
188 config_manipulator=self.config_manipulator)
199 self.config["rules"],
200 self.type_matrix,
201 config_manipulator=self.config_manipulator,
202 )
189 return rule.match(struct)
203 return rule.match(struct)
190
204
191 if test_value is None:
205 if test_value is None:
192 return False
206 return False
193
207
194 try:
208 try:
195 struct_value = self.normalized_type(field_name,
209 struct_value = self.normalized_type(
196 self.struct_getter(struct,
210 field_name, self.struct_getter(struct, field_name)
197 field_name))
211 )
198 except (UnknownTypeException, InvalidValueException) as exc:
212 except (UnknownTypeException, InvalidValueException) as exc:
199 log.error(str(exc))
213 log.error(str(exc))
200 return False
214 return False
@@ -205,24 +219,23 b' class Rule(RuleBase):'
205 log.error(str(exc))
219 log.error(str(exc))
206 return False
220 return False
207
221
208 if self.config['op'] not in ('startswith', 'endswith', 'contains'):
222 if self.config["op"] not in ("startswith", "endswith", "contains"):
209 try:
223 try:
210 return getattr(operator,
224 return getattr(operator, self.config["op"])(struct_value, test_value)
211 self.config['op'])(struct_value, test_value)
212 except TypeError:
225 except TypeError:
213 return False
226 return False
214 elif self.config['op'] == 'startswith':
227 elif self.config["op"] == "startswith":
215 return struct_value.startswith(test_value)
228 return struct_value.startswith(test_value)
216 elif self.config['op'] == 'endswith':
229 elif self.config["op"] == "endswith":
217 return struct_value.endswith(test_value)
230 return struct_value.endswith(test_value)
218 elif self.config['op'] == 'contains':
231 elif self.config["op"] == "contains":
219 return test_value in struct_value
232 return test_value in struct_value
220 raise BadConfigException('Invalid configuration, '
233 raise BadConfigException(
221 'unknown operator: {}'.format(self.config))
234 "Invalid configuration, " "unknown operator: {}".format(self.config)
235 )
222
236
223 def __repr__(self):
237 def __repr__(self):
224 return '<Rule {} {}>'.format(self.config.get('field'),
238 return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value"))
225 self.config.get('value'))
226
239
227
240
228 class AND(Rule):
241 class AND(Rule):
@@ -231,8 +244,7 b' class AND(Rule):'
231 self.rules = rules
244 self.rules = rules
232
245
233 def match(self, struct):
246 def match(self, struct):
234 return all([self.subrule_check(r_conf, struct) for r_conf
247 return all([self.subrule_check(r_conf, struct) for r_conf in self.rules])
235 in self.rules])
236
248
237
249
238 class NOT(Rule):
250 class NOT(Rule):
@@ -241,8 +253,7 b' class NOT(Rule):'
241 self.rules = rules
253 self.rules = rules
242
254
243 def match(self, struct):
255 def match(self, struct):
244 return all([not self.subrule_check(r_conf, struct) for r_conf
256 return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules])
245 in self.rules])
246
257
247
258
248 class OR(Rule):
259 class OR(Rule):
@@ -251,14 +262,12 b' class OR(Rule):'
251 self.rules = rules
262 self.rules = rules
252
263
253 def match(self, struct):
264 def match(self, struct):
254 return any([self.subrule_check(r_conf, struct) for r_conf
265 return any([self.subrule_check(r_conf, struct) for r_conf in self.rules])
255 in self.rules])
256
266
257
267
258 class RuleService(object):
268 class RuleService(object):
259 @staticmethod
269 @staticmethod
260 def rule_from_config(config, field_mappings, labels_dict,
270 def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None):
261 manipulator_func=None):
262 """
271 """
263 Returns modified rule with manipulator function
272 Returns modified rule with manipulator function
264 By default manipulator function replaces field id from labels_dict
273 By default manipulator function replaces field id from labels_dict
@@ -270,28 +279,33 b' class RuleService(object):'
270 """
279 """
271 rev_map = {}
280 rev_map = {}
272 for k, v in labels_dict.items():
281 for k, v in labels_dict.items():
273 rev_map[(v['agg'], v['key'],)] = k
282 rev_map[(v["agg"], v["key"])] = k
274
283
275 if manipulator_func is None:
284 if manipulator_func is None:
285
276 def label_rewriter_func(rule):
286 def label_rewriter_func(rule):
277 field = rule.config.get('field')
287 field = rule.config.get("field")
278 if not field or rule.config['field'] in ['__OR__',
288 if not field or rule.config["field"] in [
279 '__AND__', '__NOT__']:
289 "__OR__",
290 "__AND__",
291 "__NOT__",
292 ]:
280 return
293 return
281
294
282 to_map = field_mappings.get(rule.config['field'])
295 to_map = field_mappings.get(rule.config["field"])
283
296
284 # we need to replace series field with _AE_NOT_FOUND_ to not match
297 # we need to replace series field with _AE_NOT_FOUND_ to not match
285 # accidently some other field which happens to have the series that
298 # accidently some other field which happens to have the series that
286 # was used when the alert was created
299 # was used when the alert was created
287 if to_map:
300 if to_map:
288 to_replace = rev_map.get((to_map['agg'], to_map['key'],),
301 to_replace = rev_map.get(
289 '_AE_NOT_FOUND_')
302 (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_"
303 )
290 else:
304 else:
291 to_replace = '_AE_NOT_FOUND_'
305 to_replace = "_AE_NOT_FOUND_"
292
306
293 rule.config['field'] = to_replace
307 rule.config["field"] = to_replace
294 rule.type_matrix[to_replace] = {"type": 'float'}
308 rule.type_matrix[to_replace] = {"type": "float"}
295
309
296 manipulator_func = label_rewriter_func
310 manipulator_func = label_rewriter_func
297
311
@@ -14,8 +14,9 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from ziggurat_foundations.models.services.external_identity import \
17 from ziggurat_foundations.models.services.external_identity import (
18 ExternalIdentityService
18 ExternalIdentityService,
19 )
19 from appenlight.models.external_identity import ExternalIdentity
20 from appenlight.models.external_identity import ExternalIdentity
20
21
21
22
@@ -24,37 +25,38 b' def handle_social_data(request, user, social_data):'
24 update_identity = False
25 update_identity = False
25
26
26 extng_id = ExternalIdentityService.by_external_id_and_provider(
27 extng_id = ExternalIdentityService.by_external_id_and_provider(
27 social_data['user']['id'],
28 social_data["user"]["id"], social_data["credentials"].provider_name
28 social_data['credentials'].provider_name
29 )
29 )
30
30
31 # fix legacy accounts with wrong google ID
31 # fix legacy accounts with wrong google ID
32 if not extng_id and social_data['credentials'].provider_name == 'google':
32 if not extng_id and social_data["credentials"].provider_name == "google":
33 extng_id = ExternalIdentityService.by_external_id_and_provider(
33 extng_id = ExternalIdentityService.by_external_id_and_provider(
34 social_data['user']['email'],
34 social_data["user"]["email"], social_data["credentials"].provider_name
35 social_data['credentials'].provider_name
36 )
35 )
37
36
38 if extng_id:
37 if extng_id:
39 extng_id.delete()
38 extng_id.delete()
40 update_identity = True
39 update_identity = True
41
40
42 if not social_data['user']['id']:
41 if not social_data["user"]["id"]:
43 request.session.flash(
42 request.session.flash(
44 'No external user id found? Perhaps permissions for '
43 "No external user id found? Perhaps permissions for "
45 'authentication are set incorrectly', 'error')
44 "authentication are set incorrectly",
45 "error",
46 )
46 return False
47 return False
47
48
48 if not extng_id or update_identity:
49 if not extng_id or update_identity:
49 if not update_identity:
50 if not update_identity:
50 request.session.flash('Your external identity is now '
51 request.session.flash(
51 'connected with your account')
52 "Your external identity is now " "connected with your account"
53 )
52 ex_identity = ExternalIdentity()
54 ex_identity = ExternalIdentity()
53 ex_identity.external_id = social_data['user']['id']
55 ex_identity.external_id = social_data["user"]["id"]
54 ex_identity.external_user_name = social_data['user']['user_name']
56 ex_identity.external_user_name = social_data["user"]["user_name"]
55 ex_identity.provider_name = social_data['credentials'].provider_name
57 ex_identity.provider_name = social_data["credentials"].provider_name
56 ex_identity.access_token = social_data['credentials'].token
58 ex_identity.access_token = social_data["credentials"].token
57 ex_identity.token_secret = social_data['credentials'].token_secret
59 ex_identity.token_secret = social_data["credentials"].token_secret
58 ex_identity.alt_token = social_data['credentials'].refresh_token
60 ex_identity.alt_token = social_data["credentials"].refresh_token
59 user.external_identities.append(ex_identity)
61 user.external_identities.append(ex_identity)
60 request.session.pop('zigg.social_auth', None)
62 request.session.pop("zigg.social_auth", None)
@@ -28,9 +28,7 b' from collections import namedtuple'
28 from datetime import timedelta, datetime, date
28 from datetime import timedelta, datetime, date
29 from dogpile.cache.api import NO_VALUE
29 from dogpile.cache.api import NO_VALUE
30 from appenlight.models import Datastores
30 from appenlight.models import Datastores
31 from appenlight.validators import (LogSearchSchema,
31 from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params
32 TagListSchema,
33 accepted_search_params)
34 from itsdangerous import TimestampSigner
32 from itsdangerous import TimestampSigner
35 from ziggurat_foundations.permissions import ALL_PERMISSIONS
33 from ziggurat_foundations.permissions import ALL_PERMISSIONS
36 from ziggurat_foundations.models.services.user import UserService
34 from ziggurat_foundations.models.services.user import UserService
@@ -40,21 +38,20 b' from dateutil.rrule import rrule, MONTHLY, DAILY'
40 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
41
39
42
40
43 Stat = namedtuple('Stat', 'start_interval value')
41 Stat = namedtuple("Stat", "start_interval value")
44
42
45
43
46 def default_extractor(item):
44 def default_extractor(item):
47 """
45 """
48 :param item - item to extract date from
46 :param item - item to extract date from
49 """
47 """
50 if hasattr(item, 'start_interval'):
48 if hasattr(item, "start_interval"):
51 return item.start_interval
49 return item.start_interval
52 return item['start_interval']
50 return item["start_interval"]
53
51
54
52
55 # fast gap generator
53 # fast gap generator
56 def gap_gen_default(start, step, itemiterator, end_time=None,
54 def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None):
57 iv_extractor=None):
58 """ generates a list of time/value items based on step and itemiterator
55 """ generates a list of time/value items based on step and itemiterator
59 if there are entries missing from iterator time/None will be returned
56 if there are entries missing from iterator time/None will be returned
60 instead
57 instead
@@ -100,27 +97,31 b' class DateTimeEncoder(json.JSONEncoder):'
100 return json.JSONEncoder.default(self, obj)
97 return json.JSONEncoder.default(self, obj)
101
98
102
99
103 def channelstream_request(secret, endpoint, payload, throw_exceptions=False,
100 def channelstream_request(
104 servers=None):
101 secret, endpoint, payload, throw_exceptions=False, servers=None
102 ):
105 responses = []
103 responses = []
106 if not servers:
104 if not servers:
107 servers = []
105 servers = []
108
106
109 signer = TimestampSigner(secret)
107 signer = TimestampSigner(secret)
110 sig_for_server = signer.sign(endpoint)
108 sig_for_server = signer.sign(endpoint)
111 for secret, server in [(s['secret'], s['server']) for s in servers]:
109 for secret, server in [(s["secret"], s["server"]) for s in servers]:
112 response = {}
110 response = {}
113 secret_headers = {'x-channelstream-secret': sig_for_server,
111 secret_headers = {
114 'x-channelstream-endpoint': endpoint,
112 "x-channelstream-secret": sig_for_server,
115 'Content-Type': 'application/json'}
113 "x-channelstream-endpoint": endpoint,
116 url = '%s%s' % (server, endpoint)
114 "Content-Type": "application/json",
115 }
116 url = "%s%s" % (server, endpoint)
117 try:
117 try:
118 response = requests.post(url,
118 response = requests.post(
119 data=json.dumps(payload,
119 url,
120 cls=DateTimeEncoder),
120 data=json.dumps(payload, cls=DateTimeEncoder),
121 headers=secret_headers,
121 headers=secret_headers,
122 verify=False,
122 verify=False,
123 timeout=2).json()
123 timeout=2,
124 ).json()
124 except requests.exceptions.RequestException as e:
125 except requests.exceptions.RequestException as e:
125 if throw_exceptions:
126 if throw_exceptions:
126 raise
127 raise
@@ -130,13 +131,15 b' def channelstream_request(secret, endpoint, payload, throw_exceptions=False,'
130
131
131 def add_cors_headers(response):
132 def add_cors_headers(response):
132 # allow CORS
133 # allow CORS
133 response.headers.add('Access-Control-Allow-Origin', '*')
134 response.headers.add("Access-Control-Allow-Origin", "*")
134 response.headers.add('XDomainRequestAllowed', '1')
135 response.headers.add("XDomainRequestAllowed", "1")
135 response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
136 response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
136 # response.headers.add('Access-Control-Allow-Credentials', 'true')
137 # response.headers.add('Access-Control-Allow-Credentials', 'true')
137 response.headers.add('Access-Control-Allow-Headers',
138 response.headers.add(
138 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
139 "Access-Control-Allow-Headers",
139 response.headers.add('Access-Control-Max-Age', '86400')
140 "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie",
141 )
142 response.headers.add("Access-Control-Max-Age", "86400")
140
143
141
144
142 from sqlalchemy.sql import compiler
145 from sqlalchemy.sql import compiler
@@ -145,6 +148,7 b' from psycopg2.extensions import adapt as sqlescape'
145
148
146 # or use the appropiate escape function from your db driver
149 # or use the appropiate escape function from your db driver
147
150
151
148 def compile_query(query):
152 def compile_query(query):
149 dialect = query.session.bind.dialect
153 dialect = query.session.bind.dialect
150 statement = query.statement
154 statement = query.statement
@@ -166,22 +170,23 b' def convert_es_type(input_data):'
166 return str(input_data)
170 return str(input_data)
167
171
168
172
169 ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
173 ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"])
170
174
171
175
172 def parse_proto(input_data):
176 def parse_proto(input_data):
173 try:
177 try:
174 parts = [int(x) for x in input_data.split('.')]
178 parts = [int(x) for x in input_data.split(".")]
175 while len(parts) < 3:
179 while len(parts) < 3:
176 parts.append(0)
180 parts.append(0)
177 return ProtoVersion(*parts)
181 return ProtoVersion(*parts)
178 except Exception as e:
182 except Exception as e:
179 log.info('Unknown protocol version: %s' % e)
183 log.info("Unknown protocol version: %s" % e)
180 return ProtoVersion(99, 99, 99)
184 return ProtoVersion(99, 99, 99)
181
185
182
186
183 def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
187 def es_index_name_limiter(
184 ixtypes=None):
188 start_date=None, end_date=None, months_in_past=6, ixtypes=None
189 ):
185 """
190 """
186 This function limits the search to 6 months by default so we don't have to
191 This function limits the search to 6 months by default so we don't have to
187 query 300 elasticsearch indices for 20 years of historical data for example
192 query 300 elasticsearch indices for 20 years of historical data for example
@@ -189,23 +194,23 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,'
189
194
190 # should be cached later
195 # should be cached later
191 def get_possible_names():
196 def get_possible_names():
192 return list(Datastores.es.indices.get_alias('*'))
197 return list(Datastores.es.indices.get_alias("*"))
193
198
194 possible_names = get_possible_names()
199 possible_names = get_possible_names()
195 es_index_types = []
200 es_index_types = []
196 if not ixtypes:
201 if not ixtypes:
197 ixtypes = ['reports', 'metrics', 'logs']
202 ixtypes = ["reports", "metrics", "logs"]
198 for t in ixtypes:
203 for t in ixtypes:
199 if t == 'reports':
204 if t == "reports":
200 es_index_types.append('rcae_r_%s')
205 es_index_types.append("rcae_r_%s")
201 elif t == 'logs':
206 elif t == "logs":
202 es_index_types.append('rcae_l_%s')
207 es_index_types.append("rcae_l_%s")
203 elif t == 'metrics':
208 elif t == "metrics":
204 es_index_types.append('rcae_m_%s')
209 es_index_types.append("rcae_m_%s")
205 elif t == 'uptime':
210 elif t == "uptime":
206 es_index_types.append('rcae_u_%s')
211 es_index_types.append("rcae_u_%s")
207 elif t == 'slow_calls':
212 elif t == "slow_calls":
208 es_index_types.append('rcae_sc_%s')
213 es_index_types.append("rcae_sc_%s")
209
214
210 if start_date:
215 if start_date:
211 start_date = copy.copy(start_date)
216 start_date = copy.copy(start_date)
@@ -217,26 +222,34 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,'
217 if not end_date:
222 if not end_date:
218 end_date = start_date + relativedelta(months=months_in_past)
223 end_date = start_date + relativedelta(months=months_in_past)
219
224
220 index_dates = list(rrule(MONTHLY,
225 index_dates = list(
221 dtstart=start_date.date().replace(day=1),
226 rrule(
222 until=end_date.date(),
227 MONTHLY,
223 count=36))
228 dtstart=start_date.date().replace(day=1),
229 until=end_date.date(),
230 count=36,
231 )
232 )
224 index_names = []
233 index_names = []
225 for ix_type in es_index_types:
234 for ix_type in es_index_types:
226 to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
235 to_extend = [
227 if ix_type % d.strftime('%Y_%m') in possible_names]
236 ix_type % d.strftime("%Y_%m")
237 for d in index_dates
238 if ix_type % d.strftime("%Y_%m") in possible_names
239 ]
228 index_names.extend(to_extend)
240 index_names.extend(to_extend)
229 for day in list(rrule(DAILY, dtstart=start_date.date(),
241 for day in list(
230 until=end_date.date(), count=366)):
242 rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366)
231 ix_name = ix_type % day.strftime('%Y_%m_%d')
243 ):
244 ix_name = ix_type % day.strftime("%Y_%m_%d")
232 if ix_name in possible_names:
245 if ix_name in possible_names:
233 index_names.append(ix_name)
246 index_names.append(ix_name)
234 return index_names
247 return index_names
235
248
236
249
237 def build_filter_settings_from_query_dict(
250 def build_filter_settings_from_query_dict(
238 request, params=None, override_app_ids=None,
251 request, params=None, override_app_ids=None, resource_permissions=None
239 resource_permissions=None):
252 ):
240 """
253 """
241 Builds list of normalized search terms for ES from query params
254 Builds list of normalized search terms for ES from query params
242 ensuring application list is restricted to only applications user
255 ensuring application list is restricted to only applications user
@@ -249,11 +262,12 b' def build_filter_settings_from_query_dict('
249 params = copy.deepcopy(params)
262 params = copy.deepcopy(params)
250 applications = []
263 applications = []
251 if not resource_permissions:
264 if not resource_permissions:
252 resource_permissions = ['view']
265 resource_permissions = ["view"]
253
266
254 if request.user:
267 if request.user:
255 applications = UserService.resources_with_perms(
268 applications = UserService.resources_with_perms(
256 request.user, resource_permissions, resource_types=['application'])
269 request.user, resource_permissions, resource_types=["application"]
270 )
257
271
258 # CRITICAL - this ensures our resultset is limited to only the ones
272 # CRITICAL - this ensures our resultset is limited to only the ones
259 # user has view permissions
273 # user has view permissions
@@ -273,11 +287,11 b' def build_filter_settings_from_query_dict('
273 for k, v in list(filter_settings.items()):
287 for k, v in list(filter_settings.items()):
274 if k in accepted_search_params:
288 if k in accepted_search_params:
275 continue
289 continue
276 tag_list.append({"name": k, "value": v, "op": 'eq'})
290 tag_list.append({"name": k, "value": v, "op": "eq"})
277 # remove the key from filter_settings
291 # remove the key from filter_settings
278 filter_settings.pop(k, None)
292 filter_settings.pop(k, None)
279 tags = tag_schema.deserialize(tag_list)
293 tags = tag_schema.deserialize(tag_list)
280 filter_settings['tags'] = tags
294 filter_settings["tags"] = tags
281 return filter_settings
295 return filter_settings
282
296
283
297
@@ -299,26 +313,36 b' def permission_tuple_to_dict(data):'
299 "resource_type": None,
313 "resource_type": None,
300 "resource_id": None,
314 "resource_id": None,
301 "group_name": None,
315 "group_name": None,
302 "group_id": None
316 "group_id": None,
303 }
317 }
304 if data.user:
318 if data.user:
305 out["user_name"] = data.user.user_name
319 out["user_name"] = data.user.user_name
306 if data.perm_name == ALL_PERMISSIONS:
320 if data.perm_name == ALL_PERMISSIONS:
307 out['perm_name'] = '__all_permissions__'
321 out["perm_name"] = "__all_permissions__"
308 if data.resource:
322 if data.resource:
309 out['resource_name'] = data.resource.resource_name
323 out["resource_name"] = data.resource.resource_name
310 out['resource_type'] = data.resource.resource_type
324 out["resource_type"] = data.resource.resource_type
311 out['resource_id'] = data.resource.resource_id
325 out["resource_id"] = data.resource.resource_id
312 if data.group:
326 if data.group:
313 out['group_name'] = data.group.group_name
327 out["group_name"] = data.group.group_name
314 out['group_id'] = data.group.id
328 out["group_id"] = data.group.id
315 return out
329 return out
316
330
317
331
318 def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
332 def get_cached_buckets(
319 gap_gen=None, db_session=None, step_interval=None,
333 request,
320 iv_extractor=None,
334 stats_since,
321 rerange=False, *args, **kwargs):
335 end_time,
336 fn,
337 cache_key,
338 gap_gen=None,
339 db_session=None,
340 step_interval=None,
341 iv_extractor=None,
342 rerange=False,
343 *args,
344 **kwargs
345 ):
322 """ Takes "fn" that should return some data and tries to load the data
346 """ Takes "fn" that should return some data and tries to load the data
323 dividing it into daily buckets - if the stats_since and end time give a
347 dividing it into daily buckets - if the stats_since and end time give a
324 delta bigger than 24hours, then only "todays" data is computed on the fly
348 delta bigger than 24hours, then only "todays" data is computed on the fly
@@ -360,25 +384,28 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,'
360 # do not use custom interval if total time range with new iv would exceed
384 # do not use custom interval if total time range with new iv would exceed
361 # end time
385 # end time
362 if not step_interval or stats_since + step_interval >= end_time:
386 if not step_interval or stats_since + step_interval >= end_time:
363 if delta < h.time_deltas.get('12h')['delta']:
387 if delta < h.time_deltas.get("12h")["delta"]:
364 step_interval = timedelta(seconds=60)
388 step_interval = timedelta(seconds=60)
365 elif delta < h.time_deltas.get('3d')['delta']:
389 elif delta < h.time_deltas.get("3d")["delta"]:
366 step_interval = timedelta(seconds=60 * 5)
390 step_interval = timedelta(seconds=60 * 5)
367 elif delta > h.time_deltas.get('2w')['delta']:
391 elif delta > h.time_deltas.get("2w")["delta"]:
368 step_interval = timedelta(days=1)
392 step_interval = timedelta(days=1)
369 else:
393 else:
370 step_interval = timedelta(minutes=60)
394 step_interval = timedelta(minutes=60)
371
395
372 if step_interval >= timedelta(minutes=60):
396 if step_interval >= timedelta(minutes=60):
373 log.info('cached_buckets:{}: adjusting start time '
397 log.info(
374 'for hourly or daily intervals'.format(cache_key))
398 "cached_buckets:{}: adjusting start time "
399 "for hourly or daily intervals".format(cache_key)
400 )
375 stats_since = stats_since.replace(hour=0, minute=0)
401 stats_since = stats_since.replace(hour=0, minute=0)
376
402
377 ranges = [i.start_interval for i in list(gap_gen(stats_since,
403 ranges = [
378 step_interval, [],
404 i.start_interval
379 end_time=end_time))]
405 for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time))
406 ]
380 buckets = {}
407 buckets = {}
381 storage_key = 'buckets:' + cache_key + '{}|{}'
408 storage_key = "buckets:" + cache_key + "{}|{}"
382 # this means we basicly cache per hour in 3-14 day intervals but i think
409 # this means we basicly cache per hour in 3-14 day intervals but i think
383 # its fine at this point - will be faster than db access anyways
410 # its fine at this point - will be faster than db access anyways
384
411
@@ -391,45 +418,67 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,'
391 k = storage_key.format(step_interval.total_seconds(), r)
418 k = storage_key.format(step_interval.total_seconds(), r)
392 value = request.registry.cache_regions.redis_day_30.get(k)
419 value = request.registry.cache_regions.redis_day_30.get(k)
393 # last buckets are never loaded from cache
420 # last buckets are never loaded from cache
394 is_last_result = (
421 is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges
395 r >= end_time - timedelta(hours=6) or r in last_ranges)
396 if value is not NO_VALUE and not is_last_result:
422 if value is not NO_VALUE and not is_last_result:
397 log.info("cached_buckets:{}: "
423 log.info(
398 "loading range {} from cache".format(cache_key, r))
424 "cached_buckets:{}: "
425 "loading range {} from cache".format(cache_key, r)
426 )
399 buckets[r] = value
427 buckets[r] = value
400 else:
428 else:
401 log.info("cached_buckets:{}: "
429 log.info(
402 "loading range {} from storage".format(cache_key, r))
430 "cached_buckets:{}: "
431 "loading range {} from storage".format(cache_key, r)
432 )
403 range_size = step_interval
433 range_size = step_interval
404 if (step_interval == timedelta(minutes=60) and
434 if (
405 not is_last_result and rerange):
435 step_interval == timedelta(minutes=60)
436 and not is_last_result
437 and rerange
438 ):
406 range_size = timedelta(days=1)
439 range_size = timedelta(days=1)
407 r = r.replace(hour=0, minute=0)
440 r = r.replace(hour=0, minute=0)
408 log.info("cached_buckets:{}: "
441 log.info(
409 "loading collapsed "
442 "cached_buckets:{}: "
410 "range {} {}".format(cache_key, r,
443 "loading collapsed "
411 r + range_size))
444 "range {} {}".format(cache_key, r, r + range_size)
445 )
412 bucket_data = fn(
446 bucket_data = fn(
413 request, r, r + range_size, step_interval,
447 request,
414 gap_gen, bucket_count=len(ranges), *args, **kwargs)
448 r,
449 r + range_size,
450 step_interval,
451 gap_gen,
452 bucket_count=len(ranges),
453 *args,
454 **kwargs
455 )
415 for b in bucket_data:
456 for b in bucket_data:
416 b_iv = iv_extractor(b)
457 b_iv = iv_extractor(b)
417 buckets[b_iv] = b
458 buckets[b_iv] = b
418 k2 = storage_key.format(
459 k2 = storage_key.format(step_interval.total_seconds(), b_iv)
419 step_interval.total_seconds(), b_iv)
420 request.registry.cache_regions.redis_day_30.set(k2, b)
460 request.registry.cache_regions.redis_day_30.set(k2, b)
421 log.info("cached_buckets:{}: saving cache".format(cache_key))
461 log.info("cached_buckets:{}: saving cache".format(cache_key))
422 else:
462 else:
423 # bucket count is 1 for short time ranges <= 24h from now
463 # bucket count is 1 for short time ranges <= 24h from now
424 bucket_data = fn(request, stats_since, end_time, step_interval,
464 bucket_data = fn(
425 gap_gen, bucket_count=1, *args, **kwargs)
465 request,
466 stats_since,
467 end_time,
468 step_interval,
469 gap_gen,
470 bucket_count=1,
471 *args,
472 **kwargs
473 )
426 for b in bucket_data:
474 for b in bucket_data:
427 buckets[iv_extractor(b)] = b
475 buckets[iv_extractor(b)] = b
428 return buckets
476 return buckets
429
477
430
478
431 def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
479 def get_cached_split_data(
432 db_session=None, *args, **kwargs):
480 request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs
481 ):
433 """ Takes "fn" that should return some data and tries to load the data
482 """ Takes "fn" that should return some data and tries to load the data
434 dividing it into 2 buckets - cached "since_from" bucket and "today"
483 dividing it into 2 buckets - cached "since_from" bucket and "today"
435 bucket - then the data can be reduced into single value
484 bucket - then the data can be reduced into single value
@@ -442,43 +491,51 b' def get_cached_split_data(request, stats_since, end_time, fn, cache_key,'
442 delta = end_time - stats_since
491 delta = end_time - stats_since
443
492
444 if delta >= timedelta(minutes=60):
493 if delta >= timedelta(minutes=60):
445 log.info('cached_split_data:{}: adjusting start time '
494 log.info(
446 'for hourly or daily intervals'.format(cache_key))
495 "cached_split_data:{}: adjusting start time "
496 "for hourly or daily intervals".format(cache_key)
497 )
447 stats_since = stats_since.replace(hour=0, minute=0)
498 stats_since = stats_since.replace(hour=0, minute=0)
448
499
449 storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
500 storage_key = "buckets_split_data:" + cache_key + ":{}|{}"
450 old_end_time = end_time.replace(hour=0, minute=0)
501 old_end_time = end_time.replace(hour=0, minute=0)
451
502
452 final_storage_key = storage_key.format(delta.total_seconds(),
503 final_storage_key = storage_key.format(delta.total_seconds(), old_end_time)
453 old_end_time)
454 older_data = None
504 older_data = None
455
505
456 cdata = request.registry.cache_regions.redis_day_7.get(
506 cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key)
457 final_storage_key)
458
507
459 if cdata:
508 if cdata:
460 log.info("cached_split_data:{}: found old "
509 log.info("cached_split_data:{}: found old " "bucket data".format(cache_key))
461 "bucket data".format(cache_key))
462 older_data = cdata
510 older_data = cdata
463
511
464 if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
512 if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata:
465 not cdata):
513 log.info(
466 log.info("cached_split_data:{}: didn't find the "
514 "cached_split_data:{}: didn't find the "
467 "start bucket in cache so load older data".format(cache_key))
515 "start bucket in cache so load older data".format(cache_key)
516 )
468 recent_stats_since = old_end_time
517 recent_stats_since = old_end_time
469 older_data = fn(request, stats_since, recent_stats_since,
518 older_data = fn(
470 db_session=db_session, *args, **kwargs)
519 request,
471 request.registry.cache_regions.redis_day_7.set(final_storage_key,
520 stats_since,
472 older_data)
521 recent_stats_since,
473 elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
522 db_session=db_session,
523 *args,
524 **kwargs
525 )
526 request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data)
527 elif stats_since < end_time - h.time_deltas.get("24h")["delta"]:
474 recent_stats_since = old_end_time
528 recent_stats_since = old_end_time
475 else:
529 else:
476 recent_stats_since = stats_since
530 recent_stats_since = stats_since
477
531
478 log.info("cached_split_data:{}: loading fresh "
532 log.info(
479 "data bucksts from last 24h ".format(cache_key))
533 "cached_split_data:{}: loading fresh "
480 todays_data = fn(request, recent_stats_since, end_time,
534 "data bucksts from last 24h ".format(cache_key)
481 db_session=db_session, *args, **kwargs)
535 )
536 todays_data = fn(
537 request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs
538 )
482 return older_data, todays_data
539 return older_data, todays_data
483
540
484
541
@@ -488,4 +545,4 b' def in_batches(seq, size):'
488 :param seq (iterable)
545 :param seq (iterable)
489 :param size integer
546 :param size integer
490 """
547 """
491 return (seq[pos:pos + size] for pos in range(0, len(seq), size))
548 return (seq[pos : pos + size] for pos in range(0, len(seq), size))
@@ -24,119 +24,138 b' log = logging.getLogger(__name__)'
24
24
25 def parse_airbrake_xml(request):
25 def parse_airbrake_xml(request):
26 root = request.context.airbrake_xml_etree
26 root = request.context.airbrake_xml_etree
27 error = root.find('error')
27 error = root.find("error")
28 notifier = root.find('notifier')
28 notifier = root.find("notifier")
29 server_env = root.find('server-environment')
29 server_env = root.find("server-environment")
30 request_data = root.find('request')
30 request_data = root.find("request")
31 user = root.find('current-user')
31 user = root.find("current-user")
32 if request_data is not None:
32 if request_data is not None:
33 cgi_data = request_data.find('cgi-data')
33 cgi_data = request_data.find("cgi-data")
34 if cgi_data is None:
34 if cgi_data is None:
35 cgi_data = []
35 cgi_data = []
36
36
37 error_dict = {
37 error_dict = {
38 'class_name': error.findtext('class') or '',
38 "class_name": error.findtext("class") or "",
39 'error': error.findtext('message') or '',
39 "error": error.findtext("message") or "",
40 "occurences": 1,
40 "occurences": 1,
41 "http_status": 500,
41 "http_status": 500,
42 "priority": 5,
42 "priority": 5,
43 "server": 'unknown',
43 "server": "unknown",
44 'url': 'unknown', 'request': {}
44 "url": "unknown",
45 "request": {},
45 }
46 }
46 if user is not None:
47 if user is not None:
47 error_dict['username'] = user.findtext('username') or \
48 error_dict["username"] = user.findtext("username") or user.findtext("id")
48 user.findtext('id')
49 if notifier is not None:
49 if notifier is not None:
50 error_dict['client'] = notifier.findtext('name')
50 error_dict["client"] = notifier.findtext("name")
51
51
52 if server_env is not None:
52 if server_env is not None:
53 error_dict["server"] = server_env.findtext('hostname', 'unknown')
53 error_dict["server"] = server_env.findtext("hostname", "unknown")
54
54
55 whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
55 whitelist_environ = [
56 'CONTENT_TYPE', 'HTTP_REFERER']
56 "REMOTE_USER",
57 "REMOTE_ADDR",
58 "SERVER_NAME",
59 "CONTENT_TYPE",
60 "HTTP_REFERER",
61 ]
57
62
58 if request_data is not None:
63 if request_data is not None:
59 error_dict['url'] = request_data.findtext('url', 'unknown')
64 error_dict["url"] = request_data.findtext("url", "unknown")
60 component = request_data.findtext('component')
65 component = request_data.findtext("component")
61 action = request_data.findtext('action')
66 action = request_data.findtext("action")
62 if component and action:
67 if component and action:
63 error_dict['view_name'] = '%s:%s' % (component, action)
68 error_dict["view_name"] = "%s:%s" % (component, action)
64 for node in cgi_data:
69 for node in cgi_data:
65 key = node.get('key')
70 key = node.get("key")
66 if key.startswith('HTTP') or key in whitelist_environ:
71 if key.startswith("HTTP") or key in whitelist_environ:
67 error_dict['request'][key] = node.text
72 error_dict["request"][key] = node.text
68 elif 'query_parameters' in key:
73 elif "query_parameters" in key:
69 error_dict['request']['GET'] = {}
74 error_dict["request"]["GET"] = {}
70 for x in node:
75 for x in node:
71 error_dict['request']['GET'][x.get('key')] = x.text
76 error_dict["request"]["GET"][x.get("key")] = x.text
72 elif 'request_parameters' in key:
77 elif "request_parameters" in key:
73 error_dict['request']['POST'] = {}
78 error_dict["request"]["POST"] = {}
74 for x in node:
79 for x in node:
75 error_dict['request']['POST'][x.get('key')] = x.text
80 error_dict["request"]["POST"][x.get("key")] = x.text
76 elif key.endswith('cookie'):
81 elif key.endswith("cookie"):
77 error_dict['request']['COOKIE'] = {}
82 error_dict["request"]["COOKIE"] = {}
78 for x in node:
83 for x in node:
79 error_dict['request']['COOKIE'][x.get('key')] = x.text
84 error_dict["request"]["COOKIE"][x.get("key")] = x.text
80 elif key.endswith('request_id'):
85 elif key.endswith("request_id"):
81 error_dict['request_id'] = node.text
86 error_dict["request_id"] = node.text
82 elif key.endswith('session'):
87 elif key.endswith("session"):
83 error_dict['request']['SESSION'] = {}
88 error_dict["request"]["SESSION"] = {}
84 for x in node:
89 for x in node:
85 error_dict['request']['SESSION'][x.get('key')] = x.text
90 error_dict["request"]["SESSION"][x.get("key")] = x.text
86 else:
91 else:
87 if key in ['rack.session.options']:
92 if key in ["rack.session.options"]:
88 # skip secret configs
93 # skip secret configs
89 continue
94 continue
90 try:
95 try:
91 if len(node):
96 if len(node):
92 error_dict['request'][key] = dict(
97 error_dict["request"][key] = dict(
93 [(x.get('key'), x.text,) for x in node])
98 [(x.get("key"), x.text) for x in node]
99 )
94 else:
100 else:
95 error_dict['request'][key] = node.text
101 error_dict["request"][key] = node.text
96 except Exception as e:
102 except Exception as e:
97 log.warning('Airbrake integration exception: %s' % e)
103 log.warning("Airbrake integration exception: %s" % e)
98
104
99 error_dict['request'].pop('HTTP_COOKIE', '')
105 error_dict["request"].pop("HTTP_COOKIE", "")
100
106
101 error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
107 error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "")
102 error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
108 error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "")
103 if 'request_id' not in error_dict:
109 if "request_id" not in error_dict:
104 error_dict['request_id'] = str(uuid.uuid4())
110 error_dict["request_id"] = str(uuid.uuid4())
105 if request.context.possibly_public:
111 if request.context.possibly_public:
106 # set ip for reports that come from airbrake js client
112 # set ip for reports that come from airbrake js client
107 error_dict["timestamp"] = datetime.utcnow()
113 error_dict["timestamp"] = datetime.utcnow()
108 if request.environ.get("HTTP_X_FORWARDED_FOR"):
114 if request.environ.get("HTTP_X_FORWARDED_FOR"):
109 ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
115 ip = request.environ.get("HTTP_X_FORWARDED_FOR", "")
110 first_ip = ip.split(',')[0]
116 first_ip = ip.split(",")[0]
111 remote_addr = first_ip.strip()
117 remote_addr = first_ip.strip()
112 else:
118 else:
113 remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
119 remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get(
114 request.environ.get('REMOTE_ADDR'))
120 "REMOTE_ADDR"
121 )
115 error_dict["ip"] = remote_addr
122 error_dict["ip"] = remote_addr
116
123
117 blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
124 blacklist = [
118 'session', 'test']
125 "password",
126 "passwd",
127 "pwd",
128 "auth_tkt",
129 "secret",
130 "csrf",
131 "session",
132 "test",
133 ]
119
134
120 lines = []
135 lines = []
121 for l in error.find('backtrace'):
136 for l in error.find("backtrace"):
122 lines.append({'file': l.get("file", ""),
137 lines.append(
123 'line': l.get("number", ""),
138 {
124 'fn': l.get("method", ""),
139 "file": l.get("file", ""),
125 'module': l.get("module", ""),
140 "line": l.get("number", ""),
126 'cline': l.get("method", ""),
141 "fn": l.get("method", ""),
127 'vars': {}})
142 "module": l.get("module", ""),
128 error_dict['traceback'] = list(reversed(lines))
143 "cline": l.get("method", ""),
144 "vars": {},
145 }
146 )
147 error_dict["traceback"] = list(reversed(lines))
129 # filtering is not provided by airbrake
148 # filtering is not provided by airbrake
130 keys_to_check = (
149 keys_to_check = (
131 error_dict['request'].get('COOKIE'),
150 error_dict["request"].get("COOKIE"),
132 error_dict['request'].get('COOKIES'),
151 error_dict["request"].get("COOKIES"),
133 error_dict['request'].get('POST'),
152 error_dict["request"].get("POST"),
134 error_dict['request'].get('SESSION'),
153 error_dict["request"].get("SESSION"),
135 )
154 )
136 for source in [_f for _f in keys_to_check if _f]:
155 for source in [_f for _f in keys_to_check if _f]:
137 for k in source.keys():
156 for k in source.keys():
138 for bad_key in blacklist:
157 for bad_key in blacklist:
139 if bad_key in k.lower():
158 if bad_key in k.lower():
140 source[k] = '***'
159 source[k] = "***"
141
160
142 return error_dict
161 return error_dict
@@ -22,12 +22,12 b' log = logging.getLogger(__name__)'
22
22
23
23
24 def to_relativedelta(time_delta):
24 def to_relativedelta(time_delta):
25 return relativedelta(seconds=int(time_delta.total_seconds()),
25 return relativedelta(
26 microseconds=time_delta.microseconds)
26 seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds
27 )
27
28
28
29
29 def convert_date(date_str, return_utcnow_if_wrong=True,
30 def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False):
30 normalize_future=False):
31 utcnow = datetime.utcnow()
31 utcnow = datetime.utcnow()
32 if isinstance(date_str, datetime):
32 if isinstance(date_str, datetime):
33 # get rid of tzinfo
33 # get rid of tzinfo
@@ -36,21 +36,21 b' def convert_date(date_str, return_utcnow_if_wrong=True,'
36 return utcnow
36 return utcnow
37 try:
37 try:
38 try:
38 try:
39 if 'Z' in date_str:
39 if "Z" in date_str:
40 date_str = date_str[:date_str.index('Z')]
40 date_str = date_str[: date_str.index("Z")]
41 if '.' in date_str:
41 if "." in date_str:
42 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
42 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f")
43 else:
43 else:
44 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
44 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S")
45 except Exception:
45 except Exception:
46 # bw compat with old client
46 # bw compat with old client
47 date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
47 date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f")
48 except Exception:
48 except Exception:
49 if return_utcnow_if_wrong:
49 if return_utcnow_if_wrong:
50 date = utcnow
50 date = utcnow
51 else:
51 else:
52 date = None
52 date = None
53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
54 log.warning('time %s in future + 3 min, normalizing' % date)
54 log.warning("time %s in future + 3 min, normalizing" % date)
55 return utcnow
55 return utcnow
56 return date
56 return date
@@ -19,45 +19,68 b' from datetime import timedelta'
19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
20
20
21 EXCLUDED_LOG_VARS = [
21 EXCLUDED_LOG_VARS = [
22 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
22 "args",
23 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
23 "asctime",
24 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
24 "created",
25 'thread', 'threadName']
25 "exc_info",
26 "exc_text",
27 "filename",
28 "funcName",
29 "levelname",
30 "levelno",
31 "lineno",
32 "message",
33 "module",
34 "msecs",
35 "msg",
36 "name",
37 "pathname",
38 "process",
39 "processName",
40 "relativeCreated",
41 "thread",
42 "threadName",
43 ]
26
44
27 EXCLUDE_SENTRY_KEYS = [
45 EXCLUDE_SENTRY_KEYS = [
28 'csp',
46 "csp",
29 'culprit',
47 "culprit",
30 'event_id',
48 "event_id",
31 'exception',
49 "exception",
32 'extra',
50 "extra",
33 'level',
51 "level",
34 'logentry',
52 "logentry",
35 'logger',
53 "logger",
36 'message',
54 "message",
37 'modules',
55 "modules",
38 'platform',
56 "platform",
39 'query',
57 "query",
40 'release',
58 "release",
41 'request',
59 "request",
42 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
60 "sentry.interfaces.Csp",
43 'sentry.interfaces.Http', 'sentry.interfaces.Message',
61 "sentry.interfaces.Exception",
44 'sentry.interfaces.Query',
62 "sentry.interfaces.Http",
45 'sentry.interfaces.Stacktrace',
63 "sentry.interfaces.Message",
46 'sentry.interfaces.Template', 'sentry.interfaces.User',
64 "sentry.interfaces.Query",
47 'sentry.interfaces.csp.Csp',
65 "sentry.interfaces.Stacktrace",
48 'sentry.interfaces.exception.Exception',
66 "sentry.interfaces.Template",
49 'sentry.interfaces.http.Http',
67 "sentry.interfaces.User",
50 'sentry.interfaces.message.Message',
68 "sentry.interfaces.csp.Csp",
51 'sentry.interfaces.query.Query',
69 "sentry.interfaces.exception.Exception",
52 'sentry.interfaces.stacktrace.Stacktrace',
70 "sentry.interfaces.http.Http",
53 'sentry.interfaces.template.Template',
71 "sentry.interfaces.message.Message",
54 'sentry.interfaces.user.User', 'server_name',
72 "sentry.interfaces.query.Query",
55 'stacktrace',
73 "sentry.interfaces.stacktrace.Stacktrace",
56 'tags',
74 "sentry.interfaces.template.Template",
57 'template',
75 "sentry.interfaces.user.User",
58 'time_spent',
76 "server_name",
59 'timestamp',
77 "stacktrace",
60 'user']
78 "tags",
79 "template",
80 "time_spent",
81 "timestamp",
82 "user",
83 ]
61
84
62
85
63 def get_keys(list_of_keys, json_body):
86 def get_keys(list_of_keys, json_body):
@@ -67,30 +90,32 b' def get_keys(list_of_keys, json_body):'
67
90
68
91
69 def get_logentry(json_body):
92 def get_logentry(json_body):
70 key_names = ['logentry',
93 key_names = [
71 'sentry.interfaces.message.Message',
94 "logentry",
72 'sentry.interfaces.Message'
95 "sentry.interfaces.message.Message",
73 ]
96 "sentry.interfaces.Message",
97 ]
74 logentry = get_keys(key_names, json_body)
98 logentry = get_keys(key_names, json_body)
75 return logentry
99 return logentry
76
100
77
101
78 def get_exception(json_body):
102 def get_exception(json_body):
79 parsed_exception = {}
103 parsed_exception = {}
80 key_names = ['exception',
104 key_names = [
81 'sentry.interfaces.exception.Exception',
105 "exception",
82 'sentry.interfaces.Exception'
106 "sentry.interfaces.exception.Exception",
83 ]
107 "sentry.interfaces.Exception",
108 ]
84 exception = get_keys(key_names, json_body) or {}
109 exception = get_keys(key_names, json_body) or {}
85 if exception:
110 if exception:
86 if isinstance(exception, dict):
111 if isinstance(exception, dict):
87 exception = exception['values'][0]
112 exception = exception["values"][0]
88 else:
113 else:
89 exception = exception[0]
114 exception = exception[0]
90
115
91 parsed_exception['type'] = exception.get('type')
116 parsed_exception["type"] = exception.get("type")
92 parsed_exception['value'] = exception.get('value')
117 parsed_exception["value"] = exception.get("value")
93 parsed_exception['module'] = exception.get('module')
118 parsed_exception["module"] = exception.get("module")
94 parsed_stacktrace = get_stacktrace(exception) or {}
119 parsed_stacktrace = get_stacktrace(exception) or {}
95 parsed_exception = exception or {}
120 parsed_exception = exception or {}
96 return parsed_exception, parsed_stacktrace
121 return parsed_exception, parsed_stacktrace
@@ -98,41 +123,45 b' def get_exception(json_body):'
98
123
99 def get_stacktrace(json_body):
124 def get_stacktrace(json_body):
100 parsed_stacktrace = []
125 parsed_stacktrace = []
101 key_names = ['stacktrace',
126 key_names = [
102 'sentry.interfaces.stacktrace.Stacktrace',
127 "stacktrace",
103 'sentry.interfaces.Stacktrace'
128 "sentry.interfaces.stacktrace.Stacktrace",
104 ]
129 "sentry.interfaces.Stacktrace",
130 ]
105 stacktrace = get_keys(key_names, json_body)
131 stacktrace = get_keys(key_names, json_body)
106 if stacktrace:
132 if stacktrace:
107 for frame in stacktrace['frames']:
133 for frame in stacktrace["frames"]:
108 parsed_stacktrace.append(
134 parsed_stacktrace.append(
109 {"cline": frame.get('context_line', ''),
135 {
110 "file": frame.get('filename', ''),
136 "cline": frame.get("context_line", ""),
111 "module": frame.get('module', ''),
137 "file": frame.get("filename", ""),
112 "fn": frame.get('function', ''),
138 "module": frame.get("module", ""),
113 "line": frame.get('lineno', ''),
139 "fn": frame.get("function", ""),
114 "vars": list(frame.get('vars', {}).items())
140 "line": frame.get("lineno", ""),
115 }
141 "vars": list(frame.get("vars", {}).items()),
142 }
116 )
143 )
117 return parsed_stacktrace
144 return parsed_stacktrace
118
145
119
146
120 def get_template(json_body):
147 def get_template(json_body):
121 parsed_template = {}
148 parsed_template = {}
122 key_names = ['template',
149 key_names = [
123 'sentry.interfaces.template.Template',
150 "template",
124 'sentry.interfaces.Template'
151 "sentry.interfaces.template.Template",
125 ]
152 "sentry.interfaces.Template",
153 ]
126 template = get_keys(key_names, json_body)
154 template = get_keys(key_names, json_body)
127 if template:
155 if template:
128 for frame in template['frames']:
156 for frame in template["frames"]:
129 parsed_template.append(
157 parsed_template.append(
130 {"cline": frame.get('context_line', ''),
158 {
131 "file": frame.get('filename', ''),
159 "cline": frame.get("context_line", ""),
132 "fn": '',
160 "file": frame.get("filename", ""),
133 "line": frame.get('lineno', ''),
161 "fn": "",
134 "vars": []
162 "line": frame.get("lineno", ""),
135 }
163 "vars": [],
164 }
136 )
165 )
137
166
138 return parsed_template
167 return parsed_template
@@ -140,16 +169,13 b' def get_template(json_body):'
140
169
141 def get_request(json_body):
170 def get_request(json_body):
142 parsed_http = {}
171 parsed_http = {}
143 key_names = ['request',
172 key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"]
144 'sentry.interfaces.http.Http',
145 'sentry.interfaces.Http'
146 ]
147 http = get_keys(key_names, json_body) or {}
173 http = get_keys(key_names, json_body) or {}
148 for k, v in http.items():
174 for k, v in http.items():
149 if k == 'headers':
175 if k == "headers":
150 parsed_http['headers'] = {}
176 parsed_http["headers"] = {}
151 for sk, sv in http['headers'].items():
177 for sk, sv in http["headers"].items():
152 parsed_http['headers'][sk.title()] = sv
178 parsed_http["headers"][sk.title()] = sv
153 else:
179 else:
154 parsed_http[k.lower()] = v
180 parsed_http[k.lower()] = v
155 return parsed_http
181 return parsed_http
@@ -157,53 +183,47 b' def get_request(json_body):'
157
183
158 def get_user(json_body):
184 def get_user(json_body):
159 parsed_user = {}
185 parsed_user = {}
160 key_names = ['user',
186 key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"]
161 'sentry.interfaces.user.User',
162 'sentry.interfaces.User'
163 ]
164 user = get_keys(key_names, json_body)
187 user = get_keys(key_names, json_body)
165 if user:
188 if user:
166 parsed_user['id'] = user.get('id')
189 parsed_user["id"] = user.get("id")
167 parsed_user['username'] = user.get('username')
190 parsed_user["username"] = user.get("username")
168 parsed_user['email'] = user.get('email')
191 parsed_user["email"] = user.get("email")
169 parsed_user['ip_address'] = user.get('ip_address')
192 parsed_user["ip_address"] = user.get("ip_address")
170
193
171 return parsed_user
194 return parsed_user
172
195
173
196
174 def get_query(json_body):
197 def get_query(json_body):
175 query = None
198 query = None
176 key_name = ['query',
199 key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"]
177 'sentry.interfaces.query.Query',
178 'sentry.interfaces.Query'
179 ]
180 query = get_keys(key_name, json_body)
200 query = get_keys(key_name, json_body)
181 return query
201 return query
182
202
183
203
184 def parse_sentry_event(json_body):
204 def parse_sentry_event(json_body):
185 request_id = json_body.get('event_id')
205 request_id = json_body.get("event_id")
186
206
187 # required
207 # required
188 message = json_body.get('message')
208 message = json_body.get("message")
189 log_timestamp = json_body.get('timestamp')
209 log_timestamp = json_body.get("timestamp")
190 level = json_body.get('level')
210 level = json_body.get("level")
191 if isinstance(level, int):
211 if isinstance(level, int):
192 level = LogLevelPython.key_from_value(level)
212 level = LogLevelPython.key_from_value(level)
193
213
194 namespace = json_body.get('logger')
214 namespace = json_body.get("logger")
195 language = json_body.get('platform')
215 language = json_body.get("platform")
196
216
197 # optional
217 # optional
198 server_name = json_body.get('server_name')
218 server_name = json_body.get("server_name")
199 culprit = json_body.get('culprit')
219 culprit = json_body.get("culprit")
200 release = json_body.get('release')
220 release = json_body.get("release")
201
221
202 tags = json_body.get('tags', {})
222 tags = json_body.get("tags", {})
203 if hasattr(tags, 'items'):
223 if hasattr(tags, "items"):
204 tags = list(tags.items())
224 tags = list(tags.items())
205 extra = json_body.get('extra', {})
225 extra = json_body.get("extra", {})
206 if hasattr(extra, 'items'):
226 if hasattr(extra, "items"):
207 extra = list(extra.items())
227 extra = list(extra.items())
208
228
209 parsed_req = get_request(json_body)
229 parsed_req = get_request(json_body)
@@ -212,12 +232,13 b' def parse_sentry_event(json_body):'
212 query = get_query(json_body)
232 query = get_query(json_body)
213
233
214 # other unidentified keys found
234 # other unidentified keys found
215 other_keys = [(k, json_body[k]) for k in json_body.keys()
235 other_keys = [
216 if k not in EXCLUDE_SENTRY_KEYS]
236 (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS
237 ]
217
238
218 logentry = get_logentry(json_body)
239 logentry = get_logentry(json_body)
219 if logentry:
240 if logentry:
220 message = logentry['message']
241 message = logentry["message"]
221
242
222 exception, stacktrace = get_exception(json_body)
243 exception, stacktrace = get_exception(json_body)
223
244
@@ -227,70 +248,70 b' def parse_sentry_event(json_body):'
227 event_type = ParsedSentryEventType.LOG
248 event_type = ParsedSentryEventType.LOG
228
249
229 event_dict = {
250 event_dict = {
230 'log_level': level,
251 "log_level": level,
231 'message': message,
252 "message": message,
232 'namespace': namespace,
253 "namespace": namespace,
233 'request_id': request_id,
254 "request_id": request_id,
234 'server': server_name,
255 "server": server_name,
235 'date': log_timestamp,
256 "date": log_timestamp,
236 'tags': tags
257 "tags": tags,
237 }
258 }
238 event_dict['tags'].extend(
259 event_dict["tags"].extend(
239 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
260 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS]
261 )
240
262
241 # other keys can be various object types
263 # other keys can be various object types
242 event_dict['tags'].extend([(k, v) for k, v in other_keys
264 event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)])
243 if isinstance(v, str)])
244 if culprit:
265 if culprit:
245 event_dict['tags'].append(('sentry_culprit', culprit))
266 event_dict["tags"].append(("sentry_culprit", culprit))
246 if language:
267 if language:
247 event_dict['tags'].append(('sentry_language', language))
268 event_dict["tags"].append(("sentry_language", language))
248 if release:
269 if release:
249 event_dict['tags'].append(('sentry_release', release))
270 event_dict["tags"].append(("sentry_release", release))
250
271
251 if exception or stacktrace or alt_stacktrace or template:
272 if exception or stacktrace or alt_stacktrace or template:
252 event_type = ParsedSentryEventType.ERROR_REPORT
273 event_type = ParsedSentryEventType.ERROR_REPORT
253 event_dict = {
274 event_dict = {
254 'client': 'sentry',
275 "client": "sentry",
255 'error': message,
276 "error": message,
256 'namespace': namespace,
277 "namespace": namespace,
257 'request_id': request_id,
278 "request_id": request_id,
258 'server': server_name,
279 "server": server_name,
259 'start_time': log_timestamp,
280 "start_time": log_timestamp,
260 'end_time': None,
281 "end_time": None,
261 'tags': tags,
282 "tags": tags,
262 'extra': extra,
283 "extra": extra,
263 'language': language,
284 "language": language,
264 'view_name': json_body.get('culprit'),
285 "view_name": json_body.get("culprit"),
265 'http_status': None,
286 "http_status": None,
266 'username': None,
287 "username": None,
267 'url': parsed_req.get('url'),
288 "url": parsed_req.get("url"),
268 'ip': None,
289 "ip": None,
269 'user_agent': None,
290 "user_agent": None,
270 'request': None,
291 "request": None,
271 'slow_calls': None,
292 "slow_calls": None,
272 'request_stats': None,
293 "request_stats": None,
273 'traceback': None
294 "traceback": None,
274 }
295 }
275
296
276 event_dict['extra'].extend(other_keys)
297 event_dict["extra"].extend(other_keys)
277 if release:
298 if release:
278 event_dict['tags'].append(('sentry_release', release))
299 event_dict["tags"].append(("sentry_release", release))
279 event_dict['request'] = parsed_req
300 event_dict["request"] = parsed_req
280 if 'headers' in parsed_req:
301 if "headers" in parsed_req:
281 event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
302 event_dict["user_agent"] = parsed_req["headers"].get("User-Agent")
282 if 'env' in parsed_req:
303 if "env" in parsed_req:
283 event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
304 event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR")
284 ts_ms = int(json_body.get('time_spent') or 0)
305 ts_ms = int(json_body.get("time_spent") or 0)
285 if ts_ms > 0:
306 if ts_ms > 0:
286 event_dict['end_time'] = event_dict['start_time'] + \
307 event_dict["end_time"] = event_dict["start_time"] + timedelta(
287 timedelta(milliseconds=ts_ms)
308 milliseconds=ts_ms
309 )
288 if stacktrace or alt_stacktrace or template:
310 if stacktrace or alt_stacktrace or template:
289 event_dict['traceback'] = stacktrace or alt_stacktrace or template
311 event_dict["traceback"] = stacktrace or alt_stacktrace or template
290 for k in list(event_dict.keys()):
312 for k in list(event_dict.keys()):
291 if event_dict[k] is None:
313 if event_dict[k] is None:
292 del event_dict[k]
314 del event_dict[k]
293 if user:
315 if user:
294 event_dict['username'] = user['username'] or user['id'] \
316 event_dict["username"] = user["username"] or user["id"] or user["email"]
295 or user['email']
296 return event_dict, event_type
317 return event_dict, event_type
@@ -13,5 +13,3 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
17
@@ -41,7 +41,7 b' target_metadata = MetaData(naming_convention=NAMING_CONVENTION)'
41 # my_important_option = config.get_main_option("my_important_option")
41 # my_important_option = config.get_main_option("my_important_option")
42 # ... etc.
42 # ... etc.
43
43
44 VERSION_TABLE_NAME = 'alembic_appenlight_version'
44 VERSION_TABLE_NAME = "alembic_appenlight_version"
45
45
46
46
47 def run_migrations_offline():
47 def run_migrations_offline():
@@ -57,9 +57,12 b' def run_migrations_offline():'
57
57
58 """
58 """
59 url = config.get_main_option("sqlalchemy.url")
59 url = config.get_main_option("sqlalchemy.url")
60 context.configure(url=url, target_metadata=target_metadata,
60 context.configure(
61 transaction_per_migration=True,
61 url=url,
62 version_table=VERSION_TABLE_NAME)
62 target_metadata=target_metadata,
63 transaction_per_migration=True,
64 version_table=VERSION_TABLE_NAME,
65 )
63
66
64 with context.begin_transaction():
67 with context.begin_transaction():
65 context.run_migrations()
68 context.run_migrations()
@@ -74,15 +77,16 b' def run_migrations_online():'
74 """
77 """
75 engine = engine_from_config(
78 engine = engine_from_config(
76 config.get_section(config.config_ini_section),
79 config.get_section(config.config_ini_section),
77 prefix='sqlalchemy.',
80 prefix="sqlalchemy.",
78 poolclass=pool.NullPool)
81 poolclass=pool.NullPool,
82 )
79
83
80 connection = engine.connect()
84 connection = engine.connect()
81 context.configure(
85 context.configure(
82 connection=connection,
86 connection=connection,
83 target_metadata=target_metadata,
87 target_metadata=target_metadata,
84 transaction_per_migration=True,
88 transaction_per_migration=True,
85 version_table=VERSION_TABLE_NAME
89 version_table=VERSION_TABLE_NAME,
86 )
90 )
87
91
88 try:
92 try:
This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them
@@ -23,7 +23,7 b' Create Date: 2014-10-13 23:47:38.295159'
23 """
23 """
24
24
25 # revision identifiers, used by Alembic.
25 # revision identifiers, used by Alembic.
26 revision = '55b6e612672f'
26 revision = "55b6e612672f"
27 down_revision = None
27 down_revision = None
28
28
29 from alembic import op
29 from alembic import op
@@ -31,348 +31,514 b' import sqlalchemy as sa'
31
31
32
32
33 def upgrade():
33 def upgrade():
34 op.add_column('users', sa.Column('first_name', sa.Unicode(25)))
34 op.add_column("users", sa.Column("first_name", sa.Unicode(25)))
35 op.add_column('users', sa.Column('last_name', sa.Unicode(50)))
35 op.add_column("users", sa.Column("last_name", sa.Unicode(50)))
36 op.add_column('users', sa.Column('company_name', sa.Unicode(255)))
36 op.add_column("users", sa.Column("company_name", sa.Unicode(255)))
37 op.add_column('users', sa.Column('company_address', sa.Unicode(255)))
37 op.add_column("users", sa.Column("company_address", sa.Unicode(255)))
38 op.add_column('users', sa.Column('phone1', sa.Unicode(25)))
38 op.add_column("users", sa.Column("phone1", sa.Unicode(25)))
39 op.add_column('users', sa.Column('phone2', sa.Unicode(25)))
39 op.add_column("users", sa.Column("phone2", sa.Unicode(25)))
40 op.add_column('users', sa.Column('zip_code', sa.Unicode(25)))
40 op.add_column("users", sa.Column("zip_code", sa.Unicode(25)))
41 op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest"))
41 op.add_column(
42 op.add_column('users', sa.Column('city', sa.Unicode(128)))
42 "users",
43 op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default=''))
43 sa.Column(
44 op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true'))
44 "default_report_sort",
45 op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default=''))
45 sa.Unicode(20),
46 nullable=False,
47 server_default="newest",
48 ),
49 )
50 op.add_column("users", sa.Column("city", sa.Unicode(128)))
51 op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default=""))
52 op.add_column(
53 "users",
54 sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"),
55 )
56 op.add_column(
57 "users",
58 sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""),
59 )
46
60
47 op.create_table(
61 op.create_table(
48 'integrations',
62 "integrations",
49 sa.Column('id', sa.Integer(), primary_key=True),
63 sa.Column("id", sa.Integer(), primary_key=True),
50 sa.Column('resource_id', sa.Integer(),
64 sa.Column(
51 sa.ForeignKey('resources.resource_id', onupdate='cascade',
65 "resource_id",
52 ondelete='cascade')),
66 sa.Integer(),
53 sa.Column('integration_name', sa.Unicode(64)),
67 sa.ForeignKey(
54 sa.Column('config', sa.dialects.postgresql.JSON, nullable=False),
68 "resources.resource_id", onupdate="cascade", ondelete="cascade"
55 sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()),
69 ),
56 sa.Column('external_id', sa.Unicode(255)),
70 ),
57 sa.Column('external_id2', sa.Unicode(255))
71 sa.Column("integration_name", sa.Unicode(64)),
72 sa.Column("config", sa.dialects.postgresql.JSON, nullable=False),
73 sa.Column(
74 "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now()
75 ),
76 sa.Column("external_id", sa.Unicode(255)),
77 sa.Column("external_id2", sa.Unicode(255)),
58 )
78 )
59
79
60 op.create_table(
80 op.create_table(
61 'alert_channels',
81 "alert_channels",
62 sa.Column('owner_id', sa.Integer(),
82 sa.Column(
63 sa.ForeignKey('users.id', onupdate='cascade',
83 "owner_id",
64 ondelete='cascade'), nullable=False),
84 sa.Integer(),
65 sa.Column('channel_name', sa.Unicode(25), nullable=False),
85 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
66 sa.Column('channel_value', sa.Unicode(80), nullable=False),
86 nullable=False,
67 sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False),
87 ),
68 sa.Column('channel_validated', sa.Boolean, nullable=False, server_default='False'),
88 sa.Column("channel_name", sa.Unicode(25), nullable=False),
69 sa.Column('send_alerts', sa.Boolean, nullable=False, server_default='True'),
89 sa.Column("channel_value", sa.Unicode(80), nullable=False),
70 sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'),
90 sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False),
71 sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'),
91 sa.Column(
72 sa.Column('pkey', sa.Integer(), primary_key=True),
92 "channel_validated", sa.Boolean, nullable=False, server_default="False"
73 sa.Column('integration_id', sa.Integer,
93 ),
74 sa.ForeignKey('integrations.id', onupdate='cascade',
94 sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"),
75 ondelete='cascade')),
95 sa.Column(
76 )
96 "notify_only_first", sa.Boolean, nullable=False, server_default="False"
77 op.create_unique_constraint('uq_alert_channels', 'alert_channels',
97 ),
78 ["owner_id", "channel_name", "channel_value"])
98 sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"),
99 sa.Column("pkey", sa.Integer(), primary_key=True),
100 sa.Column(
101 "integration_id",
102 sa.Integer,
103 sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"),
104 ),
105 )
106 op.create_unique_constraint(
107 "uq_alert_channels",
108 "alert_channels",
109 ["owner_id", "channel_name", "channel_value"],
110 )
79
111
80 op.create_table(
112 op.create_table(
81 'alert_channels_actions',
113 "alert_channels_actions",
82 sa.Column('owner_id', sa.Integer(), nullable=False),
114 sa.Column("owner_id", sa.Integer(), nullable=False),
83 sa.Column('resource_id', sa.Integer(),
115 sa.Column(
84 sa.ForeignKey('resources.resource_id', onupdate='cascade',
116 "resource_id",
85 ondelete='cascade')),
117 sa.Integer(),
86 sa.Column('pkey', sa.Integer(), primary_key=True),
118 sa.ForeignKey(
87 sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'),
119 "resources.resource_id", onupdate="cascade", ondelete="cascade"
88 sa.Column('rule', sa.dialects.postgresql.JSON),
120 ),
89 sa.Column('type', sa.Unicode(10), index=True),
121 ),
90 sa.Column('other_id', sa.Unicode(40), index=True),
122 sa.Column("pkey", sa.Integer(), primary_key=True),
91 sa.Column('config', sa.dialects.postgresql.JSON),
123 sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"),
92 sa.Column('name', sa.Unicode(255), server_default='')
124 sa.Column("rule", sa.dialects.postgresql.JSON),
125 sa.Column("type", sa.Unicode(10), index=True),
126 sa.Column("other_id", sa.Unicode(40), index=True),
127 sa.Column("config", sa.dialects.postgresql.JSON),
128 sa.Column("name", sa.Unicode(255), server_default=""),
93 )
129 )
94
130
95
96 op.create_table(
131 op.create_table(
97 'application_postprocess_conf',
132 "application_postprocess_conf",
98 sa.Column('pkey', sa.Integer(), primary_key=True),
133 sa.Column("pkey", sa.Integer(), primary_key=True),
99 sa.Column('do', sa.Unicode(25), nullable=False),
134 sa.Column("do", sa.Unicode(25), nullable=False),
100 sa.Column('new_value', sa.UnicodeText(), nullable=False, server_default=''),
135 sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""),
101 sa.Column('resource_id', sa.Integer(),
136 sa.Column(
102 sa.ForeignKey('resources.resource_id',
137 "resource_id",
103 onupdate='cascade',
138 sa.Integer(),
104 ondelete='cascade'), nullable=False),
139 sa.ForeignKey(
105 sa.Column('rule', sa.dialects.postgresql.JSON),
140 "resources.resource_id", onupdate="cascade", ondelete="cascade"
141 ),
142 nullable=False,
143 ),
144 sa.Column("rule", sa.dialects.postgresql.JSON),
106 )
145 )
107
146
108 op.create_table(
147 op.create_table(
109 'applications',
148 "applications",
110 sa.Column('resource_id', sa.Integer(),
149 sa.Column(
111 sa.ForeignKey('resources.resource_id', onupdate='cascade',
150 "resource_id",
112 ondelete='cascade'), nullable=False,
151 sa.Integer(),
113 primary_key=True, autoincrement=False),
152 sa.ForeignKey(
114 sa.Column('domains', sa.UnicodeText, nullable=False),
153 "resources.resource_id", onupdate="cascade", ondelete="cascade"
115 sa.Column('api_key', sa.Unicode(32), nullable=False, index=True),
154 ),
116 sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'),
155 nullable=False,
117 sa.Column('public_key', sa.Unicode(32), nullable=False, index=True),
156 primary_key=True,
118 sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False),
157 autoincrement=False,
119 sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False),
158 ),
120 sa.Column('apdex_threshold', sa.Float(), server_default='0.7', nullable=False),
159 sa.Column("domains", sa.UnicodeText, nullable=False),
121 sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False),
160 sa.Column("api_key", sa.Unicode(32), nullable=False, index=True),
122 )
161 sa.Column(
123 op.create_unique_constraint(None, 'applications',
162 "default_grouping",
124 ["public_key"])
163 sa.Unicode(20),
125 op.create_unique_constraint(None, 'applications',
164 nullable=False,
126 ["api_key"])
165 server_default="url_type",
166 ),
167 sa.Column("public_key", sa.Unicode(32), nullable=False, index=True),
168 sa.Column(
169 "error_report_threshold", sa.Integer(), server_default="10", nullable=False
170 ),
171 sa.Column(
172 "slow_report_threshold", sa.Integer(), server_default="10", nullable=False
173 ),
174 sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False),
175 sa.Column(
176 "allow_permanent_storage",
177 sa.Boolean(),
178 server_default="false",
179 nullable=False,
180 ),
181 )
182 op.create_unique_constraint(None, "applications", ["public_key"])
183 op.create_unique_constraint(None, "applications", ["api_key"])
127
184
128 op.create_table(
185 op.create_table(
129 'metrics',
186 "metrics",
130 sa.Column('pkey', sa.types.BigInteger, nullable=False, primary_key=True),
187 sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True),
131 sa.Column('resource_id', sa.Integer(),
188 sa.Column(
132 sa.ForeignKey('resources.resource_id',
189 "resource_id",
133 onupdate='cascade',
190 sa.Integer(),
134 ondelete='cascade')),
191 sa.ForeignKey(
135 sa.Column('timestamp', sa.DateTime),
192 "resources.resource_id", onupdate="cascade", ondelete="cascade"
136 sa.Column('namespace', sa.Unicode(255)),
193 ),
137 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}")
194 ),
195 sa.Column("timestamp", sa.DateTime),
196 sa.Column("namespace", sa.Unicode(255)),
197 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
138 )
198 )
139
199
140 op.create_table(
200 op.create_table(
141 'events',
201 "events",
142 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
202 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
143 sa.Column('start_date', sa.DateTime, nullable=False, index=True),
203 sa.Column("start_date", sa.DateTime, nullable=False, index=True),
144 sa.Column('end_date', sa.DateTime),
204 sa.Column("end_date", sa.DateTime),
145 sa.Column('status', sa.Integer(), nullable=False, index=True),
205 sa.Column("status", sa.Integer(), nullable=False, index=True),
146 sa.Column('event_type', sa.Integer(), nullable=False, index=True),
206 sa.Column("event_type", sa.Integer(), nullable=False, index=True),
147 sa.Column('origin_user_id', sa.Integer()),
207 sa.Column("origin_user_id", sa.Integer()),
148 sa.Column('target_user_id', sa.Integer()),
208 sa.Column("target_user_id", sa.Integer()),
149 sa.Column('resource_id', sa.Integer(), index=True),
209 sa.Column("resource_id", sa.Integer(), index=True),
150 sa.Column('text', sa.UnicodeText, server_default=''),
210 sa.Column("text", sa.UnicodeText, server_default=""),
151 sa.Column('values', sa.dialects.postgresql.JSON),
211 sa.Column("values", sa.dialects.postgresql.JSON),
152 sa.Column('target_id', sa.Integer()),
212 sa.Column("target_id", sa.Integer()),
153 sa.Column('target_uuid', sa.Unicode(40), index=True)
213 sa.Column("target_uuid", sa.Unicode(40), index=True),
154 )
214 )
155
215
156 op.create_table(
216 op.create_table(
157 'logs',
217 "logs",
158 sa.Column('log_id', sa.types.BigInteger, nullable=False, primary_key=True),
218 sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True),
159 sa.Column('resource_id', sa.Integer(),
219 sa.Column(
160 sa.ForeignKey('resources.resource_id',
220 "resource_id",
161 onupdate='cascade',
221 sa.Integer(),
162 ondelete='cascade')),
222 sa.ForeignKey(
163 sa.Column('log_level', sa.SmallInteger(), nullable=False),
223 "resources.resource_id", onupdate="cascade", ondelete="cascade"
164 sa.Column('primary_key', sa.Unicode(128), nullable=True),
224 ),
165 sa.Column('message', sa.UnicodeText, nullable=False, server_default=''),
225 ),
166 sa.Column('timestamp', sa.DateTime),
226 sa.Column("log_level", sa.SmallInteger(), nullable=False),
167 sa.Column('namespace', sa.Unicode(255)),
227 sa.Column("primary_key", sa.Unicode(128), nullable=True),
168 sa.Column('request_id', sa.Unicode(40)),
228 sa.Column("message", sa.UnicodeText, nullable=False, server_default=""),
169 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"),
229 sa.Column("timestamp", sa.DateTime),
170 sa.Column('permanent', sa.Boolean(), server_default="false",
230 sa.Column("namespace", sa.Unicode(255)),
171 nullable=False)
231 sa.Column("request_id", sa.Unicode(40)),
232 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
233 sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False),
172 )
234 )
173
235
174 op.create_table(
236 op.create_table(
175 'reports_groups',
237 "reports_groups",
176 sa.Column('id', sa.types.BigInteger, primary_key=True),
238 sa.Column("id", sa.types.BigInteger, primary_key=True),
177 sa.Column('resource_id', sa.Integer,
239 sa.Column(
178 sa.ForeignKey('resources.resource_id', onupdate='cascade',
240 "resource_id",
179 ondelete='cascade'), nullable=False),
241 sa.Integer,
180 sa.Column('priority', sa.Integer, nullable=False, server_default="5"),
242 sa.ForeignKey(
181 sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()),
243 "resources.resource_id", onupdate="cascade", ondelete="cascade"
182 sa.Column('last_timestamp', sa.DateTime()),
244 ),
183 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
245 nullable=False,
184 sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""),
246 ),
185 sa.Column('triggered_postprocesses_ids', sa.dialects.postgresql.JSON, nullable=False, server_default="[]"),
247 sa.Column("priority", sa.Integer, nullable=False, server_default="5"),
186 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
248 sa.Column(
187 sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"),
249 "first_timestamp",
188 sa.Column('last_report', sa.Integer, nullable=False, server_default="0"),
250 sa.DateTime(),
189 sa.Column('occurences', sa.Integer, nullable=False, server_default="1"),
251 nullable=False,
190 sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"),
252 server_default=sa.func.now(),
191 sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"),
253 ),
192 sa.Column('notified', sa.Boolean, nullable=False, server_default="False"),
254 sa.Column("last_timestamp", sa.DateTime()),
193 sa.Column('fixed', sa.Boolean, nullable=False, server_default="False"),
255 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
194 sa.Column('public', sa.Boolean, nullable=False, server_default="False"),
256 sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""),
195 sa.Column('read', sa.Boolean, nullable=False, server_default="False"),
257 sa.Column(
258 "triggered_postprocesses_ids",
259 sa.dialects.postgresql.JSON,
260 nullable=False,
261 server_default="[]",
262 ),
263 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
264 sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"),
265 sa.Column("last_report", sa.Integer, nullable=False, server_default="0"),
266 sa.Column("occurences", sa.Integer, nullable=False, server_default="1"),
267 sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"),
268 sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"),
269 sa.Column("notified", sa.Boolean, nullable=False, server_default="False"),
270 sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"),
271 sa.Column("public", sa.Boolean, nullable=False, server_default="False"),
272 sa.Column("read", sa.Boolean, nullable=False, server_default="False"),
196 )
273 )
197
274
198 op.create_table(
275 op.create_table(
199 'reports',
276 "reports",
200 sa.Column('id', sa.types.BigInteger, primary_key=True),
277 sa.Column("id", sa.types.BigInteger, primary_key=True),
201 sa.Column('group_id', sa.types.BigInteger,
278 sa.Column(
202 sa.ForeignKey('reports_groups.id', onupdate='cascade',
279 "group_id",
203 ondelete='cascade'), nullable=False, index=True),
280 sa.types.BigInteger,
204 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
281 sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"),
205 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
282 nullable=False,
206 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
283 index=True,
207 sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
284 ),
208 sa.Column('request', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
285 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
209 sa.Column('tags', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
286 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
210 sa.Column('ip', sa.Unicode(39), nullable=False, server_default=""),
287 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
211 sa.Column('username', sa.Unicode(255), nullable=False, server_default=""),
288 sa.Column(
212 sa.Column('user_agent', sa.Unicode(512), nullable=False, server_default=""),
289 "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
213 sa.Column('url', sa.UnicodeText, nullable=False, server_default=""),
290 ),
214 sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""),
291 sa.Column(
215 sa.Column('request_stats', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
292 "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
216 sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
293 ),
217 sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""),
294 sa.Column(
218 sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()),
295 "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
219 sa.Column('end_time', sa.DateTime()),
296 ),
220 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
297 sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""),
221 sa.Column('duration', sa.Float(), nullable=False, server_default="0"),
298 sa.Column("username", sa.Unicode(255), nullable=False, server_default=""),
222 sa.Column('http_status', sa.Integer, index=True),
299 sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""),
223 sa.Column('url_domain', sa.Unicode(128)),
300 sa.Column("url", sa.UnicodeText, nullable=False, server_default=""),
224 sa.Column('url_path', sa.UnicodeText),
301 sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""),
225 sa.Column('language', sa.Integer, server_default="0"),
302 sa.Column(
226 )
303 "request_stats",
227 op.create_index(None, 'reports',
304 sa.dialects.postgresql.JSON,
228 [sa.text("(tags ->> 'server_name')")])
305 nullable=False,
229 op.create_index(None, 'reports',
306 server_default="{}",
230 [sa.text("(tags ->> 'view_name')")])
307 ),
308 sa.Column(
309 "traceback",
310 sa.dialects.postgresql.JSON,
311 nullable=False,
312 server_default="{}",
313 ),
314 sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""),
315 sa.Column(
316 "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now()
317 ),
318 sa.Column("end_time", sa.DateTime()),
319 sa.Column(
320 "report_group_time",
321 sa.DateTime,
322 index=True,
323 nullable=False,
324 server_default=sa.func.now(),
325 ),
326 sa.Column("duration", sa.Float(), nullable=False, server_default="0"),
327 sa.Column("http_status", sa.Integer, index=True),
328 sa.Column("url_domain", sa.Unicode(128)),
329 sa.Column("url_path", sa.UnicodeText),
330 sa.Column("language", sa.Integer, server_default="0"),
331 )
332 op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")])
333 op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")])
231
334
232 op.create_table(
335 op.create_table(
233 'reports_assignments',
336 "reports_assignments",
234 sa.Column('group_id', sa.types.BigInteger, nullable=False, primary_key=True),
337 sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True),
235 sa.Column('owner_id', sa.Integer,
338 sa.Column(
236 sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'),
339 "owner_id",
237 nullable=False, primary_key=True),
340 sa.Integer,
238 sa.Column('report_time', sa.DateTime, nullable=False)
341 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
239 )
342 nullable=False,
343 primary_key=True,
344 ),
345 sa.Column("report_time", sa.DateTime, nullable=False),
346 )
240
347
241 op.create_table(
348 op.create_table(
242 'reports_comments',
349 "reports_comments",
243 sa.Column('comment_id', sa.Integer, primary_key=True),
350 sa.Column("comment_id", sa.Integer, primary_key=True),
244 sa.Column('body', sa.UnicodeText, nullable=False, server_default=''),
351 sa.Column("body", sa.UnicodeText, nullable=False, server_default=""),
245 sa.Column('owner_id', sa.Integer,
352 sa.Column(
246 sa.ForeignKey('users.id', onupdate='cascade',
353 "owner_id",
247 ondelete='set null'), nullable=True),
354 sa.Integer,
248 sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
355 sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"),
249 sa.Column('report_time', sa.DateTime, nullable=False),
356 nullable=True,
250 sa.Column('group_id', sa.types.BigInteger, nullable=False)
357 ),
358 sa.Column(
359 "created_timestamp",
360 sa.DateTime,
361 nullable=False,
362 server_default=sa.func.now(),
363 ),
364 sa.Column("report_time", sa.DateTime, nullable=False),
365 sa.Column("group_id", sa.types.BigInteger, nullable=False),
251 )
366 )
252
367
253 op.create_table(
368 op.create_table(
254 'reports_stats',
369 "reports_stats",
255 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
370 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
256 sa.Column('start_interval', sa.DateTime, nullable=False, index=True),
371 sa.Column("start_interval", sa.DateTime, nullable=False, index=True),
257 sa.Column('group_id', sa.types.BigInteger, index=True),
372 sa.Column("group_id", sa.types.BigInteger, index=True),
258 sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True),
373 sa.Column(
259 sa.Column('owner_user_id', sa.Integer),
374 "occurences", sa.Integer, nullable=False, server_default="0", index=True
260 sa.Column('type', sa.Integer, index=True, nullable=False),
375 ),
261 sa.Column('duration', sa.Float(), server_default='0'),
376 sa.Column("owner_user_id", sa.Integer),
262 sa.Column('server_name', sa.Unicode(128),
377 sa.Column("type", sa.Integer, index=True, nullable=False),
263 server_default=''),
378 sa.Column("duration", sa.Float(), server_default="0"),
264 sa.Column('view_name', sa.Unicode(128),
379 sa.Column("server_name", sa.Unicode(128), server_default=""),
265 server_default=''),
380 sa.Column("view_name", sa.Unicode(128), server_default=""),
266 sa.Column('id', sa.BigInteger(), nullable=False, primary_key=True),
381 sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True),
267 )
382 )
268 op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats',
383 op.create_index(
269 ["start_interval", "group_id"])
384 "ix_reports_stats_start_interval_group_id",
385 "reports_stats",
386 ["start_interval", "group_id"],
387 )
270
388
271 op.create_table(
389 op.create_table(
272 'slow_calls',
390 "slow_calls",
273 sa.Column('id', sa.types.BigInteger, primary_key=True),
391 sa.Column("id", sa.types.BigInteger, primary_key=True),
274 sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'),
392 sa.Column(
275 nullable=False, index=True),
393 "report_id",
276 sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True),
394 sa.types.BigInteger,
277 sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True),
395 sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"),
278 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
396 nullable=False,
279 sa.Column('type', sa.Unicode(16), nullable=False, index=True),
397 index=True,
280 sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''),
398 ),
281 sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False),
399 sa.Column(
282 sa.Column('location', sa.UnicodeText, server_default=''),
400 "duration", sa.Float(), nullable=False, server_default="0", index=True
283 sa.Column('subtype', sa.Unicode(16), nullable=False, index=True),
401 ),
284 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
402 sa.Column(
285 sa.Column('statement_hash', sa.Unicode(60), index=True)
403 "timestamp",
404 sa.DateTime,
405 nullable=False,
406 server_default=sa.func.now(),
407 index=True,
408 ),
409 sa.Column(
410 "report_group_time",
411 sa.DateTime,
412 index=True,
413 nullable=False,
414 server_default=sa.func.now(),
415 ),
416 sa.Column("type", sa.Unicode(16), nullable=False, index=True),
417 sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""),
418 sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False),
419 sa.Column("location", sa.UnicodeText, server_default=""),
420 sa.Column("subtype", sa.Unicode(16), nullable=False, index=True),
421 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
422 sa.Column("statement_hash", sa.Unicode(60), index=True),
286 )
423 )
287
424
288 op.create_table(
425 op.create_table(
289 'tags',
426 "tags",
290 sa.Column('id', sa.types.BigInteger, primary_key=True),
427 sa.Column("id", sa.types.BigInteger, primary_key=True),
291 sa.Column('resource_id', sa.Integer,
428 sa.Column(
292 sa.ForeignKey('resources.resource_id', onupdate='cascade',
429 "resource_id",
293 ondelete='cascade')),
430 sa.Integer,
294 sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
431 sa.ForeignKey(
295 sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
432 "resources.resource_id", onupdate="cascade", ondelete="cascade"
296 sa.Column('name', sa.Unicode(32), nullable=False),
433 ),
297 sa.Column('value', sa.dialects.postgresql.JSON, nullable=False),
434 ),
298 sa.Column('times_seen', sa.Integer, nullable=False, server_default='1')
435 sa.Column(
436 "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
437 ),
438 sa.Column(
439 "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
440 ),
441 sa.Column("name", sa.Unicode(32), nullable=False),
442 sa.Column("value", sa.dialects.postgresql.JSON, nullable=False),
443 sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"),
299 )
444 )
300
445
301 op.create_table(
446 op.create_table(
302 'auth_tokens',
447 "auth_tokens",
303 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
448 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
304 sa.Column('token', sa.Unicode),
449 sa.Column("token", sa.Unicode),
305 sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()),
450 sa.Column(
306 sa.Column('expires', sa.DateTime),
451 "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now()
307 sa.Column('owner_id', sa.Integer,
452 ),
308 sa.ForeignKey('users.id', onupdate='cascade',
453 sa.Column("expires", sa.DateTime),
309 ondelete='cascade')),
454 sa.Column(
310 sa.Column('description', sa.Unicode),
455 "owner_id",
456 sa.Integer,
457 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
458 ),
459 sa.Column("description", sa.Unicode),
311 )
460 )
312
461
313 op.create_table(
462 op.create_table(
314 'channels_actions',
463 "channels_actions",
315 sa.Column('channel_pkey', sa.Integer,
464 sa.Column(
316 sa.ForeignKey('alert_channels.pkey',
465 "channel_pkey",
317 ondelete='CASCADE', onupdate='CASCADE')),
466 sa.Integer,
318 sa.Column('action_pkey', sa.Integer,
467 sa.ForeignKey(
319 sa.ForeignKey('alert_channels_actions.pkey',
468 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
320 ondelete='CASCADE', onupdate='CASCADE'))
469 ),
470 ),
471 sa.Column(
472 "action_pkey",
473 sa.Integer,
474 sa.ForeignKey(
475 "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE"
476 ),
477 ),
321 )
478 )
322
479
323 op.create_table(
480 op.create_table(
324 'config',
481 "config",
325 sa.Column('key', sa.Unicode(128), primary_key=True),
482 sa.Column("key", sa.Unicode(128), primary_key=True),
326 sa.Column('section', sa.Unicode(128), primary_key=True),
483 sa.Column("section", sa.Unicode(128), primary_key=True),
327 sa.Column('value', sa.dialects.postgresql.JSON,
484 sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"),
328 server_default="{}")
329 )
485 )
330
486
331 op.create_table(
487 op.create_table(
332 'plugin_configs',
488 "plugin_configs",
333 sa.Column('id', sa.Integer, primary_key=True),
489 sa.Column("id", sa.Integer, primary_key=True),
334 sa.Column('plugin_name', sa.Unicode(128)),
490 sa.Column("plugin_name", sa.Unicode(128)),
335 sa.Column('section', sa.Unicode(128)),
491 sa.Column("section", sa.Unicode(128)),
336 sa.Column('config', sa.dialects.postgresql.JSON,
492 sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"),
337 server_default="{}"),
493 sa.Column(
338 sa.Column('resource_id', sa.Integer(),
494 "resource_id",
339 sa.ForeignKey('resources.resource_id', onupdate='cascade',
495 sa.Integer(),
340 ondelete='cascade')),
496 sa.ForeignKey(
341 sa.Column('owner_id', sa.Integer(),
497 "resources.resource_id", onupdate="cascade", ondelete="cascade"
342 sa.ForeignKey('users.id', onupdate='cascade',
498 ),
343 ondelete='cascade')))
499 ),
500 sa.Column(
501 "owner_id",
502 sa.Integer(),
503 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
504 ),
505 )
344
506
345 op.create_table(
507 op.create_table(
346 'rc_versions',
508 "rc_versions",
347 sa.Column('name', sa.Unicode(40), primary_key=True),
509 sa.Column("name", sa.Unicode(40), primary_key=True),
348 sa.Column('value', sa.Unicode(40)),
510 sa.Column("value", sa.Unicode(40)),
511 )
512 version_table = sa.table(
513 "rc_versions",
514 sa.Column("name", sa.Unicode(40)),
515 sa.Column("value", sa.Unicode(40)),
349 )
516 )
350 version_table = sa.table('rc_versions',
351 sa.Column('name', sa.Unicode(40)),
352 sa.Column('value', sa.Unicode(40)))
353
517
354 insert = version_table.insert().values(name='es_reports')
518 insert = version_table.insert().values(name="es_reports")
355 op.execute(insert)
519 op.execute(insert)
356 insert = version_table.insert().values(name='es_reports_groups')
520 insert = version_table.insert().values(name="es_reports_groups")
357 op.execute(insert)
521 op.execute(insert)
358 insert = version_table.insert().values(name='es_reports_stats')
522 insert = version_table.insert().values(name="es_reports_stats")
359 op.execute(insert)
523 op.execute(insert)
360 insert = version_table.insert().values(name='es_logs')
524 insert = version_table.insert().values(name="es_logs")
361 op.execute(insert)
525 op.execute(insert)
362 insert = version_table.insert().values(name='es_metrics')
526 insert = version_table.insert().values(name="es_metrics")
363 op.execute(insert)
527 op.execute(insert)
364 insert = version_table.insert().values(name='es_slow_calls')
528 insert = version_table.insert().values(name="es_slow_calls")
365 op.execute(insert)
529 op.execute(insert)
366
530
367
531 op.execute(
368 op.execute('''
532 """
369 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
533 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
370 RETURNS timestamp without time zone AS
534 RETURNS timestamp without time zone AS
371 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
535 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
372 LANGUAGE sql VOLATILE;
536 LANGUAGE sql VOLATILE;
373 ''')
537 """
538 )
374
539
375 op.execute('''
540 op.execute(
541 """
376 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
542 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
377 LANGUAGE plpgsql SECURITY DEFINER
543 LANGUAGE plpgsql SECURITY DEFINER
378 AS $$
544 AS $$
@@ -426,13 +592,17 b' def upgrade():'
426 RETURN NULL;
592 RETURN NULL;
427 END
593 END
428 $$;
594 $$;
429 ''')
595 """
596 )
430
597
431 op.execute('''
598 op.execute(
599 """
432 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
600 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
433 ''')
601 """
602 )
434
603
435 op.execute('''
604 op.execute(
605 """
436 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
606 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
437 LANGUAGE plpgsql SECURITY DEFINER
607 LANGUAGE plpgsql SECURITY DEFINER
438 AS $$
608 AS $$
@@ -463,13 +633,17 b' def upgrade():'
463 RETURN NULL;
633 RETURN NULL;
464 END
634 END
465 $$;
635 $$;
466 ''')
636 """
637 )
467
638
468 op.execute('''
639 op.execute(
640 """
469 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
641 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
470 ''')
642 """
643 )
471
644
472 op.execute('''
645 op.execute(
646 """
473 CREATE FUNCTION partition_reports_stats() RETURNS trigger
647 CREATE FUNCTION partition_reports_stats() RETURNS trigger
474 LANGUAGE plpgsql SECURITY DEFINER
648 LANGUAGE plpgsql SECURITY DEFINER
475 AS $$
649 AS $$
@@ -499,13 +673,17 b' def upgrade():'
499 RETURN NULL;
673 RETURN NULL;
500 END
674 END
501 $$;
675 $$;
502 ''')
676 """
677 )
503
678
504 op.execute('''
679 op.execute(
680 """
505 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
681 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
506 ''')
682 """
683 )
507
684
508 op.execute('''
685 op.execute(
686 """
509 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
687 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
510 LANGUAGE plpgsql SECURITY DEFINER
688 LANGUAGE plpgsql SECURITY DEFINER
511 AS $$
689 AS $$
@@ -533,13 +711,17 b' def upgrade():'
533 RETURN NULL;
711 RETURN NULL;
534 END
712 END
535 $$;
713 $$;
536 ''')
714 """
715 )
537
716
538 op.execute('''
717 op.execute(
718 """
539 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
719 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
540 ''')
720 """
721 )
541
722
542 op.execute('''
723 op.execute(
724 """
543 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
725 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
544 LANGUAGE plpgsql SECURITY DEFINER
726 LANGUAGE plpgsql SECURITY DEFINER
545 AS $$
727 AS $$
@@ -573,14 +755,17 b' def upgrade():'
573 RETURN NULL;
755 RETURN NULL;
574 END
756 END
575 $$;
757 $$;
576 ''')
758 """
759 )
577
760
578 op.execute('''
761 op.execute(
762 """
579 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
763 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
580 ''')
764 """
581
765 )
582
766
583 op.execute('''
767 op.execute(
768 """
584 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
769 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
585 LANGUAGE plpgsql SECURITY DEFINER
770 LANGUAGE plpgsql SECURITY DEFINER
586 AS $$
771 AS $$
@@ -614,11 +799,15 b' def upgrade():'
614 RETURN NULL;
799 RETURN NULL;
615 END
800 END
616 $$;
801 $$;
617 ''')
802 """
803 )
618
804
619 op.execute('''
805 op.execute(
806 """
620 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
807 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
621 ''')
808 """
809 )
810
622
811
623 def downgrade():
812 def downgrade():
624 pass
813 pass
@@ -7,8 +7,8 b' Create Date: 2018-02-28 13:52:50.717217'
7 """
7 """
8
8
9 # revision identifiers, used by Alembic.
9 # revision identifiers, used by Alembic.
10 revision = 'e9fcfbdd9498'
10 revision = "e9fcfbdd9498"
11 down_revision = '55b6e612672f'
11 down_revision = "55b6e612672f"
12
12
13 from alembic import op
13 from alembic import op
14 import sqlalchemy as sa
14 import sqlalchemy as sa
@@ -16,17 +16,25 b' import sqlalchemy as sa'
16
16
17 def upgrade():
17 def upgrade():
18 op.create_table(
18 op.create_table(
19 'channels_resources',
19 "channels_resources",
20 sa.Column('channel_pkey', sa.Integer,
20 sa.Column(
21 sa.ForeignKey('alert_channels.pkey',
21 "channel_pkey",
22 ondelete='CASCADE', onupdate='CASCADE'),
22 sa.Integer,
23 primary_key=True),
23 sa.ForeignKey(
24 sa.Column('resource_id', sa.Integer,
24 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
25 sa.ForeignKey('resources.resource_id',
25 ),
26 ondelete='CASCADE', onupdate='CASCADE'),
26 primary_key=True,
27 primary_key=True)
27 ),
28 sa.Column(
29 "resource_id",
30 sa.Integer,
31 sa.ForeignKey(
32 "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE"
33 ),
34 primary_key=True,
35 ),
28 )
36 )
29
37
30
38
31 def downgrade():
39 def downgrade():
32 op.drop_table('channels_resources')
40 op.drop_table("channels_resources")
@@ -29,11 +29,11 b' log = logging.getLogger(__name__)'
29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
30
30
31 NAMING_CONVENTION = {
31 NAMING_CONVENTION = {
32 "ix": 'ix_%(column_0_label)s',
32 "ix": "ix_%(column_0_label)s",
33 "uq": "uq_%(table_name)s_%(column_0_name)s",
33 "uq": "uq_%(table_name)s_%(column_0_name)s",
34 "ck": "ck_%(table_name)s_%(constraint_name)s",
34 "ck": "ck_%(table_name)s_%(constraint_name)s",
35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
36 "pk": "pk_%(table_name)s"
36 "pk": "pk_%(table_name)s",
37 }
37 }
38
38
39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
@@ -59,23 +59,24 b' class SliceableESQuery(object):'
59 self.query = query
59 self.query = query
60 self.sort_query = sort_query
60 self.sort_query = sort_query
61 self.aggregations = aggregations
61 self.aggregations = aggregations
62 self.items_per_page = kwconfig.pop('items_per_page', 10)
62 self.items_per_page = kwconfig.pop("items_per_page", 10)
63 self.page = kwconfig.pop('page', 1)
63 self.page = kwconfig.pop("page", 1)
64 self.kwconfig = kwconfig
64 self.kwconfig = kwconfig
65 self.result = None
65 self.result = None
66
66
67 def __getitem__(self, index):
67 def __getitem__(self, index):
68 config = self.kwconfig.copy()
68 config = self.kwconfig.copy()
69 config['from_'] = index.start
69 config["from_"] = index.start
70 query = self.query.copy()
70 query = self.query.copy()
71 if self.sort_query:
71 if self.sort_query:
72 query.update(self.sort_query)
72 query.update(self.sort_query)
73 self.result = Datastores.es.search(body=query, size=self.items_per_page,
73 self.result = Datastores.es.search(
74 **config)
74 body=query, size=self.items_per_page, **config
75 )
75 if self.aggregations:
76 if self.aggregations:
76 self.items = self.result.get('aggregations')
77 self.items = self.result.get("aggregations")
77 else:
78 else:
78 self.items = self.result['hits']['hits']
79 self.items = self.result["hits"]["hits"]
79
80
80 return self.items
81 return self.items
81
82
@@ -85,14 +86,15 b' class SliceableESQuery(object):'
85 def __len__(self):
86 def __len__(self):
86 config = self.kwconfig.copy()
87 config = self.kwconfig.copy()
87 query = self.query.copy()
88 query = self.query.copy()
88 self.result = Datastores.es.search(body=query, size=self.items_per_page,
89 self.result = Datastores.es.search(
89 **config)
90 body=query, size=self.items_per_page, **config
91 )
90 if self.aggregations:
92 if self.aggregations:
91 self.items = self.result.get('aggregations')
93 self.items = self.result.get("aggregations")
92 else:
94 else:
93 self.items = self.result['hits']['hits']
95 self.items = self.result["hits"]["hits"]
94
96
95 count = int(self.result['hits']['total'])
97 count = int(self.result["hits"]["total"])
96 return count if count < 5000 else 5000
98 return count if count < 5000 else 5000
97
99
98
100
@@ -102,8 +104,7 b' from appenlight.models.user import User'
102 from appenlight.models.alert_channel import AlertChannel
104 from appenlight.models.alert_channel import AlertChannel
103 from appenlight.models.alert_channel_action import AlertChannelAction
105 from appenlight.models.alert_channel_action import AlertChannelAction
104 from appenlight.models.metric import Metric
106 from appenlight.models.metric import Metric
105 from appenlight.models.application_postprocess_conf import \
107 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
106 ApplicationPostprocessConf
107 from appenlight.models.auth_token import AuthToken
108 from appenlight.models.auth_token import AuthToken
108 from appenlight.models.event import Event
109 from appenlight.models.event import Event
109 from appenlight.models.external_identity import ExternalIdentity
110 from appenlight.models.external_identity import ExternalIdentity
@@ -124,7 +125,15 b' from appenlight.models.user_permission import UserPermission'
124 from appenlight.models.user_resource_permission import UserResourcePermission
125 from appenlight.models.user_resource_permission import UserResourcePermission
125 from ziggurat_foundations import ziggurat_model_init
126 from ziggurat_foundations import ziggurat_model_init
126
127
127 ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission,
128 ziggurat_model_init(
128 UserResourcePermission, GroupResourcePermission,
129 User,
129 Resource,
130 Group,
130 ExternalIdentity, passwordmanager=None)
131 UserGroup,
132 GroupPermission,
133 UserPermission,
134 UserResourcePermission,
135 GroupResourcePermission,
136 Resource,
137 ExternalIdentity,
138 passwordmanager=None,
139 )
@@ -27,126 +27,125 b' log = logging.getLogger(__name__)'
27
27
28 #
28 #
29 channel_rules_m2m_table = sa.Table(
29 channel_rules_m2m_table = sa.Table(
30 'channels_actions', Base.metadata,
30 "channels_actions",
31 sa.Column('channel_pkey', sa.Integer,
31 Base.metadata,
32 sa.ForeignKey('alert_channels.pkey')),
32 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
33 sa.Column('action_pkey', sa.Integer,
33 sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")),
34 sa.ForeignKey('alert_channels_actions.pkey'))
35 )
34 )
36
35
37 channel_resources_m2m_table = sa.Table(
36 channel_resources_m2m_table = sa.Table(
38 'channels_resources', Base.metadata,
37 "channels_resources",
39 sa.Column('channel_pkey', sa.Integer,
38 Base.metadata,
40 sa.ForeignKey('alert_channels.pkey')),
39 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
41 sa.Column('resource_id', sa.Integer,
40 sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")),
42 sa.ForeignKey('resources.resource_id'))
43 )
41 )
44
42
45 DATE_FRMT = '%Y-%m-%dT%H:%M'
43 DATE_FRMT = "%Y-%m-%dT%H:%M"
46
44
47
45
48 class AlertChannel(Base, BaseModel):
46 class AlertChannel(Base, BaseModel):
49 """
47 """
50 Stores information about possible alerting options
48 Stores information about possible alerting options
51 """
49 """
52 __tablename__ = 'alert_channels'
50
53 __possible_channel_names__ = ['email']
51 __tablename__ = "alert_channels"
52 __possible_channel_names__ = ["email"]
54 __mapper_args__ = {
53 __mapper_args__ = {
55 'polymorphic_on': 'channel_name',
54 "polymorphic_on": "channel_name",
56 'polymorphic_identity': 'integration'
55 "polymorphic_identity": "integration",
57 }
56 }
58
57
59 owner_id = sa.Column(sa.Unicode(30),
58 owner_id = sa.Column(
60 sa.ForeignKey('users.id', onupdate='CASCADE',
59 sa.Unicode(30),
61 ondelete='CASCADE'))
60 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
61 )
62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default='')
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default="")
64 channel_json_conf = sa.Column(JSON(), nullable=False, default='')
64 channel_json_conf = sa.Column(JSON(), nullable=False, default="")
65 channel_validated = sa.Column(sa.Boolean, nullable=False,
65 channel_validated = sa.Column(sa.Boolean, nullable=False, default=False)
66 default=False)
66 send_alerts = sa.Column(sa.Boolean, nullable=False, default=True)
67 send_alerts = sa.Column(sa.Boolean, nullable=False,
67 daily_digest = sa.Column(sa.Boolean, nullable=False, default=True)
68 default=True)
68 integration_id = sa.Column(
69 daily_digest = sa.Column(sa.Boolean, nullable=False,
69 sa.Integer, sa.ForeignKey("integrations.id"), nullable=True
70 default=True)
70 )
71 integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'),
72 nullable=True)
73 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
71 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
74
72
75 channel_actions = sa.orm.relationship('AlertChannelAction',
73 channel_actions = sa.orm.relationship(
76 cascade="all",
74 "AlertChannelAction",
77 passive_deletes=True,
75 cascade="all",
78 passive_updates=True,
76 passive_deletes=True,
79 secondary=channel_rules_m2m_table,
77 passive_updates=True,
80 backref='channels')
78 secondary=channel_rules_m2m_table,
81 resources = sa.orm.relationship('Resource',
79 backref="channels",
82 cascade="all",
80 )
83 passive_deletes=True,
81 resources = sa.orm.relationship(
84 passive_updates=True,
82 "Resource",
85 secondary=channel_resources_m2m_table,
83 cascade="all",
86 backref='resources')
84 passive_deletes=True,
85 passive_updates=True,
86 secondary=channel_resources_m2m_table,
87 backref="resources",
88 )
87
89
88 @property
90 @property
89 def channel_visible_value(self):
91 def channel_visible_value(self):
90 if self.integration:
92 if self.integration:
91 return '{}: {}'.format(
93 return "{}: {}".format(
92 self.channel_name,
94 self.channel_name, self.integration.resource.resource_name
93 self.integration.resource.resource_name
94 )
95 )
95
96
96 return '{}: {}'.format(
97 return "{}: {}".format(self.channel_name, self.channel_value)
97 self.channel_name,
98 self.channel_value
99 )
100
98
101 def get_dict(self, exclude_keys=None, include_keys=None,
99 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True):
102 extended_info=True):
103 """
100 """
104 Returns dictionary with required information that will be consumed by
101 Returns dictionary with required information that will be consumed by
105 angular
102 angular
106 """
103 """
107 instance_dict = super(AlertChannel, self).get_dict(exclude_keys,
104 instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys)
108 include_keys)
109 exclude_keys_list = exclude_keys or []
105 exclude_keys_list = exclude_keys or []
110 include_keys_list = include_keys or []
106 include_keys_list = include_keys or []
111
107
112 instance_dict['supports_report_alerting'] = True
108 instance_dict["supports_report_alerting"] = True
113 instance_dict['channel_visible_value'] = self.channel_visible_value
109 instance_dict["channel_visible_value"] = self.channel_visible_value
114
110
115 if extended_info:
111 if extended_info:
116 instance_dict['actions'] = [
112 instance_dict["actions"] = [
117 rule.get_dict(extended_info=True) for
113 rule.get_dict(extended_info=True) for rule in self.channel_actions
118 rule in self.channel_actions]
114 ]
119
115
120 del instance_dict['channel_json_conf']
116 del instance_dict["channel_json_conf"]
121
117
122 if self.integration:
118 if self.integration:
123 instance_dict[
119 instance_dict[
124 'supports_report_alerting'] = \
120 "supports_report_alerting"
125 self.integration.supports_report_alerting
121 ] = self.integration.supports_report_alerting
126 d = {}
122 d = {}
127 for k in instance_dict.keys():
123 for k in instance_dict.keys():
128 if (k not in exclude_keys_list and
124 if k not in exclude_keys_list and (
129 (k in include_keys_list or not include_keys)):
125 k in include_keys_list or not include_keys
126 ):
130 d[k] = instance_dict[k]
127 d[k] = instance_dict[k]
131 return d
128 return d
132
129
133 def __repr__(self):
130 def __repr__(self):
134 return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name,
131 return "<AlertChannel: (%s,%s), user:%s>" % (
135 self.channel_value,
132 self.channel_name,
136 self.user_name,)
133 self.channel_value,
134 self.user_name,
135 )
137
136
138 def send_digest(self, **kwargs):
137 def send_digest(self, **kwargs):
139 """
138 """
140 This should implement daily top error report notifications
139 This should implement daily top error report notifications
141 """
140 """
142 log.warning('send_digest NOT IMPLEMENTED')
141 log.warning("send_digest NOT IMPLEMENTED")
143
142
144 def notify_reports(self, **kwargs):
143 def notify_reports(self, **kwargs):
145 """
144 """
146 This should implement notification of reports that occured in 1 min
145 This should implement notification of reports that occured in 1 min
147 interval
146 interval
148 """
147 """
149 log.warning('notify_reports NOT IMPLEMENTED')
148 log.warning("notify_reports NOT IMPLEMENTED")
150
149
151 def notify_alert(self, **kwargs):
150 def notify_alert(self, **kwargs):
152 """
151 """
@@ -160,87 +159,85 b' class AlertChannel(Base, BaseModel):'
160 request: request object
159 request: request object
161
160
162 """
161 """
163 alert_name = kwargs['event'].unified_alert_name()
162 alert_name = kwargs["event"].unified_alert_name()
164 if alert_name in ['slow_report_alert', 'error_report_alert']:
163 if alert_name in ["slow_report_alert", "error_report_alert"]:
165 self.notify_report_alert(**kwargs)
164 self.notify_report_alert(**kwargs)
166 elif alert_name == 'uptime_alert':
165 elif alert_name == "uptime_alert":
167 self.notify_uptime_alert(**kwargs)
166 self.notify_uptime_alert(**kwargs)
168 elif alert_name == 'chart_alert':
167 elif alert_name == "chart_alert":
169 self.notify_chart_alert(**kwargs)
168 self.notify_chart_alert(**kwargs)
170
169
171 def notify_chart_alert(self, **kwargs):
170 def notify_chart_alert(self, **kwargs):
172 """
171 """
173 This should implement report open/close alerts notifications
172 This should implement report open/close alerts notifications
174 """
173 """
175 log.warning('notify_chart_alert NOT IMPLEMENTED')
174 log.warning("notify_chart_alert NOT IMPLEMENTED")
176
175
177 def notify_report_alert(self, **kwargs):
176 def notify_report_alert(self, **kwargs):
178 """
177 """
179 This should implement report open/close alerts notifications
178 This should implement report open/close alerts notifications
180 """
179 """
181 log.warning('notify_report_alert NOT IMPLEMENTED')
180 log.warning("notify_report_alert NOT IMPLEMENTED")
182
181
183 def notify_uptime_alert(self, **kwargs):
182 def notify_uptime_alert(self, **kwargs):
184 """
183 """
185 This should implement uptime open/close alerts notifications
184 This should implement uptime open/close alerts notifications
186 """
185 """
187 log.warning('notify_uptime_alert NOT IMPLEMENTED')
186 log.warning("notify_uptime_alert NOT IMPLEMENTED")
188
187
189 def get_notification_basic_vars(self, kwargs):
188 def get_notification_basic_vars(self, kwargs):
190 """
189 """
191 Sets most common variables used later for rendering notifications for
190 Sets most common variables used later for rendering notifications for
192 channel
191 channel
193 """
192 """
194 if 'event' in kwargs:
193 if "event" in kwargs:
195 kwargs['since_when'] = kwargs['event'].start_date
194 kwargs["since_when"] = kwargs["event"].start_date
196
195
197 url_start_date = kwargs.get('since_when') - timedelta(minutes=1)
196 url_start_date = kwargs.get("since_when") - timedelta(minutes=1)
198 url_end_date = kwargs.get('since_when') + timedelta(minutes=4)
197 url_end_date = kwargs.get("since_when") + timedelta(minutes=4)
199 tmpl_vars = {
198 tmpl_vars = {
200 "timestamp": kwargs['since_when'],
199 "timestamp": kwargs["since_when"],
201 "user": kwargs['user'],
200 "user": kwargs["user"],
202 "since_when": kwargs.get('since_when'),
201 "since_when": kwargs.get("since_when"),
203 "url_start_date": url_start_date,
202 "url_start_date": url_start_date,
204 "url_end_date": url_end_date
203 "url_end_date": url_end_date,
205 }
204 }
206 tmpl_vars["resource_name"] = kwargs['resource'].resource_name
205 tmpl_vars["resource_name"] = kwargs["resource"].resource_name
207 tmpl_vars["resource"] = kwargs['resource']
206 tmpl_vars["resource"] = kwargs["resource"]
208
207
209 if 'event' in kwargs:
208 if "event" in kwargs:
210 tmpl_vars['event_values'] = kwargs['event'].values
209 tmpl_vars["event_values"] = kwargs["event"].values
211 tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name()
210 tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name()
212 tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action()
211 tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action()
213 return tmpl_vars
212 return tmpl_vars
214
213
215 def report_alert_notification_vars(self, kwargs):
214 def report_alert_notification_vars(self, kwargs):
216 tmpl_vars = self.get_notification_basic_vars(kwargs)
215 tmpl_vars = self.get_notification_basic_vars(kwargs)
217 reports = kwargs.get('reports', [])
216 reports = kwargs.get("reports", [])
218 tmpl_vars["reports"] = reports
217 tmpl_vars["reports"] = reports
219 tmpl_vars["confirmed_total"] = len(reports)
218 tmpl_vars["confirmed_total"] = len(reports)
220
219
221 tmpl_vars["report_type"] = "error reports"
220 tmpl_vars["report_type"] = "error reports"
222 tmpl_vars["url_report_type"] = 'report/list'
221 tmpl_vars["url_report_type"] = "report/list"
223
222
224 alert_type = tmpl_vars.get('alert_type', '')
223 alert_type = tmpl_vars.get("alert_type", "")
225 if 'slow_report' in alert_type:
224 if "slow_report" in alert_type:
226 tmpl_vars["report_type"] = "slow reports"
225 tmpl_vars["report_type"] = "slow reports"
227 tmpl_vars["url_report_type"] = 'report/list_slow'
226 tmpl_vars["url_report_type"] = "report/list_slow"
228
227
229 app_url = kwargs['request'].registry.settings['_mail_url']
228 app_url = kwargs["request"].registry.settings["_mail_url"]
230
229
231 destination_url = kwargs['request'].route_url('/',
230 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
232 _app_url=app_url)
233 if alert_type:
231 if alert_type:
234 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
232 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(
235 tmpl_vars["url_report_type"],
233 tmpl_vars["url_report_type"],
236 tmpl_vars['resource'].resource_id,
234 tmpl_vars["resource"].resource_id,
237 tmpl_vars['url_start_date'].strftime(DATE_FRMT),
235 tmpl_vars["url_start_date"].strftime(DATE_FRMT),
238 tmpl_vars['url_end_date'].strftime(DATE_FRMT)
236 tmpl_vars["url_end_date"].strftime(DATE_FRMT),
239 )
237 )
240 else:
238 else:
241 destination_url += 'ui/{}?resource={}'.format(
239 destination_url += "ui/{}?resource={}".format(
242 tmpl_vars["url_report_type"],
240 tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id
243 tmpl_vars['resource'].resource_id
244 )
241 )
245 tmpl_vars["destination_url"] = destination_url
242 tmpl_vars["destination_url"] = destination_url
246
243
@@ -248,58 +245,54 b' class AlertChannel(Base, BaseModel):'
248
245
249 def uptime_alert_notification_vars(self, kwargs):
246 def uptime_alert_notification_vars(self, kwargs):
250 tmpl_vars = self.get_notification_basic_vars(kwargs)
247 tmpl_vars = self.get_notification_basic_vars(kwargs)
251 app_url = kwargs['request'].registry.settings['_mail_url']
248 app_url = kwargs["request"].registry.settings["_mail_url"]
252 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
249 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
253 destination_url += 'ui/{}?resource={}'.format(
250 destination_url += "ui/{}?resource={}".format(
254 'uptime',
251 "uptime", tmpl_vars["resource"].resource_id
255 tmpl_vars['resource'].resource_id)
252 )
256 tmpl_vars['destination_url'] = destination_url
253 tmpl_vars["destination_url"] = destination_url
257
254
258 reason = ''
255 reason = ""
259 e_values = tmpl_vars.get('event_values')
256 e_values = tmpl_vars.get("event_values")
260
257
261 if e_values and e_values.get('response_time') == 0:
258 if e_values and e_values.get("response_time") == 0:
262 reason += ' Response time was slower than 20 seconds.'
259 reason += " Response time was slower than 20 seconds."
263 elif e_values:
260 elif e_values:
264 code = e_values.get('status_code')
261 code = e_values.get("status_code")
265 reason += ' Response status code: %s.' % code
262 reason += " Response status code: %s." % code
266
263
267 tmpl_vars['reason'] = reason
264 tmpl_vars["reason"] = reason
268 return tmpl_vars
265 return tmpl_vars
269
266
270 def chart_alert_notification_vars(self, kwargs):
267 def chart_alert_notification_vars(self, kwargs):
271 tmpl_vars = self.get_notification_basic_vars(kwargs)
268 tmpl_vars = self.get_notification_basic_vars(kwargs)
272 tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name']
269 tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"]
273 tmpl_vars['action_name'] = tmpl_vars['event_values'].get(
270 tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or ""
274 'action_name') or ''
271 matched_values = tmpl_vars["event_values"]["matched_step_values"]
275 matched_values = tmpl_vars['event_values']['matched_step_values']
272 tmpl_vars["readable_values"] = []
276 tmpl_vars['readable_values'] = []
273 for key, value in list(matched_values["values"].items()):
277 for key, value in list(matched_values['values'].items()):
274 matched_label = matched_values["labels"].get(key)
278 matched_label = matched_values['labels'].get(key)
279 if matched_label:
275 if matched_label:
280 tmpl_vars['readable_values'].append({
276 tmpl_vars["readable_values"].append(
281 'label': matched_label['human_label'],
277 {"label": matched_label["human_label"], "value": value}
282 'value': value
278 )
283 })
279 tmpl_vars["readable_values"] = sorted(
284 tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'],
280 tmpl_vars["readable_values"], key=lambda x: x["label"]
285 key=lambda x: x['label'])
281 )
286 start_date = convert_date(tmpl_vars['event_values']['start_interval'])
282 start_date = convert_date(tmpl_vars["event_values"]["start_interval"])
287 end_date = None
283 end_date = None
288 if tmpl_vars['event_values'].get('end_interval'):
284 if tmpl_vars["event_values"].get("end_interval"):
289 end_date = convert_date(tmpl_vars['event_values']['end_interval'])
285 end_date = convert_date(tmpl_vars["event_values"]["end_interval"])
290
286
291 app_url = kwargs['request'].registry.settings['_mail_url']
287 app_url = kwargs["request"].registry.settings["_mail_url"]
292 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
288 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
293 to_encode = {
289 to_encode = {
294 'resource': tmpl_vars['event_values']['resource'],
290 "resource": tmpl_vars["event_values"]["resource"],
295 'start_date': start_date.strftime(DATE_FRMT),
291 "start_date": start_date.strftime(DATE_FRMT),
296 }
292 }
297 if end_date:
293 if end_date:
298 to_encode['end_date'] = end_date.strftime(DATE_FRMT)
294 to_encode["end_date"] = end_date.strftime(DATE_FRMT)
299
295
300 destination_url += 'ui/{}?{}'.format(
296 destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode))
301 'logs',
297 tmpl_vars["destination_url"] = destination_url
302 urllib.parse.urlencode(to_encode)
303 )
304 tmpl_vars['destination_url'] = destination_url
305 return tmpl_vars
298 return tmpl_vars
@@ -27,39 +27,42 b' class AlertChannelAction(Base, BaseModel):'
27 Stores notifications conditions for user's alert channels
27 Stores notifications conditions for user's alert channels
28 This is later used for rule parsing like "alert if http_status == 500"
28 This is later used for rule parsing like "alert if http_status == 500"
29 """
29 """
30 __tablename__ = 'alert_channels_actions'
31
30
32 types = ['report', 'chart']
31 __tablename__ = "alert_channels_actions"
33
32
34 owner_id = sa.Column(sa.Integer,
33 types = ["report", "chart"]
35 sa.ForeignKey('users.id', onupdate='CASCADE',
34
36 ondelete='CASCADE'))
35 owner_id = sa.Column(
36 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
37 )
37 resource_id = sa.Column(sa.Integer())
38 resource_id = sa.Column(sa.Integer())
38 action = sa.Column(sa.Unicode(10), nullable=False, default='always')
39 action = sa.Column(sa.Unicode(10), nullable=False, default="always")
39 type = sa.Column(sa.Unicode(10), nullable=False)
40 type = sa.Column(sa.Unicode(10), nullable=False)
40 other_id = sa.Column(sa.Unicode(40))
41 other_id = sa.Column(sa.Unicode(40))
41 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
42 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
42 rule = sa.Column(sa.dialects.postgresql.JSON,
43 rule = sa.Column(
43 nullable=False, default={'field': 'http_status',
44 sa.dialects.postgresql.JSON,
44 "op": "ge", "value": "500"})
45 nullable=False,
46 default={"field": "http_status", "op": "ge", "value": "500"},
47 )
45 config = sa.Column(sa.dialects.postgresql.JSON)
48 config = sa.Column(sa.dialects.postgresql.JSON)
46 name = sa.Column(sa.Unicode(255))
49 name = sa.Column(sa.Unicode(255))
47
50
48 @validates('notify_type')
51 @validates("notify_type")
49 def validate_email(self, key, notify_type):
52 def validate_email(self, key, notify_type):
50 assert notify_type in ['always', 'only_first']
53 assert notify_type in ["always", "only_first"]
51 return notify_type
54 return notify_type
52
55
53 def resource_name(self, db_session=None):
56 def resource_name(self, db_session=None):
54 db_session = get_db_session(db_session)
57 db_session = get_db_session(db_session)
55 if self.resource_id:
58 if self.resource_id:
56 return ResourceService.by_resource_id(
59 return ResourceService.by_resource_id(
57 self.resource_id, db_session=db_session).resource_name
60 self.resource_id, db_session=db_session
61 ).resource_name
58 else:
62 else:
59 return 'any resource'
63 return "any resource"
60
64
61 def get_dict(self, exclude_keys=None, include_keys=None,
65 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
62 extended_info=False):
63 """
66 """
64 Returns dictionary with required information that will be consumed by
67 Returns dictionary with required information that will be consumed by
65 angular
68 angular
@@ -68,12 +71,14 b' class AlertChannelAction(Base, BaseModel):'
68 exclude_keys_list = exclude_keys or []
71 exclude_keys_list = exclude_keys or []
69 include_keys_list = include_keys or []
72 include_keys_list = include_keys or []
70 if extended_info:
73 if extended_info:
71 instance_dict['channels'] = [
74 instance_dict["channels"] = [
72 c.get_dict(extended_info=False) for c in self.channels]
75 c.get_dict(extended_info=False) for c in self.channels
76 ]
73
77
74 d = {}
78 d = {}
75 for k in instance_dict.keys():
79 for k in instance_dict.keys():
76 if (k not in exclude_keys_list and
80 if k not in exclude_keys_list and (
77 (k in include_keys_list or not include_keys)):
81 k in include_keys_list or not include_keys
82 ):
78 d[k] = instance_dict[k]
83 d[k] = instance_dict[k]
79 return d
84 return d
@@ -13,4 +13,3 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
@@ -23,15 +23,13 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class CampfireAlertChannel(AlertChannel):
25 class CampfireAlertChannel(AlertChannel):
26 __mapper_args__ = {
26 __mapper_args__ = {"polymorphic_identity": "campfire"}
27 'polymorphic_identity': 'campfire'
28 }
29
27
30 @property
28 @property
31 def client(self):
29 def client(self):
32 client = CampfireIntegration.create_client(
30 client = CampfireIntegration.create_client(
33 self.integration.config['api_token'],
31 self.integration.config["api_token"], self.integration.config["account"]
34 self.integration.config['account'])
32 )
35 return client
33 return client
36
34
37 def notify_reports(self, **kwargs):
35 def notify_reports(self, **kwargs):
@@ -48,37 +46,40 b' class CampfireAlertChannel(AlertChannel):'
48 """
46 """
49 template_vars = self.report_alert_notification_vars(kwargs)
47 template_vars = self.report_alert_notification_vars(kwargs)
50
48
51 app_url = kwargs['request'].registry.settings['_mail_url']
49 app_url = kwargs["request"].registry.settings["_mail_url"]
52 destination_url = kwargs['request'].route_url('/',
50 destination_url = kwargs["request"].route_url("/", app_url=app_url)
53 app_url=app_url)
51 f_args = (
54 f_args = ('report',
52 "report",
55 template_vars['resource'].resource_id,
53 template_vars["resource"].resource_id,
56 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
54 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
57 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
55 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
58 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
56 )
59 *f_args)
57 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
60
58
61 if template_vars['confirmed_total'] > 1:
59 if template_vars["confirmed_total"] > 1:
62 template_vars["title"] = "%s - %s reports" % (
60 template_vars["title"] = "%s - %s reports" % (
63 template_vars['resource_name'],
61 template_vars["resource_name"],
64 template_vars['confirmed_total'],
62 template_vars["confirmed_total"],
65 )
63 )
66 else:
64 else:
67 error_title = truncate(template_vars['reports'][0][1].error or
65 error_title = truncate(
68 'slow report', 90)
66 template_vars["reports"][0][1].error or "slow report", 90
67 )
69 template_vars["title"] = "%s - '%s' report" % (
68 template_vars["title"] = "%s - '%s' report" % (
70 template_vars['resource_name'],
69 template_vars["resource_name"],
71 error_title)
70 error_title,
71 )
72
72
73 template_vars["title"] += ' ' + destination_url
73 template_vars["title"] += " " + destination_url
74
74
75 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
75 log_msg = "NOTIFY : %s via %s :: %s reports" % (
76 kwargs['user'].user_name,
76 kwargs["user"].user_name,
77 self.channel_visible_value,
77 self.channel_visible_value,
78 template_vars['confirmed_total'])
78 template_vars["confirmed_total"],
79 )
79 log.warning(log_msg)
80 log.warning(log_msg)
80
81
81 for room in self.integration.config['rooms'].split(','):
82 for room in self.integration.config["rooms"].split(","):
82 self.client.speak_to_room(room.strip(), template_vars["title"])
83 self.client.speak_to_room(room.strip(), template_vars["title"])
83
84
84 def notify_report_alert(self, **kwargs):
85 def notify_report_alert(self, **kwargs):
@@ -94,23 +95,23 b' class CampfireAlertChannel(AlertChannel):'
94 """
95 """
95 template_vars = self.report_alert_notification_vars(kwargs)
96 template_vars = self.report_alert_notification_vars(kwargs)
96
97
97 if kwargs['event'].unified_alert_action() == 'OPEN':
98 if kwargs["event"].unified_alert_action() == "OPEN":
98 title = 'ALERT %s: %s - %s %s %s' % (
99 title = "ALERT %s: %s - %s %s %s" % (
99 template_vars['alert_action'],
100 template_vars["alert_action"],
100 template_vars['resource_name'],
101 template_vars["resource_name"],
101 kwargs['event'].values['reports'],
102 kwargs["event"].values["reports"],
102 template_vars['report_type'],
103 template_vars["report_type"],
103 template_vars['destination_url']
104 template_vars["destination_url"],
104 )
105 )
105
106
106 else:
107 else:
107 title = 'ALERT %s: %s type: %s' % (
108 title = "ALERT %s: %s type: %s" % (
108 template_vars['alert_action'],
109 template_vars["alert_action"],
109 template_vars['resource_name'],
110 template_vars["resource_name"],
110 template_vars['alert_type'].replace('_', ' '),
111 template_vars["alert_type"].replace("_", " "),
111 )
112 )
112 for room in self.integration.config['rooms'].split(','):
113 for room in self.integration.config["rooms"].split(","):
113 self.client.speak_to_room(room.strip(), title, sound='VUVUZELA')
114 self.client.speak_to_room(room.strip(), title, sound="VUVUZELA")
114
115
115 def notify_uptime_alert(self, **kwargs):
116 def notify_uptime_alert(self, **kwargs):
116 """
117 """
@@ -125,15 +126,15 b' class CampfireAlertChannel(AlertChannel):'
125 """
126 """
126 template_vars = self.uptime_alert_notification_vars(kwargs)
127 template_vars = self.uptime_alert_notification_vars(kwargs)
127
128
128 message = 'ALERT %s: %s has uptime issues %s\n\n' % (
129 message = "ALERT %s: %s has uptime issues %s\n\n" % (
129 template_vars['alert_action'],
130 template_vars["alert_action"],
130 template_vars['resource_name'],
131 template_vars["resource_name"],
131 template_vars['destination_url']
132 template_vars["destination_url"],
132 )
133 )
133 message += template_vars['reason']
134 message += template_vars["reason"]
134
135
135 for room in self.integration.config['rooms'].split(','):
136 for room in self.integration.config["rooms"].split(","):
136 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
137 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
137
138
138 def send_digest(self, **kwargs):
139 def send_digest(self, **kwargs):
139 """
140 """
@@ -148,17 +149,17 b' class CampfireAlertChannel(AlertChannel):'
148
149
149 """
150 """
150 template_vars = self.report_alert_notification_vars(kwargs)
151 template_vars = self.report_alert_notification_vars(kwargs)
151 f_args = (template_vars['resource_name'],
152 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
152 template_vars['confirmed_total'],)
153 message = "Daily report digest: %s - %s reports" % f_args
153 message = "Daily report digest: %s - %s reports" % f_args
154 message += '{}\n'.format(template_vars['destination_url'])
154 message += "{}\n".format(template_vars["destination_url"])
155 for room in self.integration.config['rooms'].split(','):
155 for room in self.integration.config["rooms"].split(","):
156 self.client.speak_to_room(room.strip(), message)
156 self.client.speak_to_room(room.strip(), message)
157
157
158 log_msg = 'DIGEST : %s via %s :: %s reports' % (
158 log_msg = "DIGEST : %s via %s :: %s reports" % (
159 kwargs['user'].user_name,
159 kwargs["user"].user_name,
160 self.channel_visible_value,
160 self.channel_visible_value,
161 template_vars['confirmed_total'])
161 template_vars["confirmed_total"],
162 )
162 log.warning(log_msg)
163 log.warning(log_msg)
163
164
164 def notify_chart_alert(self, **kwargs):
165 def notify_chart_alert(self, **kwargs):
@@ -173,16 +174,18 b' class CampfireAlertChannel(AlertChannel):'
173
174
174 """
175 """
175 template_vars = self.chart_alert_notification_vars(kwargs)
176 template_vars = self.chart_alert_notification_vars(kwargs)
176 message = 'ALERT {}: value in "{}" chart: ' \
177 message = (
177 'met alert "{}" criteria {} \n'.format(
178 'ALERT {}: value in "{}" chart: '
178 template_vars['alert_action'],
179 'met alert "{}" criteria {} \n'.format(
179 template_vars['chart_name'],
180 template_vars["alert_action"],
180 template_vars['action_name'],
181 template_vars["chart_name"],
181 template_vars['destination_url']
182 template_vars["action_name"],
183 template_vars["destination_url"],
184 )
182 )
185 )
183
186
184 for item in template_vars['readable_values']:
187 for item in template_vars["readable_values"]:
185 message += '{}: {}\n'.format(item['label'], item['value'])
188 message += "{}: {}\n".format(item["label"], item["value"])
186
189
187 for room in self.integration.config['rooms'].split(','):
190 for room in self.integration.config["rooms"].split(","):
188 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
191 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
@@ -27,9 +27,7 b' class EmailAlertChannel(AlertChannel):'
27 Default email alerting channel
27 Default email alerting channel
28 """
28 """
29
29
30 __mapper_args__ = {
30 __mapper_args__ = {"polymorphic_identity": "email"}
31 'polymorphic_identity': 'email'
32 }
33
31
34 def notify_reports(self, **kwargs):
32 def notify_reports(self, **kwargs):
35 """
33 """
@@ -45,25 +43,30 b' class EmailAlertChannel(AlertChannel):'
45 """
43 """
46 template_vars = self.report_alert_notification_vars(kwargs)
44 template_vars = self.report_alert_notification_vars(kwargs)
47
45
48 if template_vars['confirmed_total'] > 1:
46 if template_vars["confirmed_total"] > 1:
49 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
47 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
50 template_vars['resource_name'],
48 template_vars["resource_name"],
51 template_vars['confirmed_total'],
49 template_vars["confirmed_total"],
52 )
50 )
53 else:
51 else:
54 error_title = truncate(template_vars['reports'][0][1].error or
52 error_title = truncate(
55 'slow report', 20)
53 template_vars["reports"][0][1].error or "slow report", 20
54 )
56 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
55 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
57 template_vars['resource_name'],
56 template_vars["resource_name"],
58 error_title)
57 error_title,
59 UserService.send_email(kwargs['request'],
58 )
60 [self.channel_value],
59 UserService.send_email(
61 template_vars,
60 kwargs["request"],
62 '/email_templates/notify_reports.jinja2')
61 [self.channel_value],
63 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
62 template_vars,
64 kwargs['user'].user_name,
63 "/email_templates/notify_reports.jinja2",
64 )
65 log_msg = "NOTIFY : %s via %s :: %s reports" % (
66 kwargs["user"].user_name,
65 self.channel_visible_value,
67 self.channel_visible_value,
66 template_vars['confirmed_total'])
68 template_vars["confirmed_total"],
69 )
67 log.warning(log_msg)
70 log.warning(log_msg)
68
71
69 def send_digest(self, **kwargs):
72 def send_digest(self, **kwargs):
@@ -81,20 +84,23 b' class EmailAlertChannel(AlertChannel):'
81 template_vars = self.report_alert_notification_vars(kwargs)
84 template_vars = self.report_alert_notification_vars(kwargs)
82 title = "AppEnlight :: Daily report digest: %s - %s reports"
85 title = "AppEnlight :: Daily report digest: %s - %s reports"
83 template_vars["email_title"] = title % (
86 template_vars["email_title"] = title % (
84 template_vars['resource_name'],
87 template_vars["resource_name"],
85 template_vars['confirmed_total'],
88 template_vars["confirmed_total"],
86 )
89 )
87
90
88 UserService.send_email(kwargs['request'],
91 UserService.send_email(
89 [self.channel_value],
92 kwargs["request"],
90 template_vars,
93 [self.channel_value],
91 '/email_templates/notify_reports.jinja2',
94 template_vars,
92 immediately=True,
95 "/email_templates/notify_reports.jinja2",
93 silent=True)
96 immediately=True,
94 log_msg = 'DIGEST : %s via %s :: %s reports' % (
97 silent=True,
95 kwargs['user'].user_name,
98 )
99 log_msg = "DIGEST : %s via %s :: %s reports" % (
100 kwargs["user"].user_name,
96 self.channel_visible_value,
101 self.channel_visible_value,
97 template_vars['confirmed_total'])
102 template_vars["confirmed_total"],
103 )
98 log.warning(log_msg)
104 log.warning(log_msg)
99
105
100 def notify_report_alert(self, **kwargs):
106 def notify_report_alert(self, **kwargs):
@@ -110,23 +116,26 b' class EmailAlertChannel(AlertChannel):'
110 """
116 """
111 template_vars = self.report_alert_notification_vars(kwargs)
117 template_vars = self.report_alert_notification_vars(kwargs)
112
118
113 if kwargs['event'].unified_alert_action() == 'OPEN':
119 if kwargs["event"].unified_alert_action() == "OPEN":
114 title = 'AppEnlight :: ALERT %s: %s - %s %s' % (
120 title = "AppEnlight :: ALERT %s: %s - %s %s" % (
115 template_vars['alert_action'],
121 template_vars["alert_action"],
116 template_vars['resource_name'],
122 template_vars["resource_name"],
117 kwargs['event'].values['reports'],
123 kwargs["event"].values["reports"],
118 template_vars['report_type'],
124 template_vars["report_type"],
119 )
125 )
120 else:
126 else:
121 title = 'AppEnlight :: ALERT %s: %s type: %s' % (
127 title = "AppEnlight :: ALERT %s: %s type: %s" % (
122 template_vars['alert_action'],
128 template_vars["alert_action"],
123 template_vars['resource_name'],
129 template_vars["resource_name"],
124 template_vars['alert_type'].replace('_', ' '),
130 template_vars["alert_type"].replace("_", " "),
125 )
131 )
126 template_vars['email_title'] = title
132 template_vars["email_title"] = title
127 UserService.send_email(kwargs['request'], [self.channel_value],
133 UserService.send_email(
128 template_vars,
134 kwargs["request"],
129 '/email_templates/alert_reports.jinja2')
135 [self.channel_value],
136 template_vars,
137 "/email_templates/alert_reports.jinja2",
138 )
130
139
131 def notify_uptime_alert(self, **kwargs):
140 def notify_uptime_alert(self, **kwargs):
132 """
141 """
@@ -140,15 +149,18 b' class EmailAlertChannel(AlertChannel):'
140
149
141 """
150 """
142 template_vars = self.uptime_alert_notification_vars(kwargs)
151 template_vars = self.uptime_alert_notification_vars(kwargs)
143 title = 'AppEnlight :: ALERT %s: %s has uptime issues' % (
152 title = "AppEnlight :: ALERT %s: %s has uptime issues" % (
144 template_vars['alert_action'],
153 template_vars["alert_action"],
145 template_vars['resource_name'],
154 template_vars["resource_name"],
146 )
155 )
147 template_vars['email_title'] = title
156 template_vars["email_title"] = title
148
157
149 UserService.send_email(kwargs['request'], [self.channel_value],
158 UserService.send_email(
150 template_vars,
159 kwargs["request"],
151 '/email_templates/alert_uptime.jinja2')
160 [self.channel_value],
161 template_vars,
162 "/email_templates/alert_uptime.jinja2",
163 )
152
164
153 def notify_chart_alert(self, **kwargs):
165 def notify_chart_alert(self, **kwargs):
154 """
166 """
@@ -163,13 +175,18 b' class EmailAlertChannel(AlertChannel):'
163 """
175 """
164 template_vars = self.chart_alert_notification_vars(kwargs)
176 template_vars = self.chart_alert_notification_vars(kwargs)
165
177
166 title = 'AppEnlight :: ALERT {} value in "{}" chart' \
178 title = (
167 ' met alert "{}" criteria'.format(
179 'AppEnlight :: ALERT {} value in "{}" chart'
168 template_vars['alert_action'],
180 ' met alert "{}" criteria'.format(
169 template_vars['chart_name'],
181 template_vars["alert_action"],
170 template_vars['action_name'],
182 template_vars["chart_name"],
183 template_vars["action_name"],
184 )
185 )
186 template_vars["email_title"] = title
187 UserService.send_email(
188 kwargs["request"],
189 [self.channel_value],
190 template_vars,
191 "/email_templates/alert_chart.jinja2",
171 )
192 )
172 template_vars['email_title'] = title
173 UserService.send_email(kwargs['request'], [self.channel_value],
174 template_vars,
175 '/email_templates/alert_chart.jinja2')
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class FlowdockAlertChannel(AlertChannel):
25 class FlowdockAlertChannel(AlertChannel):
26 __mapper_args__ = {
26 __mapper_args__ = {"polymorphic_identity": "flowdock"}
27 'polymorphic_identity': 'flowdock'
28 }
29
27
30 def notify_reports(self, **kwargs):
28 def notify_reports(self, **kwargs):
31 """
29 """
@@ -41,44 +39,45 b' class FlowdockAlertChannel(AlertChannel):'
41 """
39 """
42 template_vars = self.report_alert_notification_vars(kwargs)
40 template_vars = self.report_alert_notification_vars(kwargs)
43
41
44 app_url = kwargs['request'].registry.settings['_mail_url']
42 app_url = kwargs["request"].registry.settings["_mail_url"]
45 destination_url = kwargs['request'].route_url('/',
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
46 _app_url=app_url)
44 f_args = (
47 f_args = ('report',
45 "report",
48 template_vars['resource'].resource_id,
46 template_vars["resource"].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
49 )
52 *f_args)
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
53
51
54 if template_vars['confirmed_total'] > 1:
52 if template_vars["confirmed_total"] > 1:
55 template_vars["title"] = "%s - %s reports" % (
53 template_vars["title"] = "%s - %s reports" % (
56 template_vars['resource_name'],
54 template_vars["resource_name"],
57 template_vars['confirmed_total'],
55 template_vars["confirmed_total"],
58 )
56 )
59 else:
57 else:
60 error_title = truncate(template_vars['reports'][0][1].error or
58 error_title = truncate(
61 'slow report', 90)
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
62 template_vars["title"] = "%s - '%s' report" % (
61 template_vars["title"] = "%s - '%s' report" % (
63 template_vars['resource_name'],
62 template_vars["resource_name"],
64 error_title)
63 error_title,
64 )
65
65
66 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
66 log_msg = "NOTIFY : %s via %s :: %s reports" % (
67 kwargs['user'].user_name,
67 kwargs["user"].user_name,
68 self.channel_visible_value,
68 self.channel_visible_value,
69 template_vars['confirmed_total'])
69 template_vars["confirmed_total"],
70 )
70 log.warning(log_msg)
71 log.warning(log_msg)
71
72
72 client = FlowdockIntegration.create_client(
73 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
73 self.integration.config['api_token'])
74 payload = {
74 payload = {
75 "source": "AppEnlight",
75 "source": "AppEnlight",
76 "from_address": kwargs['request'].registry.settings[
76 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
77 'mailing.from_email'],
78 "subject": template_vars["title"],
77 "subject": template_vars["title"],
79 "content": "New report present",
78 "content": "New report present",
80 "tags": ["appenlight"],
79 "tags": ["appenlight"],
81 "link": destination_url
80 "link": destination_url,
82 }
81 }
83 client.send_to_inbox(payload)
82 client.send_to_inbox(payload)
84
83
@@ -95,32 +94,30 b' class FlowdockAlertChannel(AlertChannel):'
95 """
94 """
96 template_vars = self.report_alert_notification_vars(kwargs)
95 template_vars = self.report_alert_notification_vars(kwargs)
97
96
98 if kwargs['event'].unified_alert_action() == 'OPEN':
97 if kwargs["event"].unified_alert_action() == "OPEN":
99
98
100 title = 'ALERT %s: %s - %s %s' % (
99 title = "ALERT %s: %s - %s %s" % (
101 template_vars['alert_action'],
100 template_vars["alert_action"],
102 template_vars['resource_name'],
101 template_vars["resource_name"],
103 kwargs['event'].values['reports'],
102 kwargs["event"].values["reports"],
104 template_vars['report_type'],
103 template_vars["report_type"],
105 )
104 )
106
105
107 else:
106 else:
108 title = 'ALERT %s: %s type: %s' % (
107 title = "ALERT %s: %s type: %s" % (
109 template_vars['alert_action'],
108 template_vars["alert_action"],
110 template_vars['resource_name'],
109 template_vars["resource_name"],
111 template_vars['alert_type'].replace('_', ' '),
110 template_vars["alert_type"].replace("_", " "),
112 )
111 )
113
112
114 client = FlowdockIntegration.create_client(
113 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
115 self.integration.config['api_token'])
116 payload = {
114 payload = {
117 "source": "AppEnlight",
115 "source": "AppEnlight",
118 "from_address": kwargs['request'].registry.settings[
116 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
119 'mailing.from_email'],
120 "subject": title,
117 "subject": title,
121 "content": 'Investigation required',
118 "content": "Investigation required",
122 "tags": ["appenlight", "alert", template_vars['alert_type']],
119 "tags": ["appenlight", "alert", template_vars["alert_type"]],
123 "link": template_vars['destination_url']
120 "link": template_vars["destination_url"],
124 }
121 }
125 client.send_to_inbox(payload)
122 client.send_to_inbox(payload)
126
123
@@ -137,23 +134,21 b' class FlowdockAlertChannel(AlertChannel):'
137 """
134 """
138 template_vars = self.uptime_alert_notification_vars(kwargs)
135 template_vars = self.uptime_alert_notification_vars(kwargs)
139
136
140 message = 'ALERT %s: %s has uptime issues' % (
137 message = "ALERT %s: %s has uptime issues" % (
141 template_vars['alert_action'],
138 template_vars["alert_action"],
142 template_vars['resource_name'],
139 template_vars["resource_name"],
143 )
140 )
144 submessage = 'Info: '
141 submessage = "Info: "
145 submessage += template_vars['reason']
142 submessage += template_vars["reason"]
146
143
147 client = FlowdockIntegration.create_client(
144 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
148 self.integration.config['api_token'])
149 payload = {
145 payload = {
150 "source": "AppEnlight",
146 "source": "AppEnlight",
151 "from_address": kwargs['request'].registry.settings[
147 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
152 'mailing.from_email'],
153 "subject": message,
148 "subject": message,
154 "content": submessage,
149 "content": submessage,
155 "tags": ["appenlight", "alert", 'uptime'],
150 "tags": ["appenlight", "alert", "uptime"],
156 "link": template_vars['destination_url']
151 "link": template_vars["destination_url"],
157 }
152 }
158 client.send_to_inbox(payload)
153 client.send_to_inbox(payload)
159
154
@@ -171,29 +166,29 b' class FlowdockAlertChannel(AlertChannel):'
171 """
166 """
172 template_vars = self.report_alert_notification_vars(kwargs)
167 template_vars = self.report_alert_notification_vars(kwargs)
173 message = "Daily report digest: %s - %s reports" % (
168 message = "Daily report digest: %s - %s reports" % (
174 template_vars['resource_name'], template_vars['confirmed_total'])
169 template_vars["resource_name"],
170 template_vars["confirmed_total"],
171 )
175
172
176 f_args = (template_vars['confirmed_total'],
173 f_args = (template_vars["confirmed_total"], template_vars["timestamp"])
177 template_vars['timestamp'])
178
174
179 payload = {
175 payload = {
180 "source": "AppEnlight",
176 "source": "AppEnlight",
181 "from_address": kwargs['request'].registry.settings[
177 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
182 'mailing.from_email'],
183 "subject": message,
178 "subject": message,
184 "content": '%s reports in total since %s' % f_args,
179 "content": "%s reports in total since %s" % f_args,
185 "tags": ["appenlight", "digest"],
180 "tags": ["appenlight", "digest"],
186 "link": template_vars['destination_url']
181 "link": template_vars["destination_url"],
187 }
182 }
188
183
189 client = FlowdockIntegration.create_client(
184 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
190 self.integration.config['api_token'])
191 client.send_to_inbox(payload)
185 client.send_to_inbox(payload)
192
186
193 log_msg = 'DIGEST : %s via %s :: %s reports' % (
187 log_msg = "DIGEST : %s via %s :: %s reports" % (
194 kwargs['user'].user_name,
188 kwargs["user"].user_name,
195 self.channel_visible_value,
189 self.channel_visible_value,
196 template_vars['confirmed_total'])
190 template_vars["confirmed_total"],
191 )
197 log.warning(log_msg)
192 log.warning(log_msg)
198
193
199 def notify_chart_alert(self, **kwargs):
194 def notify_chart_alert(self, **kwargs):
@@ -209,25 +204,22 b' class FlowdockAlertChannel(AlertChannel):'
209 """
204 """
210 template_vars = self.chart_alert_notification_vars(kwargs)
205 template_vars = self.chart_alert_notification_vars(kwargs)
211
206
212 message = 'ALERT {}: value in "{}" chart ' \
207 message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format(
213 'met alert "{}" criteria'.format(
208 template_vars["alert_action"],
214 template_vars['alert_action'],
209 template_vars["chart_name"],
215 template_vars['chart_name'],
210 template_vars["action_name"],
216 template_vars['action_name'],
217 )
211 )
218 submessage = 'Info: '
212 submessage = "Info: "
219 for item in template_vars['readable_values']:
213 for item in template_vars["readable_values"]:
220 submessage += '{}: {}\n'.format(item['label'], item['value'])
214 submessage += "{}: {}\n".format(item["label"], item["value"])
221
215
222 client = FlowdockIntegration.create_client(
216 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
223 self.integration.config['api_token'])
224 payload = {
217 payload = {
225 "source": "AppEnlight",
218 "source": "AppEnlight",
226 "from_address": kwargs['request'].registry.settings[
219 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
227 'mailing.from_email'],
228 "subject": message,
220 "subject": message,
229 "content": submessage,
221 "content": submessage,
230 "tags": ["appenlight", "alert", 'chart'],
222 "tags": ["appenlight", "alert", "chart"],
231 "link": template_vars['destination_url']
223 "link": template_vars["destination_url"],
232 }
224 }
233 client.send_to_inbox(payload)
225 client.send_to_inbox(payload)
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class HipchatAlertChannel(AlertChannel):
25 class HipchatAlertChannel(AlertChannel):
26 __mapper_args__ = {
26 __mapper_args__ = {"polymorphic_identity": "hipchat"}
27 'polymorphic_identity': 'hipchat'
28 }
29
27
30 def notify_reports(self, **kwargs):
28 def notify_reports(self, **kwargs):
31 """
29 """
@@ -41,46 +39,50 b' class HipchatAlertChannel(AlertChannel):'
41 """
39 """
42 template_vars = self.report_alert_notification_vars(kwargs)
40 template_vars = self.report_alert_notification_vars(kwargs)
43
41
44 app_url = kwargs['request'].registry.settings['_mail_url']
42 app_url = kwargs["request"].registry.settings["_mail_url"]
45 destination_url = kwargs['request'].route_url('/',
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
46 _app_url=app_url)
44 f_args = (
47 f_args = ('report',
45 "report",
48 template_vars['resource'].resource_id,
46 template_vars["resource"].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
49 )
52 *f_args)
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
53
51
54 if template_vars['confirmed_total'] > 1:
52 if template_vars["confirmed_total"] > 1:
55 template_vars["title"] = "%s - %s reports" % (
53 template_vars["title"] = "%s - %s reports" % (
56 template_vars['resource_name'],
54 template_vars["resource_name"],
57 template_vars['confirmed_total'],
55 template_vars["confirmed_total"],
58 )
56 )
59 else:
57 else:
60 error_title = truncate(template_vars['reports'][0][1].error or
58 error_title = truncate(
61 'slow report', 90)
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
62 template_vars["title"] = "%s - '%s' report" % (
61 template_vars["title"] = "%s - '%s' report" % (
63 template_vars['resource_name'],
62 template_vars["resource_name"],
64 error_title)
63 error_title,
64 )
65
65
66 template_vars["title"] += ' ' + destination_url
66 template_vars["title"] += " " + destination_url
67
67
68 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
68 log_msg = "NOTIFY : %s via %s :: %s reports" % (
69 kwargs['user'].user_name,
69 kwargs["user"].user_name,
70 self.channel_visible_value,
70 self.channel_visible_value,
71 template_vars['confirmed_total'])
71 template_vars["confirmed_total"],
72 )
72 log.warning(log_msg)
73 log.warning(log_msg)
73
74
74 client = HipchatIntegration.create_client(
75 client = HipchatIntegration.create_client(self.integration.config["api_token"])
75 self.integration.config['api_token'])
76 for room in self.integration.config["rooms"].split(","):
76 for room in self.integration.config['rooms'].split(','):
77 client.send(
77 client.send({
78 {
78 "message_format": 'text',
79 "message_format": "text",
79 "message": template_vars["title"],
80 "message": template_vars["title"],
80 "from": "AppEnlight",
81 "from": "AppEnlight",
81 "room_id": room.strip(),
82 "room_id": room.strip(),
82 "color": "yellow"
83 "color": "yellow",
83 })
84 }
85 )
84
86
85 def notify_report_alert(self, **kwargs):
87 def notify_report_alert(self, **kwargs):
86 """
88 """
@@ -95,35 +97,37 b' class HipchatAlertChannel(AlertChannel):'
95 """
97 """
96 template_vars = self.report_alert_notification_vars(kwargs)
98 template_vars = self.report_alert_notification_vars(kwargs)
97
99
98 if kwargs['event'].unified_alert_action() == 'OPEN':
100 if kwargs["event"].unified_alert_action() == "OPEN":
99
101
100 title = 'ALERT %s: %s - %s %s' % (
102 title = "ALERT %s: %s - %s %s" % (
101 template_vars['alert_action'],
103 template_vars["alert_action"],
102 template_vars['resource_name'],
104 template_vars["resource_name"],
103 kwargs['event'].values['reports'],
105 kwargs["event"].values["reports"],
104 template_vars['report_type'],
106 template_vars["report_type"],
105 )
107 )
106
108
107 else:
109 else:
108 title = 'ALERT %s: %s type: %s' % (
110 title = "ALERT %s: %s type: %s" % (
109 template_vars['alert_action'],
111 template_vars["alert_action"],
110 template_vars['resource_name'],
112 template_vars["resource_name"],
111 template_vars['alert_type'].replace('_', ' '),
113 template_vars["alert_type"].replace("_", " "),
112 )
114 )
113
115
114 title += '\n ' + template_vars['destination_url']
116 title += "\n " + template_vars["destination_url"]
115
117
116 api_token = self.integration.config['api_token']
118 api_token = self.integration.config["api_token"]
117 client = HipchatIntegration.create_client(api_token)
119 client = HipchatIntegration.create_client(api_token)
118 for room in self.integration.config['rooms'].split(','):
120 for room in self.integration.config["rooms"].split(","):
119 client.send({
121 client.send(
120 "message_format": 'text',
122 {
121 "message": title,
123 "message_format": "text",
122 "from": "AppEnlight",
124 "message": title,
123 "room_id": room.strip(),
125 "from": "AppEnlight",
124 "color": "red",
126 "room_id": room.strip(),
125 "notify": '1'
127 "color": "red",
126 })
128 "notify": "1",
129 }
130 )
127
131
128 def notify_uptime_alert(self, **kwargs):
132 def notify_uptime_alert(self, **kwargs):
129 """
133 """
@@ -138,24 +142,26 b' class HipchatAlertChannel(AlertChannel):'
138 """
142 """
139 template_vars = self.uptime_alert_notification_vars(kwargs)
143 template_vars = self.uptime_alert_notification_vars(kwargs)
140
144
141 message = 'ALERT %s: %s has uptime issues\n' % (
145 message = "ALERT %s: %s has uptime issues\n" % (
142 template_vars['alert_action'],
146 template_vars["alert_action"],
143 template_vars['resource_name'],
147 template_vars["resource_name"],
144 )
148 )
145 message += template_vars['reason']
149 message += template_vars["reason"]
146 message += '\n{}'.format(template_vars['destination_url'])
150 message += "\n{}".format(template_vars["destination_url"])
147
151
148 api_token = self.integration.config['api_token']
152 api_token = self.integration.config["api_token"]
149 client = HipchatIntegration.create_client(api_token)
153 client = HipchatIntegration.create_client(api_token)
150 for room in self.integration.config['rooms'].split(','):
154 for room in self.integration.config["rooms"].split(","):
151 client.send({
155 client.send(
152 "message_format": 'text',
156 {
153 "message": message,
157 "message_format": "text",
154 "from": "AppEnlight",
158 "message": message,
155 "room_id": room.strip(),
159 "from": "AppEnlight",
156 "color": "red",
160 "room_id": room.strip(),
157 "notify": '1'
161 "color": "red",
158 })
162 "notify": "1",
163 }
164 )
159
165
160 def notify_chart_alert(self, **kwargs):
166 def notify_chart_alert(self, **kwargs):
161 """
167 """
@@ -169,29 +175,30 b' class HipchatAlertChannel(AlertChannel):'
169
175
170 """
176 """
171 template_vars = self.chart_alert_notification_vars(kwargs)
177 template_vars = self.chart_alert_notification_vars(kwargs)
172 message = 'ALERT {}: value in "{}" chart: ' \
178 message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format(
173 'met alert "{}" criteria\n'.format(
179 template_vars["alert_action"],
174 template_vars['alert_action'],
180 template_vars["chart_name"],
175 template_vars['chart_name'],
181 template_vars["action_name"],
176 template_vars['action_name'],
177 )
182 )
178
183
179 for item in template_vars['readable_values']:
184 for item in template_vars["readable_values"]:
180 message += '{}: {}\n'.format(item['label'], item['value'])
185 message += "{}: {}\n".format(item["label"], item["value"])
181
186
182 message += template_vars['destination_url']
187 message += template_vars["destination_url"]
183
188
184 api_token = self.integration.config['api_token']
189 api_token = self.integration.config["api_token"]
185 client = HipchatIntegration.create_client(api_token)
190 client = HipchatIntegration.create_client(api_token)
186 for room in self.integration.config['rooms'].split(','):
191 for room in self.integration.config["rooms"].split(","):
187 client.send({
192 client.send(
188 "message_format": 'text',
193 {
189 "message": message,
194 "message_format": "text",
190 "from": "AppEnlight",
195 "message": message,
191 "room_id": room.strip(),
196 "from": "AppEnlight",
192 "color": "red",
197 "room_id": room.strip(),
193 "notify": '1'
198 "color": "red",
194 })
199 "notify": "1",
200 }
201 )
195
202
196 def send_digest(self, **kwargs):
203 def send_digest(self, **kwargs):
197 """
204 """
@@ -206,24 +213,26 b' class HipchatAlertChannel(AlertChannel):'
206
213
207 """
214 """
208 template_vars = self.report_alert_notification_vars(kwargs)
215 template_vars = self.report_alert_notification_vars(kwargs)
209 f_args = (template_vars['resource_name'],
216 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
210 template_vars['confirmed_total'],)
211 message = "Daily report digest: %s - %s reports" % f_args
217 message = "Daily report digest: %s - %s reports" % f_args
212 message += '\n{}'.format(template_vars['destination_url'])
218 message += "\n{}".format(template_vars["destination_url"])
213 api_token = self.integration.config['api_token']
219 api_token = self.integration.config["api_token"]
214 client = HipchatIntegration.create_client(api_token)
220 client = HipchatIntegration.create_client(api_token)
215 for room in self.integration.config['rooms'].split(','):
221 for room in self.integration.config["rooms"].split(","):
216 client.send({
222 client.send(
217 "message_format": 'text',
223 {
218 "message": message,
224 "message_format": "text",
219 "from": "AppEnlight",
225 "message": message,
220 "room_id": room.strip(),
226 "from": "AppEnlight",
221 "color": "green",
227 "room_id": room.strip(),
222 "notify": '1'
228 "color": "green",
223 })
229 "notify": "1",
224
230 }
225 log_msg = 'DIGEST : %s via %s :: %s reports' % (
231 )
226 kwargs['user'].user_name,
232
233 log_msg = "DIGEST : %s via %s :: %s reports" % (
234 kwargs["user"].user_name,
227 self.channel_visible_value,
235 self.channel_visible_value,
228 template_vars['confirmed_total'])
236 template_vars["confirmed_total"],
237 )
229 log.warning(log_msg)
238 log.warning(log_msg)
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class SlackAlertChannel(AlertChannel):
25 class SlackAlertChannel(AlertChannel):
26 __mapper_args__ = {
26 __mapper_args__ = {"polymorphic_identity": "slack"}
27 'polymorphic_identity': 'slack'
28 }
29
27
30 def notify_reports(self, **kwargs):
28 def notify_reports(self, **kwargs):
31 """
29 """
@@ -40,45 +38,40 b' class SlackAlertChannel(AlertChannel):'
40
38
41 """
39 """
42 template_vars = self.report_alert_notification_vars(kwargs)
40 template_vars = self.report_alert_notification_vars(kwargs)
43 template_vars["title"] = template_vars['resource_name']
41 template_vars["title"] = template_vars["resource_name"]
44
42
45 if template_vars['confirmed_total'] > 1:
43 if template_vars["confirmed_total"] > 1:
46 template_vars['subtext'] = '%s reports' % template_vars[
44 template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"]
47 'confirmed_total']
48 else:
45 else:
49 error_title = truncate(template_vars['reports'][0][1].error or
46 error_title = truncate(
50 'slow report', 90)
47 template_vars["reports"][0][1].error or "slow report", 90
51 template_vars['subtext'] = error_title
48 )
49 template_vars["subtext"] = error_title
52
50
53 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
51 log_msg = "NOTIFY : %s via %s :: %s reports" % (
54 kwargs['user'].user_name,
52 kwargs["user"].user_name,
55 self.channel_visible_value,
53 self.channel_visible_value,
56 template_vars['confirmed_total'])
54 template_vars["confirmed_total"],
55 )
57 log.warning(log_msg)
56 log.warning(log_msg)
58
57
59 client = SlackIntegration.create_client(
58 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
60 self.integration.config['webhook_url'])
61 report_data = {
59 report_data = {
62 "username": "AppEnlight",
60 "username": "AppEnlight",
63 "icon_emoji": ":fire:",
61 "icon_emoji": ":fire:",
64 "attachments": [
62 "attachments": [
65 {
63 {
66 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
64 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
67 "fallback": "*%s* - <%s| Browse>" % (
65 "fallback": "*%s* - <%s| Browse>"
68 template_vars["title"],
66 % (template_vars["title"], template_vars["destination_url"]),
69 template_vars['destination_url']),
67 "pretext": "*%s* - <%s| Browse>"
70 "pretext": "*%s* - <%s| Browse>" % (
68 % (template_vars["title"], template_vars["destination_url"]),
71 template_vars["title"],
72 template_vars['destination_url']),
73 "color": "warning",
69 "color": "warning",
74 "fields": [
70 "fields": [
75 {
71 {"value": "Info: %s" % template_vars["subtext"], "short": False}
76 "value": 'Info: %s' % template_vars['subtext'],
72 ],
77 "short": False
78 }
79 ]
80 }
73 }
81 ]
74 ],
82 }
75 }
83 client.make_request(data=report_data)
76 client.make_request(data=report_data)
84
77
@@ -95,53 +88,51 b' class SlackAlertChannel(AlertChannel):'
95 """
88 """
96 template_vars = self.report_alert_notification_vars(kwargs)
89 template_vars = self.report_alert_notification_vars(kwargs)
97
90
98 if kwargs['event'].unified_alert_action() == 'OPEN':
91 if kwargs["event"].unified_alert_action() == "OPEN":
99 title = '*ALERT %s*: %s' % (
92 title = "*ALERT %s*: %s" % (
100 template_vars['alert_action'],
93 template_vars["alert_action"],
101 template_vars['resource_name']
94 template_vars["resource_name"],
102 )
95 )
103
96
104 template_vars['subtext'] = 'Got at least %s %s' % (
97 template_vars["subtext"] = "Got at least %s %s" % (
105 kwargs['event'].values['reports'],
98 kwargs["event"].values["reports"],
106 template_vars['report_type']
99 template_vars["report_type"],
107 )
100 )
108
101
109 else:
102 else:
110 title = '*ALERT %s*: %s' % (
103 title = "*ALERT %s*: %s" % (
111 template_vars['alert_action'],
104 template_vars["alert_action"],
112 template_vars['resource_name'],
105 template_vars["resource_name"],
113 )
106 )
114
107
115 template_vars['subtext'] = ''
108 template_vars["subtext"] = ""
116
109
117 alert_type = template_vars['alert_type'].replace('_', ' ')
110 alert_type = template_vars["alert_type"].replace("_", " ")
118 alert_type = alert_type.replace('alert', '').capitalize()
111 alert_type = alert_type.replace("alert", "").capitalize()
119
112
120 template_vars['type'] = "Type: %s" % alert_type
113 template_vars["type"] = "Type: %s" % alert_type
121
114
122 client = SlackIntegration.create_client(
115 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
123 self.integration.config['webhook_url']
124 )
125 report_data = {
116 report_data = {
126 "username": "AppEnlight",
117 "username": "AppEnlight",
127 "icon_emoji": ":rage:",
118 "icon_emoji": ":rage:",
128 "attachments": [
119 "attachments": [
129 {
120 {
130 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
121 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
131 "fallback": "%s - <%s| Browse>" % (
122 "fallback": "%s - <%s| Browse>"
132 title, template_vars['destination_url']),
123 % (title, template_vars["destination_url"]),
133 "pretext": "%s - <%s| Browse>" % (
124 "pretext": "%s - <%s| Browse>"
134 title, template_vars['destination_url']),
125 % (title, template_vars["destination_url"]),
135 "color": "danger",
126 "color": "danger",
136 "fields": [
127 "fields": [
137 {
128 {
138 "title": template_vars['type'],
129 "title": template_vars["type"],
139 "value": template_vars['subtext'],
130 "value": template_vars["subtext"],
140 "short": False
131 "short": False,
141 }
132 }
142 ]
133 ],
143 }
134 }
144 ]
135 ],
145 }
136 }
146 client.make_request(data=report_data)
137 client.make_request(data=report_data)
147
138
@@ -158,13 +149,11 b' class SlackAlertChannel(AlertChannel):'
158 """
149 """
159 template_vars = self.uptime_alert_notification_vars(kwargs)
150 template_vars = self.uptime_alert_notification_vars(kwargs)
160
151
161 title = '*ALERT %s*: %s' % (
152 title = "*ALERT %s*: %s" % (
162 template_vars['alert_action'],
153 template_vars["alert_action"],
163 template_vars['resource_name'],
154 template_vars["resource_name"],
164 )
165 client = SlackIntegration.create_client(
166 self.integration.config['webhook_url']
167 )
155 )
156 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
168 report_data = {
157 report_data = {
169 "username": "AppEnlight",
158 "username": "AppEnlight",
170 "icon_emoji": ":rage:",
159 "icon_emoji": ":rage:",
@@ -172,19 +161,21 b' class SlackAlertChannel(AlertChannel):'
172 {
161 {
173 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
162 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
174 "fallback": "{} - <{}| Browse>".format(
163 "fallback": "{} - <{}| Browse>".format(
175 title, template_vars['destination_url']),
164 title, template_vars["destination_url"]
165 ),
176 "pretext": "{} - <{}| Browse>".format(
166 "pretext": "{} - <{}| Browse>".format(
177 title, template_vars['destination_url']),
167 title, template_vars["destination_url"]
168 ),
178 "color": "danger",
169 "color": "danger",
179 "fields": [
170 "fields": [
180 {
171 {
181 "title": "Application has uptime issues",
172 "title": "Application has uptime issues",
182 "value": template_vars['reason'],
173 "value": template_vars["reason"],
183 "short": False
174 "short": False,
184 }
175 }
185 ]
176 ],
186 }
177 }
187 ]
178 ],
188 }
179 }
189 client.make_request(data=report_data)
180 client.make_request(data=report_data)
190
181
@@ -201,39 +192,39 b' class SlackAlertChannel(AlertChannel):'
201 """
192 """
202 template_vars = self.chart_alert_notification_vars(kwargs)
193 template_vars = self.chart_alert_notification_vars(kwargs)
203
194
204 title = '*ALERT {}*: value in *"{}"* chart ' \
195 title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format(
205 'met alert *"{}"* criteria'.format(
196 template_vars["alert_action"],
206 template_vars['alert_action'],
197 template_vars["chart_name"],
207 template_vars['chart_name'],
198 template_vars["action_name"],
208 template_vars['action_name'],
209 )
199 )
210
200
211 subtext = ''
201 subtext = ""
212 for item in template_vars['readable_values']:
202 for item in template_vars["readable_values"]:
213 subtext += '{} - {}\n'.format(item['label'], item['value'])
203 subtext += "{} - {}\n".format(item["label"], item["value"])
214
204
215 client = SlackIntegration.create_client(
205 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
216 self.integration.config['webhook_url']
217 )
218 report_data = {
206 report_data = {
219 "username": "AppEnlight",
207 "username": "AppEnlight",
220 "icon_emoji": ":rage:",
208 "icon_emoji": ":rage:",
221 "attachments": [
209 "attachments": [
222 {"mrkdwn_in": ["text", "pretext", "title", "fallback"],
210 {
223 "fallback": "{} - <{}| Browse>".format(
211 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
224 title, template_vars['destination_url']),
212 "fallback": "{} - <{}| Browse>".format(
225 "pretext": "{} - <{}| Browse>".format(
213 title, template_vars["destination_url"]
226 title, template_vars['destination_url']),
214 ),
227 "color": "danger",
215 "pretext": "{} - <{}| Browse>".format(
228 "fields": [
216 title, template_vars["destination_url"]
229 {
217 ),
230 "title": "Following criteria were met:",
218 "color": "danger",
231 "value": subtext,
219 "fields": [
232 "short": False
220 {
233 }
221 "title": "Following criteria were met:",
234 ]
222 "value": subtext,
235 }
223 "short": False,
236 ]
224 }
225 ],
226 }
227 ],
237 }
228 }
238 client.make_request(data=report_data)
229 client.make_request(data=report_data)
239
230
@@ -250,36 +241,30 b' class SlackAlertChannel(AlertChannel):'
250
241
251 """
242 """
252 template_vars = self.report_alert_notification_vars(kwargs)
243 template_vars = self.report_alert_notification_vars(kwargs)
253 title = "*Daily report digest*: %s" % template_vars['resource_name']
244 title = "*Daily report digest*: %s" % template_vars["resource_name"]
254
245
255 subtext = '%s reports' % template_vars['confirmed_total']
246 subtext = "%s reports" % template_vars["confirmed_total"]
256
247
257 client = SlackIntegration.create_client(
248 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
258 self.integration.config['webhook_url']
259 )
260 report_data = {
249 report_data = {
261 "username": "AppEnlight",
250 "username": "AppEnlight",
262 "attachments": [
251 "attachments": [
263 {
252 {
264 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
253 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
265 "fallback": "%s : <%s| Browse>" % (
254 "fallback": "%s : <%s| Browse>"
266 title, template_vars['destination_url']),
255 % (title, template_vars["destination_url"]),
267 "pretext": "%s: <%s| Browse>" % (
256 "pretext": "%s: <%s| Browse>"
268 title, template_vars['destination_url']),
257 % (title, template_vars["destination_url"]),
269 "color": "good",
258 "color": "good",
270 "fields": [
259 "fields": [{"title": "Got at least: %s" % subtext, "short": False}],
271 {
272 "title": "Got at least: %s" % subtext,
273 "short": False
274 }
275 ]
276 }
260 }
277 ]
261 ],
278 }
262 }
279 client.make_request(data=report_data)
263 client.make_request(data=report_data)
280
264
281 log_msg = 'DIGEST : %s via %s :: %s reports' % (
265 log_msg = "DIGEST : %s via %s :: %s reports" % (
282 kwargs['user'].user_name,
266 kwargs["user"].user_name,
283 self.channel_visible_value,
267 self.channel_visible_value,
284 template_vars['confirmed_total'])
268 template_vars["confirmed_total"],
269 )
285 log.warning(log_msg)
270 log.warning(log_msg)
@@ -24,7 +24,7 b' log = logging.getLogger(__name__)'
24
24
25
25
26 def generate_api_key():
26 def generate_api_key():
27 uid = str(uuid.uuid4()).replace('-', '')
27 uid = str(uuid.uuid4()).replace("-", "")
28 return uid[0:32]
28 return uid[0:32]
29
29
30
30
@@ -33,61 +33,69 b' class Application(Resource):'
33 Resource of application type
33 Resource of application type
34 """
34 """
35
35
36 __tablename__ = 'applications'
36 __tablename__ = "applications"
37 __mapper_args__ = {'polymorphic_identity': 'application'}
37 __mapper_args__ = {"polymorphic_identity": "application"}
38
38
39 # lists configurable possible permissions for this resource type
39 # lists configurable possible permissions for this resource type
40 __possible_permissions__ = ('view', 'update_reports')
40 __possible_permissions__ = ("view", "update_reports")
41
41
42 resource_id = sa.Column(sa.Integer(),
42 resource_id = sa.Column(
43 sa.ForeignKey('resources.resource_id',
43 sa.Integer(),
44 onupdate='CASCADE',
44 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
45 ondelete='CASCADE', ),
45 primary_key=True,
46 primary_key=True, )
46 )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default='')
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default="")
48 api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True,
48 api_key = sa.Column(
49 default=generate_api_key)
49 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
50 public_key = sa.Column(sa.String(32), nullable=False, unique=True,
50 )
51 index=True,
51 public_key = sa.Column(
52 default=generate_api_key)
52 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
53 default_grouping = sa.Column(sa.Unicode(20), nullable=False,
53 )
54 default='url_traceback')
54 default_grouping = sa.Column(
55 sa.Unicode(20), nullable=False, default="url_traceback"
56 )
55 error_report_threshold = sa.Column(sa.Integer(), default=10)
57 error_report_threshold = sa.Column(sa.Integer(), default=10)
56 slow_report_threshold = sa.Column(sa.Integer(), default=10)
58 slow_report_threshold = sa.Column(sa.Integer(), default=10)
57 allow_permanent_storage = sa.Column(sa.Boolean(), default=False,
59 allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False)
58 nullable=False)
59
60
60 @sa.orm.validates('default_grouping')
61 @sa.orm.validates("default_grouping")
61 def validate_default_grouping(self, key, grouping):
62 def validate_default_grouping(self, key, grouping):
62 """ validate if resouce can have specific permission """
63 """ validate if resouce can have specific permission """
63 assert grouping in ['url_type', 'url_traceback', 'traceback_server']
64 assert grouping in ["url_type", "url_traceback", "traceback_server"]
64 return grouping
65 return grouping
65
66
66 report_groups = sa.orm.relationship('ReportGroup',
67 report_groups = sa.orm.relationship(
67 cascade="all, delete-orphan",
68 "ReportGroup",
68 passive_deletes=True,
69 cascade="all, delete-orphan",
69 passive_updates=True,
70 passive_deletes=True,
70 lazy='dynamic',
71 passive_updates=True,
71 backref=sa.orm.backref('application',
72 lazy="dynamic",
72 lazy="joined"))
73 backref=sa.orm.backref("application", lazy="joined"),
73
74 )
74 postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf',
75
75 cascade="all, delete-orphan",
76 postprocess_conf = sa.orm.relationship(
76 passive_deletes=True,
77 "ApplicationPostprocessConf",
77 passive_updates=True,
78 cascade="all, delete-orphan",
78 backref='resource')
79 passive_deletes=True,
79
80 passive_updates=True,
80 logs = sa.orm.relationship('Log',
81 backref="resource",
81 lazy='dynamic',
82 )
82 backref='application',
83
83 passive_deletes=True,
84 logs = sa.orm.relationship(
84 passive_updates=True, )
85 "Log",
85
86 lazy="dynamic",
86 integrations = sa.orm.relationship('IntegrationBase',
87 backref="application",
87 backref='resource',
88 passive_deletes=True,
88 cascade="all, delete-orphan",
89 passive_updates=True,
89 passive_deletes=True,
90 )
90 passive_updates=True, )
91
92 integrations = sa.orm.relationship(
93 "IntegrationBase",
94 backref="resource",
95 cascade="all, delete-orphan",
96 passive_deletes=True,
97 passive_updates=True,
98 )
91
99
92 def generate_api_key(self):
100 def generate_api_key(self):
93 return generate_api_key()
101 return generate_api_key()
@@ -95,10 +103,11 b' class Application(Resource):'
95
103
96 def after_update(mapper, connection, target):
104 def after_update(mapper, connection, target):
97 from appenlight.models.services.application import ApplicationService
105 from appenlight.models.services.application import ApplicationService
98 log.info('clearing out ApplicationService cache')
106
107 log.info("clearing out ApplicationService cache")
99 ApplicationService.by_id_cached().invalidate(target.resource_id)
108 ApplicationService.by_id_cached().invalidate(target.resource_id)
100 ApplicationService.by_api_key_cached().invalidate(target.api_key)
109 ApplicationService.by_api_key_cached().invalidate(target.api_key)
101
110
102
111
103 sa.event.listen(Application, 'after_update', after_update)
112 sa.event.listen(Application, "after_update", after_update)
104 sa.event.listen(Application, 'after_delete', after_update)
113 sa.event.listen(Application, "after_delete", after_update)
@@ -27,18 +27,20 b' class ApplicationPostprocessConf(Base, BaseModel):'
27 This is later used for rule parsing like "if 10 occurences bump priority +1"
27 This is later used for rule parsing like "if 10 occurences bump priority +1"
28 """
28 """
29
29
30 __tablename__ = 'application_postprocess_conf'
30 __tablename__ = "application_postprocess_conf"
31
31
32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
33 resource_id = sa.Column(sa.Integer(),
33 resource_id = sa.Column(
34 sa.ForeignKey('resources.resource_id',
34 sa.Integer(),
35 onupdate='CASCADE',
35 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
36 ondelete='CASCADE'))
36 )
37 do = sa.Column(sa.Unicode(25), nullable=False)
37 do = sa.Column(sa.Unicode(25), nullable=False)
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default='')
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default="")
39 rule = sa.Column(sa.dialects.postgresql.JSON,
39 rule = sa.Column(
40 nullable=False, default={'field': 'http_status',
40 sa.dialects.postgresql.JSON,
41 "op": "ge", "value": "500"})
41 nullable=False,
42 default={"field": "http_status", "op": "ge", "value": "500"},
43 )
42
44
43 def postprocess(self, item):
45 def postprocess(self, item):
44 new_value = int(self.new_value)
46 new_value = int(self.new_value)
@@ -29,17 +29,22 b' class AuthToken(Base, BaseModel):'
29 """
29 """
30 Stores information about possible alerting options
30 Stores information about possible alerting options
31 """
31 """
32 __tablename__ = 'auth_tokens'
32
33 __tablename__ = "auth_tokens"
33
34
34 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
35 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
35 token = sa.Column(sa.Unicode(40), nullable=False,
36 token = sa.Column(
36 default=lambda x: UserService.generate_random_string(40))
37 sa.Unicode(40),
37 owner_id = sa.Column(sa.Unicode(30),
38 nullable=False,
38 sa.ForeignKey('users.id', onupdate='CASCADE',
39 default=lambda x: UserService.generate_random_string(40),
39 ondelete='CASCADE'))
40 )
41 owner_id = sa.Column(
42 sa.Unicode(30),
43 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
44 )
40 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
45 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
41 expires = sa.Column(sa.DateTime)
46 expires = sa.Column(sa.DateTime)
42 description = sa.Column(sa.Unicode, default='')
47 description = sa.Column(sa.Unicode, default="")
43
48
44 @property
49 @property
45 def is_expired(self):
50 def is_expired(self):
@@ -49,4 +54,4 b' class AuthToken(Base, BaseModel):'
49 return False
54 return False
50
55
51 def __str__(self):
56 def __str__(self):
52 return '<AuthToken u:%s t:%s...>' % (self.owner_id, self.token[0:10])
57 return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10])
@@ -22,7 +22,7 b' from . import Base'
22
22
23
23
24 class Config(Base, BaseModel):
24 class Config(Base, BaseModel):
25 __tablename__ = 'config'
25 __tablename__ = "config"
26
26
27 key = sa.Column(sa.Unicode, primary_key=True)
27 key = sa.Column(sa.Unicode, primary_key=True)
28 section = sa.Column(sa.Unicode, primary_key=True)
28 section = sa.Column(sa.Unicode, primary_key=True)
@@ -30,39 +30,40 b' log = logging.getLogger(__name__)'
30
30
31
31
32 class Event(Base, BaseModel):
32 class Event(Base, BaseModel):
33 __tablename__ = 'events'
33 __tablename__ = "events"
34
34
35 types = {'error_report_alert': 1,
35 types = {
36 'slow_report_alert': 3,
36 "error_report_alert": 1,
37 'comment': 5,
37 "slow_report_alert": 3,
38 'assignment': 6,
38 "comment": 5,
39 'uptime_alert': 7,
39 "assignment": 6,
40 'chart_alert': 9}
40 "uptime_alert": 7,
41 "chart_alert": 9,
42 }
41
43
42 statuses = {'active': 1,
44 statuses = {"active": 1, "closed": 0}
43 'closed': 0}
44
45
45 id = sa.Column(sa.Integer, primary_key=True)
46 id = sa.Column(sa.Integer, primary_key=True)
46 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
47 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
47 end_date = sa.Column(sa.DateTime)
48 end_date = sa.Column(sa.DateTime)
48 status = sa.Column(sa.Integer, default=1)
49 status = sa.Column(sa.Integer, default=1)
49 event_type = sa.Column(sa.Integer, default=1)
50 event_type = sa.Column(sa.Integer, default=1)
50 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
51 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
51 nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
53 resource_id = sa.Column(
53 nullable=True)
54 sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True
54 resource_id = sa.Column(sa.Integer(),
55 )
55 sa.ForeignKey('resources.resource_id'),
56 nullable=True)
57 target_id = sa.Column(sa.Integer)
56 target_id = sa.Column(sa.Integer)
58 target_uuid = sa.Column(sa.Unicode(40))
57 target_uuid = sa.Column(sa.Unicode(40))
59 text = sa.Column(sa.UnicodeText())
58 text = sa.Column(sa.UnicodeText())
60 values = sa.Column(JSON(), nullable=False, default=None)
59 values = sa.Column(JSON(), nullable=False, default=None)
61
60
62 def __repr__(self):
61 def __repr__(self):
63 return '<Event %s, app:%s, %s>' % (self.unified_alert_name(),
62 return "<Event %s, app:%s, %s>" % (
64 self.resource_id,
63 self.unified_alert_name(),
65 self.unified_alert_action())
64 self.resource_id,
65 self.unified_alert_action(),
66 )
66
67
67 @property
68 @property
68 def reverse_types(self):
69 def reverse_types(self):
@@ -73,9 +74,9 b' class Event(Base, BaseModel):'
73
74
74 def unified_alert_action(self):
75 def unified_alert_action(self):
75 event_name = self.reverse_types[self.event_type]
76 event_name = self.reverse_types[self.event_type]
76 if self.status == Event.statuses['closed']:
77 if self.status == Event.statuses["closed"]:
77 return "CLOSE"
78 return "CLOSE"
78 if self.status != Event.statuses['closed']:
79 if self.status != Event.statuses["closed"]:
79 return "OPEN"
80 return "OPEN"
80 return event_name
81 return event_name
81
82
@@ -89,30 +90,33 b' class Event(Base, BaseModel):'
89 request = get_current_request()
90 request = get_current_request()
90 if not resource:
91 if not resource:
91 return
92 return
92 users = set([p.user for p in ResourceService.users_for_perm(resource, 'view')])
93 users = set([p.user for p in ResourceService.users_for_perm(resource, "view")])
93 for user in users:
94 for user in users:
94 for channel in user.alert_channels:
95 for channel in user.alert_channels:
95 matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources]
96 matches_resource = not channel.resources or resource in [
97 r.resource_id for r in channel.resources
98 ]
96 if (
99 if (
97 not channel.channel_validated or
100 not channel.channel_validated
98 not channel.send_alerts or
101 or not channel.send_alerts
99 not matches_resource
102 or not matches_resource
100 ):
103 ):
101 continue
104 continue
102 else:
105 else:
103 try:
106 try:
104 channel.notify_alert(resource=resource,
107 channel.notify_alert(
105 event=self,
108 resource=resource, event=self, user=user, request=request
106 user=user,
109 )
107 request=request)
108 except IntegrationException as e:
110 except IntegrationException as e:
109 log.warning('%s' % e)
111 log.warning("%s" % e)
110
112
111 def validate_or_close(self, since_when, db_session=None):
113 def validate_or_close(self, since_when, db_session=None):
112 """ Checks if alerts should stay open or it's time to close them.
114 """ Checks if alerts should stay open or it's time to close them.
113 Generates close alert event if alerts get closed """
115 Generates close alert event if alerts get closed """
114 event_types = [Event.types['error_report_alert'],
116 event_types = [
115 Event.types['slow_report_alert']]
117 Event.types["error_report_alert"],
118 Event.types["slow_report_alert"],
119 ]
116 app = ResourceService.by_resource_id(self.resource_id)
120 app = ResourceService.by_resource_id(self.resource_id)
117 # if app was deleted close instantly
121 # if app was deleted close instantly
118 if not app:
122 if not app:
@@ -121,10 +125,11 b' class Event(Base, BaseModel):'
121
125
122 if self.event_type in event_types:
126 if self.event_type in event_types:
123 total = ReportStatService.count_by_type(
127 total = ReportStatService.count_by_type(
124 self.event_type, self.resource_id, since_when)
128 self.event_type, self.resource_id, since_when
125 if Event.types['error_report_alert'] == self.event_type:
129 )
130 if Event.types["error_report_alert"] == self.event_type:
126 threshold = app.error_report_threshold
131 threshold = app.error_report_threshold
127 if Event.types['slow_report_alert'] == self.event_type:
132 if Event.types["slow_report_alert"] == self.event_type:
128 threshold = app.slow_report_threshold
133 threshold = app.slow_report_threshold
129
134
130 if total < threshold:
135 if total < threshold:
@@ -135,31 +140,31 b' class Event(Base, BaseModel):'
135 Closes an event and sends notification to affected users
140 Closes an event and sends notification to affected users
136 """
141 """
137 self.end_date = datetime.utcnow()
142 self.end_date = datetime.utcnow()
138 self.status = Event.statuses['closed']
143 self.status = Event.statuses["closed"]
139 log.warning('ALERT: CLOSE: %s' % self)
144 log.warning("ALERT: CLOSE: %s" % self)
140 self.send_alerts()
145 self.send_alerts()
141
146
142 def text_representation(self):
147 def text_representation(self):
143 alert_type = self.unified_alert_name()
148 alert_type = self.unified_alert_name()
144 text = ''
149 text = ""
145 if 'slow_report' in alert_type:
150 if "slow_report" in alert_type:
146 text += 'Slow report alert'
151 text += "Slow report alert"
147 if 'error_report' in alert_type:
152 if "error_report" in alert_type:
148 text += 'Exception report alert'
153 text += "Exception report alert"
149 if 'uptime_alert' in alert_type:
154 if "uptime_alert" in alert_type:
150 text += 'Uptime alert'
155 text += "Uptime alert"
151 if 'chart_alert' in alert_type:
156 if "chart_alert" in alert_type:
152 text += 'Metrics value alert'
157 text += "Metrics value alert"
153
158
154 alert_action = self.unified_alert_action()
159 alert_action = self.unified_alert_action()
155 if alert_action == 'OPEN':
160 if alert_action == "OPEN":
156 text += ' got opened.'
161 text += " got opened."
157 if alert_action == 'CLOSE':
162 if alert_action == "CLOSE":
158 text += ' got closed.'
163 text += " got closed."
159 return text
164 return text
160
165
161 def get_dict(self, request=None):
166 def get_dict(self, request=None):
162 dict_data = super(Event, self).get_dict()
167 dict_data = super(Event, self).get_dict()
163 dict_data['text'] = self.text_representation()
168 dict_data["text"] = self.text_representation()
164 dict_data['resource_name'] = self.resource.resource_name
169 dict_data["resource_name"] = self.resource.resource_name
165 return dict_data
170 return dict_data
@@ -25,12 +25,12 b' from appenlight.lib.sqlalchemy_fields import EncryptedUnicode'
25 class ExternalIdentity(ExternalIdentityMixin, Base):
25 class ExternalIdentity(ExternalIdentityMixin, Base):
26 @declared_attr
26 @declared_attr
27 def access_token(self):
27 def access_token(self):
28 return sa.Column(EncryptedUnicode(255), default='')
28 return sa.Column(EncryptedUnicode(255), default="")
29
29
30 @declared_attr
30 @declared_attr
31 def alt_token(self):
31 def alt_token(self):
32 return sa.Column(EncryptedUnicode(255), default='')
32 return sa.Column(EncryptedUnicode(255), default="")
33
33
34 @declared_attr
34 @declared_attr
35 def token_secret(self):
35 def token_secret(self):
36 return sa.Column(EncryptedUnicode(255), default='')
36 return sa.Column(EncryptedUnicode(255), default="")
@@ -19,27 +19,28 b' from appenlight.models import Base'
19
19
20
20
21 class Group(GroupMixin, Base):
21 class Group(GroupMixin, Base):
22 __possible_permissions__ = ('root_administration',
22 __possible_permissions__ = (
23 'test_features',
23 "root_administration",
24 'admin_panel',
24 "test_features",
25 'admin_users',
25 "admin_panel",
26 'manage_partitions',)
26 "admin_users",
27 "manage_partitions",
28 )
27
29
28 def get_dict(self, exclude_keys=None, include_keys=None,
30 def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False):
29 include_perms=False):
30 result = super(Group, self).get_dict(exclude_keys, include_keys)
31 result = super(Group, self).get_dict(exclude_keys, include_keys)
31 if include_perms:
32 if include_perms:
32 result['possible_permissions'] = self.__possible_permissions__
33 result["possible_permissions"] = self.__possible_permissions__
33 result['current_permissions'] = [p.perm_name for p in
34 result["current_permissions"] = [p.perm_name for p in self.permissions]
34 self.permissions]
35 else:
35 else:
36 result['possible_permissions'] = []
36 result["possible_permissions"] = []
37 result['current_permissions'] = []
37 result["current_permissions"] = []
38 exclude_keys_list = exclude_keys or []
38 exclude_keys_list = exclude_keys or []
39 include_keys_list = include_keys or []
39 include_keys_list = include_keys or []
40 d = {}
40 d = {}
41 for k in result.keys():
41 for k in result.keys():
42 if (k not in exclude_keys_list and
42 if k not in exclude_keys_list and (
43 (k in include_keys_list or not include_keys)):
43 k in include_keys_list or not include_keys
44 ):
44 d[k] = result[k]
45 d[k] = result[k]
45 return d
46 return d
@@ -14,8 +14,9 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from ziggurat_foundations.models.group_resource_permission import \
17 from ziggurat_foundations.models.group_resource_permission import (
18 GroupResourcePermissionMixin
18 GroupResourcePermissionMixin,
19 )
19 from appenlight.models import Base
20 from appenlight.models import Base
20
21
21
22
@@ -32,34 +32,37 b' class IntegrationBase(Base, BaseModel):'
32 """
32 """
33 Model from which all integrations inherit using polymorphic approach
33 Model from which all integrations inherit using polymorphic approach
34 """
34 """
35 __tablename__ = 'integrations'
35
36 __tablename__ = "integrations"
36
37
37 front_visible = False
38 front_visible = False
38 as_alert_channel = False
39 as_alert_channel = False
39 supports_report_alerting = False
40 supports_report_alerting = False
40
41
41 id = sa.Column(sa.Integer, primary_key=True)
42 id = sa.Column(sa.Integer, primary_key=True)
42 resource_id = sa.Column(sa.Integer,
43 resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id"))
43 sa.ForeignKey('applications.resource_id'))
44 integration_name = sa.Column(sa.Unicode(64))
44 integration_name = sa.Column(sa.Unicode(64))
45 _config = sa.Column('config', JSON(), nullable=False, default='')
45 _config = sa.Column("config", JSON(), nullable=False, default="")
46 modified_date = sa.Column(sa.DateTime)
46 modified_date = sa.Column(sa.DateTime)
47
47
48 channel = sa.orm.relationship('AlertChannel',
48 channel = sa.orm.relationship(
49 cascade="all,delete-orphan",
49 "AlertChannel",
50 passive_deletes=True,
50 cascade="all,delete-orphan",
51 passive_updates=True,
51 passive_deletes=True,
52 uselist=False,
52 passive_updates=True,
53 backref='integration')
53 uselist=False,
54 backref="integration",
55 )
54
56
55 __mapper_args__ = {
57 __mapper_args__ = {
56 'polymorphic_on': 'integration_name',
58 "polymorphic_on": "integration_name",
57 'polymorphic_identity': 'integration'
59 "polymorphic_identity": "integration",
58 }
60 }
59
61
60 @classmethod
62 @classmethod
61 def by_app_id_and_integration_name(cls, resource_id, integration_name,
63 def by_app_id_and_integration_name(
62 db_session=None):
64 cls, resource_id, integration_name, db_session=None
65 ):
63 db_session = get_db_session(db_session)
66 db_session = get_db_session(db_session)
64 query = db_session.query(cls)
67 query = db_session.query(cls)
65 query = query.filter(cls.integration_name == integration_name)
68 query = query.filter(cls.integration_name == integration_name)
@@ -72,7 +75,6 b' class IntegrationBase(Base, BaseModel):'
72
75
73 @config.setter
76 @config.setter
74 def config(self, value):
77 def config(self, value):
75 if not hasattr(value, 'items'):
78 if not hasattr(value, "items"):
76 raise Exception('IntegrationBase.config only accepts '
79 raise Exception("IntegrationBase.config only accepts " "flat dictionaries")
77 'flat dictionaries')
78 self._config = encrypt_dictionary_keys(value)
80 self._config = encrypt_dictionary_keys(value)
@@ -16,8 +16,7 b''
16
16
17 import requests
17 import requests
18 from requests_oauthlib import OAuth1
18 from requests_oauthlib import OAuth1
19 from appenlight.models.integrations import (IntegrationBase,
19 from appenlight.models.integrations import IntegrationBase, IntegrationException
20 IntegrationException)
21
20
22 _ = str
21 _ = str
23
22
@@ -27,14 +26,12 b' class NotFoundException(Exception):'
27
26
28
27
29 class BitbucketIntegration(IntegrationBase):
28 class BitbucketIntegration(IntegrationBase):
30 __mapper_args__ = {
29 __mapper_args__ = {"polymorphic_identity": "bitbucket"}
31 'polymorphic_identity': 'bitbucket'
32 }
33 front_visible = True
30 front_visible = True
34 as_alert_channel = False
31 as_alert_channel = False
35 supports_report_alerting = False
32 supports_report_alerting = False
36 action_notification = True
33 action_notification = True
37 integration_action = 'Add issue to Bitbucket'
34 integration_action = "Add issue to Bitbucket"
38
35
39 @classmethod
36 @classmethod
40 def create_client(cls, request, user_name=None, repo_name=None):
37 def create_client(cls, request, user_name=None, repo_name=None):
@@ -46,27 +43,36 b' class BitbucketIntegration(IntegrationBase):'
46 token = None
43 token = None
47 secret = None
44 secret = None
48 for identity in request.user.external_identities:
45 for identity in request.user.external_identities:
49 if identity.provider_name == 'bitbucket':
46 if identity.provider_name == "bitbucket":
50 token = identity.access_token
47 token = identity.access_token
51 secret = identity.token_secret
48 secret = identity.token_secret
52 break
49 break
53 if not token:
50 if not token:
54 raise IntegrationException(
51 raise IntegrationException("No valid auth token present for this service")
55 'No valid auth token present for this service')
52 client = BitbucketClient(
56 client = BitbucketClient(token, secret,
53 token,
57 user_name,
54 secret,
58 repo_name,
55 user_name,
59 config['authomatic.pr.bitbucket.key'],
56 repo_name,
60 config['authomatic.pr.bitbucket.secret'])
57 config["authomatic.pr.bitbucket.key"],
58 config["authomatic.pr.bitbucket.secret"],
59 )
61 return client
60 return client
62
61
63
62
64 class BitbucketClient(object):
63 class BitbucketClient(object):
65 api_url = 'https://bitbucket.org/api/1.0'
64 api_url = "https://bitbucket.org/api/1.0"
66 repo_type = 'bitbucket'
65 repo_type = "bitbucket"
67
66
68 def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key,
67 def __init__(
69 bitbucket_consumer_secret):
68 self,
69 token,
70 secret,
71 owner,
72 repo_name,
73 bitbucket_consumer_key,
74 bitbucket_consumer_secret,
75 ):
70 self.access_token = token
76 self.access_token = token
71 self.token_secret = secret
77 self.token_secret = secret
72 self.owner = owner
78 self.owner = owner
@@ -75,89 +81,108 b' class BitbucketClient(object):'
75 self.bitbucket_consumer_secret = bitbucket_consumer_secret
81 self.bitbucket_consumer_secret = bitbucket_consumer_secret
76
82
77 possible_keys = {
83 possible_keys = {
78 'status': ['new', 'open', 'resolved', 'on hold', 'invalid',
84 "status": [
79 'duplicate', 'wontfix'],
85 "new",
80 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'],
86 "open",
81 'kind': ['bug', 'enhancement', 'proposal', 'task']
87 "resolved",
88 "on hold",
89 "invalid",
90 "duplicate",
91 "wontfix",
92 ],
93 "priority": ["trivial", "minor", "major", "critical", "blocker"],
94 "kind": ["bug", "enhancement", "proposal", "task"],
82 }
95 }
83
96
84 def get_statuses(self):
97 def get_statuses(self):
85 """Gets list of possible item statuses"""
98 """Gets list of possible item statuses"""
86 return self.possible_keys['status']
99 return self.possible_keys["status"]
87
100
88 def get_priorities(self):
101 def get_priorities(self):
89 """Gets list of possible item statuses"""
102 """Gets list of possible item statuses"""
90 return self.possible_keys['priority']
103 return self.possible_keys["priority"]
91
104
92 def make_request(self, url, method='get', data=None, headers=None):
105 def make_request(self, url, method="get", data=None, headers=None):
93 """
106 """
94 Performs HTTP request to bitbucket
107 Performs HTTP request to bitbucket
95 """
108 """
96 auth = OAuth1(self.bitbucket_consumer_key,
109 auth = OAuth1(
97 self.bitbucket_consumer_secret,
110 self.bitbucket_consumer_key,
98 self.access_token, self.token_secret)
111 self.bitbucket_consumer_secret,
112 self.access_token,
113 self.token_secret,
114 )
99 try:
115 try:
100 resp = getattr(requests, method)(url, data=data, auth=auth,
116 resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10)
101 timeout=10)
102 except Exception as e:
117 except Exception as e:
103 raise IntegrationException(
118 raise IntegrationException(
104 _('Error communicating with Bitbucket: %s') % (e,))
119 _("Error communicating with Bitbucket: %s") % (e,)
120 )
105 if resp.status_code == 401:
121 if resp.status_code == 401:
106 raise IntegrationException(
122 raise IntegrationException(_("You are not authorized to access this repo"))
107 _('You are not authorized to access this repo'))
108 elif resp.status_code == 404:
123 elif resp.status_code == 404:
109 raise IntegrationException(_('User or repo name are incorrect'))
124 raise IntegrationException(_("User or repo name are incorrect"))
110 elif resp.status_code not in [200, 201]:
125 elif resp.status_code not in [200, 201]:
111 raise IntegrationException(
126 raise IntegrationException(
112 _('Bitbucket response_code: %s') % resp.status_code)
127 _("Bitbucket response_code: %s") % resp.status_code
128 )
113 try:
129 try:
114 return resp.json()
130 return resp.json()
115 except Exception as e:
131 except Exception as e:
116 raise IntegrationException(
132 raise IntegrationException(
117 _('Error decoding response from Bitbucket: %s') % (e,))
133 _("Error decoding response from Bitbucket: %s") % (e,)
134 )
118
135
119 def get_assignees(self):
136 def get_assignees(self):
120 """Gets list of possible assignees"""
137 """Gets list of possible assignees"""
121 url = '%(api_url)s/privileges/%(owner)s/%(repo_name)s' % {
138 url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % {
122 'api_url': self.api_url,
139 "api_url": self.api_url,
123 'owner': self.owner,
140 "owner": self.owner,
124 'repo_name': self.repo_name}
141 "repo_name": self.repo_name,
142 }
125
143
126 data = self.make_request(url)
144 data = self.make_request(url)
127 results = [{'user': self.owner, 'name': '(Repo owner)'}]
145 results = [{"user": self.owner, "name": "(Repo owner)"}]
128 if data:
146 if data:
129 for entry in data:
147 for entry in data:
130 results.append({"user": entry['user']['username'],
148 results.append(
131 "name": entry['user'].get('display_name')})
149 {
150 "user": entry["user"]["username"],
151 "name": entry["user"].get("display_name"),
152 }
153 )
132 return results
154 return results
133
155
134 def create_issue(self, form_data):
156 def create_issue(self, form_data):
135 """
157 """
136 Sends creates a new issue in tracker using REST call
158 Sends creates a new issue in tracker using REST call
137 """
159 """
138 url = '%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/' % {
160 url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % {
139 'api_url': self.api_url,
161 "api_url": self.api_url,
140 'owner': self.owner,
162 "owner": self.owner,
141 'repo_name': self.repo_name}
163 "repo_name": self.repo_name,
164 }
142
165
143 payload = {
166 payload = {
144 "title": form_data['title'],
167 "title": form_data["title"],
145 "content": form_data['content'],
168 "content": form_data["content"],
146 "kind": form_data['kind'],
169 "kind": form_data["kind"],
147 "priority": form_data['priority'],
170 "priority": form_data["priority"],
148 "responsible": form_data['responsible']
171 "responsible": form_data["responsible"],
149 }
172 }
150 data = self.make_request(url, 'post', payload)
173 data = self.make_request(url, "post", payload)
151 f_args = {
174 f_args = {
152 "owner": self.owner,
175 "owner": self.owner,
153 "repo_name": self.repo_name,
176 "repo_name": self.repo_name,
154 "issue_id": data['local_id']
177 "issue_id": data["local_id"],
155 }
178 }
156 web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \
179 web_url = (
157 '/issue/%(issue_id)s/issue-title' % f_args
180 "https://bitbucket.org/%(owner)s/%(repo_name)s"
181 "/issue/%(issue_id)s/issue-title" % f_args
182 )
158 to_return = {
183 to_return = {
159 'id': data['local_id'],
184 "id": data["local_id"],
160 'resource_url': data['resource_uri'],
185 "resource_url": data["resource_uri"],
161 'web_url': web_url
186 "web_url": web_url,
162 }
187 }
163 return to_return
188 return to_return
@@ -20,8 +20,7 b' from requests.exceptions import HTTPError, ConnectionError'
20 from camplight import Request, Campfire
20 from camplight import Request, Campfire
21 from camplight.exceptions import CamplightException
21 from camplight.exceptions import CamplightException
22
22
23 from appenlight.models.integrations import (IntegrationBase,
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
24 IntegrationException)
25
24
26 _ = str
25 _ = str
27
26
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33
32
34
33
35 class CampfireIntegration(IntegrationBase):
34 class CampfireIntegration(IntegrationBase):
36 __mapper_args__ = {
35 __mapper_args__ = {"polymorphic_identity": "campfire"}
37 'polymorphic_identity': 'campfire'
38 }
39 front_visible = False
36 front_visible = False
40 as_alert_channel = True
37 as_alert_channel = True
41 supports_report_alerting = True
38 supports_report_alerting = True
42 action_notification = True
39 action_notification = True
43 integration_action = 'Message via Campfire'
40 integration_action = "Message via Campfire"
44
41
45 @classmethod
42 @classmethod
46 def create_client(cls, api_token, account):
43 def create_client(cls, api_token, account):
@@ -50,7 +47,7 b' class CampfireIntegration(IntegrationBase):'
50
47
51 class CampfireClient(object):
48 class CampfireClient(object):
52 def __init__(self, api_token, account):
49 def __init__(self, api_token, account):
53 request = Request('https://%s.campfirenow.com' % account, api_token)
50 request = Request("https://%s.campfirenow.com" % account, api_token)
54 self.campfire = Campfire(request)
51 self.campfire = Campfire(request)
55
52
56 def get_account(self):
53 def get_account(self):
@@ -65,10 +62,10 b' class CampfireClient(object):'
65 except (HTTPError, CamplightException) as e:
62 except (HTTPError, CamplightException) as e:
66 raise IntegrationException(str(e))
63 raise IntegrationException(str(e))
67
64
68 def speak_to_room(self, room, message, sound='RIMSHOT'):
65 def speak_to_room(self, room, message, sound="RIMSHOT"):
69 try:
66 try:
70 room = self.campfire.room(room)
67 room = self.campfire.room(room)
71 room.join()
68 room.join()
72 room.speak(message, type_='TextMessage')
69 room.speak(message, type_="TextMessage")
73 except (HTTPError, CamplightException, ConnectionError) as e:
70 except (HTTPError, CamplightException, ConnectionError) as e:
74 raise IntegrationException(str(e))
71 raise IntegrationException(str(e))
@@ -20,8 +20,7 b' import requests'
20 from requests.auth import HTTPBasicAuth
20 from requests.auth import HTTPBasicAuth
21 import simplejson as json
21 import simplejson as json
22
22
23 from appenlight.models.integrations import (IntegrationBase,
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
24 IntegrationException)
25
24
26 _ = str
25 _ = str
27
26
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33
32
34
33
35 class FlowdockIntegration(IntegrationBase):
34 class FlowdockIntegration(IntegrationBase):
36 __mapper_args__ = {
35 __mapper_args__ = {"polymorphic_identity": "flowdock"}
37 'polymorphic_identity': 'flowdock'
38 }
39 front_visible = False
36 front_visible = False
40 as_alert_channel = True
37 as_alert_channel = True
41 supports_report_alerting = True
38 supports_report_alerting = True
42 action_notification = True
39 action_notification = True
43 integration_action = 'Message via Flowdock'
40 integration_action = "Message via Flowdock"
44
41
45 @classmethod
42 @classmethod
46 def create_client(cls, api_token):
43 def create_client(cls, api_token):
@@ -50,33 +47,37 b' class FlowdockIntegration(IntegrationBase):'
50
47
51 class FlowdockClient(object):
48 class FlowdockClient(object):
52 def __init__(self, api_token):
49 def __init__(self, api_token):
53 self.auth = HTTPBasicAuth(api_token, '')
50 self.auth = HTTPBasicAuth(api_token, "")
54 self.api_token = api_token
51 self.api_token = api_token
55 self.api_url = 'https://api.flowdock.com/v1/messages'
52 self.api_url = "https://api.flowdock.com/v1/messages"
56
53
57 def make_request(self, url, method='get', data=None):
54 def make_request(self, url, method="get", data=None):
58 headers = {
55 headers = {
59 'Content-Type': 'application/json',
56 "Content-Type": "application/json",
60 'User-Agent': 'appenlight-flowdock',
57 "User-Agent": "appenlight-flowdock",
61 }
58 }
62 try:
59 try:
63 if data:
60 if data:
64 data = json.dumps(data)
61 data = json.dumps(data)
65 resp = getattr(requests, method)(url, data=data, headers=headers,
62 resp = getattr(requests, method)(
66 timeout=10)
63 url, data=data, headers=headers, timeout=10
64 )
67 except Exception as e:
65 except Exception as e:
68 raise IntegrationException(
66 raise IntegrationException(
69 _('Error communicating with Flowdock: %s') % (e,))
67 _("Error communicating with Flowdock: %s") % (e,)
68 )
70 if resp.status_code > 299:
69 if resp.status_code > 299:
71 raise IntegrationException(resp.text)
70 raise IntegrationException(resp.text)
72 return resp
71 return resp
73
72
74 def send_to_chat(self, payload):
73 def send_to_chat(self, payload):
75 url = '%(api_url)s/chat/%(api_token)s' % {'api_url': self.api_url,
74 url = "%(api_url)s/chat/%(api_token)s" % {
76 'api_token': self.api_token}
75 "api_url": self.api_url,
77 return self.make_request(url, method='post', data=payload).json()
76 "api_token": self.api_token,
77 }
78 return self.make_request(url, method="post", data=payload).json()
78
79
79 def send_to_inbox(self, payload):
80 def send_to_inbox(self, payload):
80 f_args = {'api_url': self.api_url, 'api_token': self.api_token}
81 f_args = {"api_url": self.api_url, "api_token": self.api_token}
81 url = '%(api_url)s/team_inbox/%(api_token)s' % f_args
82 url = "%(api_url)s/team_inbox/%(api_token)s" % f_args
82 return self.make_request(url, method='post', data=payload).json()
83 return self.make_request(url, method="post", data=payload).json()
@@ -27,14 +27,12 b' class GithubAuthException(Exception):'
27
27
28
28
29 class GithubIntegration(IntegrationBase):
29 class GithubIntegration(IntegrationBase):
30 __mapper_args__ = {
30 __mapper_args__ = {"polymorphic_identity": "github"}
31 'polymorphic_identity': 'github'
32 }
33 front_visible = True
31 front_visible = True
34 as_alert_channel = False
32 as_alert_channel = False
35 supports_report_alerting = False
33 supports_report_alerting = False
36 action_notification = True
34 action_notification = True
37 integration_action = 'Add issue to Github'
35 integration_action = "Add issue to Github"
38
36
39 @classmethod
37 @classmethod
40 def create_client(cls, request, user_name=None, repo_name=None):
38 def create_client(cls, request, user_name=None, repo_name=None):
@@ -45,112 +43,116 b' class GithubIntegration(IntegrationBase):'
45 token = None
43 token = None
46 secret = None
44 secret = None
47 for identity in request.user.external_identities:
45 for identity in request.user.external_identities:
48 if identity.provider_name == 'github':
46 if identity.provider_name == "github":
49 token = identity.access_token
47 token = identity.access_token
50 secret = identity.token_secret
48 secret = identity.token_secret
51 break
49 break
52 if not token:
50 if not token:
53 raise IntegrationException(
51 raise IntegrationException("No valid auth token present for this service")
54 'No valid auth token present for this service')
55 client = GithubClient(token=token, owner=user_name, name=repo_name)
52 client = GithubClient(token=token, owner=user_name, name=repo_name)
56 return client
53 return client
57
54
58
55
59 class GithubClient(object):
56 class GithubClient(object):
60 api_url = 'https://api.github.com'
57 api_url = "https://api.github.com"
61 repo_type = 'github'
58 repo_type = "github"
62
59
63 def __init__(self, token, owner, name):
60 def __init__(self, token, owner, name):
64 self.access_token = token
61 self.access_token = token
65 self.owner = owner
62 self.owner = owner
66 self.name = name
63 self.name = name
67
64
68 def make_request(self, url, method='get', data=None, headers=None):
65 def make_request(self, url, method="get", data=None, headers=None):
69 req_headers = {'User-Agent': 'appenlight',
66 req_headers = {
70 'Content-Type': 'application/json',
67 "User-Agent": "appenlight",
71 'Authorization': 'token %s' % self.access_token}
68 "Content-Type": "application/json",
69 "Authorization": "token %s" % self.access_token,
70 }
72 try:
71 try:
73 if data:
72 if data:
74 data = json.dumps(data)
73 data = json.dumps(data)
75 resp = getattr(requests, method)(url, data=data,
74 resp = getattr(requests, method)(
76 headers=req_headers,
75 url, data=data, headers=req_headers, timeout=10
77 timeout=10)
76 )
78 except Exception as e:
77 except Exception as e:
79 msg = 'Error communicating with Github: %s'
78 msg = "Error communicating with Github: %s"
80 raise IntegrationException(_(msg) % (e,))
79 raise IntegrationException(_(msg) % (e,))
81
80
82 if resp.status_code == 404:
81 if resp.status_code == 404:
83 msg = 'User or repo name are incorrect'
82 msg = "User or repo name are incorrect"
84 raise IntegrationException(_(msg))
83 raise IntegrationException(_(msg))
85 if resp.status_code == 401:
84 if resp.status_code == 401:
86 msg = 'You are not authorized to access this repo'
85 msg = "You are not authorized to access this repo"
87 raise IntegrationException(_(msg))
86 raise IntegrationException(_(msg))
88 elif resp.status_code not in [200, 201]:
87 elif resp.status_code not in [200, 201]:
89 msg = 'Github response_code: %s'
88 msg = "Github response_code: %s"
90 raise IntegrationException(_(msg) % resp.status_code)
89 raise IntegrationException(_(msg) % resp.status_code)
91 try:
90 try:
92 return resp.json()
91 return resp.json()
93 except Exception as e:
92 except Exception as e:
94 msg = 'Error decoding response from Github: %s'
93 msg = "Error decoding response from Github: %s"
95 raise IntegrationException(_(msg) % (e,))
94 raise IntegrationException(_(msg) % (e,))
96
95
97 def get_statuses(self):
96 def get_statuses(self):
98 """Gets list of possible item statuses"""
97 """Gets list of possible item statuses"""
99 url = '%(api_url)s/repos/%(owner)s/%(name)s/labels' % {
98 url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % {
100 'api_url': self.api_url,
99 "api_url": self.api_url,
101 'owner': self.owner,
100 "owner": self.owner,
102 'name': self.name}
101 "name": self.name,
102 }
103
103
104 data = self.make_request(url)
104 data = self.make_request(url)
105
105
106 statuses = []
106 statuses = []
107 for status in data:
107 for status in data:
108 statuses.append(status['name'])
108 statuses.append(status["name"])
109 return statuses
109 return statuses
110
110
111 def get_repo(self):
111 def get_repo(self):
112 """Gets list of possible item statuses"""
112 """Gets list of possible item statuses"""
113 url = '%(api_url)s/repos/%(owner)s/%(name)s' % {
113 url = "%(api_url)s/repos/%(owner)s/%(name)s" % {
114 'api_url': self.api_url,
114 "api_url": self.api_url,
115 'owner': self.owner,
115 "owner": self.owner,
116 'name': self.name}
116 "name": self.name,
117 }
117
118
118 data = self.make_request(url)
119 data = self.make_request(url)
119 return data
120 return data
120
121
121 def get_assignees(self):
122 def get_assignees(self):
122 """Gets list of possible assignees"""
123 """Gets list of possible assignees"""
123 url = '%(api_url)s/repos/%(owner)s/%(name)s/collaborators' % {
124 url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % {
124 'api_url': self.api_url,
125 "api_url": self.api_url,
125 'owner': self.owner,
126 "owner": self.owner,
126 'name': self.name}
127 "name": self.name,
128 }
127 data = self.make_request(url)
129 data = self.make_request(url)
128 results = []
130 results = []
129 for entry in data:
131 for entry in data:
130 results.append({"user": entry['login'],
132 results.append({"user": entry["login"], "name": entry.get("name")})
131 "name": entry.get('name')})
132 return results
133 return results
133
134
134 def create_issue(self, form_data):
135 def create_issue(self, form_data):
135 """
136 """
136 Make a REST call to create issue in Github's issue tracker
137 Make a REST call to create issue in Github's issue tracker
137 """
138 """
138 url = '%(api_url)s/repos/%(owner)s/%(name)s/issues' % {
139 url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % {
139 'api_url': self.api_url,
140 "api_url": self.api_url,
140 'owner': self.owner,
141 "owner": self.owner,
141 'name': self.name}
142 "name": self.name,
143 }
142
144
143 payload = {
145 payload = {
144 "title": form_data['title'],
146 "title": form_data["title"],
145 "body": form_data['content'],
147 "body": form_data["content"],
146 "labels": [],
148 "labels": [],
147 "assignee": form_data['responsible']
149 "assignee": form_data["responsible"],
148 }
150 }
149 payload['labels'].extend(form_data['kind'])
151 payload["labels"].extend(form_data["kind"])
150 data = self.make_request(url, 'post', data=payload)
152 data = self.make_request(url, "post", data=payload)
151 to_return = {
153 to_return = {
152 'id': data['number'],
154 "id": data["number"],
153 'resource_url': data['url'],
155 "resource_url": data["url"],
154 'web_url': data['html_url']
156 "web_url": data["html_url"],
155 }
157 }
156 return to_return
158 return to_return
@@ -30,14 +30,12 b' class NotFoundException(Exception):'
30
30
31
31
32 class HipchatIntegration(IntegrationBase):
32 class HipchatIntegration(IntegrationBase):
33 __mapper_args__ = {
33 __mapper_args__ = {"polymorphic_identity": "hipchat"}
34 'polymorphic_identity': 'hipchat'
35 }
36 front_visible = False
34 front_visible = False
37 as_alert_channel = True
35 as_alert_channel = True
38 supports_report_alerting = True
36 supports_report_alerting = True
39 action_notification = True
37 action_notification = True
40 integration_action = 'Message via Hipchat'
38 integration_action = "Message via Hipchat"
41
39
42 @classmethod
40 @classmethod
43 def create_client(cls, api_token):
41 def create_client(cls, api_token):
@@ -48,36 +46,30 b' class HipchatIntegration(IntegrationBase):'
48 class HipchatClient(object):
46 class HipchatClient(object):
49 def __init__(self, api_token):
47 def __init__(self, api_token):
50 self.api_token = api_token
48 self.api_token = api_token
51 self.api_url = 'https://api.hipchat.com/v1'
49 self.api_url = "https://api.hipchat.com/v1"
52
50
53 def make_request(self, endpoint, method='get', data=None):
51 def make_request(self, endpoint, method="get", data=None):
54 headers = {
52 headers = {"User-Agent": "appenlight-hipchat"}
55 'User-Agent': 'appenlight-hipchat',
53 url = "%s%s" % (self.api_url, endpoint)
56 }
54 params = {"format": "json", "auth_token": self.api_token}
57 url = '%s%s' % (self.api_url, endpoint)
58 params = {
59 'format': 'json',
60 'auth_token': self.api_token
61 }
62 try:
55 try:
63 resp = getattr(requests, method)(url, data=data, headers=headers,
56 resp = getattr(requests, method)(
64 params=params,
57 url, data=data, headers=headers, params=params, timeout=3
65 timeout=3)
58 )
66 except Exception as e:
59 except Exception as e:
67 msg = 'Error communicating with Hipchat: %s'
60 msg = "Error communicating with Hipchat: %s"
68 raise IntegrationException(_(msg) % (e,))
61 raise IntegrationException(_(msg) % (e,))
69 if resp.status_code == 404:
62 if resp.status_code == 404:
70 msg = 'Error communicating with Hipchat - Room not found'
63 msg = "Error communicating with Hipchat - Room not found"
71 raise IntegrationException(msg)
64 raise IntegrationException(msg)
72 elif resp.status_code != requests.codes.ok:
65 elif resp.status_code != requests.codes.ok:
73 msg = 'Error communicating with Hipchat - status code: %s'
66 msg = "Error communicating with Hipchat - status code: %s"
74 raise IntegrationException(msg % resp.status_code)
67 raise IntegrationException(msg % resp.status_code)
75 return resp
68 return resp
76
69
77 def get_rooms(self):
70 def get_rooms(self):
78 # not used with notification api token
71 # not used with notification api token
79 return self.make_request('/rooms/list')
72 return self.make_request("/rooms/list")
80
73
81 def send(self, payload):
74 def send(self, payload):
82 return self.make_request('/rooms/message', method='post',
75 return self.make_request("/rooms/message", method="post", data=payload).json()
83 data=payload).json()
@@ -15,8 +15,7 b''
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import jira
17 import jira
18 from appenlight.models.integrations import (IntegrationBase,
18 from appenlight.models.integrations import IntegrationBase, IntegrationException
19 IntegrationException)
20
19
21 _ = str
20 _ = str
22
21
@@ -26,14 +25,12 b' class NotFoundException(Exception):'
26
25
27
26
28 class JiraIntegration(IntegrationBase):
27 class JiraIntegration(IntegrationBase):
29 __mapper_args__ = {
28 __mapper_args__ = {"polymorphic_identity": "jira"}
30 'polymorphic_identity': 'jira'
31 }
32 front_visible = True
29 front_visible = True
33 as_alert_channel = False
30 as_alert_channel = False
34 supports_report_alerting = False
31 supports_report_alerting = False
35 action_notification = True
32 action_notification = True
36 integration_action = 'Add issue to Jira'
33 integration_action = "Add issue to Jira"
37
34
38
35
39 class JiraClient(object):
36 class JiraClient(object):
@@ -44,12 +41,14 b' class JiraClient(object):'
44 self.project = project
41 self.project = project
45 self.request = request
42 self.request = request
46 try:
43 try:
47 self.client = jira.client.JIRA(options={'server': host_name},
44 self.client = jira.client.JIRA(
48 basic_auth=(user_name, password))
45 options={"server": host_name}, basic_auth=(user_name, password)
46 )
49 except jira.JIRAError as e:
47 except jira.JIRAError as e:
50 raise IntegrationException(
48 raise IntegrationException(
51 'Communication problem: HTTP_STATUS:%s, URL:%s ' % (
49 "Communication problem: HTTP_STATUS:%s, URL:%s "
52 e.status_code, e.url))
50 % (e.status_code, e.url)
51 )
53
52
54 def get_projects(self):
53 def get_projects(self):
55 projects = self.client.projects()
54 projects = self.client.projects()
@@ -58,42 +57,42 b' class JiraClient(object):'
58 def get_assignees(self, request):
57 def get_assignees(self, request):
59 """Gets list of possible assignees"""
58 """Gets list of possible assignees"""
60 cache_region = request.registry.cache_regions.redis_sec_30
59 cache_region = request.registry.cache_regions.redis_sec_30
61 @cache_region.cache_on_arguments('JiraClient.get_assignees')
60
61 @cache_region.cache_on_arguments("JiraClient.get_assignees")
62 def cached(project_name):
62 def cached(project_name):
63 users = self.client.search_assignable_users_for_issues(
63 users = self.client.search_assignable_users_for_issues(
64 None, project=project_name)
64 None, project=project_name
65 )
65 results = []
66 results = []
66 for user in users:
67 for user in users:
67 results.append({"id": user.name, "name": user.displayName})
68 results.append({"id": user.name, "name": user.displayName})
68 return results
69 return results
70
69 return cached(self.project)
71 return cached(self.project)
70
72
71 def get_issue_types(self, request):
73 def get_issue_types(self, request):
72 metadata = self.get_metadata(request)
74 metadata = self.get_metadata(request)
73 assignees = self.get_assignees(request)
75 assignees = self.get_assignees(request)
74 parsed_metadata = []
76 parsed_metadata = []
75 for entry in metadata['projects'][0]['issuetypes']:
77 for entry in metadata["projects"][0]["issuetypes"]:
76 issue = {"name": entry['name'],
78 issue = {"name": entry["name"], "id": entry["id"], "fields": []}
77 "id": entry['id'],
79 for i_id, field_i in entry["fields"].items():
78 "fields": []}
79 for i_id, field_i in entry['fields'].items():
80 field = {
80 field = {
81 "name": field_i['name'],
81 "name": field_i["name"],
82 "id": i_id,
82 "id": i_id,
83 "required": field_i['required'],
83 "required": field_i["required"],
84 "values": [],
84 "values": [],
85 "type": field_i['schema'].get('type')
85 "type": field_i["schema"].get("type"),
86 }
86 }
87 if field_i.get('allowedValues'):
87 if field_i.get("allowedValues"):
88 field['values'] = []
88 field["values"] = []
89 for i in field_i['allowedValues']:
89 for i in field_i["allowedValues"]:
90 field['values'].append(
90 field["values"].append(
91 {'id': i['id'],
91 {"id": i["id"], "name": i.get("name", i.get("value", ""))}
92 'name': i.get('name', i.get('value', ''))
92 )
93 })
93 if field["id"] == "assignee":
94 if field['id'] == 'assignee':
94 field["values"] = assignees
95 field['values'] = assignees
95 issue["fields"].append(field)
96 issue['fields'].append(field)
97 parsed_metadata.append(issue)
96 parsed_metadata.append(issue)
98 return parsed_metadata
97 return parsed_metadata
99
98
@@ -102,35 +101,37 b' class JiraClient(object):'
102 # @cache_region.cache_on_arguments('JiraClient.get_metadata')
101 # @cache_region.cache_on_arguments('JiraClient.get_metadata')
103 def cached(project_name):
102 def cached(project_name):
104 return self.client.createmeta(
103 return self.client.createmeta(
105 projectKeys=project_name, expand='projects.issuetypes.fields')
104 projectKeys=project_name, expand="projects.issuetypes.fields"
105 )
106
106 return cached(self.project)
107 return cached(self.project)
107
108
108 def create_issue(self, form_data, request):
109 def create_issue(self, form_data, request):
109 issue_types = self.get_issue_types(request)
110 issue_types = self.get_issue_types(request)
110 payload = {
111 payload = {
111 'project': {'key': form_data['project']},
112 "project": {"key": form_data["project"]},
112 'summary': form_data['title'],
113 "summary": form_data["title"],
113 'description': form_data['content'],
114 "description": form_data["content"],
114 'issuetype': {'id': form_data['issue_type']},
115 "issuetype": {"id": form_data["issue_type"]},
115 "priority": {'id': form_data['priority']},
116 "priority": {"id": form_data["priority"]},
116 "assignee": {'name': form_data['responsible']},
117 "assignee": {"name": form_data["responsible"]},
117 }
118 }
118 for issue_type in issue_types:
119 for issue_type in issue_types:
119 if issue_type['id'] == form_data['issue_type']:
120 if issue_type["id"] == form_data["issue_type"]:
120 for field in issue_type['fields']:
121 for field in issue_type["fields"]:
121 # set some defaults for other required fields
122 # set some defaults for other required fields
122 if field == 'reporter':
123 if field == "reporter":
123 payload["reporter"] = {'id': self.user_name}
124 payload["reporter"] = {"id": self.user_name}
124 if field['required'] and field['id'] not in payload:
125 if field["required"] and field["id"] not in payload:
125 if field['type'] == 'array':
126 if field["type"] == "array":
126 payload[field['id']] = [field['values'][0], ]
127 payload[field["id"]] = [field["values"][0]]
127 elif field['type'] == 'string':
128 elif field["type"] == "string":
128 payload[field['id']] = ''
129 payload[field["id"]] = ""
129 new_issue = self.client.create_issue(fields=payload)
130 new_issue = self.client.create_issue(fields=payload)
130 web_url = self.host_name + '/browse/' + new_issue.key
131 web_url = self.host_name + "/browse/" + new_issue.key
131 to_return = {
132 to_return = {
132 'id': new_issue.id,
133 "id": new_issue.id,
133 'resource_url': new_issue.self,
134 "resource_url": new_issue.self,
134 'web_url': web_url
135 "web_url": web_url,
135 }
136 }
136 return to_return
137 return to_return
@@ -18,8 +18,7 b' import logging'
18
18
19 import requests
19 import requests
20
20
21 from appenlight.models.integrations import (IntegrationBase,
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
22 IntegrationException)
23 from appenlight.lib.ext_json import json
22 from appenlight.lib.ext_json import json
24
23
25 _ = str
24 _ = str
@@ -32,14 +31,12 b' class NotFoundException(Exception):'
32
31
33
32
34 class SlackIntegration(IntegrationBase):
33 class SlackIntegration(IntegrationBase):
35 __mapper_args__ = {
34 __mapper_args__ = {"polymorphic_identity": "slack"}
36 'polymorphic_identity': 'slack'
37 }
38 front_visible = False
35 front_visible = False
39 as_alert_channel = True
36 as_alert_channel = True
40 supports_report_alerting = True
37 supports_report_alerting = True
41 action_notification = True
38 action_notification = True
42 integration_action = 'Message via Slack'
39 integration_action = "Message via Slack"
43
40
44 @classmethod
41 @classmethod
45 def create_client(cls, api_token):
42 def create_client(cls, api_token):
@@ -52,23 +49,17 b' class SlackClient(object):'
52 self.api_url = api_url
49 self.api_url = api_url
53
50
54 def make_request(self, data=None):
51 def make_request(self, data=None):
55 headers = {
52 headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"}
56 'User-Agent': 'appenlight-slack',
57 'Content-Type': 'application/json'
58 }
59 try:
53 try:
60 resp = getattr(requests, 'post')(self.api_url,
54 resp = getattr(requests, "post")(
61 data=json.dumps(data),
55 self.api_url, data=json.dumps(data), headers=headers, timeout=3
62 headers=headers,
56 )
63 timeout=3)
64 except Exception as e:
57 except Exception as e:
65 raise IntegrationException(
58 raise IntegrationException(_("Error communicating with Slack: %s") % (e,))
66 _('Error communicating with Slack: %s') % (e,))
67 if resp.status_code != requests.codes.ok:
59 if resp.status_code != requests.codes.ok:
68 msg = 'Error communicating with Slack - status code: %s'
60 msg = "Error communicating with Slack - status code: %s"
69 raise IntegrationException(msg % resp.status_code)
61 raise IntegrationException(msg % resp.status_code)
70 return resp
62 return resp
71
63
72 def send(self, payload):
64 def send(self, payload):
73 return self.make_request('/rooms/message', method='post',
65 return self.make_request("/rooms/message", method="post", data=payload).json()
74 data=payload).json()
@@ -18,8 +18,7 b' import logging'
18
18
19 import requests
19 import requests
20
20
21 from appenlight.models.integrations import (IntegrationBase,
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
22 IntegrationException)
23 from appenlight.models.alert_channel import AlertChannel
22 from appenlight.models.alert_channel import AlertChannel
24 from appenlight.lib.ext_json import json
23 from appenlight.lib.ext_json import json
25
24
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33
32
34
33
35 class WebhooksIntegration(IntegrationBase):
34 class WebhooksIntegration(IntegrationBase):
36 __mapper_args__ = {
35 __mapper_args__ = {"polymorphic_identity": "webhooks"}
37 'polymorphic_identity': 'webhooks'
38 }
39 front_visible = False
36 front_visible = False
40 as_alert_channel = True
37 as_alert_channel = True
41 supports_report_alerting = True
38 supports_report_alerting = True
42 action_notification = True
39 action_notification = True
43 integration_action = 'Message via Webhooks'
40 integration_action = "Message via Webhooks"
44
41
45 @classmethod
42 @classmethod
46 def create_client(cls, url):
43 def create_client(cls, url):
@@ -52,34 +49,33 b' class WebhooksClient(object):'
52 def __init__(self, url):
49 def __init__(self, url):
53 self.api_url = url
50 self.api_url = url
54
51
55 def make_request(self, url, method='get', data=None):
52 def make_request(self, url, method="get", data=None):
56 headers = {
53 headers = {
57 'Content-Type': 'application/json',
54 "Content-Type": "application/json",
58 'User-Agent': 'appenlight-webhooks',
55 "User-Agent": "appenlight-webhooks",
59 }
56 }
60 try:
57 try:
61 if data:
58 if data:
62 data = json.dumps(data)
59 data = json.dumps(data)
63 resp = getattr(requests, method)(url, data=data, headers=headers,
60 resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3)
64 timeout=3)
65 except Exception as e:
61 except Exception as e:
66 raise IntegrationException(
62 raise IntegrationException(
67 _('Error communicating with Webhooks: {}').format(e))
63 _("Error communicating with Webhooks: {}").format(e)
64 )
68 if resp.status_code > 299:
65 if resp.status_code > 299:
69 raise IntegrationException(
66 raise IntegrationException(
70 'Error communicating with Webhooks - status code: {}'.format(
67 "Error communicating with Webhooks - status code: {}".format(
71 resp.status_code))
68 resp.status_code
69 )
70 )
72 return resp
71 return resp
73
72
74 def send_to_hook(self, payload):
73 def send_to_hook(self, payload):
75 return self.make_request(self.api_url, method='post',
74 return self.make_request(self.api_url, method="post", data=payload).json()
76 data=payload).json()
77
75
78
76
79 class WebhooksAlertChannel(AlertChannel):
77 class WebhooksAlertChannel(AlertChannel):
80 __mapper_args__ = {
78 __mapper_args__ = {"polymorphic_identity": "webhooks"}
81 'polymorphic_identity': 'webhooks'
82 }
83
79
84 def notify_reports(self, **kwargs):
80 def notify_reports(self, **kwargs):
85 """
81 """
@@ -95,17 +91,28 b' class WebhooksAlertChannel(AlertChannel):'
95 """
91 """
96 template_vars = self.get_notification_basic_vars(kwargs)
92 template_vars = self.get_notification_basic_vars(kwargs)
97 payload = []
93 payload = []
98 include_keys = ('id', 'http_status', 'report_type', 'resource_name',
94 include_keys = (
99 'front_url', 'resource_id', 'error', 'url_path',
95 "id",
100 'tags', 'duration')
96 "http_status",
101
97 "report_type",
102 for occurences, report in kwargs['reports']:
98 "resource_name",
103 r_dict = report.last_report_ref.get_dict(kwargs['request'],
99 "front_url",
104 include_keys=include_keys)
100 "resource_id",
105 r_dict['group']['occurences'] = occurences
101 "error",
102 "url_path",
103 "tags",
104 "duration",
105 )
106
107 for occurences, report in kwargs["reports"]:
108 r_dict = report.last_report_ref.get_dict(
109 kwargs["request"], include_keys=include_keys
110 )
111 r_dict["group"]["occurences"] = occurences
106 payload.append(r_dict)
112 payload.append(r_dict)
107 client = WebhooksIntegration.create_client(
113 client = WebhooksIntegration.create_client(
108 self.integration.config['reports_webhook'])
114 self.integration.config["reports_webhook"]
115 )
109 client.send_to_hook(payload)
116 client.send_to_hook(payload)
110
117
111 def notify_alert(self, **kwargs):
118 def notify_alert(self, **kwargs):
@@ -120,19 +127,19 b' class WebhooksAlertChannel(AlertChannel):'
120
127
121 """
128 """
122 payload = {
129 payload = {
123 'alert_action': kwargs['event'].unified_alert_action(),
130 "alert_action": kwargs["event"].unified_alert_action(),
124 'alert_name': kwargs['event'].unified_alert_name(),
131 "alert_name": kwargs["event"].unified_alert_name(),
125 'event_time': kwargs['event'].end_date or kwargs[
132 "event_time": kwargs["event"].end_date or kwargs["event"].start_date,
126 'event'].start_date,
133 "resource_name": None,
127 'resource_name': None,
134 "resource_id": None,
128 'resource_id': None
129 }
135 }
130 if kwargs['event'].values and kwargs['event'].values.get('reports'):
136 if kwargs["event"].values and kwargs["event"].values.get("reports"):
131 payload['reports'] = kwargs['event'].values.get('reports', [])
137 payload["reports"] = kwargs["event"].values.get("reports", [])
132 if 'application' in kwargs:
138 if "application" in kwargs:
133 payload['resource_name'] = kwargs['application'].resource_name
139 payload["resource_name"] = kwargs["application"].resource_name
134 payload['resource_id'] = kwargs['application'].resource_id
140 payload["resource_id"] = kwargs["application"].resource_id
135
141
136 client = WebhooksIntegration.create_client(
142 client = WebhooksIntegration.create_client(
137 self.integration.config['alerts_webhook'])
143 self.integration.config["alerts_webhook"]
144 )
138 client.send_to_hook(payload)
145 client.send_to_hook(payload)
@@ -29,21 +29,23 b' log = logging.getLogger(__name__)'
29
29
30
30
31 class Log(Base, BaseModel):
31 class Log(Base, BaseModel):
32 __tablename__ = 'logs'
32 __tablename__ = "logs"
33 __table_args__ = {'implicit_returning': False}
33 __table_args__ = {"implicit_returning": False}
34
34
35 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
35 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
36 resource_id = sa.Column(sa.Integer(),
36 resource_id = sa.Column(
37 sa.ForeignKey('applications.resource_id',
37 sa.Integer(),
38 onupdate='CASCADE',
38 sa.ForeignKey(
39 ondelete='CASCADE'),
39 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
40 nullable=False,
40 ),
41 index=True)
41 nullable=False,
42 log_level = sa.Column(sa.Unicode, nullable=False, index=True,
42 index=True,
43 default='INFO')
43 )
44 message = sa.Column(sa.UnicodeText(), default='')
44 log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO")
45 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
45 message = sa.Column(sa.UnicodeText(), default="")
46 server_default=sa.func.now())
46 timestamp = sa.Column(
47 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
48 )
47 request_id = sa.Column(sa.Unicode())
49 request_id = sa.Column(sa.Unicode())
48 namespace = sa.Column(sa.Unicode())
50 namespace = sa.Column(sa.Unicode())
49 primary_key = sa.Column(sa.Unicode())
51 primary_key = sa.Column(sa.Unicode())
@@ -52,39 +54,40 b' class Log(Base, BaseModel):'
52 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
54 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
53
55
54 def __str__(self):
56 def __str__(self):
55 return self.__unicode__().encode('utf8')
57 return self.__unicode__().encode("utf8")
56
58
57 def __unicode__(self):
59 def __unicode__(self):
58 return '<Log id:%s, lv:%s, ns:%s >' % (
60 return "<Log id:%s, lv:%s, ns:%s >" % (
59 self.log_id, self.log_level, self.namespace)
61 self.log_id,
62 self.log_level,
63 self.namespace,
64 )
60
65
61 def set_data(self, data, resource):
66 def set_data(self, data, resource):
62 level = data.get('log_level').upper()
67 level = data.get("log_level").upper()
63 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
68 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
64 self.message = data.get('message', '')
69 self.message = data.get("message", "")
65 server_name = data.get('server', '').lower() or 'unknown'
70 server_name = data.get("server", "").lower() or "unknown"
66 self.tags = {
71 self.tags = {"server_name": server_name}
67 'server_name': server_name
72 if data.get("tags"):
68 }
73 for tag_tuple in data["tags"]:
69 if data.get('tags'):
70 for tag_tuple in data['tags']:
71 self.tags[tag_tuple[0]] = tag_tuple[1]
74 self.tags[tag_tuple[0]] = tag_tuple[1]
72 self.timestamp = data['date']
75 self.timestamp = data["date"]
73 r_id = data.get('request_id', '')
76 r_id = data.get("request_id", "")
74 if not r_id:
77 if not r_id:
75 r_id = ''
78 r_id = ""
76 self.request_id = r_id.replace('-', '')
79 self.request_id = r_id.replace("-", "")
77 self.resource_id = resource.resource_id
80 self.resource_id = resource.resource_id
78 self.namespace = data.get('namespace') or ''
81 self.namespace = data.get("namespace") or ""
79 self.permanent = data.get('permanent')
82 self.permanent = data.get("permanent")
80 self.primary_key = data.get('primary_key')
83 self.primary_key = data.get("primary_key")
81 if self.primary_key is not None:
84 if self.primary_key is not None:
82 self.tags['appenlight_primary_key'] = self.primary_key
85 self.tags["appenlight_primary_key"] = self.primary_key
83
86
84 def get_dict(self):
87 def get_dict(self):
85 instance_dict = super(Log, self).get_dict()
88 instance_dict = super(Log, self).get_dict()
86 instance_dict['log_level'] = LogLevel.key_from_value(self.log_level)
89 instance_dict["log_level"] = LogLevel.key_from_value(self.log_level)
87 instance_dict['resource_name'] = self.application.resource_name
90 instance_dict["resource_name"] = self.application.resource_name
88 return instance_dict
91 return instance_dict
89
92
90 @property
93 @property
@@ -92,39 +95,38 b' class Log(Base, BaseModel):'
92 if not self.primary_key:
95 if not self.primary_key:
93 return None
96 return None
94
97
95 to_hash = '{}_{}_{}'.format(self.resource_id, self.primary_key,
98 to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace)
96 self.namespace)
99 return hashlib.sha1(to_hash.encode("utf8")).hexdigest()
97 return hashlib.sha1(to_hash.encode('utf8')).hexdigest()
98
100
99 def es_doc(self):
101 def es_doc(self):
100 tags = {}
102 tags = {}
101 tag_list = []
103 tag_list = []
102 for name, value in self.tags.items():
104 for name, value in self.tags.items():
103 # replace dot in indexed tag name
105 # replace dot in indexed tag name
104 name = name.replace('.', '_')
106 name = name.replace(".", "_")
105 tag_list.append(name)
107 tag_list.append(name)
106 tags[name] = {
108 tags[name] = {
107 "values": convert_es_type(value),
109 "values": convert_es_type(value),
108 "numeric_values": value if (
110 "numeric_values": value
109 isinstance(value, (int, float)) and
111 if (isinstance(value, (int, float)) and not isinstance(value, bool))
110 not isinstance(value, bool)) else None
112 else None,
111 }
113 }
112 return {
114 return {
113 'pg_id': str(self.log_id),
115 "pg_id": str(self.log_id),
114 'delete_hash': self.delete_hash,
116 "delete_hash": self.delete_hash,
115 'resource_id': self.resource_id,
117 "resource_id": self.resource_id,
116 'request_id': self.request_id,
118 "request_id": self.request_id,
117 'log_level': LogLevel.key_from_value(self.log_level),
119 "log_level": LogLevel.key_from_value(self.log_level),
118 'timestamp': self.timestamp,
120 "timestamp": self.timestamp,
119 'message': self.message if self.message else '',
121 "message": self.message if self.message else "",
120 'namespace': self.namespace if self.namespace else '',
122 "namespace": self.namespace if self.namespace else "",
121 'tags': tags,
123 "tags": tags,
122 'tag_list': tag_list
124 "tag_list": tag_list,
123 }
125 }
124
126
125 @property
127 @property
126 def partition_id(self):
128 def partition_id(self):
127 if self.permanent:
129 if self.permanent:
128 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m')
130 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m")
129 else:
131 else:
130 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m_%d')
132 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d")
@@ -25,40 +25,44 b' from appenlight.models import Base'
25
25
26
26
27 class Metric(Base, BaseModel):
27 class Metric(Base, BaseModel):
28 __tablename__ = 'metrics'
28 __tablename__ = "metrics"
29 __table_args__ = {'implicit_returning': False}
29 __table_args__ = {"implicit_returning": False}
30
30
31 pkey = sa.Column(sa.BigInteger(), primary_key=True)
31 pkey = sa.Column(sa.BigInteger(), primary_key=True)
32 resource_id = sa.Column(sa.Integer(),
32 resource_id = sa.Column(
33 sa.ForeignKey('applications.resource_id'),
33 sa.Integer(),
34 nullable=False, primary_key=True)
34 sa.ForeignKey("applications.resource_id"),
35 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
35 nullable=False,
36 server_default=sa.func.now())
36 primary_key=True,
37 )
38 timestamp = sa.Column(
39 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
40 )
37 tags = sa.Column(JSON(), default={})
41 tags = sa.Column(JSON(), default={})
38 namespace = sa.Column(sa.Unicode(255))
42 namespace = sa.Column(sa.Unicode(255))
39
43
40 @property
44 @property
41 def partition_id(self):
45 def partition_id(self):
42 return 'rcae_m_%s' % self.timestamp.strftime('%Y_%m_%d')
46 return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d")
43
47
44 def es_doc(self):
48 def es_doc(self):
45 tags = {}
49 tags = {}
46 tag_list = []
50 tag_list = []
47 for name, value in self.tags.items():
51 for name, value in self.tags.items():
48 # replace dot in indexed tag name
52 # replace dot in indexed tag name
49 name = name.replace('.', '_')
53 name = name.replace(".", "_")
50 tag_list.append(name)
54 tag_list.append(name)
51 tags[name] = {
55 tags[name] = {
52 "values": convert_es_type(value),
56 "values": convert_es_type(value),
53 "numeric_values": value if (
57 "numeric_values": value
54 isinstance(value, (int, float)) and
58 if (isinstance(value, (int, float)) and not isinstance(value, bool))
55 not isinstance(value, bool)) else None
59 else None,
56 }
60 }
57
61
58 return {
62 return {
59 'resource_id': self.resource_id,
63 "resource_id": self.resource_id,
60 'timestamp': self.timestamp,
64 "timestamp": self.timestamp,
61 'namespace': self.namespace,
65 "namespace": self.namespace,
62 'tags': tags,
66 "tags": tags,
63 'tag_list': tag_list
67 "tag_list": tag_list,
64 }
68 }
@@ -22,19 +22,19 b' from . import Base'
22
22
23
23
24 class PluginConfig(Base, BaseModel):
24 class PluginConfig(Base, BaseModel):
25 __tablename__ = 'plugin_configs'
25 __tablename__ = "plugin_configs"
26
26
27 id = sa.Column(sa.Integer, primary_key=True)
27 id = sa.Column(sa.Integer, primary_key=True)
28 plugin_name = sa.Column(sa.Unicode)
28 plugin_name = sa.Column(sa.Unicode)
29 section = sa.Column(sa.Unicode)
29 section = sa.Column(sa.Unicode)
30 config = sa.Column(JSON, nullable=False)
30 config = sa.Column(JSON, nullable=False)
31 resource_id = sa.Column(sa.Integer(),
31 resource_id = sa.Column(
32 sa.ForeignKey('resources.resource_id',
32 sa.Integer(),
33 onupdate='cascade',
33 sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"),
34 ondelete='cascade'))
34 )
35 owner_id = sa.Column(sa.Integer(),
35 owner_id = sa.Column(
36 sa.ForeignKey('users.id', onupdate='cascade',
36 sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade")
37 ondelete='cascade'))
37 )
38
38
39 def __json__(self, request):
39 def __json__(self, request):
40 return self.get_dict()
40 return self.get_dict()
This diff has been collapsed as it changes many lines, (531 lines changed) Show them Hide them
@@ -36,51 +36,55 b' from ziggurat_foundations.models.base import BaseModel'
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38 REPORT_TYPE_MATRIX = {
38 REPORT_TYPE_MATRIX = {
39 'http_status': {"type": 'int',
39 "http_status": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
40 "ops": ('eq', 'ne', 'ge', 'le',)},
40 "group:priority": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
41 'group:priority': {"type": 'int',
41 "duration": {"type": "float", "ops": ("ge", "le")},
42 "ops": ('eq', 'ne', 'ge', 'le',)},
42 "url_domain": {
43 'duration': {"type": 'float',
43 "type": "unicode",
44 "ops": ('ge', 'le',)},
44 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
45 'url_domain': {"type": 'unicode',
45 },
46 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
46 "url_path": {
47 'url_path': {"type": 'unicode',
47 "type": "unicode",
48 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
48 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
49 'error': {"type": 'unicode',
49 },
50 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
50 "error": {
51 'tags:server_name': {"type": 'unicode',
51 "type": "unicode",
52 "ops": ('eq', 'ne', 'startswith', 'endswith',
52 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
53 'contains',)},
53 },
54 'traceback': {"type": 'unicode',
54 "tags:server_name": {
55 "ops": ('contains',)},
55 "type": "unicode",
56 'group:occurences': {"type": 'int',
56 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
57 "ops": ('eq', 'ne', 'ge', 'le',)}
57 },
58 "traceback": {"type": "unicode", "ops": ("contains",)},
59 "group:occurences": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
58 }
60 }
59
61
60
62
61 class Report(Base, BaseModel):
63 class Report(Base, BaseModel):
62 __tablename__ = 'reports'
64 __tablename__ = "reports"
63 __table_args__ = {'implicit_returning': False}
65 __table_args__ = {"implicit_returning": False}
64
66
65 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
67 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
66 group_id = sa.Column(sa.BigInteger,
68 group_id = sa.Column(
67 sa.ForeignKey('reports_groups.id', ondelete='cascade',
69 sa.BigInteger,
68 onupdate='cascade'))
70 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
71 )
69 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
72 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
70 report_type = sa.Column(sa.Integer(), nullable=False, index=True)
73 report_type = sa.Column(sa.Integer(), nullable=False, index=True)
71 error = sa.Column(sa.UnicodeText(), index=True)
74 error = sa.Column(sa.UnicodeText(), index=True)
72 extra = sa.Column(JSON(), default={})
75 extra = sa.Column(JSON(), default={})
73 request = sa.Column(JSON(), nullable=False, default={})
76 request = sa.Column(JSON(), nullable=False, default={})
74 ip = sa.Column(sa.String(39), index=True, default='')
77 ip = sa.Column(sa.String(39), index=True, default="")
75 username = sa.Column(sa.Unicode(255), default='')
78 username = sa.Column(sa.Unicode(255), default="")
76 user_agent = sa.Column(sa.Unicode(255), default='')
79 user_agent = sa.Column(sa.Unicode(255), default="")
77 url = sa.Column(sa.UnicodeText(), index=True)
80 url = sa.Column(sa.UnicodeText(), index=True)
78 request_id = sa.Column(sa.Text())
81 request_id = sa.Column(sa.Text())
79 request_stats = sa.Column(JSON(), nullable=False, default={})
82 request_stats = sa.Column(JSON(), nullable=False, default={})
80 traceback = sa.Column(JSON(), nullable=False, default=None)
83 traceback = sa.Column(JSON(), nullable=False, default=None)
81 traceback_hash = sa.Column(sa.Text())
84 traceback_hash = sa.Column(sa.Text())
82 start_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
85 start_time = sa.Column(
83 server_default=sa.func.now())
86 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
87 )
84 end_time = sa.Column(sa.DateTime())
88 end_time = sa.Column(sa.DateTime())
85 duration = sa.Column(sa.Float, default=0)
89 duration = sa.Column(sa.Float, default=0)
86 http_status = sa.Column(sa.Integer, index=True)
90 http_status = sa.Column(sa.Integer, index=True)
@@ -89,99 +93,104 b' class Report(Base, BaseModel):'
89 tags = sa.Column(JSON(), nullable=False, default={})
93 tags = sa.Column(JSON(), nullable=False, default={})
90 language = sa.Column(sa.Integer(), default=0)
94 language = sa.Column(sa.Integer(), default=0)
91 # this is used to determine partition for the report
95 # this is used to determine partition for the report
92 report_group_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
96 report_group_time = sa.Column(
93 server_default=sa.func.now())
97 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
98 )
94
99
95 logs = sa.orm.relationship(
100 logs = sa.orm.relationship(
96 'Log',
101 "Log",
97 lazy='dynamic',
102 lazy="dynamic",
98 passive_deletes=True,
103 passive_deletes=True,
99 passive_updates=True,
104 passive_updates=True,
100 primaryjoin="and_(Report.request_id==Log.request_id, "
105 primaryjoin="and_(Report.request_id==Log.request_id, "
101 "Log.request_id != None, Log.request_id != '')",
106 "Log.request_id != None, Log.request_id != '')",
102 foreign_keys='[Log.request_id]')
107 foreign_keys="[Log.request_id]",
103
108 )
104 slow_calls = sa.orm.relationship('SlowCall',
109
105 backref='detail',
110 slow_calls = sa.orm.relationship(
106 cascade="all, delete-orphan",
111 "SlowCall",
107 passive_deletes=True,
112 backref="detail",
108 passive_updates=True,
113 cascade="all, delete-orphan",
109 order_by='SlowCall.timestamp')
114 passive_deletes=True,
115 passive_updates=True,
116 order_by="SlowCall.timestamp",
117 )
110
118
111 def set_data(self, data, resource, protocol_version=None):
119 def set_data(self, data, resource, protocol_version=None):
112 self.http_status = data['http_status']
120 self.http_status = data["http_status"]
113 self.priority = data['priority']
121 self.priority = data["priority"]
114 self.error = data['error']
122 self.error = data["error"]
115 report_language = data.get('language', '').lower()
123 report_language = data.get("language", "").lower()
116 self.language = getattr(Language, report_language, Language.unknown)
124 self.language = getattr(Language, report_language, Language.unknown)
117 # we need temp holder here to decide later
125 # we need temp holder here to decide later
118 # if we want to to commit the tags if report is marked for creation
126 # if we want to to commit the tags if report is marked for creation
119 self.tags = {
127 self.tags = {"server_name": data["server"], "view_name": data["view_name"]}
120 'server_name': data['server'],
128 if data.get("tags"):
121 'view_name': data['view_name']
129 for tag_tuple in data["tags"]:
122 }
123 if data.get('tags'):
124 for tag_tuple in data['tags']:
125 self.tags[tag_tuple[0]] = tag_tuple[1]
130 self.tags[tag_tuple[0]] = tag_tuple[1]
126 self.traceback = data['traceback']
131 self.traceback = data["traceback"]
127 stripped_traceback = self.stripped_traceback()
132 stripped_traceback = self.stripped_traceback()
128 tb_repr = repr(stripped_traceback).encode('utf8')
133 tb_repr = repr(stripped_traceback).encode("utf8")
129 self.traceback_hash = hashlib.sha1(tb_repr).hexdigest()
134 self.traceback_hash = hashlib.sha1(tb_repr).hexdigest()
130 url_info = urllib.parse.urlsplit(
135 url_info = urllib.parse.urlsplit(data.get("url", ""), allow_fragments=False)
131 data.get('url', ''), allow_fragments=False)
132 self.url_domain = url_info.netloc[:128]
136 self.url_domain = url_info.netloc[:128]
133 self.url_path = url_info.path[:2048]
137 self.url_path = url_info.path[:2048]
134 self.occurences = data['occurences']
138 self.occurences = data["occurences"]
135 if self.error:
139 if self.error:
136 self.report_type = ReportType.error
140 self.report_type = ReportType.error
137 else:
141 else:
138 self.report_type = ReportType.slow
142 self.report_type = ReportType.slow
139
143
140 # but if its status 404 its 404 type
144 # but if its status 404 its 404 type
141 if self.http_status in [404, '404'] or self.error == '404 Not Found':
145 if self.http_status in [404, "404"] or self.error == "404 Not Found":
142 self.report_type = ReportType.not_found
146 self.report_type = ReportType.not_found
143 self.error = ''
147 self.error = ""
144
148
145 self.generate_grouping_hash(data.get('appenlight.group_string',
149 self.generate_grouping_hash(
146 data.get('group_string')),
150 data.get("appenlight.group_string", data.get("group_string")),
147 resource.default_grouping,
151 resource.default_grouping,
148 protocol_version)
152 protocol_version,
153 )
149
154
150 # details
155 # details
151 if data['http_status'] in [404, '404']:
156 if data["http_status"] in [404, "404"]:
152 data = {"username": data["username"],
157 data = {
153 "ip": data["ip"],
158 "username": data["username"],
154 "url": data["url"],
159 "ip": data["ip"],
155 "user_agent": data["user_agent"]}
160 "url": data["url"],
156 if data.get('HTTP_REFERER') or data.get('http_referer'):
161 "user_agent": data["user_agent"],
157 data['HTTP_REFERER'] = data.get(
162 }
158 'HTTP_REFERER', '') or data.get('http_referer', '')
163 if data.get("HTTP_REFERER") or data.get("http_referer"):
164 data["HTTP_REFERER"] = data.get("HTTP_REFERER", "") or data.get(
165 "http_referer", ""
166 )
159
167
160 self.resource_id = resource.resource_id
168 self.resource_id = resource.resource_id
161 self.username = data['username']
169 self.username = data["username"]
162 self.user_agent = data['user_agent']
170 self.user_agent = data["user_agent"]
163 self.ip = data['ip']
171 self.ip = data["ip"]
164 self.extra = {}
172 self.extra = {}
165 if data.get('extra'):
173 if data.get("extra"):
166 for extra_tuple in data['extra']:
174 for extra_tuple in data["extra"]:
167 self.extra[extra_tuple[0]] = extra_tuple[1]
175 self.extra[extra_tuple[0]] = extra_tuple[1]
168
176
169 self.url = data['url']
177 self.url = data["url"]
170 self.request_id = data.get('request_id', '').replace('-', '') or str(
178 self.request_id = data.get("request_id", "").replace("-", "") or str(
171 uuid.uuid4())
179 uuid.uuid4()
172 request_data = data.get('request', {})
180 )
181 request_data = data.get("request", {})
173
182
174 self.request = request_data
183 self.request = request_data
175 self.request_stats = data.get('request_stats', {})
184 self.request_stats = data.get("request_stats", {})
176 traceback = data.get('traceback')
185 traceback = data.get("traceback")
177 if not traceback:
186 if not traceback:
178 traceback = data.get('frameinfo')
187 traceback = data.get("frameinfo")
179 self.traceback = traceback
188 self.traceback = traceback
180 start_date = convert_date(data.get('start_time'))
189 start_date = convert_date(data.get("start_time"))
181 if not self.start_time or self.start_time < start_date:
190 if not self.start_time or self.start_time < start_date:
182 self.start_time = start_date
191 self.start_time = start_date
183
192
184 self.end_time = convert_date(data.get('end_time'), False)
193 self.end_time = convert_date(data.get("end_time"), False)
185 self.duration = 0
194 self.duration = 0
186
195
187 if self.start_time and self.end_time:
196 if self.start_time and self.end_time:
@@ -190,81 +199,85 b' class Report(Base, BaseModel):'
190
199
191 # update tags with other vars
200 # update tags with other vars
192 if self.username:
201 if self.username:
193 self.tags['user_name'] = self.username
202 self.tags["user_name"] = self.username
194 self.tags['report_language'] = Language.key_from_value(self.language)
203 self.tags["report_language"] = Language.key_from_value(self.language)
195
204
196 def add_slow_calls(self, data, report_group):
205 def add_slow_calls(self, data, report_group):
197 slow_calls = []
206 slow_calls = []
198 for call in data.get('slow_calls', []):
207 for call in data.get("slow_calls", []):
199 sc_inst = SlowCall()
208 sc_inst = SlowCall()
200 sc_inst.set_data(call, resource_id=self.resource_id,
209 sc_inst.set_data(
201 report_group=report_group)
210 call, resource_id=self.resource_id, report_group=report_group
211 )
202 slow_calls.append(sc_inst)
212 slow_calls.append(sc_inst)
203 self.slow_calls.extend(slow_calls)
213 self.slow_calls.extend(slow_calls)
204 return slow_calls
214 return slow_calls
205
215
206 def get_dict(self, request, details=False, exclude_keys=None,
216 def get_dict(self, request, details=False, exclude_keys=None, include_keys=None):
207 include_keys=None):
208 from appenlight.models.services.report_group import ReportGroupService
217 from appenlight.models.services.report_group import ReportGroupService
218
209 instance_dict = super(Report, self).get_dict()
219 instance_dict = super(Report, self).get_dict()
210 instance_dict['req_stats'] = self.req_stats()
220 instance_dict["req_stats"] = self.req_stats()
211 instance_dict['group'] = {}
221 instance_dict["group"] = {}
212 instance_dict['group']['id'] = self.report_group.id
222 instance_dict["group"]["id"] = self.report_group.id
213 instance_dict['group'][
223 instance_dict["group"]["total_reports"] = self.report_group.total_reports
214 'total_reports'] = self.report_group.total_reports
224 instance_dict["group"]["last_report"] = self.report_group.last_report
215 instance_dict['group']['last_report'] = self.report_group.last_report
225 instance_dict["group"]["priority"] = self.report_group.priority
216 instance_dict['group']['priority'] = self.report_group.priority
226 instance_dict["group"]["occurences"] = self.report_group.occurences
217 instance_dict['group']['occurences'] = self.report_group.occurences
227 instance_dict["group"]["last_timestamp"] = self.report_group.last_timestamp
218 instance_dict['group'][
228 instance_dict["group"]["first_timestamp"] = self.report_group.first_timestamp
219 'last_timestamp'] = self.report_group.last_timestamp
229 instance_dict["group"]["public"] = self.report_group.public
220 instance_dict['group'][
230 instance_dict["group"]["fixed"] = self.report_group.fixed
221 'first_timestamp'] = self.report_group.first_timestamp
231 instance_dict["group"]["read"] = self.report_group.read
222 instance_dict['group']['public'] = self.report_group.public
232 instance_dict["group"]["average_duration"] = self.report_group.average_duration
223 instance_dict['group']['fixed'] = self.report_group.fixed
233
224 instance_dict['group']['read'] = self.report_group.read
234 instance_dict["resource_name"] = self.report_group.application.resource_name
225 instance_dict['group'][
235 instance_dict["report_type"] = self.report_type
226 'average_duration'] = self.report_group.average_duration
236
227
237 if instance_dict["http_status"] == 404 and not instance_dict["error"]:
228 instance_dict[
238 instance_dict["error"] = "404 Not Found"
229 'resource_name'] = self.report_group.application.resource_name
230 instance_dict['report_type'] = self.report_type
231
232 if instance_dict['http_status'] == 404 and not instance_dict['error']:
233 instance_dict['error'] = '404 Not Found'
234
239
235 if details:
240 if details:
236 instance_dict['affected_users_count'] = \
241 instance_dict[
237 ReportGroupService.affected_users_count(self.report_group)
242 "affected_users_count"
238 instance_dict['top_affected_users'] = [
243 ] = ReportGroupService.affected_users_count(self.report_group)
239 {'username': u.username, 'count': u.count} for u in
244 instance_dict["top_affected_users"] = [
240 ReportGroupService.top_affected_users(self.report_group)]
245 {"username": u.username, "count": u.count}
241 instance_dict['application'] = {'integrations': []}
246 for u in ReportGroupService.top_affected_users(self.report_group)
247 ]
248 instance_dict["application"] = {"integrations": []}
242 for integration in self.report_group.application.integrations:
249 for integration in self.report_group.application.integrations:
243 if integration.front_visible:
250 if integration.front_visible:
244 instance_dict['application']['integrations'].append(
251 instance_dict["application"]["integrations"].append(
245 {'name': integration.integration_name,
252 {
246 'action': integration.integration_action})
253 "name": integration.integration_name,
247 instance_dict['comments'] = [c.get_dict() for c in
254 "action": integration.integration_action,
248 self.report_group.comments]
255 }
249
256 )
250 instance_dict['group']['next_report'] = None
257 instance_dict["comments"] = [
251 instance_dict['group']['previous_report'] = None
258 c.get_dict() for c in self.report_group.comments
259 ]
260
261 instance_dict["group"]["next_report"] = None
262 instance_dict["group"]["previous_report"] = None
252 next_in_group = self.get_next_in_group(request)
263 next_in_group = self.get_next_in_group(request)
253 previous_in_group = self.get_previous_in_group(request)
264 previous_in_group = self.get_previous_in_group(request)
254 if next_in_group:
265 if next_in_group:
255 instance_dict['group']['next_report'] = next_in_group
266 instance_dict["group"]["next_report"] = next_in_group
256 if previous_in_group:
267 if previous_in_group:
257 instance_dict['group']['previous_report'] = previous_in_group
268 instance_dict["group"]["previous_report"] = previous_in_group
258
269
259 # slow call ordering
270 # slow call ordering
260 def find_parent(row, data):
271 def find_parent(row, data):
261 for r in reversed(data):
272 for r in reversed(data):
262 try:
273 try:
263 if (row['timestamp'] > r['timestamp'] and
274 if (
264 row['end_time'] < r['end_time']):
275 row["timestamp"] > r["timestamp"]
276 and row["end_time"] < r["end_time"]
277 ):
265 return r
278 return r
266 except TypeError as e:
279 except TypeError as e:
267 log.warning('reports_view.find_parent: %s' % e)
280 log.warning("reports_view.find_parent: %s" % e)
268 return None
281 return None
269
282
270 new_calls = []
283 new_calls = []
@@ -274,24 +287,23 b' class Report(Base, BaseModel):'
274 for x in range(len(calls) - 1, -1, -1):
287 for x in range(len(calls) - 1, -1, -1):
275 parent = find_parent(calls[x], calls)
288 parent = find_parent(calls[x], calls)
276 if parent:
289 if parent:
277 parent['children'].append(calls[x])
290 parent["children"].append(calls[x])
278 else:
291 else:
279 # no parent at all? append to new calls anyways
292 # no parent at all? append to new calls anyways
280 new_calls.append(calls[x])
293 new_calls.append(calls[x])
281 # print 'append', calls[x]
294 # print 'append', calls[x]
282 del calls[x]
295 del calls[x]
283 break
296 break
284 instance_dict['slow_calls'] = new_calls
297 instance_dict["slow_calls"] = new_calls
285
298
286 instance_dict['front_url'] = self.get_public_url(request)
299 instance_dict["front_url"] = self.get_public_url(request)
287
300
288 exclude_keys_list = exclude_keys or []
301 exclude_keys_list = exclude_keys or []
289 include_keys_list = include_keys or []
302 include_keys_list = include_keys or []
290 for k in list(instance_dict.keys()):
303 for k in list(instance_dict.keys()):
291 if k == 'group':
304 if k == "group":
292 continue
305 continue
293 if (k in exclude_keys_list or
306 if k in exclude_keys_list or (k not in include_keys_list and include_keys):
294 (k not in include_keys_list and include_keys)):
295 del instance_dict[k]
307 del instance_dict[k]
296 return instance_dict
308 return instance_dict
297
309
@@ -301,19 +313,20 b' class Report(Base, BaseModel):'
301 "query": {
313 "query": {
302 "filtered": {
314 "filtered": {
303 "filter": {
315 "filter": {
304 "and": [{"term": {"group_id": self.group_id}},
316 "and": [
305 {"range": {"pg_id": {"lt": self.id}}}]
317 {"term": {"group_id": self.group_id}},
318 {"range": {"pg_id": {"lt": self.id}}},
319 ]
306 }
320 }
307 }
321 }
308 },
322 },
309 "sort": [
323 "sort": [{"_doc": {"order": "desc"}}],
310 {"_doc": {"order": "desc"}},
311 ],
312 }
324 }
313 result = request.es_conn.search(body=query, index=self.partition_id,
325 result = request.es_conn.search(
314 doc_type='report')
326 body=query, index=self.partition_id, doc_type="report"
315 if result['hits']['total']:
327 )
316 return result['hits']['hits'][0]['_source']['pg_id']
328 if result["hits"]["total"]:
329 return result["hits"]["hits"][0]["_source"]["pg_id"]
317
330
318 def get_next_in_group(self, request):
331 def get_next_in_group(self, request):
319 query = {
332 query = {
@@ -321,19 +334,20 b' class Report(Base, BaseModel):'
321 "query": {
334 "query": {
322 "filtered": {
335 "filtered": {
323 "filter": {
336 "filter": {
324 "and": [{"term": {"group_id": self.group_id}},
337 "and": [
325 {"range": {"pg_id": {"gt": self.id}}}]
338 {"term": {"group_id": self.group_id}},
339 {"range": {"pg_id": {"gt": self.id}}},
340 ]
326 }
341 }
327 }
342 }
328 },
343 },
329 "sort": [
344 "sort": [{"_doc": {"order": "asc"}}],
330 {"_doc": {"order": "asc"}},
331 ],
332 }
345 }
333 result = request.es_conn.search(body=query, index=self.partition_id,
346 result = request.es_conn.search(
334 doc_type='report')
347 body=query, index=self.partition_id, doc_type="report"
335 if result['hits']['total']:
348 )
336 return result['hits']['hits'][0]['_source']['pg_id']
349 if result["hits"]["total"]:
350 return result["hits"]["hits"][0]["_source"]["pg_id"]
337
351
338 def get_public_url(self, request=None, report_group=None, _app_url=None):
352 def get_public_url(self, request=None, report_group=None, _app_url=None):
339 """
353 """
@@ -341,53 +355,51 b' class Report(Base, BaseModel):'
341 """
355 """
342 if not request:
356 if not request:
343 request = get_current_request()
357 request = get_current_request()
344 url = request.route_url('/', _app_url=_app_url)
358 url = request.route_url("/", _app_url=_app_url)
345 if report_group:
359 if report_group:
346 return (url + 'ui/report/%s/%s') % (report_group.id, self.id)
360 return (url + "ui/report/%s/%s") % (report_group.id, self.id)
347 return (url + 'ui/report/%s/%s') % (self.group_id, self.id)
361 return (url + "ui/report/%s/%s") % (self.group_id, self.id)
348
362
349 def req_stats(self):
363 def req_stats(self):
350 stats = self.request_stats.copy()
364 stats = self.request_stats.copy()
351 stats['percentages'] = {}
365 stats["percentages"] = {}
352 stats['percentages']['main'] = 100.0
366 stats["percentages"]["main"] = 100.0
353 main = stats.get('main', 0.0)
367 main = stats.get("main", 0.0)
354 if not main:
368 if not main:
355 return None
369 return None
356 for name, call_time in stats.items():
370 for name, call_time in stats.items():
357 if ('calls' not in name and 'main' not in name and
371 if "calls" not in name and "main" not in name and "percentages" not in name:
358 'percentages' not in name):
372 stats["main"] -= call_time
359 stats['main'] -= call_time
373 stats["percentages"][name] = math.floor((call_time / main * 100.0))
360 stats['percentages'][name] = math.floor(
374 stats["percentages"]["main"] -= stats["percentages"][name]
361 (call_time / main * 100.0))
375 if stats["percentages"]["main"] < 0.0:
362 stats['percentages']['main'] -= stats['percentages'][name]
376 stats["percentages"]["main"] = 0.0
363 if stats['percentages']['main'] < 0.0:
377 stats["main"] = 0.0
364 stats['percentages']['main'] = 0.0
365 stats['main'] = 0.0
366 return stats
378 return stats
367
379
368 def generate_grouping_hash(self, hash_string=None, default_grouping=None,
380 def generate_grouping_hash(
369 protocol_version=None):
381 self, hash_string=None, default_grouping=None, protocol_version=None
382 ):
370 """
383 """
371 Generates SHA1 hash that will be used to group reports together
384 Generates SHA1 hash that will be used to group reports together
372 """
385 """
373 if not hash_string:
386 if not hash_string:
374 location = self.tags.get('view_name') or self.url_path;
387 location = self.tags.get("view_name") or self.url_path
375 server_name = self.tags.get('server_name') or ''
388 server_name = self.tags.get("server_name") or ""
376 if default_grouping == 'url_traceback':
389 if default_grouping == "url_traceback":
377 hash_string = '%s_%s_%s' % (self.traceback_hash, location,
390 hash_string = "%s_%s_%s" % (self.traceback_hash, location, self.error)
378 self.error)
379 if self.language == Language.javascript:
391 if self.language == Language.javascript:
380 hash_string = '%s_%s' % (self.traceback_hash, self.error)
392 hash_string = "%s_%s" % (self.traceback_hash, self.error)
381
393
382 elif default_grouping == 'traceback_server':
394 elif default_grouping == "traceback_server":
383 hash_string = '%s_%s' % (self.traceback_hash, server_name)
395 hash_string = "%s_%s" % (self.traceback_hash, server_name)
384 if self.language == Language.javascript:
396 if self.language == Language.javascript:
385 hash_string = '%s_%s' % (self.traceback_hash, server_name)
397 hash_string = "%s_%s" % (self.traceback_hash, server_name)
386 else:
398 else:
387 hash_string = '%s_%s' % (self.error, location)
399 hash_string = "%s_%s" % (self.error, location)
388 month = datetime.utcnow().date().replace(day=1)
400 month = datetime.utcnow().date().replace(day=1)
389 hash_string = '{}_{}'.format(month, hash_string)
401 hash_string = "{}_{}".format(month, hash_string)
390 binary_string = hash_string.encode('utf8')
402 binary_string = hash_string.encode("utf8")
391 self.grouping_hash = hashlib.sha1(binary_string).hexdigest()
403 self.grouping_hash = hashlib.sha1(binary_string).hexdigest()
392 return self.grouping_hash
404 return self.grouping_hash
393
405
@@ -399,7 +411,7 b' class Report(Base, BaseModel):'
399
411
400 if isinstance(stripped_traceback, list):
412 if isinstance(stripped_traceback, list):
401 for row in stripped_traceback:
413 for row in stripped_traceback:
402 row.pop('vars', None)
414 row.pop("vars", None)
403 return stripped_traceback
415 return stripped_traceback
404
416
405 def notify_channel(self, report_group):
417 def notify_channel(self, report_group):
@@ -407,78 +419,81 b' class Report(Base, BaseModel):'
407 Sends notification to websocket channel
419 Sends notification to websocket channel
408 """
420 """
409 settings = get_current_registry().settings
421 settings = get_current_registry().settings
410 log.info('notify channelstream')
422 log.info("notify channelstream")
411 if self.report_type != ReportType.error:
423 if self.report_type != ReportType.error:
412 return
424 return
413 payload = {
425 payload = {
414 'type': 'message',
426 "type": "message",
415 "user": '__system__',
427 "user": "__system__",
416 "channel": 'app_%s' % self.resource_id,
428 "channel": "app_%s" % self.resource_id,
417 'message': {
429 "message": {
418 'topic': 'front_dashboard.new_topic',
430 "topic": "front_dashboard.new_topic",
419 'report': {
431 "report": {
420 'group': {
432 "group": {
421 'priority': report_group.priority,
433 "priority": report_group.priority,
422 'first_timestamp': report_group.first_timestamp,
434 "first_timestamp": report_group.first_timestamp,
423 'last_timestamp': report_group.last_timestamp,
435 "last_timestamp": report_group.last_timestamp,
424 'average_duration': report_group.average_duration,
436 "average_duration": report_group.average_duration,
425 'occurences': report_group.occurences
437 "occurences": report_group.occurences,
426 },
438 },
427 'report_id': self.id,
439 "report_id": self.id,
428 'group_id': self.group_id,
440 "group_id": self.group_id,
429 'resource_id': self.resource_id,
441 "resource_id": self.resource_id,
430 'http_status': self.http_status,
442 "http_status": self.http_status,
431 'url_domain': self.url_domain,
443 "url_domain": self.url_domain,
432 'url_path': self.url_path,
444 "url_path": self.url_path,
433 'error': self.error or '',
445 "error": self.error or "",
434 'server': self.tags.get('server_name'),
446 "server": self.tags.get("server_name"),
435 'view_name': self.tags.get('view_name'),
447 "view_name": self.tags.get("view_name"),
436 'front_url': self.get_public_url(),
448 "front_url": self.get_public_url(),
437 }
449 },
438 }
450 },
439
440 }
451 }
441 channelstream_request(settings['cometd.secret'], '/message', [payload],
452 channelstream_request(
442 servers=[settings['cometd_servers']])
453 settings["cometd.secret"],
454 "/message",
455 [payload],
456 servers=[settings["cometd_servers"]],
457 )
443
458
444 def es_doc(self):
459 def es_doc(self):
445 tags = {}
460 tags = {}
446 tag_list = []
461 tag_list = []
447 for name, value in self.tags.items():
462 for name, value in self.tags.items():
448 name = name.replace('.', '_')
463 name = name.replace(".", "_")
449 tag_list.append(name)
464 tag_list.append(name)
450 tags[name] = {
465 tags[name] = {
451 "values": convert_es_type(value),
466 "values": convert_es_type(value),
452 "numeric_values": value if (
467 "numeric_values": value
453 isinstance(value, (int, float)) and
468 if (isinstance(value, (int, float)) and not isinstance(value, bool))
454 not isinstance(value, bool)) else None}
469 else None,
470 }
455
471
456 if 'user_name' not in self.tags and self.username:
472 if "user_name" not in self.tags and self.username:
457 tags["user_name"] = {"value": [self.username],
473 tags["user_name"] = {"value": [self.username], "numeric_value": None}
458 "numeric_value": None}
459 return {
474 return {
460 '_id': str(self.id),
475 "_id": str(self.id),
461 'pg_id': str(self.id),
476 "pg_id": str(self.id),
462 'resource_id': self.resource_id,
477 "resource_id": self.resource_id,
463 'http_status': self.http_status or '',
478 "http_status": self.http_status or "",
464 'start_time': self.start_time,
479 "start_time": self.start_time,
465 'end_time': self.end_time,
480 "end_time": self.end_time,
466 'url_domain': self.url_domain if self.url_domain else '',
481 "url_domain": self.url_domain if self.url_domain else "",
467 'url_path': self.url_path if self.url_path else '',
482 "url_path": self.url_path if self.url_path else "",
468 'duration': self.duration,
483 "duration": self.duration,
469 'error': self.error if self.error else '',
484 "error": self.error if self.error else "",
470 'report_type': self.report_type,
485 "report_type": self.report_type,
471 'request_id': self.request_id,
486 "request_id": self.request_id,
472 'ip': self.ip,
487 "ip": self.ip,
473 'group_id': str(self.group_id),
488 "group_id": str(self.group_id),
474 '_parent': str(self.group_id),
489 "_parent": str(self.group_id),
475 'tags': tags,
490 "tags": tags,
476 'tag_list': tag_list
491 "tag_list": tag_list,
477 }
492 }
478
493
479 @property
494 @property
480 def partition_id(self):
495 def partition_id(self):
481 return 'rcae_r_%s' % self.report_group_time.strftime('%Y_%m')
496 return "rcae_r_%s" % self.report_group_time.strftime("%Y_%m")
482
497
483 def partition_range(self):
498 def partition_range(self):
484 start_date = self.report_group_time.date().replace(day=1)
499 start_date = self.report_group_time.date().replace(day=1)
@@ -488,27 +503,31 b' class Report(Base, BaseModel):'
488
503
489
504
490 def after_insert(mapper, connection, target):
505 def after_insert(mapper, connection, target):
491 if not hasattr(target, '_skip_ft_index'):
506 if not hasattr(target, "_skip_ft_index"):
492 data = target.es_doc()
507 data = target.es_doc()
493 data.pop('_id', None)
508 data.pop("_id", None)
494 Datastores.es.index(target.partition_id, 'report', data,
509 Datastores.es.index(
495 parent=target.group_id, id=target.id)
510 target.partition_id, "report", data, parent=target.group_id, id=target.id
511 )
496
512
497
513
498 def after_update(mapper, connection, target):
514 def after_update(mapper, connection, target):
499 if not hasattr(target, '_skip_ft_index'):
515 if not hasattr(target, "_skip_ft_index"):
500 data = target.es_doc()
516 data = target.es_doc()
501 data.pop('_id', None)
517 data.pop("_id", None)
502 Datastores.es.index(target.partition_id, 'report', data,
518 Datastores.es.index(
503 parent=target.group_id, id=target.id)
519 target.partition_id, "report", data, parent=target.group_id, id=target.id
520 )
504
521
505
522
506 def after_delete(mapper, connection, target):
523 def after_delete(mapper, connection, target):
507 if not hasattr(target, '_skip_ft_index'):
524 if not hasattr(target, "_skip_ft_index"):
508 query = {"query":{'term': {'pg_id': target.id}}}
525 query = {"query": {"term": {"pg_id": target.id}}}
509 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
526 Datastores.es.transport.perform_request(
527 "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query
528 )
510
529
511
530
512 sa.event.listen(Report, 'after_insert', after_insert)
531 sa.event.listen(Report, "after_insert", after_insert)
513 sa.event.listen(Report, 'after_update', after_update)
532 sa.event.listen(Report, "after_update", after_update)
514 sa.event.listen(Report, 'after_delete', after_delete)
533 sa.event.listen(Report, "after_delete", after_delete)
@@ -20,13 +20,16 b' import sqlalchemy as sa'
20
20
21
21
22 class ReportAssignment(Base, BaseModel):
22 class ReportAssignment(Base, BaseModel):
23 __tablename__ = 'reports_assignments'
23 __tablename__ = "reports_assignments"
24
24
25 group_id = sa.Column(sa.BigInteger,
25 group_id = sa.Column(
26 sa.ForeignKey('reports_groups.id', ondelete='cascade',
26 sa.BigInteger,
27 onupdate='cascade'),
27 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
28 primary_key=True)
28 primary_key=True,
29 owner_id = sa.Column(sa.Integer,
29 )
30 sa.ForeignKey('users.id', onupdate='CASCADE',
30 owner_id = sa.Column(
31 ondelete='CASCADE'), primary_key=True)
31 sa.Integer,
32 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
33 primary_key=True,
34 )
32 report_time = sa.Column(sa.DateTime(), nullable=False)
35 report_time = sa.Column(sa.DateTime(), nullable=False)
@@ -22,23 +22,23 b' from ziggurat_foundations.models.base import BaseModel'
22
22
23
23
24 class ReportComment(Base, BaseModel):
24 class ReportComment(Base, BaseModel):
25 __tablename__ = 'reports_comments'
25 __tablename__ = "reports_comments"
26
26
27 comment_id = sa.Column(sa.Integer, nullable=False, primary_key=True)
27 comment_id = sa.Column(sa.Integer, nullable=False, primary_key=True)
28 group_id = sa.Column(sa.BigInteger,
28 group_id = sa.Column(
29 sa.ForeignKey('reports_groups.id', ondelete='cascade',
29 sa.BigInteger,
30 onupdate='cascade'))
30 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
31 body = sa.Column(sa.UnicodeText(), default='')
31 )
32 owner_id = sa.Column(sa.Integer,
32 body = sa.Column(sa.UnicodeText(), default="")
33 sa.ForeignKey('users.id', onupdate='CASCADE',
33 owner_id = sa.Column(
34 ondelete='CASCADE'))
34 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
35 created_timestamp = sa.Column(sa.DateTime(),
35 )
36 default=datetime.utcnow,
36 created_timestamp = sa.Column(
37 server_default=sa.func.now())
37 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
38 )
38 report_time = sa.Column(sa.DateTime(), nullable=False)
39 report_time = sa.Column(sa.DateTime(), nullable=False)
39
40
40 owner = sa.orm.relationship('User',
41 owner = sa.orm.relationship("User", lazy="joined")
41 lazy='joined')
42
42
43 @property
43 @property
44 def processed_body(self):
44 def processed_body(self):
@@ -46,5 +46,5 b' class ReportComment(Base, BaseModel):'
46
46
47 def get_dict(self):
47 def get_dict(self):
48 instance_dict = super(ReportComment, self).get_dict()
48 instance_dict = super(ReportComment, self).get_dict()
49 instance_dict['user_name'] = self.owner.user_name
49 instance_dict["user_name"] = self.owner.user_name
50 return instance_dict
50 return instance_dict
@@ -33,26 +33,30 b' log = logging.getLogger(__name__)'
33
33
34
34
35 class ReportGroup(Base, BaseModel):
35 class ReportGroup(Base, BaseModel):
36 __tablename__ = 'reports_groups'
36 __tablename__ = "reports_groups"
37 __table_args__ = {'implicit_returning': False}
37 __table_args__ = {"implicit_returning": False}
38
38
39 id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
39 id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
40 resource_id = sa.Column(sa.Integer(),
40 resource_id = sa.Column(
41 sa.ForeignKey('applications.resource_id',
41 sa.Integer(),
42 onupdate='CASCADE',
42 sa.ForeignKey(
43 ondelete='CASCADE'),
43 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
44 nullable=False,
44 ),
45 index=True)
45 nullable=False,
46 priority = sa.Column(sa.Integer, nullable=False, index=True, default=5,
46 index=True,
47 server_default='5')
47 )
48 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
48 priority = sa.Column(
49 server_default=sa.func.now())
49 sa.Integer, nullable=False, index=True, default=5, server_default="5"
50 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
50 )
51 server_default=sa.func.now())
51 first_timestamp = sa.Column(
52 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
53 )
54 last_timestamp = sa.Column(
55 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
56 )
52 error = sa.Column(sa.UnicodeText(), index=True)
57 error = sa.Column(sa.UnicodeText(), index=True)
53 grouping_hash = sa.Column(sa.String(40), default='')
58 grouping_hash = sa.Column(sa.String(40), default="")
54 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False,
59 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False, default=list)
55 default=list)
56 report_type = sa.Column(sa.Integer, default=1)
60 report_type = sa.Column(sa.Integer, default=1)
57 total_reports = sa.Column(sa.Integer, default=1)
61 total_reports = sa.Column(sa.Integer, default=1)
58 last_report = sa.Column(sa.Integer)
62 last_report = sa.Column(sa.Integer)
@@ -64,50 +68,58 b' class ReportGroup(Base, BaseModel):'
64 notified = sa.Column(sa.Boolean(), index=True, default=False)
68 notified = sa.Column(sa.Boolean(), index=True, default=False)
65 public = sa.Column(sa.Boolean(), index=True, default=False)
69 public = sa.Column(sa.Boolean(), index=True, default=False)
66
70
67 reports = sa.orm.relationship('Report',
71 reports = sa.orm.relationship(
68 lazy='dynamic',
72 "Report",
69 backref='report_group',
73 lazy="dynamic",
70 cascade="all, delete-orphan",
74 backref="report_group",
71 passive_deletes=True,
75 cascade="all, delete-orphan",
72 passive_updates=True, )
76 passive_deletes=True,
73
77 passive_updates=True,
74 comments = sa.orm.relationship('ReportComment',
78 )
75 lazy='dynamic',
79
76 backref='report',
80 comments = sa.orm.relationship(
77 cascade="all, delete-orphan",
81 "ReportComment",
78 passive_deletes=True,
82 lazy="dynamic",
79 passive_updates=True,
83 backref="report",
80 order_by="ReportComment.comment_id")
84 cascade="all, delete-orphan",
81
85 passive_deletes=True,
82 assigned_users = sa.orm.relationship('User',
86 passive_updates=True,
83 backref=sa.orm.backref(
87 order_by="ReportComment.comment_id",
84 'assigned_reports_relation',
88 )
85 lazy='dynamic',
89
86 order_by=sa.desc(
90 assigned_users = sa.orm.relationship(
87 sa.text("reports_groups.id"))
91 "User",
88 ),
92 backref=sa.orm.backref(
89 passive_deletes=True,
93 "assigned_reports_relation",
90 passive_updates=True,
94 lazy="dynamic",
91 secondary='reports_assignments',
95 order_by=sa.desc(sa.text("reports_groups.id")),
92 order_by="User.user_name")
96 ),
93
97 passive_deletes=True,
94 stats = sa.orm.relationship('ReportStat',
98 passive_updates=True,
95 lazy='dynamic',
99 secondary="reports_assignments",
96 backref='report',
100 order_by="User.user_name",
97 passive_deletes=True,
101 )
98 passive_updates=True, )
102
99
103 stats = sa.orm.relationship(
100 last_report_ref = sa.orm.relationship('Report',
104 "ReportStat",
101 uselist=False,
105 lazy="dynamic",
102 primaryjoin="ReportGroup.last_report "
106 backref="report",
103 "== Report.id",
107 passive_deletes=True,
104 foreign_keys="Report.id",
108 passive_updates=True,
105 cascade="all, delete-orphan",
109 )
106 passive_deletes=True,
110
107 passive_updates=True, )
111 last_report_ref = sa.orm.relationship(
112 "Report",
113 uselist=False,
114 primaryjoin="ReportGroup.last_report " "== Report.id",
115 foreign_keys="Report.id",
116 cascade="all, delete-orphan",
117 passive_deletes=True,
118 passive_updates=True,
119 )
108
120
109 def __repr__(self):
121 def __repr__(self):
110 return '<ReportGroup id:{}>'.format(self.id)
122 return "<ReportGroup id:{}>".format(self.id)
111
123
112 def get_report(self, report_id=None, public=False):
124 def get_report(self, report_id=None, public=False):
113 """
125 """
@@ -121,8 +133,8 b' class ReportGroup(Base, BaseModel):'
121 return self.reports.filter(Report.id == report_id).first()
133 return self.reports.filter(Report.id == report_id).first()
122
134
123 def get_public_url(self, request, _app_url=None):
135 def get_public_url(self, request, _app_url=None):
124 url = request.route_url('/', _app_url=_app_url)
136 url = request.route_url("/", _app_url=_app_url)
125 return (url + 'ui/report/%s') % self.id
137 return (url + "ui/report/%s") % self.id
126
138
127 def run_postprocessing(self, report):
139 def run_postprocessing(self, report):
128 """
140 """
@@ -135,12 +147,15 b' class ReportGroup(Base, BaseModel):'
135 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
147 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
136 report_dict = report.get_dict(request)
148 report_dict = report.get_dict(request)
137 # if was not processed yet
149 # if was not processed yet
138 if (rule_obj.match(report_dict) and
150 if (
139 action.pkey not in self.triggered_postprocesses_ids):
151 rule_obj.match(report_dict)
152 and action.pkey not in self.triggered_postprocesses_ids
153 ):
140 action.postprocess(self)
154 action.postprocess(self)
141 # this way sqla can track mutation of list
155 # this way sqla can track mutation of list
142 self.triggered_postprocesses_ids = \
156 self.triggered_postprocesses_ids = self.triggered_postprocesses_ids + [
143 self.triggered_postprocesses_ids + [action.pkey]
157 action.pkey
158 ]
144
159
145 get_db_session(None, self).flush()
160 get_db_session(None, self).flush()
146 # do not go out of bounds
161 # do not go out of bounds
@@ -151,31 +166,30 b' class ReportGroup(Base, BaseModel):'
151
166
152 def get_dict(self, request):
167 def get_dict(self, request):
153 instance_dict = super(ReportGroup, self).get_dict()
168 instance_dict = super(ReportGroup, self).get_dict()
154 instance_dict['server_name'] = self.get_report().tags.get(
169 instance_dict["server_name"] = self.get_report().tags.get("server_name")
155 'server_name')
170 instance_dict["view_name"] = self.get_report().tags.get("view_name")
156 instance_dict['view_name'] = self.get_report().tags.get('view_name')
171 instance_dict["resource_name"] = self.application.resource_name
157 instance_dict['resource_name'] = self.application.resource_name
172 instance_dict["report_type"] = self.get_report().report_type
158 instance_dict['report_type'] = self.get_report().report_type
173 instance_dict["url_path"] = self.get_report().url_path
159 instance_dict['url_path'] = self.get_report().url_path
174 instance_dict["front_url"] = self.get_report().get_public_url(request)
160 instance_dict['front_url'] = self.get_report().get_public_url(request)
175 del instance_dict["triggered_postprocesses_ids"]
161 del instance_dict['triggered_postprocesses_ids']
162 return instance_dict
176 return instance_dict
163
177
164 def es_doc(self):
178 def es_doc(self):
165 return {
179 return {
166 '_id': str(self.id),
180 "_id": str(self.id),
167 'pg_id': str(self.id),
181 "pg_id": str(self.id),
168 'resource_id': self.resource_id,
182 "resource_id": self.resource_id,
169 'error': self.error,
183 "error": self.error,
170 'fixed': self.fixed,
184 "fixed": self.fixed,
171 'public': self.public,
185 "public": self.public,
172 'read': self.read,
186 "read": self.read,
173 'priority': self.priority,
187 "priority": self.priority,
174 'occurences': self.occurences,
188 "occurences": self.occurences,
175 'average_duration': self.average_duration,
189 "average_duration": self.average_duration,
176 'summed_duration': self.summed_duration,
190 "summed_duration": self.summed_duration,
177 'first_timestamp': self.first_timestamp,
191 "first_timestamp": self.first_timestamp,
178 'last_timestamp': self.last_timestamp
192 "last_timestamp": self.last_timestamp,
179 }
193 }
180
194
181 def set_notification_info(self, notify_10=False, notify_100=False):
195 def set_notification_info(self, notify_10=False, notify_100=False):
@@ -184,53 +198,54 b' class ReportGroup(Base, BaseModel):'
184 """
198 """
185 current_time = datetime.utcnow().replace(second=0, microsecond=0)
199 current_time = datetime.utcnow().replace(second=0, microsecond=0)
186 # global app counter
200 # global app counter
187 key = REDIS_KEYS['counters']['reports_per_type'].format(
201 key = REDIS_KEYS["counters"]["reports_per_type"].format(
188 self.report_type, current_time)
202 self.report_type, current_time
203 )
189 redis_pipeline = Datastores.redis.pipeline()
204 redis_pipeline = Datastores.redis.pipeline()
190 redis_pipeline.incr(key)
205 redis_pipeline.incr(key)
191 redis_pipeline.expire(key, 3600 * 24)
206 redis_pipeline.expire(key, 3600 * 24)
192 # detailed app notification for alerts and notifications
207 # detailed app notification for alerts and notifications
208 redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id)
193 redis_pipeline.sadd(
209 redis_pipeline.sadd(
194 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
210 REDIS_KEYS["apps_that_had_reports_alerting"], self.resource_id
195 redis_pipeline.sadd(
211 )
196 REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id)
197 # only notify for exceptions here
212 # only notify for exceptions here
198 if self.report_type == ReportType.error:
213 if self.report_type == ReportType.error:
214 redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id)
199 redis_pipeline.sadd(
215 redis_pipeline.sadd(
200 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
216 REDIS_KEYS["apps_that_had_error_reports_alerting"], self.resource_id
201 redis_pipeline.sadd(
217 )
202 REDIS_KEYS['apps_that_had_error_reports_alerting'],
218 key = REDIS_KEYS["counters"]["report_group_occurences"].format(self.id)
203 self.resource_id)
204 key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id)
205 redis_pipeline.incr(key)
219 redis_pipeline.incr(key)
206 redis_pipeline.expire(key, 3600 * 24)
220 redis_pipeline.expire(key, 3600 * 24)
207 key = REDIS_KEYS['counters']['report_group_occurences_alerting'].format(
221 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(self.id)
208 self.id)
209 redis_pipeline.incr(key)
222 redis_pipeline.incr(key)
210 redis_pipeline.expire(key, 3600 * 24)
223 redis_pipeline.expire(key, 3600 * 24)
211
224
212 if notify_10:
225 if notify_10:
213 key = REDIS_KEYS['counters'][
226 key = REDIS_KEYS["counters"]["report_group_occurences_10th"].format(self.id)
214 'report_group_occurences_10th'].format(self.id)
215 redis_pipeline.setex(key, 3600 * 24, 1)
227 redis_pipeline.setex(key, 3600 * 24, 1)
216 if notify_100:
228 if notify_100:
217 key = REDIS_KEYS['counters'][
229 key = REDIS_KEYS["counters"]["report_group_occurences_100th"].format(
218 'report_group_occurences_100th'].format(self.id)
230 self.id
231 )
219 redis_pipeline.setex(key, 3600 * 24, 1)
232 redis_pipeline.setex(key, 3600 * 24, 1)
220
233
221 key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
234 key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
222 self.report_type, self.resource_id)
235 self.report_type, self.resource_id
236 )
223 redis_pipeline.sadd(key, self.id)
237 redis_pipeline.sadd(key, self.id)
224 redis_pipeline.expire(key, 3600 * 24)
238 redis_pipeline.expire(key, 3600 * 24)
225 key = REDIS_KEYS['reports_to_notify_per_type_per_app_alerting'].format(
239 key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
226 self.report_type, self.resource_id)
240 self.report_type, self.resource_id
241 )
227 redis_pipeline.sadd(key, self.id)
242 redis_pipeline.sadd(key, self.id)
228 redis_pipeline.expire(key, 3600 * 24)
243 redis_pipeline.expire(key, 3600 * 24)
229 redis_pipeline.execute()
244 redis_pipeline.execute()
230
245
231 @property
246 @property
232 def partition_id(self):
247 def partition_id(self):
233 return 'rcae_r_%s' % self.first_timestamp.strftime('%Y_%m')
248 return "rcae_r_%s" % self.first_timestamp.strftime("%Y_%m")
234
249
235 def partition_range(self):
250 def partition_range(self):
236 start_date = self.first_timestamp.date().replace(day=1)
251 start_date = self.first_timestamp.date().replace(day=1)
@@ -240,29 +255,33 b' class ReportGroup(Base, BaseModel):'
240
255
241
256
242 def after_insert(mapper, connection, target):
257 def after_insert(mapper, connection, target):
243 if not hasattr(target, '_skip_ft_index'):
258 if not hasattr(target, "_skip_ft_index"):
244 data = target.es_doc()
259 data = target.es_doc()
245 data.pop('_id', None)
260 data.pop("_id", None)
246 Datastores.es.index(target.partition_id, 'report_group',
261 Datastores.es.index(target.partition_id, "report_group", data, id=target.id)
247 data, id=target.id)
248
262
249
263
250 def after_update(mapper, connection, target):
264 def after_update(mapper, connection, target):
251 if not hasattr(target, '_skip_ft_index'):
265 if not hasattr(target, "_skip_ft_index"):
252 data = target.es_doc()
266 data = target.es_doc()
253 data.pop('_id', None)
267 data.pop("_id", None)
254 Datastores.es.index(target.partition_id, 'report_group',
268 Datastores.es.index(target.partition_id, "report_group", data, id=target.id)
255 data, id=target.id)
256
269
257
270
258 def after_delete(mapper, connection, target):
271 def after_delete(mapper, connection, target):
259 query = {"query": {'term': {'group_id': target.id}}}
272 query = {"query": {"term": {"group_id": target.id}}}
260 # delete by query
273 # delete by query
261 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
274 Datastores.es.transport.perform_request(
262 query = {"query": {'term': {'pg_id': target.id}}}
275 "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query
263 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query)
276 )
264
277 query = {"query": {"term": {"pg_id": target.id}}}
265
278 Datastores.es.transport.perform_request(
266 sa.event.listen(ReportGroup, 'after_insert', after_insert)
279 "DELETE",
267 sa.event.listen(ReportGroup, 'after_update', after_update)
280 "/{}/{}/_query".format(target.partition_id, "report_group"),
268 sa.event.listen(ReportGroup, 'after_delete', after_delete)
281 body=query,
282 )
283
284
285 sa.event.listen(ReportGroup, "after_insert", after_insert)
286 sa.event.listen(ReportGroup, "after_update", after_update)
287 sa.event.listen(ReportGroup, "after_delete", after_delete)
@@ -22,53 +22,58 b' from ziggurat_foundations.models.base import BaseModel'
22
22
23
23
24 class ReportStat(Base, BaseModel):
24 class ReportStat(Base, BaseModel):
25 __tablename__ = 'reports_stats'
25 __tablename__ = "reports_stats"
26 __table_args__ = {'implicit_returning': False}
26 __table_args__ = {"implicit_returning": False}
27
27
28 group_id = sa.Column(sa.BigInteger(),
28 group_id = sa.Column(
29 sa.ForeignKey('reports_groups.id'),
29 sa.BigInteger(), sa.ForeignKey("reports_groups.id"), nullable=False
30 nullable=False)
30 )
31 resource_id = sa.Column(sa.Integer(),
31 resource_id = sa.Column(
32 sa.ForeignKey('applications.resource_id'),
32 sa.Integer(), sa.ForeignKey("applications.resource_id"), nullable=False
33 nullable=False)
33 )
34 start_interval = sa.Column(sa.DateTime(), nullable=False)
34 start_interval = sa.Column(sa.DateTime(), nullable=False)
35 occurences = sa.Column(sa.Integer, nullable=True, default=0)
35 occurences = sa.Column(sa.Integer, nullable=True, default=0)
36 owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
36 owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
37 nullable=True)
38 type = sa.Column(sa.Integer, nullable=True, default=0)
37 type = sa.Column(sa.Integer, nullable=True, default=0)
39 duration = sa.Column(sa.Float, nullable=True, default=0)
38 duration = sa.Column(sa.Float, nullable=True, default=0)
40 id = sa.Column(sa.BigInteger, nullable=False, primary_key=True)
39 id = sa.Column(sa.BigInteger, nullable=False, primary_key=True)
41 server_name = sa.Column(sa.Unicode(128), nullable=False, default='')
40 server_name = sa.Column(sa.Unicode(128), nullable=False, default="")
42 view_name = sa.Column(sa.Unicode(128), nullable=False, default='')
41 view_name = sa.Column(sa.Unicode(128), nullable=False, default="")
43
42
44 @property
43 @property
45 def partition_id(self):
44 def partition_id(self):
46 return 'rcae_r_%s' % self.start_interval.strftime('%Y_%m')
45 return "rcae_r_%s" % self.start_interval.strftime("%Y_%m")
47
46
48 def es_doc(self):
47 def es_doc(self):
49 return {
48 return {
50 'resource_id': self.resource_id,
49 "resource_id": self.resource_id,
51 'timestamp': self.start_interval,
50 "timestamp": self.start_interval,
52 'pg_id': str(self.id),
51 "pg_id": str(self.id),
53 'permanent': True,
52 "permanent": True,
54 'request_id': None,
53 "request_id": None,
55 'log_level': 'ERROR',
54 "log_level": "ERROR",
56 'message': None,
55 "message": None,
57 'namespace': 'appenlight.error',
56 "namespace": "appenlight.error",
58 'tags': {
57 "tags": {
59 'duration': {'values': self.duration,
58 "duration": {"values": self.duration, "numeric_values": self.duration},
60 'numeric_values': self.duration},
59 "occurences": {
61 'occurences': {'values': self.occurences,
60 "values": self.occurences,
62 'numeric_values': self.occurences},
61 "numeric_values": self.occurences,
63 'group_id': {'values': self.group_id,
62 },
64 'numeric_values': self.group_id},
63 "group_id": {"values": self.group_id, "numeric_values": self.group_id},
65 'type': {'values': ReportType.key_from_value(self.type),
64 "type": {
66 'numeric_values': self.type},
65 "values": ReportType.key_from_value(self.type),
67 'server_name': {'values': self.server_name,
66 "numeric_values": self.type,
68 'numeric_values': None},
67 },
69 'view_name': {'values': self.view_name,
68 "server_name": {"values": self.server_name, "numeric_values": None},
70 'numeric_values': None},
69 "view_name": {"values": self.view_name, "numeric_values": None},
71 },
70 },
72 'tag_list': ['duration', 'occurences', 'group_id', 'type',
71 "tag_list": [
73 'server_name', 'view_name']
72 "duration",
73 "occurences",
74 "group_id",
75 "type",
76 "server_name",
77 "view_name",
78 ],
74 }
79 }
@@ -23,11 +23,13 b' from ziggurat_foundations.models.services.resource import ResourceService'
23
23
24
24
25 class Resource(ResourceMixin, Base):
25 class Resource(ResourceMixin, Base):
26 events = sa.orm.relationship('Event',
26 events = sa.orm.relationship(
27 lazy='dynamic',
27 "Event",
28 backref='resource',
28 lazy="dynamic",
29 passive_deletes=True,
29 backref="resource",
30 passive_updates=True)
30 passive_deletes=True,
31 passive_updates=True,
32 )
31
33
32 @property
34 @property
33 def owner_user_name(self):
35 def owner_user_name(self):
@@ -39,46 +41,56 b' class Resource(ResourceMixin, Base):'
39 if self.owner_group:
41 if self.owner_group:
40 return self.owner_group.group_name
42 return self.owner_group.group_name
41
43
42 def get_dict(self, exclude_keys=None, include_keys=None,
44 def get_dict(
43 include_perms=False, include_processing_rules=False):
45 self,
46 exclude_keys=None,
47 include_keys=None,
48 include_perms=False,
49 include_processing_rules=False,
50 ):
44 result = super(Resource, self).get_dict(exclude_keys, include_keys)
51 result = super(Resource, self).get_dict(exclude_keys, include_keys)
45 result['possible_permissions'] = self.__possible_permissions__
52 result["possible_permissions"] = self.__possible_permissions__
46 if include_perms:
53 if include_perms:
47 result['current_permissions'] = self.user_permissions_list
54 result["current_permissions"] = self.user_permissions_list
48 else:
55 else:
49 result['current_permissions'] = []
56 result["current_permissions"] = []
50 if include_processing_rules:
57 if include_processing_rules:
51 result["postprocessing_rules"] = [rule.get_dict() for rule
58 result["postprocessing_rules"] = [
52 in self.postprocess_conf]
59 rule.get_dict() for rule in self.postprocess_conf
60 ]
53 else:
61 else:
54 result["postprocessing_rules"] = []
62 result["postprocessing_rules"] = []
55 exclude_keys_list = exclude_keys or []
63 exclude_keys_list = exclude_keys or []
56 include_keys_list = include_keys or []
64 include_keys_list = include_keys or []
57 d = {}
65 d = {}
58 for k in result.keys():
66 for k in result.keys():
59 if (k not in exclude_keys_list and
67 if k not in exclude_keys_list and (
60 (k in include_keys_list or not include_keys)):
68 k in include_keys_list or not include_keys
69 ):
61 d[k] = result[k]
70 d[k] = result[k]
62 for k in ['owner_user_name', 'owner_group_name']:
71 for k in ["owner_user_name", "owner_group_name"]:
63 if (k not in exclude_keys_list and
72 if k not in exclude_keys_list and (
64 (k in include_keys_list or not include_keys)):
73 k in include_keys_list or not include_keys
74 ):
65 d[k] = getattr(self, k)
75 d[k] = getattr(self, k)
66 return d
76 return d
67
77
68 @property
78 @property
69 def user_permissions_list(self):
79 def user_permissions_list(self):
70 return [permission_tuple_to_dict(perm) for perm in
80 return [
71 ResourceService.users_for_perm(
81 permission_tuple_to_dict(perm)
72 self, '__any_permission__', limit_group_permissions=True)]
82 for perm in ResourceService.users_for_perm(
83 self, "__any_permission__", limit_group_permissions=True
84 )
85 ]
73
86
74 @property
87 @property
75 def __acl__(self):
88 def __acl__(self):
76 acls = []
89 acls = []
77
90
78 if self.owner_user_id:
91 if self.owner_user_id:
79 acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS,), ])
92 acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS)])
80
93
81 if self.owner_group_id:
94 if self.owner_group_id:
82 acls.extend([(Allow, "group:%s" % self.owner_group_id,
95 acls.extend([(Allow, "group:%s" % self.owner_group_id, ALL_PERMISSIONS)])
83 ALL_PERMISSIONS,), ])
84 return acls
96 return acls
@@ -13,4 +13,3 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
@@ -33,7 +33,6 b' log = logging.getLogger(__name__)'
33
33
34
34
35 class ApplicationService(BaseService):
35 class ApplicationService(BaseService):
36
37 @classmethod
36 @classmethod
38 def all(cls, db_session=None):
37 def all(cls, db_session=None):
39 db_session = get_db_session(db_session)
38 db_session = get_db_session(db_session)
@@ -51,9 +50,9 b' class ApplicationService(BaseService):'
51 @classmethod
50 @classmethod
52 def by_api_key_cached(cls, db_session=None):
51 def by_api_key_cached(cls, db_session=None):
53 db_session = get_db_session(db_session)
52 db_session = get_db_session(db_session)
54 cache_region = get_region('redis_min_1')
53 cache_region = get_region("redis_min_1")
55
54
56 @cache_region.cache_on_arguments('ApplicationService.by_api_key')
55 @cache_region.cache_on_arguments("ApplicationService.by_api_key")
57 def cached(*args, **kwargs):
56 def cached(*args, **kwargs):
58 app = cls.by_api_key(*args, db_session=db_session, **kwargs)
57 app = cls.by_api_key(*args, db_session=db_session, **kwargs)
59 if app:
58 if app:
@@ -63,10 +62,11 b' class ApplicationService(BaseService):'
63 return cached
62 return cached
64
63
65 @classmethod
64 @classmethod
66 def by_public_api_key(cls, api_key, db_session=None, from_cache=False,
65 def by_public_api_key(
67 request=None):
66 cls, api_key, db_session=None, from_cache=False, request=None
67 ):
68 db_session = get_db_session(db_session)
68 db_session = get_db_session(db_session)
69 cache_region = get_region('redis_min_1')
69 cache_region = get_region("redis_min_1")
70
70
71 def uncached(api_key):
71 def uncached(api_key):
72 q = db_session.query(Application)
72 q = db_session.query(Application)
@@ -75,8 +75,8 b' class ApplicationService(BaseService):'
75 return q.first()
75 return q.first()
76
76
77 if from_cache:
77 if from_cache:
78 @cache_region.cache_on_arguments(
78
79 'ApplicationService.by_public_api_key')
79 @cache_region.cache_on_arguments("ApplicationService.by_public_api_key")
80 def cached(api_key):
80 def cached(api_key):
81 app = uncached(api_key)
81 app = uncached(api_key)
82 if app:
82 if app:
@@ -98,9 +98,9 b' class ApplicationService(BaseService):'
98 @classmethod
98 @classmethod
99 def by_id_cached(cls, db_session=None):
99 def by_id_cached(cls, db_session=None):
100 db_session = get_db_session(db_session)
100 db_session = get_db_session(db_session)
101 cache_region = get_region('redis_min_1')
101 cache_region = get_region("redis_min_1")
102
102
103 @cache_region.cache_on_arguments('ApplicationService.by_id')
103 @cache_region.cache_on_arguments("ApplicationService.by_id")
104 def cached(*args, **kwargs):
104 def cached(*args, **kwargs):
105 app = cls.by_id(*args, db_session=db_session, **kwargs)
105 app = cls.by_id(*args, db_session=db_session, **kwargs)
106 if app:
106 if app:
@@ -119,10 +119,9 b' class ApplicationService(BaseService):'
119 @classmethod
119 @classmethod
120 def by_http_referer(cls, referer_string, db_session=None):
120 def by_http_referer(cls, referer_string, db_session=None):
121 db_session = get_db_session(db_session)
121 db_session = get_db_session(db_session)
122 domain = urllib.parse.urlsplit(
122 domain = urllib.parse.urlsplit(referer_string, allow_fragments=False).netloc
123 referer_string, allow_fragments=False).netloc
124 if domain:
123 if domain:
125 if domain.startswith('www.'):
124 if domain.startswith("www."):
126 domain = domain[4:]
125 domain = domain[4:]
127 q = db_session.query(Application).filter(Application.domain == domain)
126 q = db_session.query(Application).filter(Application.domain == domain)
128 return q.first()
127 return q.first()
@@ -132,7 +131,8 b' class ApplicationService(BaseService):'
132 db_session = get_db_session(db_session)
131 db_session = get_db_session(db_session)
133 q = db_session.query(Application)
132 q = db_session.query(Application)
134 q2 = ReportGroup.last_updated(
133 q2 = ReportGroup.last_updated(
135 since_when, exclude_status=exclude_status, db_session=db_session)
134 since_when, exclude_status=exclude_status, db_session=db_session
135 )
136 q2 = q2.from_self(ReportGroup.resource_id)
136 q2 = q2.from_self(ReportGroup.resource_id)
137 q2 = q2.group_by(ReportGroup.resource_id)
137 q2 = q2.group_by(ReportGroup.resource_id)
138 q = q.filter(Application.resource_id.in_(q2))
138 q = q.filter(Application.resource_id.in_(q2))
@@ -142,10 +142,10 b' class ApplicationService(BaseService):'
142 def check_for_groups_alert(cls, resource, event_type, *args, **kwargs):
142 def check_for_groups_alert(cls, resource, event_type, *args, **kwargs):
143 """ Check for open alerts depending on group type.
143 """ Check for open alerts depending on group type.
144 Create new one if nothing is found and send alerts """
144 Create new one if nothing is found and send alerts """
145 db_session = get_db_session(kwargs.get('db_session'))
145 db_session = get_db_session(kwargs.get("db_session"))
146 request = get_current_request()
146 request = get_current_request()
147 report_groups = kwargs['report_groups']
147 report_groups = kwargs["report_groups"]
148 occurence_dict = kwargs['occurence_dict']
148 occurence_dict = kwargs["occurence_dict"]
149
149
150 error_reports = 0
150 error_reports = 0
151 slow_reports = 0
151 slow_reports = 0
@@ -156,38 +156,45 b' class ApplicationService(BaseService):'
156 elif group.get_report().report_type == ReportType.slow:
156 elif group.get_report().report_type == ReportType.slow:
157 slow_reports += occurences
157 slow_reports += occurences
158
158
159 log_msg = 'LIMIT INFO: %s : %s error reports. %s slow_reports' % (
159 log_msg = "LIMIT INFO: %s : %s error reports. %s slow_reports" % (
160 resource,
160 resource,
161 error_reports,
161 error_reports,
162 slow_reports)
162 slow_reports,
163 )
163 logging.warning(log_msg)
164 logging.warning(log_msg)
164 threshold = 10
165 threshold = 10
165 for event_type in ['error_report_alert', 'slow_report_alert']:
166 for event_type in ["error_report_alert", "slow_report_alert"]:
166 if (error_reports < resource.error_report_threshold and
167 if (
167 event_type == 'error_report_alert'):
168 error_reports < resource.error_report_threshold
169 and event_type == "error_report_alert"
170 ):
168 continue
171 continue
169 elif (slow_reports <= resource.slow_report_threshold and
172 elif (
170 event_type == 'slow_report_alert'):
173 slow_reports <= resource.slow_report_threshold
174 and event_type == "slow_report_alert"
175 ):
171 continue
176 continue
172 if event_type == 'error_report_alert':
177 if event_type == "error_report_alert":
173 amount = error_reports
178 amount = error_reports
174 threshold = resource.error_report_threshold
179 threshold = resource.error_report_threshold
175 elif event_type == 'slow_report_alert':
180 elif event_type == "slow_report_alert":
176 amount = slow_reports
181 amount = slow_reports
177 threshold = resource.slow_report_threshold
182 threshold = resource.slow_report_threshold
178
183
179 event = EventService.for_resource([resource.resource_id],
184 event = EventService.for_resource(
180 event_type=Event.types[
185 [resource.resource_id],
181 event_type],
186 event_type=Event.types[event_type],
182 status=Event.statuses['active'])
187 status=Event.statuses["active"],
188 )
183 if event.first():
189 if event.first():
184 log.info('ALERT: PROGRESS: %s %s' % (event_type, resource))
190 log.info("ALERT: PROGRESS: %s %s" % (event_type, resource))
185 else:
191 else:
186 log.warning('ALERT: OPEN: %s %s' % (event_type, resource))
192 log.warning("ALERT: OPEN: %s %s" % (event_type, resource))
187 new_event = Event(resource_id=resource.resource_id,
193 new_event = Event(
188 event_type=Event.types[event_type],
194 resource_id=resource.resource_id,
189 status=Event.statuses['active'],
195 event_type=Event.types[event_type],
190 values={'reports': amount,
196 status=Event.statuses["active"],
191 'threshold': threshold})
197 values={"reports": amount, "threshold": threshold},
198 )
192 db_session.add(new_event)
199 db_session.add(new_event)
193 new_event.send_alerts(request=request, resource=resource)
200 new_event.send_alerts(request=request, resource=resource)
@@ -21,7 +21,6 b' from appenlight.models.services.base import BaseService'
21
21
22
22
23 class ApplicationPostprocessConfService(BaseService):
23 class ApplicationPostprocessConfService(BaseService):
24
25 @classmethod
24 @classmethod
26 def by_pkey(cls, pkey, db_session=None):
25 def by_pkey(cls, pkey, db_session=None):
27 db_session = get_db_session(db_session)
26 db_session = get_db_session(db_session)
@@ -40,9 +40,10 b' class ConfigService(BaseService):'
40 if pairs:
40 if pairs:
41 conditions = []
41 conditions = []
42 for pair in pairs:
42 for pair in pairs:
43 conditions.append(sa.and_(
43 conditions.append(
44 Config.key == pair['key'],
44 sa.and_(
45 Config.section == pair['section'])
45 Config.key == pair["key"], Config.section == pair["section"]
46 )
46 )
47 )
47
48
48 query = query.filter(sa.or_(*conditions))
49 query = query.filter(sa.or_(*conditions))
@@ -57,13 +58,15 b' class ConfigService(BaseService):'
57 return config
58 return config
58
59
59 @classmethod
60 @classmethod
60 def by_key_and_section(cls, key, section, auto_create=False,
61 def by_key_and_section(
61 default_value=None, db_session=None):
62 cls, key, section, auto_create=False, default_value=None, db_session=None
63 ):
62 db_session = get_db_session(db_session)
64 db_session = get_db_session(db_session)
63 registry = get_current_registry()
65 registry = get_current_registry()
64
66
65 @registry.cache_regions.memory_min_1.cache_on_arguments(
67 @registry.cache_regions.memory_min_1.cache_on_arguments(
66 namespace='ConfigService.by_key_and_section')
68 namespace="ConfigService.by_key_and_section"
69 )
67 def cached(key, section):
70 def cached(key, section):
68 query = db_session.query(Config).filter(Config.key == key)
71 query = db_session.query(Config).filter(Config.key == key)
69 query = query.filter(Config.section == section)
72 query = query.filter(Config.section == section)
@@ -76,8 +79,7 b' class ConfigService(BaseService):'
76 if config:
79 if config:
77 config = db_session.merge(config, load=False)
80 config = db_session.merge(config, load=False)
78 if config is None and auto_create:
81 if config is None and auto_create:
79 config = ConfigService.create_config(key, section,
82 config = ConfigService.create_config(key, section, value=default_value)
80 value=default_value)
81 cached.invalidate(key, section)
83 cached.invalidate(key, section)
82 return config
84 return config
83
85
@@ -87,14 +89,28 b' class ConfigService(BaseService):'
87 Will add fresh default config values to database if no keys are found
89 Will add fresh default config values to database if no keys are found
88 :return:
90 :return:
89 """
91 """
90 log.info('Checking/setting default values')
92 log.info("Checking/setting default values")
91 self.by_key_and_section('template_footer_html', 'global',
93 self.by_key_and_section(
92 default_value='', auto_create=True)
94 "template_footer_html", "global", default_value="", auto_create=True
93 self.by_key_and_section('list_groups_to_non_admins', 'global',
95 )
94 default_value=True, auto_create=True)
96 self.by_key_and_section(
95 self.by_key_and_section('per_application_reports_rate_limit', 'global',
97 "list_groups_to_non_admins", "global", default_value=True, auto_create=True
96 default_value=2000, auto_create=True)
98 )
97 self.by_key_and_section('per_application_logs_rate_limit', 'global',
99 self.by_key_and_section(
98 default_value=100000, auto_create=True)
100 "per_application_reports_rate_limit",
99 self.by_key_and_section('per_application_metrics_rate_limit', 'global',
101 "global",
100 default_value=100000, auto_create=True)
102 default_value=2000,
103 auto_create=True,
104 )
105 self.by_key_and_section(
106 "per_application_logs_rate_limit",
107 "global",
108 default_value=100000,
109 auto_create=True,
110 )
111 self.by_key_and_section(
112 "per_application_metrics_rate_limit",
113 "global",
114 default_value=100000,
115 auto_create=True,
116 )
@@ -26,10 +26,19 b' from appenlight.models.services.base import BaseService'
26
26
27 class EventService(BaseService):
27 class EventService(BaseService):
28 @classmethod
28 @classmethod
29 def for_resource(cls, resource_ids, event_type=None, status=None,
29 def for_resource(
30 since_when=None, limit=20, event_id=None,
30 cls,
31 target_uuid=None, order_by=None, or_target_user_id=None,
31 resource_ids,
32 db_session=None):
32 event_type=None,
33 status=None,
34 since_when=None,
35 limit=20,
36 event_id=None,
37 target_uuid=None,
38 order_by=None,
39 or_target_user_id=None,
40 db_session=None,
41 ):
33 """
42 """
34 Fetches events including based on passed params OR if target_user_id
43 Fetches events including based on passed params OR if target_user_id
35 is present include events that just target this user
44 is present include events that just target this user
@@ -57,8 +66,7 b' class EventService(BaseService):'
57 if or_target_user_id:
66 if or_target_user_id:
58 or_cond.append(sa.or_(Event.target_user_id == or_target_user_id))
67 or_cond.append(sa.or_(Event.target_user_id == or_target_user_id))
59
68
60 query = query.filter(sa.or_(sa.and_(*and_cond),
69 query = query.filter(sa.or_(sa.and_(*and_cond), *or_cond))
61 *or_cond))
62 if not order_by:
70 if not order_by:
63 query = query.order_by(sa.desc(Event.start_date))
71 query = query.order_by(sa.desc(Event.start_date))
64 if limit:
72 if limit:
@@ -67,8 +75,15 b' class EventService(BaseService):'
67 return query
75 return query
68
76
69 @classmethod
77 @classmethod
70 def by_type_and_status(cls, event_types, status_types, since_when=None,
78 def by_type_and_status(
71 older_than=None, db_session=None, app_ids=None):
79 cls,
80 event_types,
81 status_types,
82 since_when=None,
83 older_than=None,
84 db_session=None,
85 app_ids=None,
86 ):
72 db_session = get_db_session(db_session)
87 db_session = get_db_session(db_session)
73 query = db_session.query(Event)
88 query = db_session.query(Event)
74 query = query.filter(Event.event_type.in_(event_types))
89 query = query.filter(Event.event_type.in_(event_types))
@@ -84,26 +99,38 b' class EventService(BaseService):'
84 @classmethod
99 @classmethod
85 def latest_for_user(cls, user, db_session=None):
100 def latest_for_user(cls, user, db_session=None):
86 registry = get_current_registry()
101 registry = get_current_registry()
87 resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types)
102 resources = UserService.resources_with_perms(
103 user, ["view"], resource_types=registry.resource_types
104 )
88 resource_ids = [r.resource_id for r in resources]
105 resource_ids = [r.resource_id for r in resources]
89 db_session = get_db_session(db_session)
106 db_session = get_db_session(db_session)
90 return EventService.for_resource(
107 return EventService.for_resource(
91 resource_ids, or_target_user_id=user.id, limit=10,
108 resource_ids, or_target_user_id=user.id, limit=10, db_session=db_session
92 db_session=db_session)
109 )
93
110
94 @classmethod
111 @classmethod
95 def get_paginator(cls, user, page=1, item_count=None, items_per_page=50,
112 def get_paginator(
96 order_by=None, filter_settings=None, db_session=None):
113 cls,
114 user,
115 page=1,
116 item_count=None,
117 items_per_page=50,
118 order_by=None,
119 filter_settings=None,
120 db_session=None,
121 ):
97 if not filter_settings:
122 if not filter_settings:
98 filter_settings = {}
123 filter_settings = {}
99 registry = get_current_registry()
124 registry = get_current_registry()
100 resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types)
125 resources = UserService.resources_with_perms(
126 user, ["view"], resource_types=registry.resource_types
127 )
101 resource_ids = [r.resource_id for r in resources]
128 resource_ids = [r.resource_id for r in resources]
102 query = EventService.for_resource(
129 query = EventService.for_resource(
103 resource_ids, or_target_user_id=user.id, limit=100,
130 resource_ids, or_target_user_id=user.id, limit=100, db_session=db_session
104 db_session=db_session)
131 )
105
132
106 paginator = SqlalchemyOrmPage(query, page=page,
133 paginator = SqlalchemyOrmPage(
107 items_per_page=items_per_page,
134 query, page=page, items_per_page=items_per_page, **filter_settings
108 **filter_settings)
135 )
109 return paginator
136 return paginator
@@ -16,18 +16,20 b''
16
16
17 from appenlight.models.group_resource_permission import GroupResourcePermission
17 from appenlight.models.group_resource_permission import GroupResourcePermission
18 from appenlight.models import get_db_session
18 from appenlight.models import get_db_session
19 from ziggurat_foundations.models.services.group_resource_permission import GroupResourcePermissionService
19 from ziggurat_foundations.models.services.group_resource_permission import (
20 GroupResourcePermissionService,
21 )
20
22
21
23
22 class GroupResourcePermissionService(GroupResourcePermissionService):
24 class GroupResourcePermissionService(GroupResourcePermissionService):
23 @classmethod
25 @classmethod
24 def by_resource_group_and_perm(cls, group_id, perm_name, resource_id,
26 def by_resource_group_and_perm(
25 db_session=None):
27 cls, group_id, perm_name, resource_id, db_session=None
28 ):
26 """ return all instances by user name, perm name and resource id """
29 """ return all instances by user name, perm name and resource id """
27 db_session = get_db_session(db_session)
30 db_session = get_db_session(db_session)
28 query = db_session.query(GroupResourcePermission)
31 query = db_session.query(GroupResourcePermission)
29 query = query.filter(GroupResourcePermission.group_id == group_id)
32 query = query.filter(GroupResourcePermission.group_id == group_id)
30 query = query.filter(
33 query = query.filter(GroupResourcePermission.resource_id == resource_id)
31 GroupResourcePermission.resource_id == resource_id)
32 query = query.filter(GroupResourcePermission.perm_name == perm_name)
34 query = query.filter(GroupResourcePermission.perm_name == perm_name)
33 return query.first()
35 return query.first()
@@ -28,8 +28,7 b' log = logging.getLogger(__name__)'
28
28
29 class LogService(BaseService):
29 class LogService(BaseService):
30 @classmethod
30 @classmethod
31 def get_logs(cls, resource_ids=None, filter_settings=None,
31 def get_logs(cls, resource_ids=None, filter_settings=None, db_session=None):
32 db_session=None):
33 # ensure we always have id's passed
32 # ensure we always have id's passed
34 if not resource_ids:
33 if not resource_ids:
35 # raise Exception('No App ID passed')
34 # raise Exception('No App ID passed')
@@ -37,18 +36,17 b' class LogService(BaseService):'
37 db_session = get_db_session(db_session)
36 db_session = get_db_session(db_session)
38 q = db_session.query(Log)
37 q = db_session.query(Log)
39 q = q.filter(Log.resource_id.in_(resource_ids))
38 q = q.filter(Log.resource_id.in_(resource_ids))
40 if filter_settings.get('start_date'):
39 if filter_settings.get("start_date"):
41 q = q.filter(Log.timestamp >= filter_settings.get('start_date'))
40 q = q.filter(Log.timestamp >= filter_settings.get("start_date"))
42 if filter_settings.get('end_date'):
41 if filter_settings.get("end_date"):
43 q = q.filter(Log.timestamp <= filter_settings.get('end_date'))
42 q = q.filter(Log.timestamp <= filter_settings.get("end_date"))
44 if filter_settings.get('log_level'):
43 if filter_settings.get("log_level"):
45 q = q.filter(
44 q = q.filter(Log.log_level == filter_settings.get("log_level").upper())
46 Log.log_level == filter_settings.get('log_level').upper())
45 if filter_settings.get("request_id"):
47 if filter_settings.get('request_id'):
46 request_id = filter_settings.get("request_id", "")
48 request_id = filter_settings.get('request_id', '')
47 q = q.filter(Log.request_id == request_id.replace("-", ""))
49 q = q.filter(Log.request_id == request_id.replace('-', ''))
48 if filter_settings.get("namespace"):
50 if filter_settings.get('namespace'):
49 q = q.filter(Log.namespace == filter_settings.get("namespace"))
51 q = q.filter(Log.namespace == filter_settings.get('namespace'))
52 q = q.order_by(sa.desc(Log.timestamp))
50 q = q.order_by(sa.desc(Log.timestamp))
53 return q
51 return q
54
52
@@ -60,20 +58,18 b' class LogService(BaseService):'
60 query = {
58 query = {
61 "query": {
59 "query": {
62 "filtered": {
60 "filtered": {
63 "filter": {
61 "filter": {"and": [{"terms": {"resource_id": list(app_ids)}}]}
64 "and": [{"terms": {"resource_id": list(app_ids)}}]
65 }
66 }
62 }
67 }
63 }
68 }
64 }
69
65
70 start_date = filter_settings.get('start_date')
66 start_date = filter_settings.get("start_date")
71 end_date = filter_settings.get('end_date')
67 end_date = filter_settings.get("end_date")
72 filter_part = query['query']['filtered']['filter']['and']
68 filter_part = query["query"]["filtered"]["filter"]["and"]
73
69
74 for tag in filter_settings.get('tags', []):
70 for tag in filter_settings.get("tags", []):
75 tag_values = [v.lower() for v in tag['value']]
71 tag_values = [v.lower() for v in tag["value"]]
76 key = "tags.%s.values" % tag['name'].replace('.', '_')
72 key = "tags.%s.values" % tag["name"].replace(".", "_")
77 filter_part.append({"terms": {key: tag_values}})
73 filter_part.append({"terms": {key: tag_values}})
78
74
79 date_range = {"range": {"timestamp": {}}}
75 date_range = {"range": {"timestamp": {}}}
@@ -84,26 +80,21 b' class LogService(BaseService):'
84 if start_date or end_date:
80 if start_date or end_date:
85 filter_part.append(date_range)
81 filter_part.append(date_range)
86
82
87 levels = filter_settings.get('level')
83 levels = filter_settings.get("level")
88 if levels:
84 if levels:
89 filter_part.append({"terms": {'log_level': levels}})
85 filter_part.append({"terms": {"log_level": levels}})
90 namespaces = filter_settings.get('namespace')
86 namespaces = filter_settings.get("namespace")
91 if namespaces:
87 if namespaces:
92 filter_part.append({"terms": {'namespace': namespaces}})
88 filter_part.append({"terms": {"namespace": namespaces}})
93
89
94 request_ids = filter_settings.get('request_id')
90 request_ids = filter_settings.get("request_id")
95 if request_ids:
91 if request_ids:
96 filter_part.append({"terms": {'request_id': request_ids}})
92 filter_part.append({"terms": {"request_id": request_ids}})
97
93
98 messages = filter_settings.get('message')
94 messages = filter_settings.get("message")
99 if messages:
95 if messages:
100 query['query']['filtered']['query'] = {
96 query["query"]["filtered"]["query"] = {
101 'match': {
97 "match": {"message": {"query": " ".join(messages), "operator": "and"}}
102 'message': {
103 'query': ' '.join(messages),
104 'operator': 'and'
105 }
106 }
107 }
98 }
108 return query
99 return query
109
100
@@ -118,76 +109,96 b' class LogService(BaseService):'
118 "field": "timestamp",
109 "field": "timestamp",
119 "interval": "1h",
110 "interval": "1h",
120 "min_doc_count": 0,
111 "min_doc_count": 0,
121 'extended_bounds': {
112 "extended_bounds": {
122 'max': filter_settings.get('end_date'),
113 "max": filter_settings.get("end_date"),
123 'min': filter_settings.get('start_date')}
114 "min": filter_settings.get("start_date"),
115 },
124 }
116 }
125 }
117 }
126 }
118 }
127 log.debug(es_query)
119 log.debug(es_query)
128 index_names = es_index_name_limiter(filter_settings.get('start_date'),
120 index_names = es_index_name_limiter(
129 filter_settings.get('end_date'),
121 filter_settings.get("start_date"),
130 ixtypes=['logs'])
122 filter_settings.get("end_date"),
123 ixtypes=["logs"],
124 )
131 if index_names:
125 if index_names:
132 results = Datastores.es.search(
126 results = Datastores.es.search(
133 body=es_query, index=index_names, doc_type='log', size=0)
127 body=es_query, index=index_names, doc_type="log", size=0
128 )
134 else:
129 else:
135 results = []
130 results = []
136 return results
131 return results
137
132
138 @classmethod
133 @classmethod
139 def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50,
134 def get_search_iterator(
140 order_by=None, filter_settings=None, limit=None):
135 cls,
136 app_ids=None,
137 page=1,
138 items_per_page=50,
139 order_by=None,
140 filter_settings=None,
141 limit=None,
142 ):
141 if not app_ids:
143 if not app_ids:
142 return {}, 0
144 return {}, 0
143
145
144 es_query = cls.es_query_builder(app_ids, filter_settings)
146 es_query = cls.es_query_builder(app_ids, filter_settings)
145 sort_query = {
147 sort_query = {"sort": [{"timestamp": {"order": "desc"}}]}
146 "sort": [
147 {"timestamp": {"order": "desc"}}
148 ]
149 }
150 es_query.update(sort_query)
148 es_query.update(sort_query)
151 log.debug(es_query)
149 log.debug(es_query)
152 es_from = (page - 1) * items_per_page
150 es_from = (page - 1) * items_per_page
153 index_names = es_index_name_limiter(filter_settings.get('start_date'),
151 index_names = es_index_name_limiter(
154 filter_settings.get('end_date'),
152 filter_settings.get("start_date"),
155 ixtypes=['logs'])
153 filter_settings.get("end_date"),
154 ixtypes=["logs"],
155 )
156 if not index_names:
156 if not index_names:
157 return {}, 0
157 return {}, 0
158
158
159 results = Datastores.es.search(body=es_query, index=index_names,
159 results = Datastores.es.search(
160 doc_type='log', size=items_per_page,
160 body=es_query,
161 from_=es_from)
161 index=index_names,
162 if results['hits']['total'] > 5000:
162 doc_type="log",
163 size=items_per_page,
164 from_=es_from,
165 )
166 if results["hits"]["total"] > 5000:
163 count = 5000
167 count = 5000
164 else:
168 else:
165 count = results['hits']['total']
169 count = results["hits"]["total"]
166 return results['hits'], count
170 return results["hits"], count
167
171
168 @classmethod
172 @classmethod
169 def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None,
173 def get_paginator_by_app_ids(
170 items_per_page=50, order_by=None,
174 cls,
171 filter_settings=None,
175 app_ids=None,
172 exclude_columns=None, db_session=None):
176 page=1,
177 item_count=None,
178 items_per_page=50,
179 order_by=None,
180 filter_settings=None,
181 exclude_columns=None,
182 db_session=None,
183 ):
173 if not filter_settings:
184 if not filter_settings:
174 filter_settings = {}
185 filter_settings = {}
175 results, item_count = cls.get_search_iterator(app_ids, page,
186 results, item_count = cls.get_search_iterator(
176 items_per_page, order_by,
187 app_ids, page, items_per_page, order_by, filter_settings
177 filter_settings)
188 )
178 paginator = paginate.Page([],
189 paginator = paginate.Page(
179 item_count=item_count,
190 [], item_count=item_count, items_per_page=items_per_page, **filter_settings
180 items_per_page=items_per_page,
191 )
181 **filter_settings)
192 ordered_ids = tuple(
182 ordered_ids = tuple(item['_source']['pg_id']
193 item["_source"]["pg_id"] for item in results.get("hits", [])
183 for item in results.get('hits', []))
194 )
184
195
185 sorted_instance_list = []
196 sorted_instance_list = []
186 if ordered_ids:
197 if ordered_ids:
187 db_session = get_db_session(db_session)
198 db_session = get_db_session(db_session)
188 query = db_session.query(Log)
199 query = db_session.query(Log)
189 query = query.filter(Log.log_id.in_(ordered_ids))
200 query = query.filter(Log.log_id.in_(ordered_ids))
190 query = query.order_by(sa.desc('timestamp'))
201 query = query.order_by(sa.desc("timestamp"))
191 sa_items = query.all()
202 sa_items = query.all()
192 # resort by score
203 # resort by score
193 for i_id in ordered_ids:
204 for i_id in ordered_ids:
@@ -198,14 +209,14 b' class LogService(BaseService):'
198 return paginator
209 return paginator
199
210
200 @classmethod
211 @classmethod
201 def query_by_primary_key_and_namespace(cls, list_of_pairs,
212 def query_by_primary_key_and_namespace(cls, list_of_pairs, db_session=None):
202 db_session=None):
203 db_session = get_db_session(db_session)
213 db_session = get_db_session(db_session)
204 list_of_conditions = []
214 list_of_conditions = []
205 query = db_session.query(Log)
215 query = db_session.query(Log)
206 for pair in list_of_pairs:
216 for pair in list_of_pairs:
207 list_of_conditions.append(sa.and_(
217 list_of_conditions.append(
208 Log.primary_key == pair['pk'], Log.namespace == pair['ns']))
218 sa.and_(Log.primary_key == pair["pk"], Log.namespace == pair["ns"])
219 )
209 query = query.filter(sa.or_(*list_of_conditions))
220 query = query.filter(sa.or_(*list_of_conditions))
210 query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id))
221 query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id))
211 return query
222 return query
@@ -38,8 +38,9 b' class PluginConfigService(BaseService):'
38 return query.first()
38 return query.first()
39
39
40 @classmethod
40 @classmethod
41 def by_query(cls, resource_id=None, plugin_name=None,
41 def by_query(
42 section=None, db_session=None):
42 cls, resource_id=None, plugin_name=None, section=None, db_session=None
43 ):
43 db_session = get_db_session(db_session)
44 db_session = get_db_session(db_session)
44
45
45 query = db_session.query(PluginConfig)
46 query = db_session.query(PluginConfig)
@@ -37,21 +37,24 b' class ReportService(BaseService):'
37 return q
37 return q
38
38
39 @classmethod
39 @classmethod
40 def generate_stat_rows(cls, report, resource, report_group, occurences=1,
40 def generate_stat_rows(
41 db_session=None):
41 cls, report, resource, report_group, occurences=1, db_session=None
42 ):
42 """
43 """
43 Generates timeseries for this report's group
44 Generates timeseries for this report's group
44 """
45 """
45 db_session = get_db_session(db_session)
46 db_session = get_db_session(db_session)
46 stats = ReportStat(resource_id=report.resource_id,
47 stats = ReportStat(
47 group_id=report_group.id,
48 resource_id=report.resource_id,
48 start_interval=report.start_time,
49 group_id=report_group.id,
49 owner_user_id=resource.owner_user_id,
50 start_interval=report.start_time,
50 server_name=report.tags.get('server_name'),
51 owner_user_id=resource.owner_user_id,
51 view_name=report.tags.get('view_name'),
52 server_name=report.tags.get("server_name"),
52 type=report.report_type,
53 view_name=report.tags.get("view_name"),
53 occurences=occurences,
54 type=report.report_type,
54 duration=report.duration)
55 occurences=occurences,
56 duration=report.duration,
57 )
55 db_session.add(stats)
58 db_session.add(stats)
56 db_session.flush()
59 db_session.flush()
57 return stats
60 return stats
@@ -35,71 +35,98 b' log = logging.getLogger(__name__)'
35
35
36 class ReportGroupService(BaseService):
36 class ReportGroupService(BaseService):
37 @classmethod
37 @classmethod
38 def get_trending(cls, request, filter_settings, limit=15,
38 def get_trending(cls, request, filter_settings, limit=15, db_session=None):
39 db_session=None):
40 """
39 """
41 Returns report groups trending for specific time interval
40 Returns report groups trending for specific time interval
42 """
41 """
43 db_session = get_db_session(db_session)
42 db_session = get_db_session(db_session)
44
43
45 tags = []
44 tags = []
46 if filter_settings.get('tags'):
45 if filter_settings.get("tags"):
47 for tag in filter_settings['tags']:
46 for tag in filter_settings["tags"]:
48 tags.append(
47 tags.append(
49 {'terms': {
48 {"terms": {"tags.{}.values".format(tag["name"]): tag["value"]}}
50 'tags.{}.values'.format(tag['name']): tag['value']}})
49 )
51
50
52 index_names = es_index_name_limiter(
51 index_names = es_index_name_limiter(
53 start_date=filter_settings['start_date'],
52 start_date=filter_settings["start_date"],
54 end_date=filter_settings['end_date'],
53 end_date=filter_settings["end_date"],
55 ixtypes=['reports'])
54 ixtypes=["reports"],
55 )
56
56
57 if not index_names or not filter_settings['resource']:
57 if not index_names or not filter_settings["resource"]:
58 return []
58 return []
59
59
60 es_query = {
60 es_query = {
61 'aggs': {'parent_agg': {'aggs': {'groups': {'aggs': {
61 "aggs": {
62 'sub_agg': {
62 "parent_agg": {
63 'value_count': {'field': 'tags.group_id.values'}}},
63 "aggs": {
64 'filter': {'exists': {'field': 'tags.group_id.values'}}}},
64 "groups": {
65 'terms': {'field': 'tags.group_id.values', 'size': limit}}},
65 "aggs": {
66 'query': {'filtered': {
66 "sub_agg": {
67 'filter': {'and': [
67 "value_count": {"field": "tags.group_id.values"}
68 {'terms': {
68 }
69 'resource_id': [filter_settings['resource'][0]]}
69 },
70 "filter": {"exists": {"field": "tags.group_id.values"}},
71 }
70 },
72 },
71 {'range': {'timestamp': {
73 "terms": {"field": "tags.group_id.values", "size": limit},
72 'gte': filter_settings['start_date'],
73 'lte': filter_settings['end_date']}}}]
74 }
74 }
75 }}
75 },
76 "query": {
77 "filtered": {
78 "filter": {
79 "and": [
80 {
81 "terms": {
82 "resource_id": [filter_settings["resource"][0]]
83 }
84 },
85 {
86 "range": {
87 "timestamp": {
88 "gte": filter_settings["start_date"],
89 "lte": filter_settings["end_date"],
90 }
91 }
92 },
93 ]
94 }
95 }
96 },
76 }
97 }
77 if tags:
98 if tags:
78 es_query['query']['filtered']['filter']['and'].extend(tags)
99 es_query["query"]["filtered"]["filter"]["and"].extend(tags)
79
100
80 result = Datastores.es.search(
101 result = Datastores.es.search(
81 body=es_query, index=index_names, doc_type='log', size=0)
102 body=es_query, index=index_names, doc_type="log", size=0
103 )
82 series = []
104 series = []
83 for bucket in result['aggregations']['parent_agg']['buckets']:
105 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
84 series.append({
106 series.append(
85 'key': bucket['key'],
107 {"key": bucket["key"], "groups": bucket["groups"]["sub_agg"]["value"]}
86 'groups': bucket['groups']['sub_agg']['value']
108 )
87 })
88
109
89 report_groups_d = {}
110 report_groups_d = {}
90 for g in series:
111 for g in series:
91 report_groups_d[int(g['key'])] = g['groups'] or 0
112 report_groups_d[int(g["key"])] = g["groups"] or 0
92
113
93 query = db_session.query(ReportGroup)
114 query = db_session.query(ReportGroup)
94 query = query.filter(ReportGroup.id.in_(list(report_groups_d.keys())))
115 query = query.filter(ReportGroup.id.in_(list(report_groups_d.keys())))
95 query = query.options(
116 query = query.options(sa.orm.joinedload(ReportGroup.last_report_ref))
96 sa.orm.joinedload(ReportGroup.last_report_ref))
117 results = [(report_groups_d[group.id], group) for group in query]
97 results = [(report_groups_d[group.id], group,) for group in query]
118 return sorted(results, reverse=True, key=lambda x: x[0])
98 return sorted(results, reverse=True, key=lambda x:x[0])
99
119
100 @classmethod
120 @classmethod
101 def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50,
121 def get_search_iterator(
102 order_by=None, filter_settings=None, limit=None):
122 cls,
123 app_ids=None,
124 page=1,
125 items_per_page=50,
126 order_by=None,
127 filter_settings=None,
128 limit=None,
129 ):
103 if not app_ids:
130 if not app_ids:
104 return {}
131 return {}
105 if not filter_settings:
132 if not filter_settings:
@@ -109,38 +136,29 b' class ReportGroupService(BaseService):'
109 "size": 0,
136 "size": 0,
110 "query": {
137 "query": {
111 "filtered": {
138 "filtered": {
112 "filter": {
139 "filter": {"and": [{"terms": {"resource_id": list(app_ids)}}]}
113 "and": [{"terms": {"resource_id": list(app_ids)}}]
114 }
115 }
140 }
116 },
141 },
117
118 "aggs": {
142 "aggs": {
119 "top_groups": {
143 "top_groups": {
120 "terms": {
144 "terms": {
121 "size": 5000,
145 "size": 5000,
122 "field": "_parent",
146 "field": "_parent",
123 "order": {
147 "order": {"newest": "desc"},
124 "newest": "desc"
125 }
126 },
148 },
127 "aggs": {
149 "aggs": {
128 "top_reports_hits": {
150 "top_reports_hits": {
129 "top_hits": {"size": 1,
151 "top_hits": {"size": 1, "sort": {"start_time": "desc"}}
130 "sort": {"start_time": "desc"}
131 }
132 },
152 },
133 "newest": {
153 "newest": {"max": {"field": "start_time"}},
134 "max": {"field": "start_time"}
154 },
135 }
136 }
137 }
155 }
138 }
156 },
139 }
157 }
140
158
141 start_date = filter_settings.get('start_date')
159 start_date = filter_settings.get("start_date")
142 end_date = filter_settings.get('end_date')
160 end_date = filter_settings.get("end_date")
143 filter_part = query['query']['filtered']['filter']['and']
161 filter_part = query["query"]["filtered"]["filter"]["and"]
144 date_range = {"range": {"start_time": {}}}
162 date_range = {"range": {"start_time": {}}}
145 if start_date:
163 if start_date:
146 date_range["range"]["start_time"]["gte"] = start_date
164 date_range["range"]["start_time"]["gte"] = start_date
@@ -149,131 +167,160 b' class ReportGroupService(BaseService):'
149 if start_date or end_date:
167 if start_date or end_date:
150 filter_part.append(date_range)
168 filter_part.append(date_range)
151
169
152 priorities = filter_settings.get('priority')
170 priorities = filter_settings.get("priority")
153
171
154 for tag in filter_settings.get('tags', []):
172 for tag in filter_settings.get("tags", []):
155 tag_values = [v.lower() for v in tag['value']]
173 tag_values = [v.lower() for v in tag["value"]]
156 key = "tags.%s.values" % tag['name'].replace('.', '_')
174 key = "tags.%s.values" % tag["name"].replace(".", "_")
157 filter_part.append({"terms": {key: tag_values}})
175 filter_part.append({"terms": {key: tag_values}})
158
176
159 if priorities:
177 if priorities:
160 filter_part.append({"has_parent": {
178 filter_part.append(
161 "parent_type": "report_group",
179 {
162 "query": {
180 "has_parent": {
163 "terms": {'priority': priorities}
181 "parent_type": "report_group",
164 }}})
182 "query": {"terms": {"priority": priorities}},
183 }
184 }
185 )
165
186
166 min_occurences = filter_settings.get('min_occurences')
187 min_occurences = filter_settings.get("min_occurences")
167 if min_occurences:
188 if min_occurences:
168 filter_part.append({"has_parent": {
189 filter_part.append(
169 "parent_type": "report_group",
190 {
170 "query": {
191 "has_parent": {
171 "range": {'occurences': {"gte": min_occurences[0]}}
192 "parent_type": "report_group",
172 }}})
193 "query": {"range": {"occurences": {"gte": min_occurences[0]}}},
194 }
195 }
196 )
173
197
174 min_duration = filter_settings.get('min_duration')
198 min_duration = filter_settings.get("min_duration")
175 max_duration = filter_settings.get('max_duration')
199 max_duration = filter_settings.get("max_duration")
176
200
177 request_ids = filter_settings.get('request_id')
201 request_ids = filter_settings.get("request_id")
178 if request_ids:
202 if request_ids:
179 filter_part.append({"terms": {'request_id': request_ids}})
203 filter_part.append({"terms": {"request_id": request_ids}})
180
204
181 duration_range = {"range": {"average_duration": {}}}
205 duration_range = {"range": {"average_duration": {}}}
182 if min_duration:
206 if min_duration:
183 duration_range["range"]["average_duration"]["gte"] = \
207 duration_range["range"]["average_duration"]["gte"] = min_duration[0]
184 min_duration[0]
185 if max_duration:
208 if max_duration:
186 duration_range["range"]["average_duration"]["lte"] = \
209 duration_range["range"]["average_duration"]["lte"] = max_duration[0]
187 max_duration[0]
188 if min_duration or max_duration:
210 if min_duration or max_duration:
189 filter_part.append({"has_parent": {
211 filter_part.append(
190 "parent_type": "report_group",
212 {"has_parent": {"parent_type": "report_group", "query": duration_range}}
191 "query": duration_range}})
213 )
192
214
193 http_status = filter_settings.get('http_status')
215 http_status = filter_settings.get("http_status")
194 report_type = filter_settings.get('report_type', [ReportType.error])
216 report_type = filter_settings.get("report_type", [ReportType.error])
195 # set error report type if http status is not found
217 # set error report type if http status is not found
196 # and we are dealing with slow reports
218 # and we are dealing with slow reports
197 if not http_status or ReportType.slow in report_type:
219 if not http_status or ReportType.slow in report_type:
198 filter_part.append({"terms": {'report_type': report_type}})
220 filter_part.append({"terms": {"report_type": report_type}})
199 if http_status:
221 if http_status:
200 filter_part.append({"terms": {'http_status': http_status}})
222 filter_part.append({"terms": {"http_status": http_status}})
201
223
202 messages = filter_settings.get('message')
224 messages = filter_settings.get("message")
203 if messages:
225 if messages:
204 condition = {'match': {"message": ' '.join(messages)}}
226 condition = {"match": {"message": " ".join(messages)}}
205 query['query']['filtered']['query'] = condition
227 query["query"]["filtered"]["query"] = condition
206 errors = filter_settings.get('error')
228 errors = filter_settings.get("error")
207 if errors:
229 if errors:
208 condition = {'match': {"error": ' '.join(errors)}}
230 condition = {"match": {"error": " ".join(errors)}}
209 query['query']['filtered']['query'] = condition
231 query["query"]["filtered"]["query"] = condition
210 url_domains = filter_settings.get('url_domain')
232 url_domains = filter_settings.get("url_domain")
211 if url_domains:
233 if url_domains:
212 condition = {'terms': {"url_domain": url_domains}}
234 condition = {"terms": {"url_domain": url_domains}}
213 query['query']['filtered']['query'] = condition
235 query["query"]["filtered"]["query"] = condition
214 url_paths = filter_settings.get('url_path')
236 url_paths = filter_settings.get("url_path")
215 if url_paths:
237 if url_paths:
216 condition = {'terms': {"url_path": url_paths}}
238 condition = {"terms": {"url_path": url_paths}}
217 query['query']['filtered']['query'] = condition
239 query["query"]["filtered"]["query"] = condition
218
240
219 if filter_settings.get('report_status'):
241 if filter_settings.get("report_status"):
220 for status in filter_settings.get('report_status'):
242 for status in filter_settings.get("report_status"):
221 if status == 'never_reviewed':
243 if status == "never_reviewed":
222 filter_part.append({"has_parent": {
244 filter_part.append(
223 "parent_type": "report_group",
245 {
224 "query": {
246 "has_parent": {
225 "term": {"read": False}
247 "parent_type": "report_group",
226 }}})
248 "query": {"term": {"read": False}},
227 elif status == 'reviewed':
249 }
228 filter_part.append({"has_parent": {
250 }
229 "parent_type": "report_group",
251 )
230 "query": {
252 elif status == "reviewed":
231 "term": {"read": True}
253 filter_part.append(
232 }}})
254 {
233 elif status == 'public':
255 "has_parent": {
234 filter_part.append({"has_parent": {
256 "parent_type": "report_group",
235 "parent_type": "report_group",
257 "query": {"term": {"read": True}},
236 "query": {
258 }
237 "term": {"public": True}
259 }
238 }}})
260 )
239 elif status == 'fixed':
261 elif status == "public":
240 filter_part.append({"has_parent": {
262 filter_part.append(
241 "parent_type": "report_group",
263 {
242 "query": {
264 "has_parent": {
243 "term": {"fixed": True}
265 "parent_type": "report_group",
244 }}})
266 "query": {"term": {"public": True}},
267 }
268 }
269 )
270 elif status == "fixed":
271 filter_part.append(
272 {
273 "has_parent": {
274 "parent_type": "report_group",
275 "query": {"term": {"fixed": True}},
276 }
277 }
278 )
245
279
246 # logging.getLogger('pyelasticsearch').setLevel(logging.DEBUG)
280 # logging.getLogger('pyelasticsearch').setLevel(logging.DEBUG)
247 index_names = es_index_name_limiter(filter_settings.get('start_date'),
281 index_names = es_index_name_limiter(
248 filter_settings.get('end_date'),
282 filter_settings.get("start_date"),
249 ixtypes=['reports'])
283 filter_settings.get("end_date"),
284 ixtypes=["reports"],
285 )
250 if index_names:
286 if index_names:
251 results = Datastores.es.search(
287 results = Datastores.es.search(
252 body=query, index=index_names, doc_type=["report", "report_group"],
288 body=query,
253 size=0)
289 index=index_names,
290 doc_type=["report", "report_group"],
291 size=0,
292 )
254 else:
293 else:
255 return []
294 return []
256 return results['aggregations']
295 return results["aggregations"]
257
296
258 @classmethod
297 @classmethod
259 def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None,
298 def get_paginator_by_app_ids(
260 items_per_page=50, order_by=None,
299 cls,
261 filter_settings=None,
300 app_ids=None,
262 exclude_columns=None, db_session=None):
301 page=1,
302 item_count=None,
303 items_per_page=50,
304 order_by=None,
305 filter_settings=None,
306 exclude_columns=None,
307 db_session=None,
308 ):
263 if not filter_settings:
309 if not filter_settings:
264 filter_settings = {}
310 filter_settings = {}
265 results = cls.get_search_iterator(app_ids, page, items_per_page,
311 results = cls.get_search_iterator(
266 order_by, filter_settings)
312 app_ids, page, items_per_page, order_by, filter_settings
313 )
267
314
268 ordered_ids = []
315 ordered_ids = []
269 if results:
316 if results:
270 for item in results['top_groups']['buckets']:
317 for item in results["top_groups"]["buckets"]:
271 pg_id = item['top_reports_hits']['hits']['hits'][0]['_source'][
318 pg_id = item["top_reports_hits"]["hits"]["hits"][0]["_source"]["pg_id"]
272 'pg_id']
273 ordered_ids.append(pg_id)
319 ordered_ids.append(pg_id)
274 log.info(filter_settings)
320 log.info(filter_settings)
275 paginator = paginate.Page(ordered_ids, items_per_page=items_per_page,
321 paginator = paginate.Page(
276 **filter_settings)
322 ordered_ids, items_per_page=items_per_page, **filter_settings
323 )
277 sa_items = ()
324 sa_items = ()
278 if paginator.items:
325 if paginator.items:
279 db_session = get_db_session(db_session)
326 db_session = get_db_session(db_session)
@@ -281,22 +328,22 b' class ReportGroupService(BaseService):'
281 query = db_session.query(Report)
328 query = db_session.query(Report)
282 query = query.options(sa.orm.joinedload(Report.report_group))
329 query = query.options(sa.orm.joinedload(Report.report_group))
283 query = query.filter(Report.id.in_(paginator.items))
330 query = query.filter(Report.id.in_(paginator.items))
284 if filter_settings.get('order_col'):
331 if filter_settings.get("order_col"):
285 order_col = filter_settings.get('order_col')
332 order_col = filter_settings.get("order_col")
286 if filter_settings.get('order_dir') == 'dsc':
333 if filter_settings.get("order_dir") == "dsc":
287 sort_on = 'desc'
334 sort_on = "desc"
288 else:
335 else:
289 sort_on = 'asc'
336 sort_on = "asc"
290 if order_col == 'when':
337 if order_col == "when":
291 order_col = 'last_timestamp'
338 order_col = "last_timestamp"
292 query = query.order_by(getattr(sa, sort_on)(
339 query = query.order_by(
293 getattr(ReportGroup, order_col)))
340 getattr(sa, sort_on)(getattr(ReportGroup, order_col))
341 )
294 sa_items = query.all()
342 sa_items = query.all()
295 sorted_instance_list = []
343 sorted_instance_list = []
296 for i_id in ordered_ids:
344 for i_id in ordered_ids:
297 for report in sa_items:
345 for report in sa_items:
298 if (str(report.id) == i_id and
346 if str(report.id) == i_id and report not in sorted_instance_list:
299 report not in sorted_instance_list):
300 sorted_instance_list.append(report)
347 sorted_instance_list.append(report)
301 paginator.sa_items = sorted_instance_list
348 paginator.sa_items = sorted_instance_list
302 return paginator
349 return paginator
@@ -314,8 +361,7 b' class ReportGroupService(BaseService):'
314 @classmethod
361 @classmethod
315 def by_id(cls, group_id, app_ids=None, db_session=None):
362 def by_id(cls, group_id, app_ids=None, db_session=None):
316 db_session = get_db_session(db_session)
363 db_session = get_db_session(db_session)
317 q = db_session.query(ReportGroup).filter(
364 q = db_session.query(ReportGroup).filter(ReportGroup.id == int(group_id))
318 ReportGroup.id == int(group_id))
319 if app_ids:
365 if app_ids:
320 q = q.filter(ReportGroup.resource_id.in_(app_ids))
366 q = q.filter(ReportGroup.resource_id.in_(app_ids))
321 return q.first()
367 return q.first()
@@ -328,8 +374,9 b' class ReportGroupService(BaseService):'
328 return query
374 return query
329
375
330 @classmethod
376 @classmethod
331 def by_hash_and_resource(cls, resource_id, grouping_hash, since_when=None,
377 def by_hash_and_resource(
332 db_session=None):
378 cls, resource_id, grouping_hash, since_when=None, db_session=None
379 ):
333 db_session = get_db_session(db_session)
380 db_session = get_db_session(db_session)
334 q = db_session.query(ReportGroup)
381 q = db_session.query(ReportGroup)
335 q = q.filter(ReportGroup.resource_id == resource_id)
382 q = q.filter(ReportGroup.resource_id == resource_id)
@@ -340,8 +387,7 b' class ReportGroupService(BaseService):'
340 return q.first()
387 return q.first()
341
388
342 @classmethod
389 @classmethod
343 def users_commenting(cls, report_group, exclude_user_id=None,
390 def users_commenting(cls, report_group, exclude_user_id=None, db_session=None):
344 db_session=None):
345 db_session = get_db_session(None, report_group)
391 db_session = get_db_session(None, report_group)
346 query = db_session.query(User).distinct()
392 query = db_session.query(User).distinct()
347 query = query.filter(User.id == ReportComment.owner_id)
393 query = query.filter(User.id == ReportComment.owner_id)
@@ -355,7 +401,7 b' class ReportGroupService(BaseService):'
355 db_session = get_db_session(db_session)
401 db_session = get_db_session(db_session)
356 query = db_session.query(sa.func.count(Report.username))
402 query = db_session.query(sa.func.count(Report.username))
357 query = query.filter(Report.group_id == report_group.id)
403 query = query.filter(Report.group_id == report_group.id)
358 query = query.filter(Report.username != '')
404 query = query.filter(Report.username != "")
359 query = query.filter(Report.username != None)
405 query = query.filter(Report.username != None)
360 query = query.group_by(Report.username)
406 query = query.group_by(Report.username)
361 return query.count()
407 return query.count()
@@ -363,11 +409,11 b' class ReportGroupService(BaseService):'
363 @classmethod
409 @classmethod
364 def top_affected_users(cls, report_group, db_session=None):
410 def top_affected_users(cls, report_group, db_session=None):
365 db_session = get_db_session(db_session)
411 db_session = get_db_session(db_session)
366 count_label = sa.func.count(Report.username).label('count')
412 count_label = sa.func.count(Report.username).label("count")
367 query = db_session.query(Report.username, count_label)
413 query = db_session.query(Report.username, count_label)
368 query = query.filter(Report.group_id == report_group.id)
414 query = query.filter(Report.group_id == report_group.id)
369 query = query.filter(Report.username != None)
415 query = query.filter(Report.username != None)
370 query = query.filter(Report.username != '')
416 query = query.filter(Report.username != "")
371 query = query.group_by(Report.username)
417 query = query.group_by(Report.username)
372 query = query.order_by(sa.desc(count_label))
418 query = query.order_by(sa.desc(count_label))
373 query = query.limit(50)
419 query = query.limit(50)
@@ -381,71 +427,95 b' class ReportGroupService(BaseService):'
381 detailed means version that returns time intervals - non detailed
427 detailed means version that returns time intervals - non detailed
382 returns total sum
428 returns total sum
383 """
429 """
384 delta = filter_settings['end_date'] - filter_settings['start_date']
430 delta = filter_settings["end_date"] - filter_settings["start_date"]
385 if delta < h.time_deltas.get('12h')['delta']:
431 if delta < h.time_deltas.get("12h")["delta"]:
386 interval = '1m'
432 interval = "1m"
387 elif delta <= h.time_deltas.get('3d')['delta']:
433 elif delta <= h.time_deltas.get("3d")["delta"]:
388 interval = '5m'
434 interval = "5m"
389 elif delta >= h.time_deltas.get('2w')['delta']:
435 elif delta >= h.time_deltas.get("2w")["delta"]:
390 interval = '24h'
436 interval = "24h"
391 else:
437 else:
392 interval = '1h'
438 interval = "1h"
393
439
394 group_id = filter_settings.get('group_id')
440 group_id = filter_settings.get("group_id")
395
441
396 es_query = {
442 es_query = {
397 'aggs': {'parent_agg': {'aggs': {'types': {
443 "aggs": {
398 'aggs': {'sub_agg': {'terms': {'field': 'tags.type.values'}}},
444 "parent_agg": {
399 'filter': {
445 "aggs": {
400 'and': [{'exists': {'field': 'tags.type.values'}}]}
446 "types": {
401 }},
447 "aggs": {
402 'date_histogram': {'extended_bounds': {
448 "sub_agg": {"terms": {"field": "tags.type.values"}}
403 'max': filter_settings['end_date'],
449 },
404 'min': filter_settings['start_date']},
450 "filter": {
405 'field': 'timestamp',
451 "and": [{"exists": {"field": "tags.type.values"}}]
406 'interval': interval,
452 },
407 'min_doc_count': 0}}},
453 }
408 'query': {'filtered': {
454 },
409 'filter': {'and': [
455 "date_histogram": {
410 {'terms': {
456 "extended_bounds": {
411 'resource_id': [filter_settings['resource'][0]]}},
457 "max": filter_settings["end_date"],
412 {'range': {'timestamp': {
458 "min": filter_settings["start_date"],
413 'gte': filter_settings['start_date'],
459 },
414 'lte': filter_settings['end_date']}}}]
460 "field": "timestamp",
461 "interval": interval,
462 "min_doc_count": 0,
463 },
464 }
465 },
466 "query": {
467 "filtered": {
468 "filter": {
469 "and": [
470 {
471 "terms": {
472 "resource_id": [filter_settings["resource"][0]]
473 }
474 },
475 {
476 "range": {
477 "timestamp": {
478 "gte": filter_settings["start_date"],
479 "lte": filter_settings["end_date"],
480 }
481 }
482 },
483 ]
484 }
415 }
485 }
416 }}
486 },
417 }
487 }
418 if group_id:
488 if group_id:
419 parent_agg = es_query['aggs']['parent_agg']
489 parent_agg = es_query["aggs"]["parent_agg"]
420 filters = parent_agg['aggs']['types']['filter']['and']
490 filters = parent_agg["aggs"]["types"]["filter"]["and"]
421 filters.append({'terms': {'tags.group_id.values': [group_id]}})
491 filters.append({"terms": {"tags.group_id.values": [group_id]}})
422
492
423 index_names = es_index_name_limiter(
493 index_names = es_index_name_limiter(
424 start_date=filter_settings['start_date'],
494 start_date=filter_settings["start_date"],
425 end_date=filter_settings['end_date'],
495 end_date=filter_settings["end_date"],
426 ixtypes=['reports'])
496 ixtypes=["reports"],
497 )
427
498
428 if not index_names:
499 if not index_names:
429 return []
500 return []
430
501
431 result = Datastores.es.search(body=es_query,
502 result = Datastores.es.search(
432 index=index_names,
503 body=es_query, index=index_names, doc_type="log", size=0
433 doc_type='log',
504 )
434 size=0)
435 series = []
505 series = []
436 for bucket in result['aggregations']['parent_agg']['buckets']:
506 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
437 point = {
507 point = {
438 'x': datetime.utcfromtimestamp(int(bucket['key']) / 1000),
508 "x": datetime.utcfromtimestamp(int(bucket["key"]) / 1000),
439 'report': 0,
509 "report": 0,
440 'not_found': 0,
510 "not_found": 0,
441 'slow_report': 0
511 "slow_report": 0,
442 }
512 }
443 for subbucket in bucket['types']['sub_agg']['buckets']:
513 for subbucket in bucket["types"]["sub_agg"]["buckets"]:
444 if subbucket['key'] == 'slow':
514 if subbucket["key"] == "slow":
445 point['slow_report'] = subbucket['doc_count']
515 point["slow_report"] = subbucket["doc_count"]
446 elif subbucket['key'] == 'error':
516 elif subbucket["key"] == "error":
447 point['report'] = subbucket['doc_count']
517 point["report"] = subbucket["doc_count"]
448 elif subbucket['key'] == 'not_found':
518 elif subbucket["key"] == "not_found":
449 point['not_found'] = subbucket['doc_count']
519 point["not_found"] = subbucket["doc_count"]
450 series.append(point)
520 series.append(point)
451 return series
521 return series
@@ -25,26 +25,39 b' class ReportStatService(BaseService):'
25 def count_by_type(cls, report_type, resource_id, since_when):
25 def count_by_type(cls, report_type, resource_id, since_when):
26 report_type = ReportType.key_from_value(report_type)
26 report_type = ReportType.key_from_value(report_type)
27
27
28 index_names = es_index_name_limiter(start_date=since_when,
28 index_names = es_index_name_limiter(start_date=since_when, ixtypes=["reports"])
29 ixtypes=['reports'])
30
29
31 es_query = {
30 es_query = {
32 'aggs': {'reports': {'aggs': {
31 "aggs": {
33 'sub_agg': {'value_count': {'field': 'tags.group_id.values'}}},
32 "reports": {
34 'filter': {'and': [{'terms': {'resource_id': [resource_id]}},
33 "aggs": {
35 {'exists': {
34 "sub_agg": {"value_count": {"field": "tags.group_id.values"}}
36 'field': 'tags.group_id.values'}}]}}},
35 },
37 'query': {'filtered': {'filter': {
36 "filter": {
38 'and': [{'terms': {'resource_id': [resource_id]}},
37 "and": [
39 {'terms': {'tags.type.values': [report_type]}},
38 {"terms": {"resource_id": [resource_id]}},
40 {'range': {'timestamp': {
39 {"exists": {"field": "tags.group_id.values"}},
41 'gte': since_when}}}]}}}}
40 ]
41 },
42 }
43 },
44 "query": {
45 "filtered": {
46 "filter": {
47 "and": [
48 {"terms": {"resource_id": [resource_id]}},
49 {"terms": {"tags.type.values": [report_type]}},
50 {"range": {"timestamp": {"gte": since_when}}},
51 ]
52 }
53 }
54 },
55 }
42
56
43 if index_names:
57 if index_names:
44 result = Datastores.es.search(body=es_query,
58 result = Datastores.es.search(
45 index=index_names,
59 body=es_query, index=index_names, doc_type="log", size=0
46 doc_type='log',
60 )
47 size=0)
61 return result["aggregations"]["reports"]["sub_agg"]["value"]
48 return result['aggregations']['reports']['sub_agg']['value']
49 else:
62 else:
50 return 0
63 return 0
This diff has been collapsed as it changes many lines, (778 lines changed) Show them Hide them
@@ -23,264 +23,353 b' from appenlight.lib.enums import ReportType'
23 from appenlight.lib.utils import es_index_name_limiter
23 from appenlight.lib.utils import es_index_name_limiter
24
24
25 try:
25 try:
26 from ae_uptime_ce.models.services.uptime_metric import \
26 from ae_uptime_ce.models.services.uptime_metric import UptimeMetricService
27 UptimeMetricService
28 except ImportError:
27 except ImportError:
29 UptimeMetricService = None
28 UptimeMetricService = None
30
29
31
30
32 def check_key(key, stats, uptime, total_seconds):
31 def check_key(key, stats, uptime, total_seconds):
33 if key not in stats:
32 if key not in stats:
34 stats[key] = {'name': key,
33 stats[key] = {
35 'requests': 0,
34 "name": key,
36 'errors': 0,
35 "requests": 0,
37 'tolerated_requests': 0,
36 "errors": 0,
38 'frustrating_requests': 0,
37 "tolerated_requests": 0,
39 'satisfying_requests': 0,
38 "frustrating_requests": 0,
40 'total_minutes': total_seconds / 60.0,
39 "satisfying_requests": 0,
41 'uptime': uptime,
40 "total_minutes": total_seconds / 60.0,
42 'apdex': 0,
41 "uptime": uptime,
43 'rpm': 0,
42 "apdex": 0,
44 'response_time': 0,
43 "rpm": 0,
45 'avg_response_time': 0}
44 "response_time": 0,
45 "avg_response_time": 0,
46 }
46
47
47
48
48 class RequestMetricService(BaseService):
49 class RequestMetricService(BaseService):
49 @classmethod
50 @classmethod
50 def get_metrics_stats(cls, request, filter_settings, db_session=None):
51 def get_metrics_stats(cls, request, filter_settings, db_session=None):
51 delta = filter_settings['end_date'] - filter_settings['start_date']
52 delta = filter_settings["end_date"] - filter_settings["start_date"]
52 if delta < h.time_deltas.get('12h')['delta']:
53 if delta < h.time_deltas.get("12h")["delta"]:
53 interval = '1m'
54 interval = "1m"
54 elif delta <= h.time_deltas.get('3d')['delta']:
55 elif delta <= h.time_deltas.get("3d")["delta"]:
55 interval = '5m'
56 interval = "5m"
56 elif delta >= h.time_deltas.get('2w')['delta']:
57 elif delta >= h.time_deltas.get("2w")["delta"]:
57 interval = '24h'
58 interval = "24h"
58 else:
59 else:
59 interval = '1h'
60 interval = "1h"
60
61
61 filter_settings['namespace'] = ['appenlight.request_metric']
62 filter_settings["namespace"] = ["appenlight.request_metric"]
62
63
63 es_query = {
64 es_query = {
64 'aggs': {
65 "aggs": {
65 'parent_agg': {
66 "parent_agg": {
66 'aggs': {'custom': {'aggs': {'sub_agg': {
67 "aggs": {
67 'sum': {'field': 'tags.custom.numeric_values'}}},
68 "custom": {
68 'filter': {'exists': {
69 "aggs": {
69 'field': 'tags.custom.numeric_values'}}},
70 "sub_agg": {
70 'main': {'aggs': {'sub_agg': {'sum': {
71 "sum": {"field": "tags.custom.numeric_values"}
71 'field': 'tags.main.numeric_values'}}},
72 }
72 'filter': {'exists': {
73 },
73 'field': 'tags.main.numeric_values'}}},
74 "filter": {
74 'nosql': {'aggs': {'sub_agg': {'sum': {
75 "exists": {"field": "tags.custom.numeric_values"}
75 'field': 'tags.nosql.numeric_values'}}},
76 },
76 'filter': {'exists': {
77 },
77 'field': 'tags.nosql.numeric_values'}}},
78 "main": {
78 'remote': {'aggs': {'sub_agg': {'sum': {
79 "aggs": {
79 'field': 'tags.remote.numeric_values'}}},
80 "sub_agg": {
80 'filter': {'exists': {
81 "sum": {"field": "tags.main.numeric_values"}
81 'field': 'tags.remote.numeric_values'}}},
82 }
82 'requests': {'aggs': {'sub_agg': {'sum': {
83 },
83 'field': 'tags.requests.numeric_values'}}},
84 "filter": {"exists": {"field": "tags.main.numeric_values"}},
84 'filter': {'exists': {
85 },
85 'field': 'tags.requests.numeric_values'}}},
86 "nosql": {
86 'sql': {'aggs': {'sub_agg': {
87 "aggs": {
87 'sum': {'field': 'tags.sql.numeric_values'}}},
88 "sub_agg": {
88 'filter': {'exists': {
89 "sum": {"field": "tags.nosql.numeric_values"}
89 'field': 'tags.sql.numeric_values'}}},
90 }
90 'tmpl': {'aggs': {'sub_agg': {'sum': {
91 },
91 'field': 'tags.tmpl.numeric_values'}}},
92 "filter": {
92 'filter': {'exists': {
93 "exists": {"field": "tags.nosql.numeric_values"}
93 'field': 'tags.tmpl.numeric_values'}}}},
94 },
94 'date_histogram': {'extended_bounds': {
95 },
95 'max': filter_settings['end_date'],
96 "remote": {
96 'min': filter_settings['start_date']},
97 "aggs": {
97 'field': 'timestamp',
98 "sub_agg": {
98 'interval': interval,
99 "sum": {"field": "tags.remote.numeric_values"}
99 'min_doc_count': 0}}},
100 }
100 'query': {'filtered': {
101 },
101 'filter': {'and': [{'terms': {
102 "filter": {
102 'resource_id': [filter_settings['resource'][0]]}},
103 "exists": {"field": "tags.remote.numeric_values"}
103 {'range': {'timestamp': {
104 },
104 'gte': filter_settings['start_date'],
105 },
105 'lte': filter_settings['end_date']}}},
106 "requests": {
106 {'terms': {'namespace': [
107 "aggs": {
107 'appenlight.request_metric']}}]}}}}
108 "sub_agg": {
109 "sum": {"field": "tags.requests.numeric_values"}
110 }
111 },
112 "filter": {
113 "exists": {"field": "tags.requests.numeric_values"}
114 },
115 },
116 "sql": {
117 "aggs": {
118 "sub_agg": {"sum": {"field": "tags.sql.numeric_values"}}
119 },
120 "filter": {"exists": {"field": "tags.sql.numeric_values"}},
121 },
122 "tmpl": {
123 "aggs": {
124 "sub_agg": {
125 "sum": {"field": "tags.tmpl.numeric_values"}
126 }
127 },
128 "filter": {"exists": {"field": "tags.tmpl.numeric_values"}},
129 },
130 },
131 "date_histogram": {
132 "extended_bounds": {
133 "max": filter_settings["end_date"],
134 "min": filter_settings["start_date"],
135 },
136 "field": "timestamp",
137 "interval": interval,
138 "min_doc_count": 0,
139 },
140 }
141 },
142 "query": {
143 "filtered": {
144 "filter": {
145 "and": [
146 {
147 "terms": {
148 "resource_id": [filter_settings["resource"][0]]
149 }
150 },
151 {
152 "range": {
153 "timestamp": {
154 "gte": filter_settings["start_date"],
155 "lte": filter_settings["end_date"],
156 }
157 }
158 },
159 {"terms": {"namespace": ["appenlight.request_metric"]}},
160 ]
161 }
162 }
163 },
164 }
108
165
109 index_names = es_index_name_limiter(
166 index_names = es_index_name_limiter(
110 start_date=filter_settings['start_date'],
167 start_date=filter_settings["start_date"],
111 end_date=filter_settings['end_date'],
168 end_date=filter_settings["end_date"],
112 ixtypes=['metrics'])
169 ixtypes=["metrics"],
170 )
113 if not index_names:
171 if not index_names:
114 return []
172 return []
115
173
116 result = Datastores.es.search(body=es_query,
174 result = Datastores.es.search(
117 index=index_names,
175 body=es_query, index=index_names, doc_type="log", size=0
118 doc_type='log',
176 )
119 size=0)
120
177
121 plot_data = []
178 plot_data = []
122 for item in result['aggregations']['parent_agg']['buckets']:
179 for item in result["aggregations"]["parent_agg"]["buckets"]:
123 x_time = datetime.utcfromtimestamp(int(item['key']) / 1000)
180 x_time = datetime.utcfromtimestamp(int(item["key"]) / 1000)
124 point = {"x": x_time}
181 point = {"x": x_time}
125 for key in ['custom', 'main', 'nosql', 'remote',
182 for key in ["custom", "main", "nosql", "remote", "requests", "sql", "tmpl"]:
126 'requests', 'sql', 'tmpl']:
183 value = item[key]["sub_agg"]["value"]
127 value = item[key]['sub_agg']['value']
128 point[key] = round(value, 3) if value else 0
184 point[key] = round(value, 3) if value else 0
129 plot_data.append(point)
185 plot_data.append(point)
130
186
131 return plot_data
187 return plot_data
132
188
133 @classmethod
189 @classmethod
134 def get_requests_breakdown(cls, request, filter_settings,
190 def get_requests_breakdown(cls, request, filter_settings, db_session=None):
135 db_session=None):
136 db_session = get_db_session(db_session)
191 db_session = get_db_session(db_session)
137
192
138 # fetch total time of all requests in this time range
193 # fetch total time of all requests in this time range
139 index_names = es_index_name_limiter(
194 index_names = es_index_name_limiter(
140 start_date=filter_settings['start_date'],
195 start_date=filter_settings["start_date"],
141 end_date=filter_settings['end_date'],
196 end_date=filter_settings["end_date"],
142 ixtypes=['metrics'])
197 ixtypes=["metrics"],
198 )
143
199
144 if index_names and filter_settings['resource']:
200 if index_names and filter_settings["resource"]:
145 es_query = {
201 es_query = {
146 'aggs': {'main': {'aggs': {
202 "aggs": {
147 'sub_agg': {'sum': {'field': 'tags.main.numeric_values'}}},
203 "main": {
148 'filter': {'exists': {
204 "aggs": {
149 'field': 'tags.main.numeric_values'}}}},
205 "sub_agg": {"sum": {"field": "tags.main.numeric_values"}}
150 'query': {'filtered': {
206 },
151 'filter': {'and': [
207 "filter": {"exists": {"field": "tags.main.numeric_values"}},
152 {'terms': {
208 }
153 'resource_id': [filter_settings['resource'][0]]}},
209 },
154 {'range': {'timestamp': {
210 "query": {
155 'gte': filter_settings['start_date'],
211 "filtered": {
156 'lte': filter_settings['end_date']}}},
212 "filter": {
157 {'terms': {'namespace': [
213 "and": [
158 'appenlight.request_metric']}}]}}}}
214 {
159 result = Datastores.es.search(body=es_query,
215 "terms": {
160 index=index_names,
216 "resource_id": [filter_settings["resource"][0]]
161 doc_type='log',
217 }
162 size=0)
218 },
163 total_time_spent = result['aggregations']['main']['sub_agg'][
219 {
164 'value']
220 "range": {
221 "timestamp": {
222 "gte": filter_settings["start_date"],
223 "lte": filter_settings["end_date"],
224 }
225 }
226 },
227 {"terms": {"namespace": ["appenlight.request_metric"]}},
228 ]
229 }
230 }
231 },
232 }
233 result = Datastores.es.search(
234 body=es_query, index=index_names, doc_type="log", size=0
235 )
236 total_time_spent = result["aggregations"]["main"]["sub_agg"]["value"]
165 else:
237 else:
166 total_time_spent = 0
238 total_time_spent = 0
167 script_text = "doc['tags.main.numeric_values'].value / {}".format(
239 script_text = "doc['tags.main.numeric_values'].value / {}".format(
168 total_time_spent)
240 total_time_spent
241 )
169
242
170 if index_names and filter_settings['resource']:
243 if index_names and filter_settings["resource"]:
171 es_query = {
244 es_query = {
172 'aggs': {
245 "aggs": {
173 'parent_agg': {
246 "parent_agg": {
174 'aggs': {'main': {'aggs': {
247 "aggs": {
175 'sub_agg': {
248 "main": {
176 'sum': {'field': 'tags.main.numeric_values'}}},
249 "aggs": {
177 'filter': {
250 "sub_agg": {
178 'exists': {
251 "sum": {"field": "tags.main.numeric_values"}
179 'field': 'tags.main.numeric_values'}}},
252 }
180 'percentage': {
253 },
181 'aggs': {'sub_agg': {
254 "filter": {
182 'sum': {
255 "exists": {"field": "tags.main.numeric_values"}
183 'lang': 'expression',
256 },
184 'script': script_text}}},
257 },
185 'filter': {
258 "percentage": {
186 'exists': {
259 "aggs": {
187 'field': 'tags.main.numeric_values'}}},
260 "sub_agg": {
188 'requests': {'aggs': {'sub_agg': {
261 "sum": {
189 'sum': {
262 "lang": "expression",
190 'field': 'tags.requests.numeric_values'}}},
263 "script": script_text,
191 'filter': {'exists': {
264 }
192 'field': 'tags.requests.numeric_values'}}}},
265 }
193 'terms': {'field': 'tags.view_name.values',
266 },
194 'order': {'percentage>sub_agg': 'desc'},
267 "filter": {
195 'size': 15}}},
268 "exists": {"field": "tags.main.numeric_values"}
196 'query': {'filtered': {'filter': {'and': [
269 },
197 {'terms': {
270 },
198 'resource_id': [filter_settings['resource'][0]]}},
271 "requests": {
199 {'range': {
272 "aggs": {
200 'timestamp': {'gte': filter_settings['start_date'],
273 "sub_agg": {
201 'lte': filter_settings['end_date']
274 "sum": {"field": "tags.requests.numeric_values"}
202 }
275 }
276 },
277 "filter": {
278 "exists": {"field": "tags.requests.numeric_values"}
279 },
280 },
281 },
282 "terms": {
283 "field": "tags.view_name.values",
284 "order": {"percentage>sub_agg": "desc"},
285 "size": 15,
286 },
203 }
287 }
288 },
289 "query": {
290 "filtered": {
291 "filter": {
292 "and": [
293 {
294 "terms": {
295 "resource_id": [filter_settings["resource"][0]]
296 }
297 },
298 {
299 "range": {
300 "timestamp": {
301 "gte": filter_settings["start_date"],
302 "lte": filter_settings["end_date"],
303 }
304 }
305 },
306 ]
307 }
204 }
308 }
205 ]}
309 },
206 }}
207 }
310 }
208 result = Datastores.es.search(body=es_query,
311 result = Datastores.es.search(
209 index=index_names,
312 body=es_query, index=index_names, doc_type="log", size=0
210 doc_type='log',
313 )
211 size=0)
314 series = result["aggregations"]["parent_agg"]["buckets"]
212 series = result['aggregations']['parent_agg']['buckets']
213 else:
315 else:
214 series = []
316 series = []
215
317
216 and_part = [
318 and_part = [
217 {"term": {"resource_id": filter_settings['resource'][0]}},
319 {"term": {"resource_id": filter_settings["resource"][0]}},
218 {"terms": {"tags.view_name.values": [row['key'] for
320 {"terms": {"tags.view_name.values": [row["key"] for row in series]}},
219 row in series]}},
321 {"term": {"report_type": str(ReportType.slow)}},
220 {"term": {"report_type": str(ReportType.slow)}}
221 ]
322 ]
222 query = {
323 query = {
223 "aggs": {
324 "aggs": {
224 "top_reports": {
325 "top_reports": {
225 "terms": {
326 "terms": {"field": "tags.view_name.values", "size": len(series)},
226 "field": "tags.view_name.values",
227 "size": len(series)
228 },
229 "aggs": {
327 "aggs": {
230 "top_calls_hits": {
328 "top_calls_hits": {
231 "top_hits": {
329 "top_hits": {"sort": {"start_time": "desc"}, "size": 5}
232 "sort": {"start_time": "desc"},
233 "size": 5
234 }
235 }
330 }
236 }
331 },
237 }
332 }
238 },
333 },
239
334 "query": {"filtered": {"filter": {"and": and_part}}},
240 "query": {
241 "filtered": {
242 "filter": {
243 "and": and_part
244 }
245 }
246 }
247 }
335 }
248 details = {}
336 details = {}
249 index_names = es_index_name_limiter(ixtypes=['reports'])
337 index_names = es_index_name_limiter(ixtypes=["reports"])
250 if index_names and series:
338 if index_names and series:
251 result = Datastores.es.search(
339 result = Datastores.es.search(
252 body=query, doc_type='report', size=0, index=index_names)
340 body=query, doc_type="report", size=0, index=index_names
253 for bucket in result['aggregations']['top_reports']['buckets']:
341 )
254 details[bucket['key']] = []
342 for bucket in result["aggregations"]["top_reports"]["buckets"]:
255
343 details[bucket["key"]] = []
256 for hit in bucket['top_calls_hits']['hits']['hits']:
344
257 details[bucket['key']].append(
345 for hit in bucket["top_calls_hits"]["hits"]["hits"]:
258 {'report_id': hit['_source']['pg_id'],
346 details[bucket["key"]].append(
259 'group_id': hit['_source']['group_id']}
347 {
348 "report_id": hit["_source"]["pg_id"],
349 "group_id": hit["_source"]["group_id"],
350 }
260 )
351 )
261
352
262 results = []
353 results = []
263 for row in series:
354 for row in series:
264 result = {
355 result = {
265 'key': row['key'],
356 "key": row["key"],
266 'main': row['main']['sub_agg']['value'],
357 "main": row["main"]["sub_agg"]["value"],
267 'requests': row['requests']['sub_agg']['value']
358 "requests": row["requests"]["sub_agg"]["value"],
268 }
359 }
269 # es can return 'infinity'
360 # es can return 'infinity'
270 try:
361 try:
271 result['percentage'] = float(
362 result["percentage"] = float(row["percentage"]["sub_agg"]["value"])
272 row['percentage']['sub_agg']['value'])
273 except ValueError:
363 except ValueError:
274 result['percentage'] = 0
364 result["percentage"] = 0
275
365
276 result['latest_details'] = details.get(row['key']) or []
366 result["latest_details"] = details.get(row["key"]) or []
277 results.append(result)
367 results.append(result)
278
368
279 return results
369 return results
280
370
281 @classmethod
371 @classmethod
282 def get_apdex_stats(cls, request, filter_settings,
372 def get_apdex_stats(cls, request, filter_settings, threshold=1, db_session=None):
283 threshold=1, db_session=None):
284 """
373 """
285 Returns information and calculates APDEX score per server for dashboard
374 Returns information and calculates APDEX score per server for dashboard
286 server information (upper right stats boxes)
375 server information (upper right stats boxes)
@@ -288,156 +377,241 b' class RequestMetricService(BaseService):'
288 # Apdex t = (Satisfied Count + Tolerated Count / 2) / Total Samples
377 # Apdex t = (Satisfied Count + Tolerated Count / 2) / Total Samples
289 db_session = get_db_session(db_session)
378 db_session = get_db_session(db_session)
290 index_names = es_index_name_limiter(
379 index_names = es_index_name_limiter(
291 start_date=filter_settings['start_date'],
380 start_date=filter_settings["start_date"],
292 end_date=filter_settings['end_date'], ixtypes=['metrics'])
381 end_date=filter_settings["end_date"],
382 ixtypes=["metrics"],
383 )
293
384
294 requests_series = []
385 requests_series = []
295
386
296 if index_names and filter_settings['resource']:
387 if index_names and filter_settings["resource"]:
297 es_query = {
388 es_query = {
298 'aggs': {
389 "aggs": {
299 'parent_agg': {'aggs': {
390 "parent_agg": {
300 'frustrating': {'aggs': {'sub_agg': {
391 "aggs": {
301 'sum': {'field': 'tags.requests.numeric_values'}}},
392 "frustrating": {
302 'filter': {'and': [
393 "aggs": {
303 {'range': {
394 "sub_agg": {
304 'tags.main.numeric_values': {'gte': '4'}}},
395 "sum": {"field": "tags.requests.numeric_values"}
305 {'exists': {
396 }
306 'field': 'tags.requests.numeric_values'}}]
397 },
307 }
398 "filter": {
399 "and": [
400 {
401 "range": {
402 "tags.main.numeric_values": {"gte": "4"}
403 }
404 },
405 {
406 "exists": {
407 "field": "tags.requests.numeric_values"
408 }
409 },
410 ]
411 },
412 },
413 "main": {
414 "aggs": {
415 "sub_agg": {
416 "sum": {"field": "tags.main.numeric_values"}
417 }
418 },
419 "filter": {
420 "exists": {"field": "tags.main.numeric_values"}
421 },
422 },
423 "requests": {
424 "aggs": {
425 "sub_agg": {
426 "sum": {"field": "tags.requests.numeric_values"}
427 }
428 },
429 "filter": {
430 "exists": {"field": "tags.requests.numeric_values"}
431 },
432 },
433 "tolerated": {
434 "aggs": {
435 "sub_agg": {
436 "sum": {"field": "tags.requests.numeric_values"}
437 }
438 },
439 "filter": {
440 "and": [
441 {
442 "range": {
443 "tags.main.numeric_values": {"gte": "1"}
444 }
445 },
446 {
447 "range": {
448 "tags.main.numeric_values": {"lt": "4"}
449 }
450 },
451 {
452 "exists": {
453 "field": "tags.requests.numeric_values"
454 }
455 },
456 ]
457 },
458 },
308 },
459 },
309 'main': {'aggs': {'sub_agg': {'sum': {
460 "terms": {"field": "tags.server_name.values", "size": 999999},
310 'field': 'tags.main.numeric_values'}}},
461 }
311 'filter': {'exists': {
462 },
312 'field': 'tags.main.numeric_values'}}},
463 "query": {
313 'requests': {'aggs': {'sub_agg': {
464 "filtered": {
314 'sum': {
465 "filter": {
315 'field': 'tags.requests.numeric_values'}}},
466 "and": [
316 'filter': {'exists': {
467 {
317 'field': 'tags.requests.numeric_values'}}},
468 "terms": {
318 'tolerated': {'aggs': {'sub_agg': {
469 "resource_id": [filter_settings["resource"][0]]
319 'sum': {
470 }
320 'field': 'tags.requests.numeric_values'}}},
471 },
321 'filter': {'and': [
472 {
322 {'range': {
473 "range": {
323 'tags.main.numeric_values': {'gte': '1'}}},
474 "timestamp": {
324 {'range': {
475 "gte": filter_settings["start_date"],
325 'tags.main.numeric_values': {'lt': '4'}}},
476 "lte": filter_settings["end_date"],
326 {'exists': {
477 }
327 'field': 'tags.requests.numeric_values'}}]}
478 }
479 },
480 {"terms": {"namespace": ["appenlight.request_metric"]}},
481 ]
328 }
482 }
329 },
483 }
330 'terms': {'field': 'tags.server_name.values',
484 },
331 'size': 999999}}},
485 }
332 'query': {
486
333 'filtered': {
487 result = Datastores.es.search(
334 'filter': {'and': [{'terms': {
488 body=es_query, index=index_names, doc_type="log", size=0
335 'resource_id': [
489 )
336 filter_settings['resource'][0]]}},
490 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
337 {'range': {'timestamp': {
491 requests_series.append(
338 'gte': filter_settings['start_date'],
492 {
339 'lte': filter_settings['end_date']}}},
493 "frustrating": bucket["frustrating"]["sub_agg"]["value"],
340 {'terms': {'namespace': [
494 "main": bucket["main"]["sub_agg"]["value"],
341 'appenlight.request_metric']}}]}}}}
495 "requests": bucket["requests"]["sub_agg"]["value"],
342
496 "tolerated": bucket["tolerated"]["sub_agg"]["value"],
343 result = Datastores.es.search(body=es_query,
497 "key": bucket["key"],
344 index=index_names,
498 }
345 doc_type='log',
499 )
346 size=0)
500
347 for bucket in result['aggregations']['parent_agg']['buckets']:
501 since_when = filter_settings["start_date"]
348 requests_series.append({
502 until = filter_settings["end_date"]
349 'frustrating': bucket['frustrating']['sub_agg']['value'],
350 'main': bucket['main']['sub_agg']['value'],
351 'requests': bucket['requests']['sub_agg']['value'],
352 'tolerated': bucket['tolerated']['sub_agg']['value'],
353 'key': bucket['key']
354 })
355
356 since_when = filter_settings['start_date']
357 until = filter_settings['end_date']
358
503
359 # total errors
504 # total errors
360
505
361 index_names = es_index_name_limiter(
506 index_names = es_index_name_limiter(
362 start_date=filter_settings['start_date'],
507 start_date=filter_settings["start_date"],
363 end_date=filter_settings['end_date'], ixtypes=['reports'])
508 end_date=filter_settings["end_date"],
509 ixtypes=["reports"],
510 )
364
511
365 report_series = []
512 report_series = []
366 if index_names and filter_settings['resource']:
513 if index_names and filter_settings["resource"]:
367 report_type = ReportType.key_from_value(ReportType.error)
514 report_type = ReportType.key_from_value(ReportType.error)
368 es_query = {
515 es_query = {
369 'aggs': {
516 "aggs": {
370 'parent_agg': {'aggs': {'errors': {'aggs': {'sub_agg': {
517 "parent_agg": {
371 'sum': {
518 "aggs": {
372 'field': 'tags.occurences.numeric_values'}}},
519 "errors": {
373 'filter': {'and': [
520 "aggs": {
374 {'terms': {
521 "sub_agg": {
375 'tags.type.values': [report_type]}},
522 "sum": {
376 {'exists': {
523 "field": "tags.occurences.numeric_values"
377 'field': 'tags.occurences.numeric_values'}}]
524 }
378 }
525 }
379 }},
526 },
380 'terms': {'field': 'tags.server_name.values',
527 "filter": {
381 'size': 999999}}},
528 "and": [
382 'query': {'filtered': {
529 {"terms": {"tags.type.values": [report_type]}},
383 'filter': {'and': [
530 {
384 {'terms': {
531 "exists": {
385 'resource_id': [filter_settings['resource'][0]]}},
532 "field": "tags.occurences.numeric_values"
386 {'range': {
533 }
387 'timestamp': {'gte': filter_settings['start_date'],
534 },
388 'lte': filter_settings['end_date']}}
535 ]
536 },
537 }
389 },
538 },
390 {'terms': {'namespace': ['appenlight.error']}}]
539 "terms": {"field": "tags.server_name.values", "size": 999999},
540 }
541 },
542 "query": {
543 "filtered": {
544 "filter": {
545 "and": [
546 {
547 "terms": {
548 "resource_id": [filter_settings["resource"][0]]
549 }
550 },
551 {
552 "range": {
553 "timestamp": {
554 "gte": filter_settings["start_date"],
555 "lte": filter_settings["end_date"],
556 }
557 }
558 },
559 {"terms": {"namespace": ["appenlight.error"]}},
560 ]
561 }
391 }
562 }
392 }}
563 },
393 }
564 }
394 result = Datastores.es.search(body=es_query,
565 result = Datastores.es.search(
395 index=index_names,
566 body=es_query, index=index_names, doc_type="log", size=0
396 doc_type='log',
567 )
397 size=0)
568 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
398 for bucket in result['aggregations']['parent_agg']['buckets']:
399 report_series.append(
569 report_series.append(
400 {'key': bucket['key'],
570 {
401 'errors': bucket['errors']['sub_agg']['value']
571 "key": bucket["key"],
572 "errors": bucket["errors"]["sub_agg"]["value"],
402 }
573 }
403 )
574 )
404
575
405 stats = {}
576 stats = {}
406 if UptimeMetricService is not None:
577 if UptimeMetricService is not None:
407 uptime = UptimeMetricService.get_uptime_by_app(
578 uptime = UptimeMetricService.get_uptime_by_app(
408 filter_settings['resource'][0],
579 filter_settings["resource"][0], since_when=since_when, until=until
409 since_when=since_when, until=until)
580 )
410 else:
581 else:
411 uptime = 0
582 uptime = 0
412
583
413 total_seconds = (until - since_when).total_seconds()
584 total_seconds = (until - since_when).total_seconds()
414
585
415 for stat in requests_series:
586 for stat in requests_series:
416 check_key(stat['key'], stats, uptime, total_seconds)
587 check_key(stat["key"], stats, uptime, total_seconds)
417 stats[stat['key']]['requests'] = int(stat['requests'])
588 stats[stat["key"]]["requests"] = int(stat["requests"])
418 stats[stat['key']]['response_time'] = stat['main']
589 stats[stat["key"]]["response_time"] = stat["main"]
419 stats[stat['key']]['tolerated_requests'] = stat['tolerated']
590 stats[stat["key"]]["tolerated_requests"] = stat["tolerated"]
420 stats[stat['key']]['frustrating_requests'] = stat['frustrating']
591 stats[stat["key"]]["frustrating_requests"] = stat["frustrating"]
421 for server in report_series:
592 for server in report_series:
422 check_key(server['key'], stats, uptime, total_seconds)
593 check_key(server["key"], stats, uptime, total_seconds)
423 stats[server['key']]['errors'] = server['errors']
594 stats[server["key"]]["errors"] = server["errors"]
424
595
425 server_stats = list(stats.values())
596 server_stats = list(stats.values())
426 for stat in server_stats:
597 for stat in server_stats:
427 stat['satisfying_requests'] = stat['requests'] - stat['errors'] \
598 stat["satisfying_requests"] = (
428 - stat['frustrating_requests'] - \
599 stat["requests"]
429 stat['tolerated_requests']
600 - stat["errors"]
430 if stat['satisfying_requests'] < 0:
601 - stat["frustrating_requests"]
431 stat['satisfying_requests'] = 0
602 - stat["tolerated_requests"]
432
603 )
433 if stat['requests']:
604 if stat["satisfying_requests"] < 0:
434 stat['avg_response_time'] = round(stat['response_time'] /
605 stat["satisfying_requests"] = 0
435 stat['requests'], 3)
606
436 qual_requests = stat['satisfying_requests'] + \
607 if stat["requests"]:
437 stat['tolerated_requests'] / 2.0
608 stat["avg_response_time"] = round(
438 stat['apdex'] = round((qual_requests / stat['requests']) * 100,
609 stat["response_time"] / stat["requests"], 3
439 2)
610 )
440 stat['rpm'] = round(stat['requests'] / stat['total_minutes'],
611 qual_requests = (
441 2)
612 stat["satisfying_requests"] + stat["tolerated_requests"] / 2.0
442
613 )
443 return sorted(server_stats, key=lambda x: x['name'])
614 stat["apdex"] = round((qual_requests / stat["requests"]) * 100, 2)
615 stat["rpm"] = round(stat["requests"] / stat["total_minutes"], 2)
616
617 return sorted(server_stats, key=lambda x: x["name"])
@@ -22,71 +22,90 b' from appenlight.lib.utils import es_index_name_limiter'
22
22
23 class SlowCallService(BaseService):
23 class SlowCallService(BaseService):
24 @classmethod
24 @classmethod
25 def get_time_consuming_calls(cls, request, filter_settings,
25 def get_time_consuming_calls(cls, request, filter_settings, db_session=None):
26 db_session=None):
27 db_session = get_db_session(db_session)
26 db_session = get_db_session(db_session)
28 # get slow calls from older partitions too
27 # get slow calls from older partitions too
29 index_names = es_index_name_limiter(
28 index_names = es_index_name_limiter(
30 start_date=filter_settings['start_date'],
29 start_date=filter_settings["start_date"],
31 end_date=filter_settings['end_date'],
30 end_date=filter_settings["end_date"],
32 ixtypes=['slow_calls'])
31 ixtypes=["slow_calls"],
33 if index_names and filter_settings['resource']:
32 )
33 if index_names and filter_settings["resource"]:
34 # get longest time taking hashes
34 # get longest time taking hashes
35 es_query = {
35 es_query = {
36 'aggs': {
36 "aggs": {
37 'parent_agg': {
37 "parent_agg": {
38 'aggs': {
38 "aggs": {
39 'duration': {
39 "duration": {
40 'aggs': {'sub_agg': {
40 "aggs": {
41 'sum': {
41 "sub_agg": {
42 'field': 'tags.duration.numeric_values'}
42 "sum": {"field": "tags.duration.numeric_values"}
43 }},
43 }
44 'filter': {'exists': {
44 },
45 'field': 'tags.duration.numeric_values'}}},
45 "filter": {
46 'total': {
46 "exists": {"field": "tags.duration.numeric_values"}
47 'aggs': {'sub_agg': {'value_count': {
47 },
48 'field': 'tags.statement_hash.values'}}},
48 },
49 'filter': {'exists': {
49 "total": {
50 'field': 'tags.statement_hash.values'}}}},
50 "aggs": {
51 'terms': {'field': 'tags.statement_hash.values',
51 "sub_agg": {
52 'order': {'duration>sub_agg': 'desc'},
52 "value_count": {
53 'size': 15}}},
53 "field": "tags.statement_hash.values"
54 'query': {'filtered': {
54 }
55 'filter': {'and': [
55 }
56 {'terms': {
56 },
57 'resource_id': [filter_settings['resource'][0]]
57 "filter": {
58 }},
58 "exists": {"field": "tags.statement_hash.values"}
59 {'range': {'timestamp': {
59 },
60 'gte': filter_settings['start_date'],
60 },
61 'lte': filter_settings['end_date']}
61 },
62 }}]
62 "terms": {
63 "field": "tags.statement_hash.values",
64 "order": {"duration>sub_agg": "desc"},
65 "size": 15,
66 },
63 }
67 }
64 }
68 },
65 }
69 "query": {
70 "filtered": {
71 "filter": {
72 "and": [
73 {
74 "terms": {
75 "resource_id": [filter_settings["resource"][0]]
76 }
77 },
78 {
79 "range": {
80 "timestamp": {
81 "gte": filter_settings["start_date"],
82 "lte": filter_settings["end_date"],
83 }
84 }
85 },
86 ]
87 }
88 }
89 },
66 }
90 }
67 result = Datastores.es.search(
91 result = Datastores.es.search(
68 body=es_query, index=index_names, doc_type='log', size=0)
92 body=es_query, index=index_names, doc_type="log", size=0
69 results = result['aggregations']['parent_agg']['buckets']
93 )
94 results = result["aggregations"]["parent_agg"]["buckets"]
70 else:
95 else:
71 return []
96 return []
72 hashes = [i['key'] for i in results]
97 hashes = [i["key"] for i in results]
73
98
74 # get queries associated with hashes
99 # get queries associated with hashes
75 calls_query = {
100 calls_query = {
76 "aggs": {
101 "aggs": {
77 "top_calls": {
102 "top_calls": {
78 "terms": {
103 "terms": {"field": "tags.statement_hash.values", "size": 15},
79 "field": "tags.statement_hash.values",
80 "size": 15
81 },
82 "aggs": {
104 "aggs": {
83 "top_calls_hits": {
105 "top_calls_hits": {
84 "top_hits": {
106 "top_hits": {"sort": {"timestamp": "desc"}, "size": 5}
85 "sort": {"timestamp": "desc"},
86 "size": 5
87 }
88 }
107 }
89 }
108 },
90 }
109 }
91 },
110 },
92 "query": {
111 "query": {
@@ -95,45 +114,38 b' class SlowCallService(BaseService):'
95 "and": [
114 "and": [
96 {
115 {
97 "terms": {
116 "terms": {
98 "resource_id": [
117 "resource_id": [filter_settings["resource"][0]]
99 filter_settings['resource'][0]
100 ]
101 }
102 },
103 {
104 "terms": {
105 "tags.statement_hash.values": hashes
106 }
118 }
107 },
119 },
120 {"terms": {"tags.statement_hash.values": hashes}},
108 {
121 {
109 "range": {
122 "range": {
110 "timestamp": {
123 "timestamp": {
111 "gte": filter_settings['start_date'],
124 "gte": filter_settings["start_date"],
112 "lte": filter_settings['end_date']
125 "lte": filter_settings["end_date"],
113 }
126 }
114 }
127 }
115 }
128 },
116 ]
129 ]
117 }
130 }
118 }
131 }
119 }
132 },
120 }
133 }
121 calls = Datastores.es.search(body=calls_query,
134 calls = Datastores.es.search(
122 index=index_names,
135 body=calls_query, index=index_names, doc_type="log", size=0
123 doc_type='log',
136 )
124 size=0)
125 call_results = {}
137 call_results = {}
126 report_ids = []
138 report_ids = []
127 for call in calls['aggregations']['top_calls']['buckets']:
139 for call in calls["aggregations"]["top_calls"]["buckets"]:
128 hits = call['top_calls_hits']['hits']['hits']
140 hits = call["top_calls_hits"]["hits"]["hits"]
129 call_results[call['key']] = [i['_source'] for i in hits]
141 call_results[call["key"]] = [i["_source"] for i in hits]
130 report_ids.extend([i['_source']['tags']['report_id']['values']
142 report_ids.extend(
131 for i in hits])
143 [i["_source"]["tags"]["report_id"]["values"] for i in hits]
144 )
132 if report_ids:
145 if report_ids:
133 r_query = db_session.query(Report.group_id, Report.id)
146 r_query = db_session.query(Report.group_id, Report.id)
134 r_query = r_query.filter(Report.id.in_(report_ids))
147 r_query = r_query.filter(Report.id.in_(report_ids))
135 r_query = r_query.filter(
148 r_query = r_query.filter(Report.start_time >= filter_settings["start_date"])
136 Report.start_time >= filter_settings['start_date'])
137 else:
149 else:
138 r_query = []
150 r_query = []
139 reports_reversed = {}
151 reports_reversed = {}
@@ -142,27 +154,32 b' class SlowCallService(BaseService):'
142
154
143 final_results = []
155 final_results = []
144 for item in results:
156 for item in results:
145 if item['key'] not in call_results:
157 if item["key"] not in call_results:
146 continue
158 continue
147 call = call_results[item['key']][0]
159 call = call_results[item["key"]][0]
148 row = {'occurences': item['total']['sub_agg']['value'],
160 row = {
149 'total_duration': round(
161 "occurences": item["total"]["sub_agg"]["value"],
150 item['duration']['sub_agg']['value']),
162 "total_duration": round(item["duration"]["sub_agg"]["value"]),
151 'statement': call['message'],
163 "statement": call["message"],
152 'statement_type': call['tags']['type']['values'],
164 "statement_type": call["tags"]["type"]["values"],
153 'statement_subtype': call['tags']['subtype']['values'],
165 "statement_subtype": call["tags"]["subtype"]["values"],
154 'statement_hash': item['key'],
166 "statement_hash": item["key"],
155 'latest_details': []}
167 "latest_details": [],
156 if row['statement_type'] in ['tmpl', ' remote']:
168 }
157 params = call['tags']['parameters']['values'] \
169 if row["statement_type"] in ["tmpl", " remote"]:
158 if 'parameters' in call['tags'] else ''
170 params = (
159 row['statement'] = '{} ({})'.format(call['message'], params)
171 call["tags"]["parameters"]["values"]
160 for call in call_results[item['key']]:
172 if "parameters" in call["tags"]
161 report_id = call['tags']['report_id']['values']
173 else ""
174 )
175 row["statement"] = "{} ({})".format(call["message"], params)
176 for call in call_results[item["key"]]:
177 report_id = call["tags"]["report_id"]["values"]
162 group_id = reports_reversed.get(report_id)
178 group_id = reports_reversed.get(report_id)
163 if group_id:
179 if group_id:
164 row['latest_details'].append(
180 row["latest_details"].append(
165 {'group_id': group_id, 'report_id': report_id})
181 {"group_id": group_id, "report_id": report_id}
182 )
166
183
167 final_results.append(row)
184 final_results.append(row)
168
185
@@ -34,8 +34,9 b' class TagService(BaseService):'
34 return value
34 return value
35
35
36 @classmethod
36 @classmethod
37 def by_resource_id_and_value(cls, resource_id, tag_name, value,
37 def by_resource_id_and_value(
38 db_session=None, create_missing=True):
38 cls, resource_id, tag_name, value, db_session=None, create_missing=True
39 ):
39 """
40 """
40 Fetches tag and creates a new one if missing
41 Fetches tag and creates a new one if missing
41 """
42 """
@@ -43,39 +44,42 b' class TagService(BaseService):'
43 registry = get_current_registry()
44 registry = get_current_registry()
44
45
45 @registry.cache_regions.redis_min_10.cache_on_arguments(
46 @registry.cache_regions.redis_min_10.cache_on_arguments(
46 namespace='TagService.by_resource_id_and_value')
47 namespace="TagService.by_resource_id_and_value"
48 )
47 def cached(resource_id, tag_name, value):
49 def cached(resource_id, tag_name, value):
48 reduced_name = cls.cut_name(tag_name.decode('utf8'))
50 reduced_name = cls.cut_name(tag_name.decode("utf8"))
49 reduced_value = cls.cut_value(value.decode('utf8'))
51 reduced_value = cls.cut_value(value.decode("utf8"))
50
52
51 query = db_session.query(Tag)
53 query = db_session.query(Tag)
52 query = query.filter(Tag.resource_id == resource_id)
54 query = query.filter(Tag.resource_id == resource_id)
53 query = query.filter(Tag.name == reduced_name)
55 query = query.filter(Tag.name == reduced_name)
54 query = query.filter(sa.cast(Tag.value, sa.types.TEXT) ==
56 query = query.filter(
55 sa.cast(json.dumps(reduced_value),
57 sa.cast(Tag.value, sa.types.TEXT)
56 sa.types.TEXT))
58 == sa.cast(json.dumps(reduced_value), sa.types.TEXT)
59 )
57 tag = query.first()
60 tag = query.first()
58 if tag:
61 if tag:
59 db_session.expunge(tag)
62 db_session.expunge(tag)
60 return tag
63 return tag
61
64
62 view = cached(resource_id, tag_name.encode('utf8'),
65 view = cached(resource_id, tag_name.encode("utf8"), value.encode("utf8"))
63 value.encode('utf8'))
64 if not view and create_missing:
66 if not view and create_missing:
65 view = cls.create_tag(resource_id,
67 view = cls.create_tag(
66 cls.cut_name(tag_name),
68 resource_id, cls.cut_name(tag_name), cls.cut_value(value), db_session
67 cls.cut_value(value),
69 )
68 db_session)
70 cached.invalidate(
69 cached.invalidate(resource_id, tag_name.encode('utf8'),
71 resource_id, tag_name.encode("utf8"), value.encode("utf8")
70 value.encode('utf8'))
72 )
71 return view
73 return view
72
74
73 @classmethod
75 @classmethod
74 def create_tag(cls, resource_id, tag_name, value, db_session=None):
76 def create_tag(cls, resource_id, tag_name, value, db_session=None):
75
77
76 tag = Tag(resource_id=resource_id,
78 tag = Tag(
77 name=cls.cut_name(tag_name),
79 resource_id=resource_id,
78 value=cls.cut_value(value))
80 name=cls.cut_name(tag_name),
81 value=cls.cut_value(value),
82 )
79 db_session = get_db_session(db_session)
83 db_session = get_db_session(db_session)
80 db_session.add(tag)
84 db_session.add(tag)
81 db_session.flush()
85 db_session.flush()
@@ -87,7 +91,8 b' class TagService(BaseService):'
87 registry = get_current_registry()
91 registry = get_current_registry()
88
92
89 @registry.cache_regions.redis_min_10.cache_on_arguments(
93 @registry.cache_regions.redis_min_10.cache_on_arguments(
90 namespace='TagService.by_tag_id')
94 namespace="TagService.by_tag_id"
95 )
91 def cached(tag_id):
96 def cached(tag_id):
92 tag = db_session.query(Tag).filter(Tag.id == tag_id).first()
97 tag = db_session.query(Tag).filter(Tag.id == tag_id).first()
93 if tag:
98 if tag:
@@ -34,7 +34,7 b' from pyramid.threadlocal import get_current_registry'
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37 GroupOccurence = namedtuple('GroupOccurence', ['occurences', 'group'])
37 GroupOccurence = namedtuple("GroupOccurence", ["occurences", "group"])
38
38
39
39
40 class UserService(UserService):
40 class UserService(UserService):
@@ -43,31 +43,40 b' class UserService(UserService):'
43 return get_db_session(db_session).query(User).order_by(User.user_name)
43 return get_db_session(db_session).query(User).order_by(User.user_name)
44
44
45 @classmethod
45 @classmethod
46 def send_email(cls, request, recipients, variables, template,
46 def send_email(
47 immediately=False, silent=False):
47 cls, request, recipients, variables, template, immediately=False, silent=False
48 ):
48 html = pyramid.renderers.render(template, variables, request)
49 html = pyramid.renderers.render(template, variables, request)
49 title = variables.get('email_title',
50 title = variables.get("email_title", variables.get("title", "No Title"))
50 variables.get('title', "No Title"))
51 title = title.replace("\r", "").replace("\n", "")
51 title = title.replace('\r', '').replace('\n', '')
52 sender = "{} <{}>".format(
52 sender = "{} <{}>".format(
53 request.registry.settings['mailing.from_name'],
53 request.registry.settings["mailing.from_name"],
54 request.registry.settings['mailing.from_email'])
54 request.registry.settings["mailing.from_email"],
55 )
55 message = pyramid_mailer.message.Message(
56 message = pyramid_mailer.message.Message(
56 subject=title, sender=sender, recipients=recipients, html=html)
57 subject=title, sender=sender, recipients=recipients, html=html
58 )
57 if immediately:
59 if immediately:
58 try:
60 try:
59 request.registry.mailer.send_immediately(message)
61 request.registry.mailer.send_immediately(message)
60 except Exception as e:
62 except Exception as e:
61 log.warning('Exception %s' % e)
63 log.warning("Exception %s" % e)
62 if not silent:
64 if not silent:
63 raise
65 raise
64 else:
66 else:
65 request.registry.mailer.send(message)
67 request.registry.mailer.send(message)
66
68
67 @classmethod
69 @classmethod
68 def get_paginator(cls, page=1, item_count=None, items_per_page=50,
70 def get_paginator(
69 order_by=None, filter_settings=None,
71 cls,
70 exclude_columns=None, db_session=None):
72 page=1,
73 item_count=None,
74 items_per_page=50,
75 order_by=None,
76 filter_settings=None,
77 exclude_columns=None,
78 db_session=None,
79 ):
71 registry = get_current_registry()
80 registry = get_current_registry()
72 if not exclude_columns:
81 if not exclude_columns:
73 exclude_columns = []
82 exclude_columns = []
@@ -75,19 +84,19 b' class UserService(UserService):'
75 filter_settings = {}
84 filter_settings = {}
76 db_session = get_db_session(db_session)
85 db_session = get_db_session(db_session)
77 q = db_session.query(User)
86 q = db_session.query(User)
78 if filter_settings.get('order_col'):
87 if filter_settings.get("order_col"):
79 order_col = filter_settings.get('order_col')
88 order_col = filter_settings.get("order_col")
80 if filter_settings.get('order_dir') == 'dsc':
89 if filter_settings.get("order_dir") == "dsc":
81 sort_on = 'desc'
90 sort_on = "desc"
82 else:
91 else:
83 sort_on = 'asc'
92 sort_on = "asc"
84 q = q.order_by(getattr(sa, sort_on)(getattr(User, order_col)))
93 q = q.order_by(getattr(sa, sort_on)(getattr(User, order_col)))
85 else:
94 else:
86 q = q.order_by(sa.desc(User.registered_date))
95 q = q.order_by(sa.desc(User.registered_date))
87 # remove urlgen or it never caches count
96 # remove urlgen or it never caches count
88 cache_params = dict(filter_settings)
97 cache_params = dict(filter_settings)
89 cache_params.pop('url', None)
98 cache_params.pop("url", None)
90 cache_params.pop('url_maker', None)
99 cache_params.pop("url_maker", None)
91
100
92 @registry.cache_regions.redis_min_5.cache_on_arguments()
101 @registry.cache_regions.redis_min_5.cache_on_arguments()
93 def estimate_users(cache_key):
102 def estimate_users(cache_key):
@@ -100,20 +109,23 b' class UserService(UserService):'
100 # errors just started to flow in
109 # errors just started to flow in
101 if item_count < 1000:
110 if item_count < 1000:
102 item_count = estimate_users.refresh(cache_params)
111 item_count = estimate_users.refresh(cache_params)
103 paginator = SqlalchemyOrmPage(q, page=page,
112 paginator = SqlalchemyOrmPage(
104 item_count=item_count,
113 q,
105 items_per_page=items_per_page,
114 page=page,
106 **filter_settings)
115 item_count=item_count,
116 items_per_page=items_per_page,
117 **filter_settings
118 )
107 return paginator
119 return paginator
108
120
109 @classmethod
121 @classmethod
110 def get_valid_channels(cls, user):
122 def get_valid_channels(cls, user):
111 return [channel for channel in user.alert_channels
123 return [channel for channel in user.alert_channels if channel.channel_validated]
112 if channel.channel_validated]
113
124
114 @classmethod
125 @classmethod
115 def report_notify(cls, user, request, application, report_groups,
126 def report_notify(
116 occurence_dict, db_session=None):
127 cls, user, request, application, report_groups, occurence_dict, db_session=None
128 ):
117 db_session = get_db_session(db_session)
129 db_session = get_db_session(db_session)
118 if not report_groups:
130 if not report_groups:
119 return True
131 return True
@@ -125,12 +137,12 b' class UserService(UserService):'
125 occurences = occurence_dict.get(group.id, 1)
137 occurences = occurence_dict.get(group.id, 1)
126 for action in channel.channel_actions:
138 for action in channel.channel_actions:
127 not_matched = (
139 not_matched = (
128 action.resource_id and action.resource_id !=
140 action.resource_id
129 application.resource_id)
141 and action.resource_id != application.resource_id
130 if action.type != 'report' or not_matched:
142 )
143 if action.type != "report" or not_matched:
131 continue
144 continue
132 should_notify = (action.action == 'always' or
145 should_notify = action.action == "always" or not group.notified
133 not group.notified)
134 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
146 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
135 report_dict = group.get_report().get_dict(request)
147 report_dict = group.get_report().get_dict(request)
136 if rule_obj.match(report_dict) and should_notify:
148 if rule_obj.match(report_dict) and should_notify:
@@ -143,10 +155,12 b' class UserService(UserService):'
143 if not total_confirmed:
155 if not total_confirmed:
144 continue
156 continue
145 try:
157 try:
146 channel.notify_reports(resource=application,
158 channel.notify_reports(
147 user=user,
159 resource=application,
148 request=request,
160 user=user,
149 since_when=since_when,
161 request=request,
150 reports=confirmed_groups)
162 since_when=since_when,
163 reports=confirmed_groups,
164 )
151 except IntegrationException as e:
165 except IntegrationException as e:
152 log.warning('%s' % e)
166 log.warning("%s" % e)
@@ -24,51 +24,53 b' from ziggurat_foundations.models.base import BaseModel'
24
24
25
25
26 class SlowCall(Base, BaseModel):
26 class SlowCall(Base, BaseModel):
27 __tablename__ = 'slow_calls'
27 __tablename__ = "slow_calls"
28 __table_args__ = {'implicit_returning': False}
28 __table_args__ = {"implicit_returning": False}
29
29
30 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
30 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
31 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
31 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
32 report_id = sa.Column(sa.BigInteger,
32 report_id = sa.Column(
33 sa.ForeignKey('reports.id',
33 sa.BigInteger,
34 ondelete='cascade',
34 sa.ForeignKey("reports.id", ondelete="cascade", onupdate="cascade"),
35 onupdate='cascade'),
35 primary_key=True,
36 primary_key=True)
36 )
37 duration = sa.Column(sa.Float(), default=0)
37 duration = sa.Column(sa.Float(), default=0)
38 statement = sa.Column(sa.UnicodeText(), default='')
38 statement = sa.Column(sa.UnicodeText(), default="")
39 statement_hash = sa.Column(sa.Unicode(60), default='')
39 statement_hash = sa.Column(sa.Unicode(60), default="")
40 parameters = sa.Column(JSON(), nullable=False, default=dict)
40 parameters = sa.Column(JSON(), nullable=False, default=dict)
41 type = sa.Column(sa.Unicode(16), default='')
41 type = sa.Column(sa.Unicode(16), default="")
42 subtype = sa.Column(sa.Unicode(16), default=None)
42 subtype = sa.Column(sa.Unicode(16), default=None)
43 location = sa.Column(sa.Unicode(255), default='')
43 location = sa.Column(sa.Unicode(255), default="")
44 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
44 timestamp = sa.Column(
45 server_default=sa.func.now())
45 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
46 report_group_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
46 )
47 server_default=sa.func.now())
47 report_group_time = sa.Column(
48 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
49 )
48
50
49 def set_data(self, data, protocol_version=None, resource_id=None,
51 def set_data(
50 report_group=None):
52 self, data, protocol_version=None, resource_id=None, report_group=None
53 ):
51 self.resource_id = resource_id
54 self.resource_id = resource_id
52 if data.get('start') and data.get('end'):
55 if data.get("start") and data.get("end"):
53 self.timestamp = data.get('start')
56 self.timestamp = data.get("start")
54 d = data.get('end') - data.get('start')
57 d = data.get("end") - data.get("start")
55 self.duration = d.total_seconds()
58 self.duration = d.total_seconds()
56 self.statement = data.get('statement', '')
59 self.statement = data.get("statement", "")
57 self.type = data.get('type', 'unknown')[:16]
60 self.type = data.get("type", "unknown")[:16]
58 self.parameters = data.get('parameters', {})
61 self.parameters = data.get("parameters", {})
59 self.location = data.get('location', '')[:255]
62 self.location = data.get("location", "")[:255]
60 self.report_group_time = report_group.first_timestamp
63 self.report_group_time = report_group.first_timestamp
61 if 'subtype' in data:
64 if "subtype" in data:
62 self.subtype = data.get('subtype', 'unknown')[:16]
65 self.subtype = data.get("subtype", "unknown")[:16]
63 if self.type == 'tmpl':
66 if self.type == "tmpl":
64 self.set_hash('{} {}'.format(self.statement, self.parameters))
67 self.set_hash("{} {}".format(self.statement, self.parameters))
65 else:
68 else:
66 self.set_hash()
69 self.set_hash()
67
70
68 def set_hash(self, custom_statement=None):
71 def set_hash(self, custom_statement=None):
69 statement = custom_statement or self.statement
72 statement = custom_statement or self.statement
70 self.statement_hash = hashlib.sha1(
73 self.statement_hash = hashlib.sha1(statement.encode("utf8")).hexdigest()
71 statement.encode('utf8')).hexdigest()
72
74
73 @property
75 @property
74 def end_time(self):
76 def end_time(self):
@@ -78,38 +80,48 b' class SlowCall(Base, BaseModel):'
78
80
79 def get_dict(self):
81 def get_dict(self):
80 instance_dict = super(SlowCall, self).get_dict()
82 instance_dict = super(SlowCall, self).get_dict()
81 instance_dict['children'] = []
83 instance_dict["children"] = []
82 instance_dict['end_time'] = self.end_time
84 instance_dict["end_time"] = self.end_time
83 return instance_dict
85 return instance_dict
84
86
85 def es_doc(self):
87 def es_doc(self):
86 doc = {
88 doc = {
87 'resource_id': self.resource_id,
89 "resource_id": self.resource_id,
88 'timestamp': self.timestamp,
90 "timestamp": self.timestamp,
89 'pg_id': str(self.id),
91 "pg_id": str(self.id),
90 'permanent': False,
92 "permanent": False,
91 'request_id': None,
93 "request_id": None,
92 'log_level': 'UNKNOWN',
94 "log_level": "UNKNOWN",
93 'message': self.statement,
95 "message": self.statement,
94 'namespace': 'appenlight.slow_call',
96 "namespace": "appenlight.slow_call",
95 'tags': {
97 "tags": {
96 'report_id': {'values': self.report_id,
98 "report_id": {
97 'numeric_values': self.report_id},
99 "values": self.report_id,
98 'duration': {'values': None, 'numeric_values': self.duration},
100 "numeric_values": self.report_id,
99 'statement_hash': {'values': self.statement_hash,
101 },
100 'numeric_values': None},
102 "duration": {"values": None, "numeric_values": self.duration},
101 'type': {'values': self.type, 'numeric_values': None},
103 "statement_hash": {
102 'subtype': {'values': self.subtype, 'numeric_values': None},
104 "values": self.statement_hash,
103 'location': {'values': self.location, 'numeric_values': None},
105 "numeric_values": None,
104 'parameters': {'values': None, 'numeric_values': None}
106 },
107 "type": {"values": self.type, "numeric_values": None},
108 "subtype": {"values": self.subtype, "numeric_values": None},
109 "location": {"values": self.location, "numeric_values": None},
110 "parameters": {"values": None, "numeric_values": None},
105 },
111 },
106 'tag_list': ['report_id', 'duration', 'statement_hash', 'type',
112 "tag_list": [
107 'subtype', 'location']
113 "report_id",
114 "duration",
115 "statement_hash",
116 "type",
117 "subtype",
118 "location",
119 ],
108 }
120 }
109 if isinstance(self.parameters, str):
121 if isinstance(self.parameters, str):
110 doc['tags']['parameters']['values'] = self.parameters[:255]
122 doc["tags"]["parameters"]["values"] = self.parameters[:255]
111 return doc
123 return doc
112
124
113 @property
125 @property
114 def partition_id(self):
126 def partition_id(self):
115 return 'rcae_sc_%s' % self.report_group_time.strftime('%Y_%m')
127 return "rcae_sc_%s" % self.report_group_time.strftime("%Y_%m")
@@ -23,15 +23,16 b' from . import Base'
23
23
24
24
25 class Tag(Base, BaseModel):
25 class Tag(Base, BaseModel):
26 __tablename__ = 'tags'
26 __tablename__ = "tags"
27
27
28 id = sa.Column(sa.Integer, primary_key=True)
28 id = sa.Column(sa.Integer, primary_key=True)
29 resource_id = sa.Column(sa.Integer,
29 resource_id = sa.Column(sa.Integer, sa.ForeignKey("resources.resource_id"))
30 sa.ForeignKey('resources.resource_id'))
31 name = sa.Column(sa.Unicode(512), nullable=False)
30 name = sa.Column(sa.Unicode(512), nullable=False)
32 value = sa.Column(JSON, nullable=False)
31 value = sa.Column(JSON, nullable=False)
33 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
32 first_timestamp = sa.Column(
34 server_default=sa.func.now())
33 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
35 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
34 )
36 server_default=sa.func.now())
35 last_timestamp = sa.Column(
36 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
37 )
37 times_seen = sa.Column(sa.Integer, nullable=False, default=0)
38 times_seen = sa.Column(sa.Integer, nullable=False, default=0)
@@ -32,72 +32,84 b' class User(UserMixin, Base):'
32
32
33 first_name = sa.Column(sa.Unicode(25))
33 first_name = sa.Column(sa.Unicode(25))
34 last_name = sa.Column(sa.Unicode(25))
34 last_name = sa.Column(sa.Unicode(25))
35 company_name = sa.Column(sa.Unicode(255), default='')
35 company_name = sa.Column(sa.Unicode(255), default="")
36 company_address = sa.Column(sa.Unicode(255), default='')
36 company_address = sa.Column(sa.Unicode(255), default="")
37 zip_code = sa.Column(sa.Unicode(25), default='')
37 zip_code = sa.Column(sa.Unicode(25), default="")
38 city = sa.Column(sa.Unicode(50), default='')
38 city = sa.Column(sa.Unicode(50), default="")
39 default_report_sort = sa.Column(sa.Unicode(25), default='newest')
39 default_report_sort = sa.Column(sa.Unicode(25), default="newest")
40 notes = sa.Column(sa.UnicodeText, default='')
40 notes = sa.Column(sa.UnicodeText, default="")
41 notifications = sa.Column(sa.Boolean(), default=True)
41 notifications = sa.Column(sa.Boolean(), default=True)
42 registration_ip = sa.Column(sa.UnicodeText(), default='')
42 registration_ip = sa.Column(sa.UnicodeText(), default="")
43 alert_channels = sa.orm.relationship('AlertChannel',
43 alert_channels = sa.orm.relationship(
44 cascade="all,delete-orphan",
44 "AlertChannel",
45 passive_deletes=True,
45 cascade="all,delete-orphan",
46 passive_updates=True,
46 passive_deletes=True,
47 backref='owner',
47 passive_updates=True,
48 order_by='AlertChannel.channel_name, '
48 backref="owner",
49 'AlertChannel.channel_value')
49 order_by="AlertChannel.channel_name, " "AlertChannel.channel_value",
50
50 )
51 alert_actions = sa.orm.relationship('AlertChannelAction',
51
52 cascade="all,delete-orphan",
52 alert_actions = sa.orm.relationship(
53 passive_deletes=True,
53 "AlertChannelAction",
54 passive_updates=True,
54 cascade="all,delete-orphan",
55 backref='owner',
55 passive_deletes=True,
56 order_by='AlertChannelAction.pkey')
56 passive_updates=True,
57
57 backref="owner",
58 auth_tokens = sa.orm.relationship('AuthToken',
58 order_by="AlertChannelAction.pkey",
59 cascade="all,delete-orphan",
59 )
60 passive_deletes=True,
60
61 passive_updates=True,
61 auth_tokens = sa.orm.relationship(
62 backref='owner',
62 "AuthToken",
63 order_by='AuthToken.creation_date')
63 cascade="all,delete-orphan",
64
64 passive_deletes=True,
65 def get_dict(self, exclude_keys=None, include_keys=None,
65 passive_updates=True,
66 extended_info=False):
66 backref="owner",
67 order_by="AuthToken.creation_date",
68 )
69
70 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
67 result = super(User, self).get_dict(exclude_keys, include_keys)
71 result = super(User, self).get_dict(exclude_keys, include_keys)
68 if extended_info:
72 if extended_info:
69 result['groups'] = [g.group_name for g in self.groups]
73 result["groups"] = [g.group_name for g in self.groups]
70 result['permissions'] = [p.perm_name for p in UserService.permissions(self)]
74 result["permissions"] = [p.perm_name for p in UserService.permissions(self)]
71 request = get_current_request()
75 request = get_current_request()
72 apps = UserService.resources_with_perms(self,
76 apps = UserService.resources_with_perms(
73 ['view'], resource_types=['application'])
77 self, ["view"], resource_types=["application"]
74 result['applications'] = sorted(
78 )
75 [{'resource_id': a.resource_id,
79 result["applications"] = sorted(
76 'resource_name': a.resource_name}
80 [
77 for a in apps.all()],
81 {"resource_id": a.resource_id, "resource_name": a.resource_name}
78 key=lambda x: x['resource_name'].lower())
82 for a in apps.all()
79 result['assigned_reports'] = [r.get_dict(request) for r
83 ],
80 in self.assigned_report_groups]
84 key=lambda x: x["resource_name"].lower(),
81 result['latest_events'] = [ev.get_dict(request) for ev
85 )
82 in self.latest_events()]
86 result["assigned_reports"] = [
87 r.get_dict(request) for r in self.assigned_report_groups
88 ]
89 result["latest_events"] = [
90 ev.get_dict(request) for ev in self.latest_events()
91 ]
83
92
84 exclude_keys_list = exclude_keys or []
93 exclude_keys_list = exclude_keys or []
85 include_keys_list = include_keys or []
94 include_keys_list = include_keys or []
86 d = {}
95 d = {}
87 for k in result.keys():
96 for k in result.keys():
88 if (k not in exclude_keys_list and
97 if k not in exclude_keys_list and (
89 (k in include_keys_list or not include_keys)):
98 k in include_keys_list or not include_keys
99 ):
90 d[k] = result[k]
100 d[k] = result[k]
91 return d
101 return d
92
102
93 def __repr__(self):
103 def __repr__(self):
94 return '<User: %s, id: %s>' % (self.user_name, self.id)
104 return "<User: %s, id: %s>" % (self.user_name, self.id)
95
105
96 @property
106 @property
97 def assigned_report_groups(self):
107 def assigned_report_groups(self):
98 from appenlight.models.report_group import ReportGroup
108 from appenlight.models.report_group import ReportGroup
99
109
100 resources = UserService.resources_with_perms(self, ['view'], resource_types=['application'])
110 resources = UserService.resources_with_perms(
111 self, ["view"], resource_types=["application"]
112 )
101 query = self.assigned_reports_relation
113 query = self.assigned_reports_relation
102 rid_list = [r.resource_id for r in resources]
114 rid_list = [r.resource_id for r in resources]
103 query = query.filter(ReportGroup.resource_id.in_(rid_list))
115 query = query.filter(ReportGroup.resource_id.in_(rid_list))
@@ -106,12 +118,13 b' class User(UserMixin, Base):'
106
118
107 def feed_report(self, report):
119 def feed_report(self, report):
108 """ """
120 """ """
109 if not hasattr(self, 'current_reports'):
121 if not hasattr(self, "current_reports"):
110 self.current_reports = []
122 self.current_reports = []
111 self.current_reports.append(report)
123 self.current_reports.append(report)
112
124
113 def send_digest(self, request, application, reports, since_when=None,
125 def send_digest(
114 db_session=None):
126 self, request, application, reports, since_when=None, db_session=None
127 ):
115 db_session = get_db_session(db_session)
128 db_session = get_db_session(db_session)
116 if not reports:
129 if not reports:
117 return True
130 return True
@@ -121,13 +134,15 b' class User(UserMixin, Base):'
121 if not channel.channel_validated or not channel.daily_digest:
134 if not channel.channel_validated or not channel.daily_digest:
122 continue
135 continue
123 try:
136 try:
124 channel.send_digest(resource=application,
137 channel.send_digest(
125 user=self,
138 resource=application,
126 request=request,
139 user=self,
127 since_when=since_when,
140 request=request,
128 reports=reports)
141 since_when=since_when,
142 reports=reports,
143 )
129 except IntegrationException as e:
144 except IntegrationException as e:
130 log.warning('%s' % e)
145 log.warning("%s" % e)
131
146
132 def latest_events(self):
147 def latest_events(self):
133 return EventService.latest_for_user(self)
148 return EventService.latest_for_user(self)
@@ -14,7 +14,9 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from ziggurat_foundations.models.user_resource_permission import UserResourcePermissionMixin
17 from ziggurat_foundations.models.user_resource_permission import (
18 UserResourcePermissionMixin,
19 )
18 from appenlight.models import Base
20 from appenlight.models import Base
19
21
20
22
@@ -20,49 +20,41 b' from appenlight.forms import CSRFException'
20 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
21
21
22 from pyramid.interfaces import IDefaultCSRFOptions
22 from pyramid.interfaces import IDefaultCSRFOptions
23 from pyramid.session import (
23 from pyramid.session import check_csrf_origin, check_csrf_token
24 check_csrf_origin,
25 check_csrf_token,
26 )
27
24
28 # taken directly from pyramid 1.7
25 # taken directly from pyramid 1.7
29 # pyramid/viewderivers.py
26 # pyramid/viewderivers.py
30 # the difference is this deriver will ignore csrf_check when auth token
27 # the difference is this deriver will ignore csrf_check when auth token
31 # policy is in effect
28 # policy is in effect
32
29
30
33 def csrf_view(view, info):
31 def csrf_view(view, info):
34 explicit_val = info.options.get('require_csrf')
32 explicit_val = info.options.get("require_csrf")
35 defaults = info.registry.queryUtility(IDefaultCSRFOptions)
33 defaults = info.registry.queryUtility(IDefaultCSRFOptions)
36 if defaults is None:
34 if defaults is None:
37 default_val = False
35 default_val = False
38 token = 'csrf_token'
36 token = "csrf_token"
39 header = 'X-CSRF-Token'
37 header = "X-CSRF-Token"
40 safe_methods = frozenset(["GET", "HEAD", "OPTIONS", "TRACE"])
38 safe_methods = frozenset(["GET", "HEAD", "OPTIONS", "TRACE"])
41 else:
39 else:
42 default_val = defaults.require_csrf
40 default_val = defaults.require_csrf
43 token = defaults.token
41 token = defaults.token
44 header = defaults.header
42 header = defaults.header
45 safe_methods = defaults.safe_methods
43 safe_methods = defaults.safe_methods
46 enabled = (
44 enabled = explicit_val is True or (explicit_val is not False and default_val)
47 explicit_val is True or
48 (explicit_val is not False and default_val)
49 )
50 # disable if both header and token are disabled
45 # disable if both header and token are disabled
51 enabled = enabled and (token or header)
46 enabled = enabled and (token or header)
52 wrapped_view = view
47 wrapped_view = view
53 if enabled:
48 if enabled:
49
54 def csrf_view(context, request):
50 def csrf_view(context, request):
55 is_from_auth_token = 'auth:auth_token' in \
51 is_from_auth_token = "auth:auth_token" in request.effective_principals
56 request.effective_principals
57 if is_from_auth_token:
52 if is_from_auth_token:
58 log.debug('ignoring CSRF check, auth token used')
53 log.debug("ignoring CSRF check, auth token used")
59 elif (
54 elif request.method not in safe_methods and (
60 request.method not in safe_methods and
61 (
62 # skip exception views unless value is explicitly defined
55 # skip exception views unless value is explicitly defined
63 getattr(request, 'exception', None) is None or
56 getattr(request, "exception", None) is None
64 explicit_val is not None
57 or explicit_val is not None
65 )
66 ):
58 ):
67 check_csrf_origin(request, raises=True)
59 check_csrf_origin(request, raises=True)
68 check_csrf_token(request, token, header, raises=True)
60 check_csrf_token(request, token, header, raises=True)
@@ -71,7 +63,8 b' def csrf_view(view, info):'
71 wrapped_view = csrf_view
63 wrapped_view = csrf_view
72 return wrapped_view
64 return wrapped_view
73
65
74 csrf_view.options = ('require_csrf',)
66
67 csrf_view.options = ("require_csrf",)
75
68
76
69
77 class PublicReportGroup(object):
70 class PublicReportGroup(object):
@@ -79,12 +72,12 b' class PublicReportGroup(object):'
79 self.val = val
72 self.val = val
80
73
81 def text(self):
74 def text(self):
82 return 'public_report_group = %s' % (self.val,)
75 return "public_report_group = %s" % (self.val,)
83
76
84 phash = text
77 phash = text
85
78
86 def __call__(self, context, request):
79 def __call__(self, context, request):
87 report_group = getattr(context, 'report_group', None)
80 report_group = getattr(context, "report_group", None)
88 if report_group:
81 if report_group:
89 return context.report_group.public == self.val
82 return context.report_group.public == self.val
90
83
@@ -95,8 +88,7 b' class contextTypeClass(object):'
95 self.cls = context_property[1]
88 self.cls = context_property[1]
96
89
97 def text(self):
90 def text(self):
98 return 'context_type_class = %s, %s' % (
91 return "context_type_class = %s, %s" % (self.context_property, self.cls)
99 self.context_property, self.cls)
100
92
101 phash = text
93 phash = text
102
94
@@ -13,4 +13,3 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
@@ -24,16 +24,21 b' log = logging.getLogger(__name__)'
24
24
25
25
26 def main():
26 def main():
27 choices = ['logs']
27 choices = ["logs"]
28
28
29 parser = argparse.ArgumentParser(description='Cleanup AppEnlight logs')
29 parser = argparse.ArgumentParser(description="Cleanup AppEnlight logs")
30 parser.add_argument('-c', '--config', required=True,
30 parser.add_argument(
31 help='Configuration ini file of application')
31 "-c", "--config", required=True, help="Configuration ini file of application"
32 parser.add_argument('-t', '--types', choices=choices,
32 )
33 default='logs',
33 parser.add_argument(
34 help='Which parts of database should get cleared')
34 "-t",
35 parser.add_argument('-r', '--resource', required=True, help='Resource id')
35 "--types",
36 parser.add_argument('-n', '--namespace', help='Limit to Namespace')
36 choices=choices,
37 default="logs",
38 help="Which parts of database should get cleared",
39 )
40 parser.add_argument("-r", "--resource", required=True, help="Resource id")
41 parser.add_argument("-n", "--namespace", help="Limit to Namespace")
37 args = parser.parse_args()
42 args = parser.parse_args()
38
43
39 config_uri = args.config
44 config_uri = args.config
@@ -42,22 +47,20 b' def main():'
42 env = bootstrap(config_uri)
47 env = bootstrap(config_uri)
43
48
44 config = {
49 config = {
45 'types': args.types,
50 "types": args.types,
46 'namespace': args.namespace,
51 "namespace": args.namespace,
47 'resource': int(args.resource),
52 "resource": int(args.resource),
48 }
53 }
49
54
50 action_cleanup_logs(config)
55 action_cleanup_logs(config)
51
56
52
57
53 def action_cleanup_logs(config):
58 def action_cleanup_logs(config):
54 filter_settings = {
59 filter_settings = {"namespace": []}
55 'namespace': []
60 if config["namespace"]:
56 }
61 filter_settings["namespace"].append(config["namespace"])
57 if config['namespace']:
62 logs_cleanup(config["resource"], filter_settings)
58 filter_settings['namespace'].append(config['namespace'])
59 logs_cleanup(config['resource'], filter_settings)
60
63
61
64
62 if __name__ == '__main__':
65 if __name__ == "__main__":
63 main()
66 main()
@@ -25,13 +25,7 b' from ziggurat_foundations.models.services.user import UserService'
25
25
26 from appenlight.forms import UserRegisterForm
26 from appenlight.forms import UserRegisterForm
27 from appenlight.lib.ext_json import json
27 from appenlight.lib.ext_json import json
28 from appenlight.models import (
28 from appenlight.models import DBSession, Group, GroupPermission, User, AuthToken
29 DBSession,
30 Group,
31 GroupPermission,
32 User,
33 AuthToken
34 )
35 from appenlight.models.services.group import GroupService
29 from appenlight.models.services.group import GroupService
36
30
37 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
@@ -40,101 +34,101 b' _ = str'
40
34
41
35
42 def is_yes(input_data):
36 def is_yes(input_data):
43 return input_data in ['y', 'yes']
37 return input_data in ["y", "yes"]
44
38
45
39
46 def is_no(input_data):
40 def is_no(input_data):
47 return input_data in ['n', 'no']
41 return input_data in ["n", "no"]
48
42
49
43
50 def main():
44 def main():
51 parser = argparse.ArgumentParser(
45 parser = argparse.ArgumentParser(
52 description='Populate AppEnlight database',
46 description="Populate AppEnlight database", add_help=False
53 add_help=False)
47 )
54 parser.add_argument('-c', '--config', required=True,
48 parser.add_argument(
55 help='Configuration ini file of application')
49 "-c", "--config", required=True, help="Configuration ini file of application"
56 parser.add_argument('--username', default=None,
50 )
57 help='User to create')
51 parser.add_argument("--username", default=None, help="User to create")
58 parser.add_argument('--password', default=None,
52 parser.add_argument("--password", default=None, help="Password for created user")
59 help='Password for created user')
53 parser.add_argument("--email", default=None, help="Email for created user")
60 parser.add_argument('--email', default=None,
54 parser.add_argument(
61 help='Email for created user')
55 "--auth-token", default=None, help="Auth token for created user"
62 parser.add_argument('--auth-token', default=None,
56 )
63 help='Auth token for created user')
64 args = parser.parse_args()
57 args = parser.parse_args()
65 config_uri = args.config
58 config_uri = args.config
66
59
67 setup_logging(config_uri)
60 setup_logging(config_uri)
68 env = bootstrap(config_uri)
61 env = bootstrap(config_uri)
69 request = env['request']
62 request = env["request"]
70 with get_current_request().tm:
63 with get_current_request().tm:
71 group = GroupService.by_id(1)
64 group = GroupService.by_id(1)
72 if not group:
65 if not group:
73 group = Group(id=1, group_name='Administrators',
66 group = Group(
74 description="Top level permission owners")
67 id=1,
68 group_name="Administrators",
69 description="Top level permission owners",
70 )
75 DBSession.add(group)
71 DBSession.add(group)
76 permission = GroupPermission(perm_name='root_administration')
72 permission = GroupPermission(perm_name="root_administration")
77 group.permissions.append(permission)
73 group.permissions.append(permission)
78
74
79 create_user = True if args.username else None
75 create_user = True if args.username else None
80 while create_user is None:
76 while create_user is None:
81 response = input(
77 response = input("Do you want to create a new admin? (n)\n").lower()
82 'Do you want to create a new admin? (n)\n').lower()
83
78
84 if is_yes(response or 'n'):
79 if is_yes(response or "n"):
85 create_user = True
80 create_user = True
86 elif is_no(response or 'n'):
81 elif is_no(response or "n"):
87 create_user = False
82 create_user = False
88
83
89 if create_user:
84 if create_user:
90 csrf_token = request.session.get_csrf_token()
85 csrf_token = request.session.get_csrf_token()
91 user_name = args.username
86 user_name = args.username
92 print('*********************************************************')
87 print("*********************************************************")
93 while user_name is None:
88 while user_name is None:
94 response = input('What is the username of new admin?\n')
89 response = input("What is the username of new admin?\n")
95 form = UserRegisterForm(
90 form = UserRegisterForm(
96 user_name=response, csrf_token=csrf_token,
91 user_name=response, csrf_token=csrf_token, csrf_context=request
97 csrf_context=request)
92 )
98 form.validate()
93 form.validate()
99 if form.user_name.errors:
94 if form.user_name.errors:
100 print(form.user_name.errors[0])
95 print(form.user_name.errors[0])
101 else:
96 else:
102 user_name = response
97 user_name = response
103 print('The admin username is "{}"\n'.format(user_name))
98 print('The admin username is "{}"\n'.format(user_name))
104 print('*********************************************************')
99 print("*********************************************************")
105 email = args.email
100 email = args.email
106 while email is None:
101 while email is None:
107 response = input('What is the email of admin account?\n')
102 response = input("What is the email of admin account?\n")
108 form = UserRegisterForm(
103 form = UserRegisterForm(
109 email=response, csrf_token=csrf_token,
104 email=response, csrf_token=csrf_token, csrf_context=request
110 csrf_context=request)
105 )
111 form.validate()
106 form.validate()
112 if form.email.errors:
107 if form.email.errors:
113 print(form.email.errors[0])
108 print(form.email.errors[0])
114 else:
109 else:
115 email = response
110 email = response
116 print('The admin email is "{}"\n'.format(email))
111 print('The admin email is "{}"\n'.format(email))
117 print('*********************************************************')
112 print("*********************************************************")
118 user_password = args.password
113 user_password = args.password
119 confirmed_password = args.password
114 confirmed_password = args.password
120 while user_password is None or confirmed_password is None:
115 while user_password is None or confirmed_password is None:
121 response = getpass.getpass(
116 response = getpass.getpass("What is the password for admin account?\n")
122 'What is the password for admin account?\n')
123 form = UserRegisterForm(
117 form = UserRegisterForm(
124 user_password=response, csrf_token=csrf_token,
118 user_password=response, csrf_token=csrf_token, csrf_context=request
125 csrf_context=request)
119 )
126 form.validate()
120 form.validate()
127 if form.user_password.errors:
121 if form.user_password.errors:
128 print(form.user_password.errors[0])
122 print(form.user_password.errors[0])
129 else:
123 else:
130 user_password = response
124 user_password = response
131
125
132 response = getpass.getpass('Please confirm the password.\n')
126 response = getpass.getpass("Please confirm the password.\n")
133 if user_password == response:
127 if user_password == response:
134 confirmed_password = response
128 confirmed_password = response
135 else:
129 else:
136 print('Passwords do not match. Please try again')
130 print("Passwords do not match. Please try again")
137 print('*********************************************************')
131 print("*********************************************************")
138
132
139 with get_current_request().tm:
133 with get_current_request().tm:
140 if create_user:
134 if create_user:
@@ -148,8 +142,8 b' def main():'
148 token.token = args.auth_token
142 token.token = args.auth_token
149 user.auth_tokens.append(token)
143 user.auth_tokens.append(token)
150 group.users.append(user)
144 group.users.append(user)
151 print('USER CREATED')
145 print("USER CREATED")
152 print(json.dumps(user.get_dict()))
146 print(json.dumps(user.get_dict()))
153 print('*********************************************************')
147 print("*********************************************************")
154 print('AUTH TOKEN')
148 print("AUTH TOKEN")
155 print(json.dumps(user.auth_tokens[0].get_dict()))
149 print(json.dumps(user.auth_tokens[0].get_dict()))
@@ -26,32 +26,35 b' log = logging.getLogger(__name__)'
26
26
27
27
28 def gen_secret():
28 def gen_secret():
29 return Fernet.generate_key().decode('utf8')
29 return Fernet.generate_key().decode("utf8")
30
30
31
31
32 def main():
32 def main():
33 parser = argparse.ArgumentParser(
33 parser = argparse.ArgumentParser(
34 description='Generate AppEnlight static resources',
34 description="Generate AppEnlight static resources", add_help=False
35 add_help=False)
35 )
36 parser.add_argument('config', help='Name of generated file')
36 parser.add_argument("config", help="Name of generated file")
37 parser.add_argument(
37 parser.add_argument(
38 '--domain',
38 "--domain",
39 default='appenlight-rhodecode.local',
39 default="appenlight-rhodecode.local",
40 help='Domain which will be used to serve the application')
40 help="Domain which will be used to serve the application",
41 )
41 parser.add_argument(
42 parser.add_argument(
42 '--dbstring',
43 "--dbstring",
43 default='postgresql://appenlight:test@127.0.0.1:5432/appenlight',
44 default="postgresql://appenlight:test@127.0.0.1:5432/appenlight",
44 help='Domain which will be used to serve the application')
45 help="Domain which will be used to serve the application",
46 )
45 args = parser.parse_args()
47 args = parser.parse_args()
46 ini_path = os.path.join('templates', 'ini', 'production.ini.jinja2')
48 ini_path = os.path.join("templates", "ini", "production.ini.jinja2")
47 template_str = pkg_resources.resource_string('appenlight', ini_path)
49 template_str = pkg_resources.resource_string("appenlight", ini_path)
48 template = jinja2.Template(template_str.decode('utf8'))
50 template = jinja2.Template(template_str.decode("utf8"))
49 template_vars = {'appenlight_encryption_secret': gen_secret(),
51 template_vars = {
50 'appenlight_authtkt_secret': gen_secret(),
52 "appenlight_encryption_secret": gen_secret(),
51 'appenlight_redis_session_secret': gen_secret(),
53 "appenlight_authtkt_secret": gen_secret(),
52 'appenlight_domain': args.domain,
54 "appenlight_redis_session_secret": gen_secret(),
53 'appenlight_dbstring': args.dbstring,
55 "appenlight_domain": args.domain,
54 }
56 "appenlight_dbstring": args.dbstring,
57 }
55 compiled = template.render(**template_vars)
58 compiled = template.render(**template_vars)
56 with open(args.config, 'w') as f:
59 with open(args.config, "w") as f:
57 f.write(compiled)
60 f.write(compiled)
@@ -31,10 +31,11 b' log = logging.getLogger(__name__)'
31
31
32 def main(argv=sys.argv):
32 def main(argv=sys.argv):
33 parser = argparse.ArgumentParser(
33 parser = argparse.ArgumentParser(
34 description='Migrate AppEnlight database to latest version',
34 description="Migrate AppEnlight database to latest version", add_help=False
35 add_help=False)
35 )
36 parser.add_argument('-c', '--config', required=True,
36 parser.add_argument(
37 help='Configuration ini file of application')
37 "-c", "--config", required=True, help="Configuration ini file of application"
38 )
38 args = parser.parse_args()
39 args = parser.parse_args()
39 config_uri = args.config
40 config_uri = args.config
40
41
@@ -42,32 +43,31 b' def main(argv=sys.argv):'
42 bootstrap(config_uri)
43 bootstrap(config_uri)
43 registry = get_current_registry()
44 registry = get_current_registry()
44 alembic_cfg = Config()
45 alembic_cfg = Config()
45 alembic_cfg.set_main_option("sqlalchemy.echo", 'true')
46 alembic_cfg.set_main_option("sqlalchemy.echo", "true")
46 alembic_cfg.set_main_option("script_location",
47 alembic_cfg.set_main_option("script_location", "ziggurat_foundations:migrations")
47 "ziggurat_foundations:migrations")
48 alembic_cfg.set_main_option("sqlalchemy.url", registry.settings["sqlalchemy.url"])
48 alembic_cfg.set_main_option("sqlalchemy.url",
49 registry.settings["sqlalchemy.url"])
50 command.upgrade(alembic_cfg, "head")
49 command.upgrade(alembic_cfg, "head")
51 alembic_cfg = Config()
50 alembic_cfg = Config()
52 alembic_cfg.set_main_option("sqlalchemy.echo", 'true')
51 alembic_cfg.set_main_option("sqlalchemy.echo", "true")
53 alembic_cfg.set_main_option("script_location", "appenlight:migrations")
52 alembic_cfg.set_main_option("script_location", "appenlight:migrations")
54 alembic_cfg.set_main_option("sqlalchemy.url",
53 alembic_cfg.set_main_option("sqlalchemy.url", registry.settings["sqlalchemy.url"])
55 registry.settings["sqlalchemy.url"])
56 command.upgrade(alembic_cfg, "head")
54 command.upgrade(alembic_cfg, "head")
57
55
58 for plugin_name, config in registry.appenlight_plugins.items():
56 for plugin_name, config in registry.appenlight_plugins.items():
59 if config['sqlalchemy_migrations']:
57 if config["sqlalchemy_migrations"]:
60 alembic_cfg = Config()
58 alembic_cfg = Config()
61 alembic_cfg.set_main_option("script_location",
59 alembic_cfg.set_main_option(
62 config['sqlalchemy_migrations'])
60 "script_location", config["sqlalchemy_migrations"]
63 alembic_cfg.set_main_option("sqlalchemy.url",
61 )
64 registry.settings["sqlalchemy.url"])
62 alembic_cfg.set_main_option(
65 alembic_cfg.set_main_option("sqlalchemy.echo", 'true')
63 "sqlalchemy.url", registry.settings["sqlalchemy.url"]
64 )
65 alembic_cfg.set_main_option("sqlalchemy.echo", "true")
66 command.upgrade(alembic_cfg, "head")
66 command.upgrade(alembic_cfg, "head")
67
67
68 with get_current_request().tm:
68 with get_current_request().tm:
69 ConfigService.setup_default_values()
69 ConfigService.setup_default_values()
70
70
71 for plugin_name, config in registry.appenlight_plugins.items():
71 for plugin_name, config in registry.appenlight_plugins.items():
72 if config['default_values_setter']:
72 if config["default_values_setter"]:
73 get_callable(config['default_values_setter'])()
73 get_callable(config["default_values_setter"])()
@@ -25,11 +25,7 b' import elasticsearch.helpers'
25 from collections import defaultdict
25 from collections import defaultdict
26 from pyramid.paster import setup_logging
26 from pyramid.paster import setup_logging
27 from pyramid.paster import bootstrap
27 from pyramid.paster import bootstrap
28 from appenlight.models import (
28 from appenlight.models import DBSession, Datastores, metadata
29 DBSession,
30 Datastores,
31 metadata
32 )
33 from appenlight.lib import get_callable
29 from appenlight.lib import get_callable
34 from appenlight.models.report_group import ReportGroup
30 from appenlight.models.report_group import ReportGroup
35 from appenlight.models.report import Report
31 from appenlight.models.report import Report
@@ -42,25 +38,27 b' from appenlight.models.metric import Metric'
42 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
43
39
44 tables = {
40 tables = {
45 'slow_calls_p_': [],
41 "slow_calls_p_": [],
46 'reports_stats_p_': [],
42 "reports_stats_p_": [],
47 'reports_p_': [],
43 "reports_p_": [],
48 'reports_groups_p_': [],
44 "reports_groups_p_": [],
49 'logs_p_': [],
45 "logs_p_": [],
50 'metrics_p_': [],
46 "metrics_p_": [],
51 }
47 }
52
48
49
53 def detect_tables(table_prefix):
50 def detect_tables(table_prefix):
54 found_tables = []
51 found_tables = []
55 db_tables_query = '''
52 db_tables_query = """
56 SELECT tablename FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
53 SELECT tablename FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
57 tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;'''
54 tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;"""
58
55
59 for table in DBSession.execute(db_tables_query).fetchall():
56 for table in DBSession.execute(db_tables_query).fetchall():
60 tablename = table.tablename
57 tablename = table.tablename
61 if tablename.startswith(table_prefix):
58 if tablename.startswith(table_prefix):
62 t = sa.Table(tablename, metadata, autoload=True,
59 t = sa.Table(
63 autoload_with=DBSession.bind.engine)
60 tablename, metadata, autoload=True, autoload_with=DBSession.bind.engine
61 )
64 found_tables.append(t)
62 found_tables.append(t)
65 return found_tables
63 return found_tables
66
64
@@ -75,69 +73,78 b' def main():'
75 # need parser twice because we first need to load ini file
73 # need parser twice because we first need to load ini file
76 # bootstrap pyramid and then load plugins
74 # bootstrap pyramid and then load plugins
77 pre_parser = argparse.ArgumentParser(
75 pre_parser = argparse.ArgumentParser(
78 description='Reindex AppEnlight data',
76 description="Reindex AppEnlight data", add_help=False
79 add_help=False)
77 )
80 pre_parser.add_argument('-c', '--config', required=True,
78 pre_parser.add_argument(
81 help='Configuration ini file of application')
79 "-c", "--config", required=True, help="Configuration ini file of application"
82 pre_parser.add_argument('-h', '--help', help='Show help', nargs='?')
80 )
83 pre_parser.add_argument('-t', '--types', nargs='+',
81 pre_parser.add_argument("-h", "--help", help="Show help", nargs="?")
84 help='Which parts of database should get reindexed')
82 pre_parser.add_argument(
83 "-t", "--types", nargs="+", help="Which parts of database should get reindexed"
84 )
85 args = pre_parser.parse_args()
85 args = pre_parser.parse_args()
86
86
87 config_uri = args.config
87 config_uri = args.config
88 setup_logging(config_uri)
88 setup_logging(config_uri)
89 log.setLevel(logging.INFO)
89 log.setLevel(logging.INFO)
90 env = bootstrap(config_uri)
90 env = bootstrap(config_uri)
91 parser = argparse.ArgumentParser(description='Reindex AppEnlight data')
91 parser = argparse.ArgumentParser(description="Reindex AppEnlight data")
92 choices = {
92 choices = {
93 'reports': 'appenlight.scripts.reindex_elasticsearch:reindex_reports',
93 "reports": "appenlight.scripts.reindex_elasticsearch:reindex_reports",
94 'logs': 'appenlight.scripts.reindex_elasticsearch:reindex_logs',
94 "logs": "appenlight.scripts.reindex_elasticsearch:reindex_logs",
95 'metrics': 'appenlight.scripts.reindex_elasticsearch:reindex_metrics',
95 "metrics": "appenlight.scripts.reindex_elasticsearch:reindex_metrics",
96 'slow_calls': 'appenlight.scripts.reindex_elasticsearch:reindex_slow_calls',
96 "slow_calls": "appenlight.scripts.reindex_elasticsearch:reindex_slow_calls",
97 'template': 'appenlight.scripts.reindex_elasticsearch:update_template'
97 "template": "appenlight.scripts.reindex_elasticsearch:update_template",
98 }
98 }
99 for k, v in env['registry'].appenlight_plugins.items():
99 for k, v in env["registry"].appenlight_plugins.items():
100 if v.get('fulltext_indexer'):
100 if v.get("fulltext_indexer"):
101 choices[k] = v['fulltext_indexer']
101 choices[k] = v["fulltext_indexer"]
102 parser.add_argument('-t', '--types', nargs='*',
102 parser.add_argument(
103 choices=['all'] + list(choices.keys()), default=[],
103 "-t",
104 help='Which parts of database should get reindexed')
104 "--types",
105 parser.add_argument('-c', '--config', required=True,
105 nargs="*",
106 help='Configuration ini file of application')
106 choices=["all"] + list(choices.keys()),
107 default=[],
108 help="Which parts of database should get reindexed",
109 )
110 parser.add_argument(
111 "-c", "--config", required=True, help="Configuration ini file of application"
112 )
107 args = parser.parse_args()
113 args = parser.parse_args()
108
114
109
115 if "all" in args.types:
110 if 'all' in args.types:
111 args.types = list(choices.keys())
116 args.types = list(choices.keys())
112
117
113 print("Selected types to reindex: {}".format(args.types))
118 print("Selected types to reindex: {}".format(args.types))
114
119
115 log.info('settings {}'.format(args.types))
120 log.info("settings {}".format(args.types))
116
121
117 if 'template' in args.types:
122 if "template" in args.types:
118 get_callable(choices['template'])()
123 get_callable(choices["template"])()
119 args.types.remove('template')
124 args.types.remove("template")
120 for selected in args.types:
125 for selected in args.types:
121 get_callable(choices[selected])()
126 get_callable(choices[selected])()
122
127
123
128
124 def update_template():
129 def update_template():
125 try:
130 try:
126 Datastores.es.indices.delete_template('rcae')
131 Datastores.es.indices.delete_template("rcae")
127 except elasticsearch.exceptions.NotFoundError as e:
132 except elasticsearch.exceptions.NotFoundError as e:
128 log.error(e)
133 log.error(e)
129 log.info('updating elasticsearch template')
134 log.info("updating elasticsearch template")
130 tag_templates = [
135 tag_templates = [
131 {"values": {
136 {
132 "path_match": "tags.*",
137 "values": {
133 "mapping": {
138 "path_match": "tags.*",
134 "type": "object",
139 "mapping": {
135 "properties": {
140 "type": "object",
136 "values": {"type": "string", "analyzer": "tag_value"},
141 "properties": {
137 "numeric_values": {"type": "float"}
142 "values": {"type": "string", "analyzer": "tag_value"},
138 }
143 "numeric_values": {"type": "float"},
144 },
145 },
139 }
146 }
140 }}
147 }
141 ]
148 ]
142
149
143 template_schema = {
150 template_schema = {
@@ -145,8 +152,7 b' def update_template():'
145 "settings": {
152 "settings": {
146 "index": {
153 "index": {
147 "refresh_interval": "5s",
154 "refresh_interval": "5s",
148 "translog": {"sync_interval": "5s",
155 "translog": {"sync_interval": "5s", "durability": "async"},
149 "durability": "async"}
150 },
156 },
151 "number_of_shards": 5,
157 "number_of_shards": 5,
152 "analysis": {
158 "analysis": {
@@ -155,13 +161,13 b' def update_template():'
155 "type": "custom",
161 "type": "custom",
156 "char_filter": [],
162 "char_filter": [],
157 "tokenizer": "path_hierarchy",
163 "tokenizer": "path_hierarchy",
158 "filter": []
164 "filter": [],
159 },
165 },
160 "tag_value": {
166 "tag_value": {
161 "type": "custom",
167 "type": "custom",
162 "char_filter": [],
168 "char_filter": [],
163 "tokenizer": "keyword",
169 "tokenizer": "keyword",
164 "filter": ["lowercase"]
170 "filter": ["lowercase"],
165 },
171 },
166 }
172 }
167 },
173 },
@@ -182,8 +188,8 b' def update_template():'
182 "last_timestamp": {"type": "date"},
188 "last_timestamp": {"type": "date"},
183 "average_duration": {"type": "float"},
189 "average_duration": {"type": "float"},
184 "summed_duration": {"type": "float"},
190 "summed_duration": {"type": "float"},
185 "public": {"type": "boolean"}
191 "public": {"type": "boolean"},
186 }
192 },
187 },
193 },
188 "report": {
194 "report": {
189 "_all": {"enabled": False},
195 "_all": {"enabled": False},
@@ -202,15 +208,11 b' def update_template():'
202 "request_id": {"type": "string", "index": "not_analyzed"},
208 "request_id": {"type": "string", "index": "not_analyzed"},
203 "end_time": {"type": "date"},
209 "end_time": {"type": "date"},
204 "duration": {"type": "float"},
210 "duration": {"type": "float"},
205 "tags": {
211 "tags": {"type": "object"},
206 "type": "object"
207 },
208 "tag_list": {"type": "string", "analyzer": "tag_value"},
212 "tag_list": {"type": "string", "analyzer": "tag_value"},
209 "extra": {
213 "extra": {"type": "object"},
210 "type": "object"
211 },
212 },
214 },
213 "_parent": {"type": "report_group"}
215 "_parent": {"type": "report_group"},
214 },
216 },
215 "log": {
217 "log": {
216 "_all": {"enabled": False},
218 "_all": {"enabled": False},
@@ -225,26 +227,24 b' def update_template():'
225 "log_level": {"type": "string", "analyzer": "simple"},
227 "log_level": {"type": "string", "analyzer": "simple"},
226 "message": {"type": "string", "analyzer": "simple"},
228 "message": {"type": "string", "analyzer": "simple"},
227 "namespace": {"type": "string", "index": "not_analyzed"},
229 "namespace": {"type": "string", "index": "not_analyzed"},
228 "tags": {
230 "tags": {"type": "object"},
229 "type": "object"
231 "tag_list": {"type": "string", "analyzer": "tag_value"},
230 },
232 },
231 "tag_list": {"type": "string", "analyzer": "tag_value"}
233 },
232 }
234 },
233 }
234 }
235 }
235 }
236
236
237 Datastores.es.indices.put_template('rcae', body=template_schema)
237 Datastores.es.indices.put_template("rcae", body=template_schema)
238
238
239
239
240 def reindex_reports():
240 def reindex_reports():
241 reports_groups_tables = detect_tables('reports_groups_p_')
241 reports_groups_tables = detect_tables("reports_groups_p_")
242 try:
242 try:
243 Datastores.es.indices.delete('rcae_r*')
243 Datastores.es.indices.delete("rcae_r*")
244 except elasticsearch.exceptions.NotFoundError as e:
244 except elasticsearch.exceptions.NotFoundError as e:
245 log.error(e)
245 log.error(e)
246
246
247 log.info('reindexing report groups')
247 log.info("reindexing report groups")
248 i = 0
248 i = 0
249 task_start = datetime.datetime.now()
249 task_start = datetime.datetime.now()
250 for partition_table in reports_groups_tables:
250 for partition_table in reports_groups_tables:
@@ -262,19 +262,18 b' def reindex_reports():'
262 es_docs[d_range].append(item.es_doc())
262 es_docs[d_range].append(item.es_doc())
263 if es_docs:
263 if es_docs:
264 name = partition_table.name
264 name = partition_table.name
265 log.info('round {}, {}'.format(i, name))
265 log.info("round {}, {}".format(i, name))
266 for k, v in es_docs.items():
266 for k, v in es_docs.items():
267 to_update = {'_index': k, '_type': 'report_group'}
267 to_update = {"_index": k, "_type": "report_group"}
268 [i.update(to_update) for i in v]
268 [i.update(to_update) for i in v]
269 elasticsearch.helpers.bulk(Datastores.es, v)
269 elasticsearch.helpers.bulk(Datastores.es, v)
270
270
271 log.info(
271 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
272 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
273
272
274 i = 0
273 i = 0
275 log.info('reindexing reports')
274 log.info("reindexing reports")
276 task_start = datetime.datetime.now()
275 task_start = datetime.datetime.now()
277 reports_tables = detect_tables('reports_p_')
276 reports_tables = detect_tables("reports_p_")
278 for partition_table in reports_tables:
277 for partition_table in reports_tables:
279 conn = DBSession.connection().execution_options(stream_results=True)
278 conn = DBSession.connection().execution_options(stream_results=True)
280 result = conn.execute(partition_table.select())
279 result = conn.execute(partition_table.select())
@@ -290,19 +289,18 b' def reindex_reports():'
290 es_docs[d_range].append(item.es_doc())
289 es_docs[d_range].append(item.es_doc())
291 if es_docs:
290 if es_docs:
292 name = partition_table.name
291 name = partition_table.name
293 log.info('round {}, {}'.format(i, name))
292 log.info("round {}, {}".format(i, name))
294 for k, v in es_docs.items():
293 for k, v in es_docs.items():
295 to_update = {'_index': k, '_type': 'report'}
294 to_update = {"_index": k, "_type": "report"}
296 [i.update(to_update) for i in v]
295 [i.update(to_update) for i in v]
297 elasticsearch.helpers.bulk(Datastores.es, v)
296 elasticsearch.helpers.bulk(Datastores.es, v)
298
297
299 log.info(
298 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
300 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
301
299
302 log.info('reindexing reports stats')
300 log.info("reindexing reports stats")
303 i = 0
301 i = 0
304 task_start = datetime.datetime.now()
302 task_start = datetime.datetime.now()
305 reports_stats_tables = detect_tables('reports_stats_p_')
303 reports_stats_tables = detect_tables("reports_stats_p_")
306 for partition_table in reports_stats_tables:
304 for partition_table in reports_stats_tables:
307 conn = DBSession.connection().execution_options(stream_results=True)
305 conn = DBSession.connection().execution_options(stream_results=True)
308 result = conn.execute(partition_table.select())
306 result = conn.execute(partition_table.select())
@@ -315,34 +313,33 b' def reindex_reports():'
315 rd = dict(list(row.items()))
313 rd = dict(list(row.items()))
316 # remove legacy columns
314 # remove legacy columns
317 # TODO: remove the column later
315 # TODO: remove the column later
318 rd.pop('size', None)
316 rd.pop("size", None)
319 item = ReportStat(**rd)
317 item = ReportStat(**rd)
320 i += 1
318 i += 1
321 d_range = item.partition_id
319 d_range = item.partition_id
322 es_docs[d_range].append(item.es_doc())
320 es_docs[d_range].append(item.es_doc())
323 if es_docs:
321 if es_docs:
324 name = partition_table.name
322 name = partition_table.name
325 log.info('round {}, {}'.format(i, name))
323 log.info("round {}, {}".format(i, name))
326 for k, v in es_docs.items():
324 for k, v in es_docs.items():
327 to_update = {'_index': k, '_type': 'log'}
325 to_update = {"_index": k, "_type": "log"}
328 [i.update(to_update) for i in v]
326 [i.update(to_update) for i in v]
329 elasticsearch.helpers.bulk(Datastores.es, v)
327 elasticsearch.helpers.bulk(Datastores.es, v)
330
328
331 log.info(
329 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
332 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
333
330
334
331
335 def reindex_logs():
332 def reindex_logs():
336 try:
333 try:
337 Datastores.es.indices.delete('rcae_l*')
334 Datastores.es.indices.delete("rcae_l*")
338 except elasticsearch.exceptions.NotFoundError as e:
335 except elasticsearch.exceptions.NotFoundError as e:
339 log.error(e)
336 log.error(e)
340
337
341 # logs
338 # logs
342 log.info('reindexing logs')
339 log.info("reindexing logs")
343 i = 0
340 i = 0
344 task_start = datetime.datetime.now()
341 task_start = datetime.datetime.now()
345 log_tables = detect_tables('logs_p_')
342 log_tables = detect_tables("logs_p_")
346 for partition_table in log_tables:
343 for partition_table in log_tables:
347 conn = DBSession.connection().execution_options(stream_results=True)
344 conn = DBSession.connection().execution_options(stream_results=True)
348 result = conn.execute(partition_table.select())
345 result = conn.execute(partition_table.select())
@@ -359,26 +356,25 b' def reindex_logs():'
359 es_docs[d_range].append(item.es_doc())
356 es_docs[d_range].append(item.es_doc())
360 if es_docs:
357 if es_docs:
361 name = partition_table.name
358 name = partition_table.name
362 log.info('round {}, {}'.format(i, name))
359 log.info("round {}, {}".format(i, name))
363 for k, v in es_docs.items():
360 for k, v in es_docs.items():
364 to_update = {'_index': k, '_type': 'log'}
361 to_update = {"_index": k, "_type": "log"}
365 [i.update(to_update) for i in v]
362 [i.update(to_update) for i in v]
366 elasticsearch.helpers.bulk(Datastores.es, v)
363 elasticsearch.helpers.bulk(Datastores.es, v)
367
364
368 log.info(
365 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
369 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
370
366
371
367
372 def reindex_metrics():
368 def reindex_metrics():
373 try:
369 try:
374 Datastores.es.indices.delete('rcae_m*')
370 Datastores.es.indices.delete("rcae_m*")
375 except elasticsearch.exceptions.NotFoundError as e:
371 except elasticsearch.exceptions.NotFoundError as e:
376 log.error(e)
372 log.error(e)
377
373
378 log.info('reindexing applications metrics')
374 log.info("reindexing applications metrics")
379 i = 0
375 i = 0
380 task_start = datetime.datetime.now()
376 task_start = datetime.datetime.now()
381 metric_tables = detect_tables('metrics_p_')
377 metric_tables = detect_tables("metrics_p_")
382 for partition_table in metric_tables:
378 for partition_table in metric_tables:
383 conn = DBSession.connection().execution_options(stream_results=True)
379 conn = DBSession.connection().execution_options(stream_results=True)
384 result = conn.execute(partition_table.select())
380 result = conn.execute(partition_table.select())
@@ -394,26 +390,25 b' def reindex_metrics():'
394 es_docs[d_range].append(item.es_doc())
390 es_docs[d_range].append(item.es_doc())
395 if es_docs:
391 if es_docs:
396 name = partition_table.name
392 name = partition_table.name
397 log.info('round {}, {}'.format(i, name))
393 log.info("round {}, {}".format(i, name))
398 for k, v in es_docs.items():
394 for k, v in es_docs.items():
399 to_update = {'_index': k, '_type': 'log'}
395 to_update = {"_index": k, "_type": "log"}
400 [i.update(to_update) for i in v]
396 [i.update(to_update) for i in v]
401 elasticsearch.helpers.bulk(Datastores.es, v)
397 elasticsearch.helpers.bulk(Datastores.es, v)
402
398
403 log.info(
399 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
404 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
405
400
406
401
407 def reindex_slow_calls():
402 def reindex_slow_calls():
408 try:
403 try:
409 Datastores.es.indices.delete('rcae_sc*')
404 Datastores.es.indices.delete("rcae_sc*")
410 except elasticsearch.exceptions.NotFoundError as e:
405 except elasticsearch.exceptions.NotFoundError as e:
411 log.error(e)
406 log.error(e)
412
407
413 log.info('reindexing slow calls')
408 log.info("reindexing slow calls")
414 i = 0
409 i = 0
415 task_start = datetime.datetime.now()
410 task_start = datetime.datetime.now()
416 slow_calls_tables = detect_tables('slow_calls_p_')
411 slow_calls_tables = detect_tables("slow_calls_p_")
417 for partition_table in slow_calls_tables:
412 for partition_table in slow_calls_tables:
418 conn = DBSession.connection().execution_options(stream_results=True)
413 conn = DBSession.connection().execution_options(stream_results=True)
419 result = conn.execute(partition_table.select())
414 result = conn.execute(partition_table.select())
@@ -429,15 +424,14 b' def reindex_slow_calls():'
429 es_docs[d_range].append(item.es_doc())
424 es_docs[d_range].append(item.es_doc())
430 if es_docs:
425 if es_docs:
431 name = partition_table.name
426 name = partition_table.name
432 log.info('round {}, {}'.format(i, name))
427 log.info("round {}, {}".format(i, name))
433 for k, v in es_docs.items():
428 for k, v in es_docs.items():
434 to_update = {'_index': k, '_type': 'log'}
429 to_update = {"_index": k, "_type": "log"}
435 [i.update(to_update) for i in v]
430 [i.update(to_update) for i in v]
436 elasticsearch.helpers.bulk(Datastores.es, v)
431 elasticsearch.helpers.bulk(Datastores.es, v)
437
432
438 log.info(
433 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
439 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
440
434
441
435
442 if __name__ == '__main__':
436 if __name__ == "__main__":
443 main()
437 main()
@@ -28,30 +28,30 b' log = logging.getLogger(__name__)'
28
28
29 def main():
29 def main():
30 parser = argparse.ArgumentParser(
30 parser = argparse.ArgumentParser(
31 description='Generate AppEnlight static resources',
31 description="Generate AppEnlight static resources", add_help=False
32 add_help=False)
32 )
33 parser.add_argument('-c', '--config', required=True,
33 parser.add_argument(
34 help='Configuration ini file of application')
34 "-c", "--config", required=True, help="Configuration ini file of application"
35 )
35 args = parser.parse_args()
36 args = parser.parse_args()
36 config_uri = args.config
37 config_uri = args.config
37 setup_logging(config_uri)
38 setup_logging(config_uri)
38 env = bootstrap(config_uri)
39 env = bootstrap(config_uri)
39 registry = env['registry']
40 registry = env["registry"]
40 settings = registry.settings
41 settings = registry.settings
41 if os.path.exists(settings['webassets.dir']):
42 if os.path.exists(settings["webassets.dir"]):
42 shutil.rmtree(settings['webassets.dir'])
43 shutil.rmtree(settings["webassets.dir"])
43 os.mkdir(settings['webassets.dir'])
44 os.mkdir(settings["webassets.dir"])
44 ae_basedir = pkg_resources.resource_filename('appenlight', 'static')
45 ae_basedir = pkg_resources.resource_filename("appenlight", "static")
45 shutil.copytree(ae_basedir,
46 shutil.copytree(ae_basedir, os.path.join(settings["webassets.dir"], "appenlight"))
46 os.path.join(settings['webassets.dir'], 'appenlight'))
47
47
48 for plugin_name, config in registry.appenlight_plugins.items():
48 for plugin_name, config in registry.appenlight_plugins.items():
49 if config['static']:
49 if config["static"]:
50 shutil.copytree(config['static'],
50 shutil.copytree(
51 os.path.join(settings['webassets.dir'],
51 config["static"], os.path.join(settings["webassets.dir"], plugin_name)
52 plugin_name))
52 )
53
53
54 for root, dirs, files in os.walk(settings['webassets.dir']):
54 for root, dirs, files in os.walk(settings["webassets.dir"]):
55 for item in dirs:
55 for item in dirs:
56 os.chmod(os.path.join(root, item), 0o775)
56 os.chmod(os.path.join(root, item), 0o775)
57 for item in files:
57 for item in files:
@@ -36,8 +36,8 b' log = logging.getLogger(__name__)'
36
36
37
37
38 def groupfinder(userid, request):
38 def groupfinder(userid, request):
39 if userid and hasattr(request, 'user') and request.user:
39 if userid and hasattr(request, "user") and request.user:
40 groups = ['group:%s' % g.id for g in request.user.groups]
40 groups = ["group:%s" % g.id for g in request.user.groups]
41 return groups
41 return groups
42 return []
42 return []
43
43
@@ -53,16 +53,16 b' class AuthTokenAuthenticationPolicy(CallbackAuthenticationPolicy):'
53 return []
53 return []
54
54
55 def unauthenticated_userid(self, request):
55 def unauthenticated_userid(self, request):
56 token = request.headers.get('x-appenlight-auth-token')
56 token = request.headers.get("x-appenlight-auth-token")
57 if token:
57 if token:
58 auth_token = AuthTokenService.by_token(token)
58 auth_token = AuthTokenService.by_token(token)
59 if auth_token and not auth_token.is_expired:
59 if auth_token and not auth_token.is_expired:
60 log.info('%s is valid' % auth_token)
60 log.info("%s is valid" % auth_token)
61 return auth_token.owner_id
61 return auth_token.owner_id
62 elif auth_token:
62 elif auth_token:
63 log.warning('%s is expired' % auth_token)
63 log.warning("%s is expired" % auth_token)
64 else:
64 else:
65 log.warning('token: %s is not found' % token)
65 log.warning("token: %s is not found" % token)
66
66
67 def authenticated_userid(self, request):
67 def authenticated_userid(self, request):
68 return self.unauthenticated_userid(request)
68 return self.unauthenticated_userid(request)
@@ -72,10 +72,10 b' def rewrite_root_perm(perm_user, perm_name):'
72 """
72 """
73 Translates root_administration into ALL_PERMISSIONS object
73 Translates root_administration into ALL_PERMISSIONS object
74 """
74 """
75 if perm_name == 'root_administration':
75 if perm_name == "root_administration":
76 return (Allow, perm_user, ALL_PERMISSIONS,)
76 return (Allow, perm_user, ALL_PERMISSIONS)
77 else:
77 else:
78 return (Allow, perm_user, perm_name,)
78 return (Allow, perm_user, perm_name)
79
79
80
80
81 def add_root_superperm(request, context):
81 def add_root_superperm(request, context):
@@ -83,10 +83,10 b' def add_root_superperm(request, context):'
83 Adds ALL_PERMISSIONS to every resource if user somehow has 'root_permission'
83 Adds ALL_PERMISSIONS to every resource if user somehow has 'root_permission'
84 non-resource permission
84 non-resource permission
85 """
85 """
86 if hasattr(request, 'user') and request.user:
86 if hasattr(request, "user") and request.user:
87 acls = permission_to_04_acls(UserService.permissions(request.user))
87 acls = permission_to_04_acls(UserService.permissions(request.user))
88 for perm_user, perm_name in acls:
88 for perm_user, perm_name in acls:
89 if perm_name == 'root_administration':
89 if perm_name == "root_administration":
90 context.__acl__.append(rewrite_root_perm(perm_user, perm_name))
90 context.__acl__.append(rewrite_root_perm(perm_user, perm_name))
91
91
92
92
@@ -96,14 +96,17 b' class RootFactory(object):'
96 """
96 """
97
97
98 def __init__(self, request):
98 def __init__(self, request):
99 self.__acl__ = [(Allow, Authenticated, 'authenticated'),
99 self.__acl__ = [
100 (Allow, Authenticated, 'create_resources')]
100 (Allow, Authenticated, "authenticated"),
101 (Allow, Authenticated, "create_resources"),
102 ]
101 # general page factory - append custom non resource permissions
103 # general page factory - append custom non resource permissions
102 if hasattr(request, 'user') and request.user:
104 if hasattr(request, "user") and request.user:
103 acls = permission_to_04_acls(UserService.permissions(request.user))
105 acls = permission_to_04_acls(UserService.permissions(request.user))
104 for perm_user, perm_name in acls:
106 for perm_user, perm_name in acls:
105 self.__acl__.append(rewrite_root_perm(perm_user, perm_name))
107 self.__acl__.append(rewrite_root_perm(perm_user, perm_name))
106
108
109
107 class ResourceFactory(object):
110 class ResourceFactory(object):
108 """
111 """
109 Checks permissions to specific resource based on user permissions or
112 Checks permissions to specific resource based on user permissions or
@@ -114,11 +117,13 b' class ResourceFactory(object):'
114 Resource = appenlight.models.resource.Resource
117 Resource = appenlight.models.resource.Resource
115
118
116 self.__acl__ = []
119 self.__acl__ = []
117 resource_id = request.matchdict.get("resource_id",
120 resource_id = request.matchdict.get(
118 request.GET.get("resource_id"))
121 "resource_id", request.GET.get("resource_id")
122 )
119 resource_id = to_integer_safe(resource_id)
123 resource_id = to_integer_safe(resource_id)
120 self.resource = ResourceService.by_resource_id(resource_id) \
124 self.resource = (
121 if resource_id else None
125 ResourceService.by_resource_id(resource_id) if resource_id else None
126 )
122 if self.resource and request.user:
127 if self.resource and request.user:
123 self.__acl__ = self.resource.__acl__
128 self.__acl__ = self.resource.__acl__
124 permissions = ResourceService.perms_for_user(self.resource, request.user)
129 permissions = ResourceService.perms_for_user(self.resource, request.user)
@@ -138,17 +143,18 b' class ResourceReportFactory(object):'
138 Resource = appenlight.models.resource.Resource
143 Resource = appenlight.models.resource.Resource
139
144
140 self.__acl__ = []
145 self.__acl__ = []
141 group_id = request.matchdict.get("group_id",
146 group_id = request.matchdict.get("group_id", request.params.get("group_id"))
142 request.params.get("group_id"))
143 group_id = to_integer_safe(group_id)
147 group_id = to_integer_safe(group_id)
144 self.report_group = ReportGroupService.by_id(
148 self.report_group = ReportGroupService.by_id(group_id) if group_id else None
145 group_id) if group_id else None
146 if not self.report_group:
149 if not self.report_group:
147 raise HTTPNotFound()
150 raise HTTPNotFound()
148
151
149 self.public = self.report_group.public
152 self.public = self.report_group.public
150 self.resource = ResourceService.by_resource_id(self.report_group.resource_id) \
153 self.resource = (
151 if self.report_group else None
154 ResourceService.by_resource_id(self.report_group.resource_id)
155 if self.report_group
156 else None
157 )
152
158
153 if self.resource:
159 if self.resource:
154 self.__acl__ = self.resource.__acl__
160 self.__acl__ = self.resource.__acl__
@@ -157,11 +163,12 b' class ResourceReportFactory(object):'
157 for perm_user, perm_name in permission_to_04_acls(permissions):
163 for perm_user, perm_name in permission_to_04_acls(permissions):
158 self.__acl__.append(rewrite_root_perm(perm_user, perm_name))
164 self.__acl__.append(rewrite_root_perm(perm_user, perm_name))
159 if self.public:
165 if self.public:
160 self.__acl__.append((Allow, Everyone, 'view',))
166 self.__acl__.append((Allow, Everyone, "view"))
161 if not request.user:
167 if not request.user:
162 # unauthed users need to visit using both group and report pair
168 # unauthed users need to visit using both group and report pair
163 report_id = request.params.get('reportId',
169 report_id = request.params.get(
164 request.params.get('report_id', -1))
170 "reportId", request.params.get("report_id", -1)
171 )
165 report = self.report_group.get_report(report_id, public=True)
172 report = self.report_group.get_report(report_id, public=True)
166 if not report:
173 if not report:
167 raise HTTPNotFound()
174 raise HTTPNotFound()
@@ -177,24 +184,23 b' class APIFactory(object):'
177 self.__acl__ = []
184 self.__acl__ = []
178 self.possibly_public = False
185 self.possibly_public = False
179 private_api_key = request.headers.get(
186 private_api_key = request.headers.get(
180 'x-appenlight-api-key',
187 "x-appenlight-api-key", request.params.get("api_key")
181 request.params.get('api_key')
182 )
188 )
183 log.debug("private key: %s" % private_api_key)
189 log.debug("private key: %s" % private_api_key)
184 if private_api_key:
190 if private_api_key:
185 self.resource = ApplicationService.by_api_key_cached()(
191 self.resource = ApplicationService.by_api_key_cached()(private_api_key)
186 private_api_key)
187 # then try public key
192 # then try public key
188 else:
193 else:
189 public_api_key = request.headers.get(
194 public_api_key = request.headers.get(
190 'x-appenlight-public-api-key',
195 "x-appenlight-public-api-key", request.GET.get("public_api_key")
191 request.GET.get('public_api_key'))
196 )
192 log.debug("public key: %s" % public_api_key)
197 log.debug("public key: %s" % public_api_key)
193 self.resource = ApplicationService.by_public_api_key(
198 self.resource = ApplicationService.by_public_api_key(
194 public_api_key, from_cache=True, request=request)
199 public_api_key, from_cache=True, request=request
200 )
195 self.possibly_public = True
201 self.possibly_public = True
196 if self.resource:
202 if self.resource:
197 self.__acl__.append((Allow, Everyone, 'create',))
203 self.__acl__.append((Allow, Everyone, "create"))
198
204
199
205
200 class AirbrakeV2APIFactory(object):
206 class AirbrakeV2APIFactory(object):
@@ -205,14 +211,13 b' class AirbrakeV2APIFactory(object):'
205 def __init__(self, request):
211 def __init__(self, request):
206 self.__acl__ = []
212 self.__acl__ = []
207 self.possibly_public = False
213 self.possibly_public = False
208 fixed_xml_data = ''
214 fixed_xml_data = ""
209 try:
215 try:
210 data = request.GET.get('data')
216 data = request.GET.get("data")
211 if data:
217 if data:
212 self.possibly_public = True
218 self.possibly_public = True
213 except (UnicodeDecodeError, UnicodeEncodeError) as exc:
219 except (UnicodeDecodeError, UnicodeEncodeError) as exc:
214 log.warning(
220 log.warning("Problem parsing Airbrake data: %s, failed decoding" % exc)
215 'Problem parsing Airbrake data: %s, failed decoding' % exc)
216 raise HTTPBadRequest()
221 raise HTTPBadRequest()
217 try:
222 try:
218 if not data:
223 if not data:
@@ -220,39 +225,38 b' class AirbrakeV2APIFactory(object):'
220 # fix shitty airbrake js client not escaping line method attr
225 # fix shitty airbrake js client not escaping line method attr
221
226
222 def repl(input):
227 def repl(input):
223 return 'line method=%s file' % quoteattr(input.group(1))
228 return "line method=%s file" % quoteattr(input.group(1))
224
229
225 fixed_xml_data = re.sub('line method="(.*?)" file', repl, data)
230 fixed_xml_data = re.sub('line method="(.*?)" file', repl, data)
226 root = ElementTree.fromstring(fixed_xml_data)
231 root = ElementTree.fromstring(fixed_xml_data)
227 except Exception as exc:
232 except Exception as exc:
228 log.info(
233 log.info("Problem parsing Airbrake " "data: %s, trying unquoting" % exc)
229 'Problem parsing Airbrake '
230 'data: %s, trying unquoting' % exc)
231 self.possibly_public = True
234 self.possibly_public = True
232 try:
235 try:
233 root = ElementTree.fromstring(urllib.parse.unquote(fixed_xml_data))
236 root = ElementTree.fromstring(urllib.parse.unquote(fixed_xml_data))
234 except Exception as exc:
237 except Exception as exc:
235 log.warning('Problem parsing Airbrake '
238 log.warning(
236 'data: %s, failed completly' % exc)
239 "Problem parsing Airbrake " "data: %s, failed completly" % exc
240 )
237 raise HTTPBadRequest()
241 raise HTTPBadRequest()
238 self.airbrake_xml_etree = root
242 self.airbrake_xml_etree = root
239 api_key = root.findtext('api-key', '')
243 api_key = root.findtext("api-key", "")
240
244
241 self.resource = ApplicationService.by_api_key_cached()(api_key)
245 self.resource = ApplicationService.by_api_key_cached()(api_key)
242 if not self.resource:
246 if not self.resource:
243 self.resource = ApplicationService.by_public_api_key(api_key,
247 self.resource = ApplicationService.by_public_api_key(
244 from_cache=True,
248 api_key, from_cache=True, request=request
245 request=request)
249 )
246 if self.resource:
250 if self.resource:
247 self.possibly_public = True
251 self.possibly_public = True
248
252
249 if self.resource:
253 if self.resource:
250 self.__acl__.append((Allow, Everyone, 'create',))
254 self.__acl__.append((Allow, Everyone, "create"))
251
255
252
256
253 def parse_sentry_header(header):
257 def parse_sentry_header(header):
254 parsed = header.split(' ', 1)[1].split(',') or []
258 parsed = header.split(" ", 1)[1].split(",") or []
255 return dict([x.strip().split('=') for x in parsed])
259 return dict([x.strip().split("=") for x in parsed])
256
260
257
261
258 class SentryAPIFactory(object):
262 class SentryAPIFactory(object):
@@ -263,34 +267,34 b' class SentryAPIFactory(object):'
263 def __init__(self, request):
267 def __init__(self, request):
264 self.__acl__ = []
268 self.__acl__ = []
265 self.possibly_public = False
269 self.possibly_public = False
266 if request.headers.get('X-Sentry-Auth', '').startswith('Sentry'):
270 if request.headers.get("X-Sentry-Auth", "").startswith("Sentry"):
267 header_string = request.headers['X-Sentry-Auth']
271 header_string = request.headers["X-Sentry-Auth"]
268 result = parse_sentry_header(header_string)
272 result = parse_sentry_header(header_string)
269 elif request.headers.get('Authorization', '').startswith('Sentry'):
273 elif request.headers.get("Authorization", "").startswith("Sentry"):
270 header_string = request.headers['Authorization']
274 header_string = request.headers["Authorization"]
271 result = parse_sentry_header(header_string)
275 result = parse_sentry_header(header_string)
272 else:
276 else:
273 result = dict((k, v) for k, v in list(request.GET.items())
277 result = dict(
274 if k.startswith('sentry_'))
278 (k, v) for k, v in list(request.GET.items()) if k.startswith("sentry_")
275 key = result.get('sentry_key')
279 )
276 log.info('sentry request {}'.format(result))
280 key = result.get("sentry_key")
281 log.info("sentry request {}".format(result))
277
282
278 self.resource = ApplicationService.by_api_key_cached()(key)
283 self.resource = ApplicationService.by_api_key_cached()(key)
279 if not self.resource or \
284 if not self.resource or result.get("sentry_client", "").startswith("raven-js"):
280 result.get('sentry_client', '').startswith('raven-js'):
281 self.resource = ApplicationService.by_public_api_key(
285 self.resource = ApplicationService.by_public_api_key(
282 key, from_cache=True, request=request)
286 key, from_cache=True, request=request
287 )
283 if self.resource:
288 if self.resource:
284 self.__acl__.append((Allow, Everyone, 'create',))
289 self.__acl__.append((Allow, Everyone, "create"))
285
290
286
291
287 class ResourcePluginConfigFactory(object):
292 class ResourcePluginConfigFactory(object):
288
289 def __init__(self, request):
293 def __init__(self, request):
290 Resource = appenlight.models.resource.Resource
294 Resource = appenlight.models.resource.Resource
291 self.__acl__ = []
295 self.__acl__ = []
292 self.resource = None
296 self.resource = None
293 plugin_id = to_integer_safe(request.matchdict.get('id'))
297 plugin_id = to_integer_safe(request.matchdict.get("id"))
294 self.plugin = PluginConfigService.by_id(plugin_id)
298 self.plugin = PluginConfigService.by_id(plugin_id)
295 if not self.plugin:
299 if not self.plugin:
296 raise HTTPNotFound()
300 raise HTTPNotFound()
@@ -316,7 +320,7 b' class ResourceJSONBodyFactory(object):'
316 Resource = appenlight.models.resource.Resource
320 Resource = appenlight.models.resource.Resource
317
321
318 self.__acl__ = []
322 self.__acl__ = []
319 resource_id = request.unsafe_json_body().get('resource_id')
323 resource_id = request.unsafe_json_body().get("resource_id")
320 resource_id = to_integer_safe(resource_id)
324 resource_id = to_integer_safe(resource_id)
321 self.resource = ResourceService.by_resource_id(resource_id)
325 self.resource = ResourceService.by_resource_id(resource_id)
322 if self.resource and request.user:
326 if self.resource and request.user:
@@ -334,9 +338,9 b' class ResourcePluginMixedFactory(object):'
334 json_body = request.safe_json_body
338 json_body = request.safe_json_body
335 self.resource = None
339 self.resource = None
336 if json_body:
340 if json_body:
337 resource_id = json_body.get('resource_id')
341 resource_id = json_body.get("resource_id")
338 else:
342 else:
339 resource_id = request.GET.get('resource_id')
343 resource_id = request.GET.get("resource_id")
340 if resource_id:
344 if resource_id:
341 resource_id = to_integer_safe(resource_id)
345 resource_id = to_integer_safe(resource_id)
342 self.resource = ResourceService.by_resource_id(resource_id)
346 self.resource = ResourceService.by_resource_id(resource_id)
@@ -20,7 +20,7 b' import os'
20 from pyramid.i18n import TranslationStringFactory
20 from pyramid.i18n import TranslationStringFactory
21 from pyramid import threadlocal
21 from pyramid import threadlocal
22
22
23 _ = TranslationStringFactory('pyramid')
23 _ = TranslationStringFactory("pyramid")
24
24
25 from appenlight import security
25 from appenlight import security
26 from appenlight.lib import helpers, generate_random_string
26 from appenlight.lib import helpers, generate_random_string
@@ -29,70 +29,133 b' from appenlight.models.services.config import ConfigService'
29
29
30 def gen_urls(request):
30 def gen_urls(request):
31 urls = {
31 urls = {
32 'baseUrl': request.route_url('/'),
32 "baseUrl": request.route_url("/"),
33 'applicationsNoId': request.route_url('applications_no_id'),
33 "applicationsNoId": request.route_url("applications_no_id"),
34 'applications': request.route_url('applications', resource_id='REPLACE_ID').replace('REPLACE_ID',':resourceId'),
34 "applications": request.route_url(
35 'applicationsProperty': request.route_url('applications_property',key='REPLACE_KEY', resource_id='REPLACE_ID').replace('REPLACE_ID',':resourceId').replace('REPLACE_KEY',':key'),
35 "applications", resource_id="REPLACE_ID"
36 'configsNoId': request.route_url('admin_configs'),
36 ).replace("REPLACE_ID", ":resourceId"),
37 'configs': request.route_url('admin_config', key='REPLACE_KEY', section='REPLACE_SECTION').replace('REPLACE_SECTION',':section').replace('REPLACE_KEY',':key'),
37 "applicationsProperty": request.route_url(
38 'docs': 'http://getappenlight.com/page/api/main.html',
38 "applications_property", key="REPLACE_KEY", resource_id="REPLACE_ID"
39 'eventsNoId': request.route_url('events_no_id'),
39 )
40 'events': request.route_url('events', event_id='REPLACE_ID').replace('REPLACE_ID',':eventId'),
40 .replace("REPLACE_ID", ":resourceId")
41 'eventsProperty': request.route_url('events_property',key='REPLACE_KEY', event_id='REPLACE_ID').replace('REPLACE_ID',':eventId').replace('REPLACE_KEY',':key'),
41 .replace("REPLACE_KEY", ":key"),
42 'groupsNoId': request.route_url('groups_no_id'),
42 "configsNoId": request.route_url("admin_configs"),
43 'groups': request.route_url('groups', group_id='REPLACE_ID').replace('REPLACE_ID',':groupId'),
43 "configs": request.route_url(
44 'groupsProperty': request.route_url('groups_property',key='REPLACE_KEY', group_id='REPLACE_ID').replace('REPLACE_ID',':groupId').replace('REPLACE_KEY',':key'),
44 "admin_config", key="REPLACE_KEY", section="REPLACE_SECTION"
45 'logsNoId': request.route_url('logs_no_id'),
45 )
46 'integrationAction': request.route_url('integrations_id',action='REPLACE_ACT', resource_id='REPLACE_RID', integration='REPLACE_IID').replace('REPLACE_RID',':resourceId').replace('REPLACE_ACT',':action').replace('REPLACE_IID',':integration'),
46 .replace("REPLACE_SECTION", ":section")
47 'usersNoId': request.route_url('users_no_id'),
47 .replace("REPLACE_KEY", ":key"),
48 'users': request.route_url('users', user_id='REPLACE_ID').replace('REPLACE_ID',':userId'),
48 "docs": "http://getappenlight.com/page/api/main.html",
49 'usersProperty': request.route_url('users_property',key='REPLACE_KEY', user_id='REPLACE_ID').replace('REPLACE_ID',':userId').replace('REPLACE_KEY',':key'),
49 "eventsNoId": request.route_url("events_no_id"),
50 'userSelf': request.route_url('users_self'),
50 "events": request.route_url("events", event_id="REPLACE_ID").replace(
51 'userSelfProperty': request.route_url('users_self_property',key='REPLACE_KEY').replace('REPLACE_KEY',':key'),
51 "REPLACE_ID", ":eventId"
52 'reports': request.route_url('reports'),
52 ),
53 'reportGroup': request.route_url('report_groups', group_id='REPLACE_RID').replace('REPLACE_RID',':groupId'),
53 "eventsProperty": request.route_url(
54 'reportGroupProperty': request.route_url('report_groups_property', key='REPLACE_KEY', group_id='REPLACE_GID').replace('REPLACE_KEY',':key').replace('REPLACE_GID',':groupId'),
54 "events_property", key="REPLACE_KEY", event_id="REPLACE_ID"
55 'pluginConfigsNoId': request.route_url('plugin_configs', plugin_name='REPLACE_TYPE').replace('REPLACE_TYPE',':plugin_name'),
55 )
56 'pluginConfigs': request.route_url('plugin_config', id='REPLACE_ID', plugin_name='REPLACE_TYPE').replace('REPLACE_ID',':id').replace('REPLACE_TYPE',':plugin_name'),
56 .replace("REPLACE_ID", ":eventId")
57 'resourceProperty': request.route_url('resources_property',key='REPLACE_KEY', resource_id='REPLACE_ID').replace('REPLACE_ID',':resourceId').replace('REPLACE_KEY',':key'),
57 .replace("REPLACE_KEY", ":key"),
58 'slowReports': request.route_url('slow_reports'),
58 "groupsNoId": request.route_url("groups_no_id"),
59 'sectionView': request.route_url('section_view', section='REPLACE_S', view='REPLACE_V').replace('REPLACE_S',':section').replace('REPLACE_V',':view'),
59 "groups": request.route_url("groups", group_id="REPLACE_ID").replace(
60 'otherRoutes': {
60 "REPLACE_ID", ":groupId"
61 'register': request.route_url('register'),
61 ),
62 'lostPassword': request.route_url('lost_password'),
62 "groupsProperty": request.route_url(
63 'lostPasswordGenerate': request.route_url('lost_password_generate'),
63 "groups_property", key="REPLACE_KEY", group_id="REPLACE_ID"
64 'signOut': request.route_url('ziggurat.routes.sign_out')
64 )
65 .replace("REPLACE_ID", ":groupId")
66 .replace("REPLACE_KEY", ":key"),
67 "logsNoId": request.route_url("logs_no_id"),
68 "integrationAction": request.route_url(
69 "integrations_id",
70 action="REPLACE_ACT",
71 resource_id="REPLACE_RID",
72 integration="REPLACE_IID",
73 )
74 .replace("REPLACE_RID", ":resourceId")
75 .replace("REPLACE_ACT", ":action")
76 .replace("REPLACE_IID", ":integration"),
77 "usersNoId": request.route_url("users_no_id"),
78 "users": request.route_url("users", user_id="REPLACE_ID").replace(
79 "REPLACE_ID", ":userId"
80 ),
81 "usersProperty": request.route_url(
82 "users_property", key="REPLACE_KEY", user_id="REPLACE_ID"
83 )
84 .replace("REPLACE_ID", ":userId")
85 .replace("REPLACE_KEY", ":key"),
86 "userSelf": request.route_url("users_self"),
87 "userSelfProperty": request.route_url(
88 "users_self_property", key="REPLACE_KEY"
89 ).replace("REPLACE_KEY", ":key"),
90 "reports": request.route_url("reports"),
91 "reportGroup": request.route_url(
92 "report_groups", group_id="REPLACE_RID"
93 ).replace("REPLACE_RID", ":groupId"),
94 "reportGroupProperty": request.route_url(
95 "report_groups_property", key="REPLACE_KEY", group_id="REPLACE_GID"
96 )
97 .replace("REPLACE_KEY", ":key")
98 .replace("REPLACE_GID", ":groupId"),
99 "pluginConfigsNoId": request.route_url(
100 "plugin_configs", plugin_name="REPLACE_TYPE"
101 ).replace("REPLACE_TYPE", ":plugin_name"),
102 "pluginConfigs": request.route_url(
103 "plugin_config", id="REPLACE_ID", plugin_name="REPLACE_TYPE"
104 )
105 .replace("REPLACE_ID", ":id")
106 .replace("REPLACE_TYPE", ":plugin_name"),
107 "resourceProperty": request.route_url(
108 "resources_property", key="REPLACE_KEY", resource_id="REPLACE_ID"
109 )
110 .replace("REPLACE_ID", ":resourceId")
111 .replace("REPLACE_KEY", ":key"),
112 "slowReports": request.route_url("slow_reports"),
113 "sectionView": request.route_url(
114 "section_view", section="REPLACE_S", view="REPLACE_V"
115 )
116 .replace("REPLACE_S", ":section")
117 .replace("REPLACE_V", ":view"),
118 "otherRoutes": {
119 "register": request.route_url("register"),
120 "lostPassword": request.route_url("lost_password"),
121 "lostPasswordGenerate": request.route_url("lost_password_generate"),
122 "signOut": request.route_url("ziggurat.routes.sign_out"),
65 },
123 },
66 'social_auth': {
124 "social_auth": {
67 'google': request.route_url('social_auth', provider='google'),
125 "google": request.route_url("social_auth", provider="google"),
68 'twitter': request.route_url('social_auth', provider='twitter'),
126 "twitter": request.route_url("social_auth", provider="twitter"),
69 'bitbucket': request.route_url('social_auth', provider='bitbucket'),
127 "bitbucket": request.route_url("social_auth", provider="bitbucket"),
70 'github': request.route_url('social_auth', provider='github'),
128 "github": request.route_url("social_auth", provider="github"),
71 },
129 },
72 "plugins": {},
130 "plugins": {},
73 "adminAction": request.route_url('admin', action="REPLACE_ACT").replace('REPLACE_ACT',':action')
131 "adminAction": request.route_url("admin", action="REPLACE_ACT").replace(
132 "REPLACE_ACT", ":action"
133 ),
74 }
134 }
75 return urls
135 return urls
76
136
137
77 def new_request(event):
138 def new_request(event):
78 environ = event.request.environ
139 environ = event.request.environ
79 event.request.response.headers['X-Frame-Options'] = 'SAMEORIGIN'
140 event.request.response.headers["X-Frame-Options"] = "SAMEORIGIN"
80 event.request.response.headers['X-XSS-Protection'] = '1; mode=block'
141 event.request.response.headers["X-XSS-Protection"] = "1; mode=block"
81 # can this be enabled on non https deployments?
142 # can this be enabled on non https deployments?
82 # event.request.response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubdomains;'
143 # event.request.response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubdomains;'
83
144
84 # do not send XSRF token with /api calls
145 # do not send XSRF token with /api calls
85 if not event.request.path.startswith('/api'):
146 if not event.request.path.startswith("/api"):
86 if environ['wsgi.url_scheme'] == 'https':
147 if environ["wsgi.url_scheme"] == "https":
87 event.request.response.set_cookie(
148 event.request.response.set_cookie(
88 'XSRF-TOKEN', event.request.session.get_csrf_token(),
149 "XSRF-TOKEN", event.request.session.get_csrf_token(), secure=True
89 secure=True)
150 )
90 else:
151 else:
91 event.request.response.set_cookie(
152 event.request.response.set_cookie(
92 'XSRF-TOKEN', event.request.session.get_csrf_token())
153 "XSRF-TOKEN", event.request.session.get_csrf_token()
154 )
93 if event.request.user:
155 if event.request.user:
94 event.request.response.headers[
156 event.request.response.headers["x-appenlight-uid"] = (
95 'x-appenlight-uid'] = '%s' % event.request.user.id
157 "%s" % event.request.user.id
158 )
96
159
97
160
98 def add_renderer_globals(event):
161 def add_renderer_globals(event):
@@ -102,49 +165,55 b' def add_renderer_globals(event):'
102 renderer_globals["h"] = helpers
165 renderer_globals["h"] = helpers
103 renderer_globals["js_hash"] = request.registry.js_hash
166 renderer_globals["js_hash"] = request.registry.js_hash
104 renderer_globals["css_hash"] = request.registry.css_hash
167 renderer_globals["css_hash"] = request.registry.css_hash
105 renderer_globals['_'] = _
168 renderer_globals["_"] = _
106 renderer_globals['security'] = security
169 renderer_globals["security"] = security
107 renderer_globals['flash_msgs'] = []
170 renderer_globals["flash_msgs"] = []
108 renderer_globals['appenlight_plugins'] = []
171 renderer_globals["appenlight_plugins"] = []
109
172
110 if 'jinja' in event['renderer_info'].type:
173 if "jinja" in event["renderer_info"].type:
111 renderer_globals['url_list'] = gen_urls(request)
174 renderer_globals["url_list"] = gen_urls(request)
112 # add footer html and some other global vars to renderer
175 # add footer html and some other global vars to renderer
113 for module, config in request.registry.appenlight_plugins.items():
176 for module, config in request.registry.appenlight_plugins.items():
114 if config['url_gen']:
177 if config["url_gen"]:
115 urls = config['url_gen'](request)
178 urls = config["url_gen"](request)
116 renderer_globals['url_list']['plugins'][module] = urls
179 renderer_globals["url_list"]["plugins"][module] = urls
117
180
118 renderer_globals['appenlight_plugins'].append(
181 renderer_globals["appenlight_plugins"].append(
119 {'name': module,
182 {
120 'config': {
183 "name": module,
121 'javascript':config['javascript'],
184 "config": {
122 'header_html':config['header_html']
185 "javascript": config["javascript"],
123 }})
186 "header_html": config["header_html"],
187 },
188 }
189 )
124
190
125 footer_config = ConfigService.by_key_and_section(
191 footer_config = ConfigService.by_key_and_section(
126 'template_footer_html', 'global', default_value='')
192 "template_footer_html", "global", default_value=""
193 )
127
194
128 renderer_globals['template_footer_html'] = footer_config.value
195 renderer_globals["template_footer_html"] = footer_config.value
129 try:
196 try:
130 renderer_globals['root_administrator'] = request.has_permission(
197 renderer_globals["root_administrator"] = request.has_permission(
131 'root_administration', security.RootFactory(request))
198 "root_administration", security.RootFactory(request)
199 )
132 except AttributeError:
200 except AttributeError:
133 renderer_globals['root_administrator'] = False
201 renderer_globals["root_administrator"] = False
134
202
135 renderer_globals['_mail_url'] = request.registry.settings['_mail_url']
203 renderer_globals["_mail_url"] = request.registry.settings["_mail_url"]
136
204
137 if not request:
205 if not request:
138 return
206 return
139
207
140 # do not sens flash headers with /api calls
208 # do not sens flash headers with /api calls
141 if not request.path.startswith('/api'):
209 if not request.path.startswith("/api"):
142 flash_msgs = helpers.get_type_formatted_flash(request)
210 flash_msgs = helpers.get_type_formatted_flash(request)
143 renderer_globals['flash_msgs'] = flash_msgs
211 renderer_globals["flash_msgs"] = flash_msgs
144 request.add_flash_to_headers()
212 request.add_flash_to_headers()
145
213
214
146 def application_created(app):
215 def application_created(app):
147 webassets_dir = app.app.registry.settings.get('webassets.dir')
216 webassets_dir = app.app.registry.settings.get("webassets.dir")
148 js_hash = generate_random_string()
217 js_hash = generate_random_string()
149 css_hash = generate_random_string()
218 css_hash = generate_random_string()
150 if webassets_dir:
219 if webassets_dir:
@@ -153,16 +222,14 b' def application_created(app):'
153 for root, dirs, files in os.walk(webassets_dir):
222 for root, dirs, files in os.walk(webassets_dir):
154 for name in files:
223 for name in files:
155 filename = os.path.join(root, name)
224 filename = os.path.join(root, name)
156 if name.endswith('css'):
225 if name.endswith("css"):
157 with open(filename, 'r', encoding='utf8',
226 with open(filename, "r", encoding="utf8", errors="replace") as f:
158 errors='replace') as f:
159 for line in f:
227 for line in f:
160 css_hasher.update(line.encode('utf8'))
228 css_hasher.update(line.encode("utf8"))
161 elif name.endswith('js'):
229 elif name.endswith("js"):
162 with open(filename, 'r', encoding='utf8',
230 with open(filename, "r", encoding="utf8", errors="replace") as f:
163 errors='replace') as f:
164 for line in f:
231 for line in f:
165 js_hasher.update(line.encode('utf8'))
232 js_hasher.update(line.encode("utf8"))
166 js_hash = js_hasher.hexdigest()
233 js_hash = js_hasher.hexdigest()
167 css_hash = css_hasher.hexdigest()
234 css_hash = css_hasher.hexdigest()
168 app.app.registry.js_hash = js_hash
235 app.app.registry.js_hash = js_hash
@@ -13,4 +13,3 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
@@ -31,17 +31,19 b' from pyramid import testing'
31 @pytest.fixture
31 @pytest.fixture
32 def base_app(request, mocker):
32 def base_app(request, mocker):
33 # disable email sending
33 # disable email sending
34 mocker.patch('pyramid_mailer.mailer_factory_from_settings', mocker.Mock())
34 mocker.patch("pyramid_mailer.mailer_factory_from_settings", mocker.Mock())
35
35
36 from appenlight import main
36 from appenlight import main
37 import transaction
37 import transaction
38
38 current_dir = os.path.dirname(os.path.abspath(__file__))
39 current_dir = os.path.dirname(os.path.abspath(__file__))
39 path = os.path.join(current_dir, '../../../../',
40 path = os.path.join(
40 os.environ.get("APPENLIGHT_INI", 'testing.ini'))
41 current_dir, "../../../../", os.environ.get("APPENLIGHT_INI", "testing.ini")
42 )
41 # appsettings from ini
43 # appsettings from ini
42 app_settings = get_appsettings(path, name="appenlight")
44 app_settings = get_appsettings(path, name="appenlight")
43 app = main({}, **app_settings)
45 app = main({}, **app_settings)
44 app_request = testing.DummyRequest(base_url='https://appenlight.com')
46 app_request = testing.DummyRequest(base_url="https://appenlight.com")
45 app_request.tm = transaction.manager
47 app_request.tm = transaction.manager
46 app_request.add_flash_to_headers = mock.Mock()
48 app_request.add_flash_to_headers = mock.Mock()
47 testing.setUp(registry=app.registry, request=app_request)
49 testing.setUp(registry=app.registry, request=app_request)
@@ -58,8 +60,7 b' def base_app(request, mocker):'
58 def with_migrations(request, base_app):
60 def with_migrations(request, base_app):
59 settings = base_app.registry.settings
61 settings = base_app.registry.settings
60 alembic_cfg = Config()
62 alembic_cfg = Config()
61 alembic_cfg.set_main_option("script_location",
63 alembic_cfg.set_main_option("script_location", "ziggurat_foundations:migrations")
62 "ziggurat_foundations:migrations")
63 alembic_cfg.set_main_option("sqlalchemy.url", settings["sqlalchemy.url"])
64 alembic_cfg.set_main_option("sqlalchemy.url", settings["sqlalchemy.url"])
64 command.upgrade(alembic_cfg, "head")
65 command.upgrade(alembic_cfg, "head")
65 alembic_cfg = Config()
66 alembic_cfg = Config()
@@ -68,13 +69,14 b' def with_migrations(request, base_app):'
68 command.upgrade(alembic_cfg, "head")
69 command.upgrade(alembic_cfg, "head")
69
70
70 for plugin_name, config in base_app.registry.appenlight_plugins.items():
71 for plugin_name, config in base_app.registry.appenlight_plugins.items():
71 if config['sqlalchemy_migrations']:
72 if config["sqlalchemy_migrations"]:
72 alembic_cfg = Config()
73 alembic_cfg = Config()
73 alembic_cfg.set_main_option("script_location",
74 config['sqlalchemy_migrations'])
75 alembic_cfg.set_main_option(
74 alembic_cfg.set_main_option(
76 "sqlalchemy.url",
75 "script_location", config["sqlalchemy_migrations"]
77 base_app.registry.settings["sqlalchemy.url"])
76 )
77 alembic_cfg.set_main_option(
78 "sqlalchemy.url", base_app.registry.settings["sqlalchemy.url"]
79 )
78 command.upgrade(alembic_cfg, "head")
80 command.upgrade(alembic_cfg, "head")
79
81
80
82
@@ -82,11 +84,12 b' def with_migrations(request, base_app):'
82 def default_data(base_app):
84 def default_data(base_app):
83 from appenlight.models.services.config import ConfigService
85 from appenlight.models.services.config import ConfigService
84 from appenlight.lib import get_callable
86 from appenlight.lib import get_callable
87
85 transaction.begin()
88 transaction.begin()
86 ConfigService.setup_default_values()
89 ConfigService.setup_default_values()
87 for plugin_name, config in base_app.registry.appenlight_plugins.items():
90 for plugin_name, config in base_app.registry.appenlight_plugins.items():
88 if config['default_values_setter']:
91 if config["default_values_setter"]:
89 get_callable(config['default_values_setter'])()
92 get_callable(config["default_values_setter"])()
90 transaction.commit()
93 transaction.commit()
91
94
92
95
@@ -98,8 +101,8 b' def clean_tables(request):'
98 tables = Base.metadata.tables.keys()
101 tables = Base.metadata.tables.keys()
99 transaction.begin()
102 transaction.begin()
100 for t in tables:
103 for t in tables:
101 if not t.startswith('alembic_'):
104 if not t.startswith("alembic_"):
102 DBSession.execute('truncate %s cascade' % t)
105 DBSession.execute("truncate %s cascade" % t)
103 session = DBSession()
106 session = DBSession()
104 mark_changed(session)
107 mark_changed(session)
105 transaction.commit()
108 transaction.commit()
@@ -112,14 +115,12 b' def default_user():'
112 from appenlight.models import DBSession
115 from appenlight.models import DBSession
113 from appenlight.models.user import User
116 from appenlight.models.user import User
114 from appenlight.models.auth_token import AuthToken
117 from appenlight.models.auth_token import AuthToken
118
115 transaction.begin()
119 transaction.begin()
116 session = DBSession()
120 session = DBSession()
117 user = User(id=1,
121 user = User(id=1, user_name="testuser", status=1, email="foo@barbaz99.com")
118 user_name='testuser',
119 status=1,
120 email='foo@barbaz99.com')
121 session.add(user)
122 session.add(user)
122 token = AuthToken(token='1234')
123 token = AuthToken(token="1234")
123 user.auth_tokens.append(token)
124 user.auth_tokens.append(token)
124 session.execute("SELECT nextval('users_id_seq')")
125 session.execute("SELECT nextval('users_id_seq')")
125 transaction.commit()
126 transaction.commit()
@@ -133,8 +134,7 b' def default_application(default_user):'
133
134
134 transaction.begin()
135 transaction.begin()
135 session = DBSession()
136 session = DBSession()
136 application = Application(
137 application = Application(resource_id=1, resource_name="testapp", api_key="xxxx")
137 resource_id=1, resource_name='testapp', api_key='xxxx')
138 session.add(application)
138 session.add(application)
139 default_user.resources.append(application)
139 default_user.resources.append(application)
140 session.execute("SELECT nextval('resources_resource_id_seq')")
140 session.execute("SELECT nextval('resources_resource_id_seq')")
@@ -145,6 +145,7 b' def default_application(default_user):'
145 @pytest.fixture
145 @pytest.fixture
146 def report_type_matrix():
146 def report_type_matrix():
147 from appenlight.models.report import REPORT_TYPE_MATRIX
147 from appenlight.models.report import REPORT_TYPE_MATRIX
148
148 return REPORT_TYPE_MATRIX
149 return REPORT_TYPE_MATRIX
149
150
150
151
@@ -153,38 +154,43 b' def chart_series():'
153 series = []
154 series = []
154
155
155 for x in range(1, 7):
156 for x in range(1, 7):
156 tmp_list = [('key', 'X'), ('0_1', x)]
157 tmp_list = [("key", "X"), ("0_1", x)]
157 if x % 2 == 0:
158 if x % 2 == 0:
158 tmp_list.append(('0_2', x))
159 tmp_list.append(("0_2", x))
159 if x % 3 == 0:
160 if x % 3 == 0:
160 tmp_list.append(('0_3', x))
161 tmp_list.append(("0_3", x))
161
162
162 series.append(
163 series.append(OrderedDict(tmp_list))
163 OrderedDict(tmp_list)
164 )
165 return series
164 return series
166
165
167
166
168 @pytest.fixture
167 @pytest.fixture
169 def log_schema():
168 def log_schema():
170 from appenlight.validators import LogListSchema
169 from appenlight.validators import LogListSchema
170
171 schema = LogListSchema().bind(utcnow=datetime.utcnow())
171 schema = LogListSchema().bind(utcnow=datetime.utcnow())
172 return schema
172 return schema
173
173
174
174 @pytest.fixture
175 @pytest.fixture
175 def general_metrics_schema():
176 def general_metrics_schema():
176 from appenlight.validators import GeneralMetricsListSchema
177 from appenlight.validators import GeneralMetricsListSchema
178
177 schema = GeneralMetricsListSchema().bind(utcnow=datetime.utcnow())
179 schema = GeneralMetricsListSchema().bind(utcnow=datetime.utcnow())
178 return schema
180 return schema
179
181
182
180 @pytest.fixture
183 @pytest.fixture
181 def request_metrics_schema():
184 def request_metrics_schema():
182 from appenlight.validators import MetricsListSchema
185 from appenlight.validators import MetricsListSchema
186
183 schema = MetricsListSchema().bind(utcnow=datetime.utcnow())
187 schema = MetricsListSchema().bind(utcnow=datetime.utcnow())
184 return schema
188 return schema
185
189
190
186 @pytest.fixture
191 @pytest.fixture
187 def report_05_schema():
192 def report_05_schema():
188 from appenlight.validators import ReportListSchema_0_5
193 from appenlight.validators import ReportListSchema_0_5
194
189 schema = ReportListSchema_0_5().bind(utcnow=datetime.utcnow())
195 schema = ReportListSchema_0_5().bind(utcnow=datetime.utcnow())
190 return schema
196 return schema
This diff has been collapsed as it changes many lines, (1112 lines changed) Show them Hide them
@@ -20,37 +20,48 b' from datetime import datetime'
20
20
21 now = datetime.utcnow().date()
21 now = datetime.utcnow().date()
22
22
23 REQUEST_METRICS_EXAMPLES = [{
23 REQUEST_METRICS_EXAMPLES = [
24 "server": "some.server.hostname",
24 {
25 "timestamp": now.strftime('%Y-%m-%dT%H:%M:%S.0'),
25 "server": "some.server.hostname",
26 "metrics": [
26 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
27 ["dir/module:func",
27 "metrics": [
28 {"custom": 0.0,
28 [
29 "custom_calls": 0,
29 "dir/module:func",
30 "main": 0.01664,
30 {
31 "nosql": 0.00061,
31 "custom": 0.0,
32 "nosql_calls": 23,
32 "custom_calls": 0,
33 "remote": 0.0,
33 "main": 0.01664,
34 "remote_calls": 0,
34 "nosql": 0.00061,
35 "requests": 1,
35 "nosql_calls": 23,
36 "sql": 0.00105,
36 "remote": 0.0,
37 "sql_calls": 2,
37 "remote_calls": 0,
38 "tmpl": 0.0,
38 "requests": 1,
39 "tmpl_calls": 0}],
39 "sql": 0.00105,
40 ["SomeView.function",
40 "sql_calls": 2,
41 {"custom": 0.0,
41 "tmpl": 0.0,
42 "custom_calls": 0,
42 "tmpl_calls": 0,
43 "main": 0.647261,
43 },
44 "nosql": 0.306554,
44 ],
45 "nosql_calls": 140,
45 [
46 "remote": 0.0,
46 "SomeView.function",
47 "remote_calls": 0,
47 {
48 "requests": 28,
48 "custom": 0.0,
49 "sql": 0.0,
49 "custom_calls": 0,
50 "sql_calls": 0,
50 "main": 0.647261,
51 "tmpl": 0.0,
51 "nosql": 0.306554,
52 "tmpl_calls": 0}]]
52 "nosql_calls": 140,
53 }]
53 "remote": 0.0,
54 "remote_calls": 0,
55 "requests": 28,
56 "sql": 0.0,
57 "sql_calls": 0,
58 "tmpl": 0.0,
59 "tmpl_calls": 0,
60 },
61 ],
62 ],
63 }
64 ]
54
65
55 LOG_EXAMPLES = [
66 LOG_EXAMPLES = [
56 {
67 {
@@ -59,11 +70,8 b' LOG_EXAMPLES = ['
59 "namespace": "some.namespace.indicator",
70 "namespace": "some.namespace.indicator",
60 "request_id": "SOME_UUID",
71 "request_id": "SOME_UUID",
61 "server": "some server",
72 "server": "some server",
62 "tags": [["tag_name", "tag_value"],
73 "tags": [["tag_name", "tag_value"], ["tag_name2", 2]],
63 ["tag_name2", 2]
74 "date": now.strftime("%Y-%m-%dT%H:%M:%S.%f"),
64
65 ],
66 "date": now.strftime('%Y-%m-%dT%H:%M:%S.%f')
67 },
75 },
68 {
76 {
69 "log_level": "ERROR",
77 "log_level": "ERROR",
@@ -71,114 +79,119 b' LOG_EXAMPLES = ['
71 "namespace": "some.namespace.indicator",
79 "namespace": "some.namespace.indicator",
72 "request_id": "SOME_UUID",
80 "request_id": "SOME_UUID",
73 "server": "some server",
81 "server": "some server",
74 "date": now.strftime('%Y-%m-%dT%H:%M:%S.%f')
82 "date": now.strftime("%Y-%m-%dT%H:%M:%S.%f"),
75 }
83 },
76 ]
84 ]
77
85
78 PARSED_REPORT_404 = {
86 PARSED_REPORT_404 = {
79 'report_details': [{
87 "report_details": [
80 'username': 'foo',
88 {
81 'url': 'http://localhost:6543/test/error?aaa=1&bbb=2',
89 "username": "foo",
82 'ip': '127.0.0.1',
90 "url": "http://localhost:6543/test/error?aaa=1&bbb=2",
83 'start_time': now.strftime(
91 "ip": "127.0.0.1",
84 '%Y-%m-%dT%H:%M:%S.0'),
92 "start_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
85 'slow_calls': [],
93 "slow_calls": [],
86 'request': {'COOKIES': {
94 "request": {
87 'country': 'US',
95 "COOKIES": {
88 'sessionId': '***',
96 "country": "US",
89 'test_group_id': '5',
97 "sessionId": "***",
90 'http_referer': 'http://localhost:5000/'},
98 "test_group_id": "5",
91 'POST': {},
99 "http_referer": "http://localhost:5000/",
92 'GET': {'aaa': ['1'], 'bbb': ['2']},
100 },
93 'HTTP_METHOD': 'GET',
101 "POST": {},
94 },
102 "GET": {"aaa": ["1"], "bbb": ["2"]},
95 'user_agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0.1) Gecko/20100101 Firefox/10.0.1',
103 "HTTP_METHOD": "GET",
96 'message': '',
104 },
97 'end_time': now.strftime(
105 "user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:10.0.1) Gecko/20100101 Firefox/10.0.1",
98 '%Y-%m-%dT%H:%M:%S.0'),
106 "message": "",
99 'request_stats': {}
107 "end_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
100 }],
108 "request_stats": {},
101 'error': '404 Not Found',
109 }
102 'server': "servername/instancename",
110 ],
103 'priority': 5,
111 "error": "404 Not Found",
104 'client': 'appenlight-python',
112 "server": "servername/instancename",
105 'language': 'python',
113 "priority": 5,
106 'http_status': 404}
114 "client": "appenlight-python",
115 "language": "python",
116 "http_status": 404,
117 }
107
118
108 PYTHON_PAYLOAD_0_4 = {
119 PYTHON_PAYLOAD_0_4 = {
109 "client": "your-client-name-python",
120 "client": "your-client-name-python",
110 "language": "python",
121 "language": "python",
111 'view_name': 'views/foo:bar',
122 "view_name": "views/foo:bar",
112 'server': "servername/instancename",
123 "server": "servername/instancename",
113 "priority": 5,
124 "priority": 5,
114 "error": "OMG ValueError happened test",
125 "error": "OMG ValueError happened test",
115 "occurences": 1,
126 "occurences": 1,
116 "http_status": 500,
127 "http_status": 500,
117 "report_details": [
128 "report_details": [
118 {"username": "USER",
129 {
119 "url": "HTTP://SOMEURL",
130 "username": "USER",
120 "ip": "127.0.0.1",
131 "url": "HTTP://SOMEURL",
121 "start_time": now.strftime(
132 "ip": "127.0.0.1",
122 '%Y-%m-%dT%H:%M:%S.0'),
133 "start_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
123 "end_time": now.strftime(
134 "end_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
124 '%Y-%m-%dT%H:%M:%S.0'),
135 "user_agent": "BROWSER_AGENT",
125 "user_agent": "BROWSER_AGENT",
136 "message": "arbitrary text that will get attached to your report",
126 "message": "arbitrary text that will get attached to your report",
137 "request_id": "SOME_UUID",
127 "request_id": "SOME_UUID",
138 "request": {
128 "request": {"REQUEST_METHOD": "GET",
139 "REQUEST_METHOD": "GET",
129 "PATH_INFO": "/FOO/BAR",
140 "PATH_INFO": "/FOO/BAR",
130 "POST": {"FOO": "BAZ", "XXX": "YYY"}
141 "POST": {"FOO": "BAZ", "XXX": "YYY"},
131 },
142 },
132 "slow_calls": [
143 "slow_calls": [
133 {
144 {
134 "type": "sql",
145 "type": "sql",
135 "start": now.strftime(
146 "start": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
136 '%Y-%m-%dT%H:%M:%S.0'),
147 "end": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
137 "end": now.strftime(
148 "subtype": "postgresql",
138 '%Y-%m-%dT%H:%M:%S.0'),
149 "parameters": ["QPARAM1", "QPARAM2", "QPARAMX"],
139 "subtype": "postgresql",
150 "statement": "QUERY",
140 "parameters": ["QPARAM1", "QPARAM2",
151 }
141 "QPARAMX"],
152 ],
142 "statement": "QUERY"}],
153 "request_stats": {
143 "request_stats": {"main": 0.50779,
154 "main": 0.50779,
144 "nosql": 0.01008,
155 "nosql": 0.01008,
145 "nosql_calls": 17.0,
156 "nosql_calls": 17.0,
146 "remote": 0.0,
157 "remote": 0.0,
147 "remote_calls": 0.0,
158 "remote_calls": 0.0,
148 "custom": 0.0,
159 "custom": 0.0,
149 "custom_calls": 0.0,
160 "custom_calls": 0.0,
150 "sql": 0.42423,
161 "sql": 0.42423,
151 "sql_calls": 1.0,
162 "sql_calls": 1.0,
152 "tmpl": 0.0,
163 "tmpl": 0.0,
153 "tmpl_calls": 0.0},
164 "tmpl_calls": 0.0,
154 "traceback": [
165 },
155 {"cline": "return foo_bar_baz(1,2,3)",
166 "traceback": [
156 "file": "somedir/somefile.py",
167 {
157 "fn": "somefunction",
168 "cline": "return foo_bar_baz(1,2,3)",
158 "line": 454,
169 "file": "somedir/somefile.py",
159 "vars": [["a_list",
170 "fn": "somefunction",
160 ["1",
171 "line": 454,
161 "2",
172 "vars": [
162 "4",
173 ["a_list", ["1", "2", "4", "5", "6"]],
163 "5",
174 ["b", {1: "2", "ccc": "ddd", "1": "a"}],
164 "6"]],
175 ["obj", "<object object at 0x7f0030853dc0>"],
165 ["b",
176 ],
166 {1: "2", "ccc": "ddd", "1": "a"}],
177 },
167 ["obj",
178 {
168 "<object object at 0x7f0030853dc0>"]]},
179 "cline": "OMG ValueError happened",
169 {"cline": "OMG ValueError happened",
180 "file": "",
170 "file": "",
181 "fn": "",
171 "fn": "",
182 "line": "",
172 "line": "",
183 "vars": [],
173 "vars": []}]
184 },
174 }]
185 ],
186 }
187 ],
175 }
188 }
176
189
177 PYTHON_PAYLOAD_0_5 = {
190 PYTHON_PAYLOAD_0_5 = {
178 "client": "your-client-name-python",
191 "client": "your-client-name-python",
179 "language": "python",
192 "language": "python",
180 'view_name': 'views/foo:bar',
193 "view_name": "views/foo:bar",
181 'server': "servername/instancename",
194 "server": "servername/instancename",
182 "priority": 5,
195 "priority": 5,
183 "error": "OMG ValueError happened test",
196 "error": "OMG ValueError happened test",
184 "occurences": 1,
197 "occurences": 1,
@@ -186,186 +199,219 b' PYTHON_PAYLOAD_0_5 = {'
186 "username": "USER",
199 "username": "USER",
187 "url": "HTTP://SOMEURL",
200 "url": "HTTP://SOMEURL",
188 "ip": "127.0.0.1",
201 "ip": "127.0.0.1",
189 "start_time": now.strftime(
202 "start_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
190 '%Y-%m-%dT%H:%M:%S.0'),
203 "end_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
191 "end_time": now.strftime(
192 '%Y-%m-%dT%H:%M:%S.0'),
193 "user_agent": "BROWSER_AGENT",
204 "user_agent": "BROWSER_AGENT",
194 "message": "arbitrary text that will get attached to your report",
205 "message": "arbitrary text that will get attached to your report",
195 "request_id": "SOME_UUID",
206 "request_id": "SOME_UUID",
196 "request": {"REQUEST_METHOD": "GET",
207 "request": {
197 "PATH_INFO": "/FOO/BAR",
208 "REQUEST_METHOD": "GET",
198 "POST": {"FOO": "BAZ", "XXX": "YYY"}},
209 "PATH_INFO": "/FOO/BAR",
210 "POST": {"FOO": "BAZ", "XXX": "YYY"},
211 },
199 "slow_calls": [
212 "slow_calls": [
200 {
213 {
201 "type": "sql",
214 "type": "sql",
202 "start": now.strftime(
215 "start": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
203 '%Y-%m-%dT%H:%M:%S.0'),
216 "end": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
204 "end": now.strftime(
205 '%Y-%m-%dT%H:%M:%S.0'),
206 "subtype": "postgresql",
217 "subtype": "postgresql",
207 "parameters": ["QPARAM1", "QPARAM2", "QPARAMX"],
218 "parameters": ["QPARAM1", "QPARAM2", "QPARAMX"],
208 "statement": "QUERY"}],
219 "statement": "QUERY",
209 "request_stats": {"main": 0.50779,
220 }
210 "nosql": 0.01008,
221 ],
211 "nosql_calls": 17.0,
222 "request_stats": {
212 "remote": 0.0,
223 "main": 0.50779,
213 "remote_calls": 0.0,
224 "nosql": 0.01008,
214 "custom": 0.0,
225 "nosql_calls": 17.0,
215 "custom_calls": 0.0,
226 "remote": 0.0,
216 "sql": 0.42423,
227 "remote_calls": 0.0,
217 "sql_calls": 1.0,
228 "custom": 0.0,
218 "tmpl": 0.0,
229 "custom_calls": 0.0,
219 "tmpl_calls": 0.0},
230 "sql": 0.42423,
231 "sql_calls": 1.0,
232 "tmpl": 0.0,
233 "tmpl_calls": 0.0,
234 },
220 "traceback": [
235 "traceback": [
221 {"cline": "return foo_bar_baz(1,2,3)",
236 {
222 "file": "somedir/somefile.py",
237 "cline": "return foo_bar_baz(1,2,3)",
223 "fn": "somefunction",
238 "file": "somedir/somefile.py",
224 "line": 454,
239 "fn": "somefunction",
225 "vars": [["a_list",
240 "line": 454,
226 ["1",
241 "vars": [
227 "2",
242 ["a_list", ["1", "2", "4", "5", "6"]],
228 "4",
243 ["b", {1: "2", "ccc": "ddd", "1": "a"}],
229 "5",
244 ["obj", "<object object at 0x7f0030853dc0>"],
230 "6"]],
245 ],
231 ["b", {1: "2", "ccc": "ddd", "1": "a"}],
246 },
232 ["obj",
247 {
233 "<object object at 0x7f0030853dc0>"]]},
248 "cline": "OMG ValueError happened",
234 {"cline": "OMG ValueError happened",
249 "file": "",
235 "file": "",
250 "fn": "",
236 "fn": "",
251 "line": "",
237 "line": "",
252 "vars": [],
238 "vars": []}]
253 },
254 ],
239 }
255 }
240
256
241 PHP_PAYLOAD = {
257 PHP_PAYLOAD = {
242 'client': 'php',
258 "client": "php",
243 'error': 'Nie mo\u017cna ustali\u0107 \u017c\u0105dania "feed.xml".',
259 "error": 'Nie mo\u017cna ustali\u0107 \u017c\u0105dania "feed.xml".',
244 'error_type': '',
260 "error_type": "",
245 'http_status': 404,
261 "http_status": 404,
246 'language': 'unknown',
262 "language": "unknown",
247 'priority': 1,
263 "priority": 1,
248 'report_details': [{'end_time': None,
264 "report_details": [
249 'group_string': None,
265 {
250 'ip': None,
266 "end_time": None,
251 'message': 'exception \'CHttpException\' with message \'Nie mo\u017cna ustali\u0107 \u017c\u0105dania "feed.xml".\' in /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php:286\nStack trace:\n#0 /home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php(34): CWebApplication->runController(\'feed.xml\')\n#1 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php(141): WebApplication->runController(\'feed.xml\')\n#2 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php(180): CWebApplication->processRequest()\n#3 /home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php(23): CApplication->run()\n#4 {main}',
267 "group_string": None,
252 'occurences': 1,
268 "ip": None,
253 'request': {
269 "message": "exception 'CHttpException' with message 'Nie mo\u017cna ustali\u0107 \u017c\u0105dania \"feed.xml\".' in /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php:286\nStack trace:\n#0 /home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php(34): CWebApplication->runController('feed.xml')\n#1 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php(141): WebApplication->runController('feed.xml')\n#2 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php(180): CWebApplication->processRequest()\n#3 /home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php(23): CApplication->run()\n#4 {main}",
254 'COOKIES': [],
270 "occurences": 1,
255 'FILES': [],
271 "request": {
256 'GET': [],
272 "COOKIES": [],
257 'POST': [],
273 "FILES": [],
258 'REQUEST_METHOD': None,
274 "GET": [],
259 'SERVER': {
275 "POST": [],
260 'DOCUMENT_ROOT': '/home/dobryslownik/www/sites/dobryslownik/frontend/www/',
276 "REQUEST_METHOD": None,
261 'GATEWAY_INTERFACE': 'CGI/1.1',
277 "SERVER": {
262 'HTTPS': 'on',
278 "DOCUMENT_ROOT": "/home/dobryslownik/www/sites/dobryslownik/frontend/www/",
263 'HTTP_ACCEPT': '*/*',
279 "GATEWAY_INTERFACE": "CGI/1.1",
264 'HTTP_ACCEPT_ENCODING': 'gzip, deflate',
280 "HTTPS": "on",
265 'HTTP_ACCEPT_LANGUAGE': 'pl-PL',
281 "HTTP_ACCEPT": "*/*",
266 'HTTP_CONNECTION': 'close',
282 "HTTP_ACCEPT_ENCODING": "gzip, deflate",
267 'HTTP_HOST': 'dobryslownik.pl',
283 "HTTP_ACCEPT_LANGUAGE": "pl-PL",
268 'HTTP_IF_MODIFIED_SINCE': 'Wed, 30 Jul 2014 18:26:32 GMT',
284 "HTTP_CONNECTION": "close",
269 'HTTP_IF_NONE_MATCH': '"45de3-2a3-4ff6d4b9fbe7f"',
285 "HTTP_HOST": "dobryslownik.pl",
270 'HTTP_USER_AGENT': 'Apple-PubSub/28',
286 "HTTP_IF_MODIFIED_SINCE": "Wed, 30 Jul 2014 18:26:32 GMT",
271 'HTTP_X_FORWARDED_FOR': '195.150.190.186',
287 "HTTP_IF_NONE_MATCH": '"45de3-2a3-4ff6d4b9fbe7f"',
272 'HTTP_X_FORWARDED_PROTO': 'https',
288 "HTTP_USER_AGENT": "Apple-PubSub/28",
273 'PATH': '/bin:/usr/bin:/usr/ucb:/usr/bsd:/usr/local/bin',
289 "HTTP_X_FORWARDED_FOR": "195.150.190.186",
274 'PHP_SELF': '/index.php',
290 "HTTP_X_FORWARDED_PROTO": "https",
275 'QUERY_STRING': '',
291 "PATH": "/bin:/usr/bin:/usr/ucb:/usr/bsd:/usr/local/bin",
276 'REDIRECT_HTTPS': 'on',
292 "PHP_SELF": "/index.php",
277 'REDIRECT_STATUS': '200',
293 "QUERY_STRING": "",
278 'REDIRECT_UNIQUE_ID': 'VFAhZQoCaXIAAAkd414AAAAC',
294 "REDIRECT_HTTPS": "on",
279 'REDIRECT_URL': '/feed.xml',
295 "REDIRECT_STATUS": "200",
280 'REMOTE_ADDR': '195.150.190.186',
296 "REDIRECT_UNIQUE_ID": "VFAhZQoCaXIAAAkd414AAAAC",
281 'REMOTE_PORT': '41728',
297 "REDIRECT_URL": "/feed.xml",
282 'REQUEST_METHOD': 'GET',
298 "REMOTE_ADDR": "195.150.190.186",
283 'REQUEST_TIME': 1414537573,
299 "REMOTE_PORT": "41728",
284 'REQUEST_TIME_FLOAT': 1414537573.32,
300 "REQUEST_METHOD": "GET",
285 'REQUEST_URI': '/feed.xml',
301 "REQUEST_TIME": 1414537573,
286 'SCRIPT_FILENAME': '/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php',
302 "REQUEST_TIME_FLOAT": 1414537573.32,
287 'SCRIPT_NAME': '/index.php',
303 "REQUEST_URI": "/feed.xml",
288 'SERVER_ADDR': '10.2.105.114',
304 "SCRIPT_FILENAME": "/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php",
289 'SERVER_ADMIN': '[no address given]',
305 "SCRIPT_NAME": "/index.php",
290 'SERVER_NAME': 'dobryslownik.pl',
306 "SERVER_ADDR": "10.2.105.114",
291 'SERVER_SIGNATURE': '',
307 "SERVER_ADMIN": "[no address given]",
292 'SERVER_SOFTWARE': 'Apache/2.2.22 (Ubuntu) PHP/5.4.17',
308 "SERVER_NAME": "dobryslownik.pl",
293 'UNIQUE_ID': 'VFAg4AoCaXIAAAkd40UAAAAC'},
309 "SERVER_SIGNATURE": "",
294 'SESSION': []},
310 "SERVER_SOFTWARE": "Apache/2.2.22 (Ubuntu) PHP/5.4.17",
295 'request_id': 'VFAg4AoCaXIAAAkd40UAAAAC',
311 "UNIQUE_ID": "VFAg4AoCaXIAAAkd40UAAAAC",
296 'request_stats': {'custom': 0,
312 },
297 'custom_calls': 0,
313 "SESSION": [],
298 'main': 0,
314 },
299 'nosql': 0.0,
315 "request_id": "VFAg4AoCaXIAAAkd40UAAAAC",
300 'nosql_calls': 0.0,
316 "request_stats": {
301 'remote': 0.0,
317 "custom": 0,
302 'remote_calls': 0.0,
318 "custom_calls": 0,
303 'sql': 0.0,
319 "main": 0,
304 'sql_calls': 0.0,
320 "nosql": 0.0,
305 'tmpl': 0.0,
321 "nosql_calls": 0.0,
306 'tmpl_calls': 0.0,
322 "remote": 0.0,
307 'unknown': 0.0},
323 "remote_calls": 0.0,
308 'slow_calls': [],
324 "sql": 0.0,
309 'start_time': None,
325 "sql_calls": 0.0,
310 'frameinfo': [
326 "tmpl": 0.0,
311 {'cline': None,
327 "tmpl_calls": 0.0,
312 'file': '/home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php',
328 "unknown": 0.0,
313 'fn': 'CWebApplication->runController',
329 },
314 'line': 34,
330 "slow_calls": [],
315 'vars': ['feed.xml']},
331 "start_time": None,
316 {'cline': None,
332 "frameinfo": [
317 'file': '/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php',
333 {
318 'fn': 'WebApplication->runController',
334 "cline": None,
319 'line': 141,
335 "file": "/home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php",
320 'vars': ['feed.xml']},
336 "fn": "CWebApplication->runController",
321 {'cline': None,
337 "line": 34,
322 'file': '/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php',
338 "vars": ["feed.xml"],
323 'fn': 'CWebApplication->processRequest',
339 },
324 'line': 180,
340 {
325 'vars': []},
341 "cline": None,
326 {'cline': None,
342 "file": "/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php",
327 'file': '/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php',
343 "fn": "WebApplication->runController",
328 'fn': 'CApplication->run',
344 "line": 141,
329 'line': 23,
345 "vars": ["feed.xml"],
330 'vars': []}],
346 },
331 'url': 'https://dobryslownik.pl/feed.xml',
347 {
332 'user_agent': 'magpie-crawler/1.1 (U; Linux amd64; en-GB; +http://www.brandwatch.net)',
348 "cline": None,
333 'username': 'guest'}],
349 "file": "/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php",
334 'server': 'unknown',
350 "fn": "CWebApplication->processRequest",
335 'traceback': '',
351 "line": 180,
336 'view_name': ''}
352 "vars": [],
353 },
354 {
355 "cline": None,
356 "file": "/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php",
357 "fn": "CApplication->run",
358 "line": 23,
359 "vars": [],
360 },
361 ],
362 "url": "https://dobryslownik.pl/feed.xml",
363 "user_agent": "magpie-crawler/1.1 (U; Linux amd64; en-GB; +http://www.brandwatch.net)",
364 "username": "guest",
365 }
366 ],
367 "server": "unknown",
368 "traceback": "",
369 "view_name": "",
370 }
337
371
338 JS_PAYLOAD = {
372 JS_PAYLOAD = {
339 "client": "javascript", "language": "javascript",
373 "client": "javascript",
374 "language": "javascript",
340 "error_type": "ReferenceError: non_existant_var is not defined",
375 "error_type": "ReferenceError: non_existant_var is not defined",
341 "occurences": 1, "priority": 5, "server": "jstest.appenlight",
376 "occurences": 1,
342 "http_status": 500, "report_details": [{
377 "priority": 5,
343 "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
378 "server": "jstest.appenlight",
344 "start_time": "2014-10-29T19:59:30.589Z",
379 "http_status": 500,
345 "frameinfo": [
380 "report_details": [
346 {
381 {
347 "cline": "",
382 "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
348 "file": "http://demo.appenlight.com/#",
383 "start_time": "2014-10-29T19:59:30.589Z",
349 "fn": "HTMLAnchorElement.onclick",
384 "frameinfo": [
350 "line": 79,
385 {
351 "vars": []},
386 "cline": "",
352 {
387 "file": "http://demo.appenlight.com/#",
353 "cline": "",
388 "fn": "HTMLAnchorElement.onclick",
354 "file": "http://demo.appenlight.com/static/js/demo.js",
389 "line": 79,
355 "fn": "test_error",
390 "vars": [],
356 "line": 7,
391 },
357 "vars": []},
392 {
358 {
393 "cline": "",
359 "cline": "ReferenceError: non_existant_var is not defined",
394 "file": "http://demo.appenlight.com/static/js/demo.js",
360 "file": "http://demo.appenlight.com/static/js/demo.js",
395 "fn": "test_error",
361 "fn": "something",
396 "line": 7,
362 "line": 2,
397 "vars": [],
363 "vars": []}],
398 },
364 "url": "http://demo.appenlight.com/#",
399 {
365 "server": "jstest.appenlight",
400 "cline": "ReferenceError: non_existant_var is not defined",
366 "username": "i_am_mario",
401 "file": "http://demo.appenlight.com/static/js/demo.js",
367 "ip": "127.0.0.1",
402 "fn": "something",
368 "request_id": "0.01984176435507834"}]}
403 "line": 2,
404 "vars": [],
405 },
406 ],
407 "url": "http://demo.appenlight.com/#",
408 "server": "jstest.appenlight",
409 "username": "i_am_mario",
410 "ip": "127.0.0.1",
411 "request_id": "0.01984176435507834",
412 }
413 ],
414 }
369
415
370 AIRBRAKE_RUBY_EXAMPLE = """
416 AIRBRAKE_RUBY_EXAMPLE = """
371 <?xml version="1.0" ?>
417 <?xml version="1.0" ?>
@@ -590,9 +636,13 b' AIRBRAKE_RUBY_EXAMPLE = """'
590 </server-environment>
636 </server-environment>
591 <framework>Rails: 3.2.11</framework>
637 <framework>Rails: 3.2.11</framework>
592 </notice>
638 </notice>
593 """.replace('\n', '').replace(' ', '')
639 """.replace(
640 "\n", ""
641 ).replace(
642 " ", ""
643 )
594
644
595 AIRBRAKE_EXAMPLE_SHORT = '''
645 AIRBRAKE_EXAMPLE_SHORT = """
596 <?xml version="1.0" encoding="UTF-8"?>
646 <?xml version="1.0" encoding="UTF-8"?>
597 <notice version="2.3">
647 <notice version="2.3">
598 <api-key>76fdb93ab2cf276ec080671a8b3d3866</api-key>
648 <api-key>76fdb93ab2cf276ec080671a8b3d3866</api-key>
@@ -624,232 +674,304 b" AIRBRAKE_EXAMPLE_SHORT = '''"
624 <app-version>1.0.0</app-version>
674 <app-version>1.0.0</app-version>
625 </server-environment>
675 </server-environment>
626 </notice>
676 </notice>
627 '''.replace('\n', '').replace(' ', '')
677 """.replace(
678 "\n", ""
679 ).replace(
680 " ", ""
681 )
628
682
629 SENTRY_PYTHON_PAYLOAD_7 = {
683 SENTRY_PYTHON_PAYLOAD_7 = {
630 'culprit': 'djangoapp.views in error',
684 "culprit": "djangoapp.views in error",
631 'event_id': '9fae652c8c1c4d6a8eee09260f613a98',
685 "event_id": "9fae652c8c1c4d6a8eee09260f613a98",
632 'exception': {
686 "exception": {
633 'values': [
687 "values": [
634 {'module': 'exceptions',
688 {
635 'stacktrace': {'frames': [{
689 "module": "exceptions",
636 'abs_path': '/home/ergo/venvs/appenlight/local/lib/python2.7/site-packages/django/core/handlers/base.py',
690 "stacktrace": {
637 'context_line': 'response = wrapped_callback(request, *callback_args, **callback_kwargs)',
691 "frames": [
638 'filename': 'django/core/handlers/base.py',
692 {
639 'function': 'get_response',
693 "abs_path": "/home/ergo/venvs/appenlight/local/lib/python2.7/site-packages/django/core/handlers/base.py",
640 'in_app': False,
694 "context_line": "response = wrapped_callback(request, *callback_args, **callback_kwargs)",
641 'lineno': 111,
695 "filename": "django/core/handlers/base.py",
642 'module': 'django.core.handlers.base',
696 "function": "get_response",
643 'post_context': [
697 "in_app": False,
644 ' except Exception as e:',
698 "lineno": 111,
645 ' # If the view raised an exception, run it through exception',
699 "module": "django.core.handlers.base",
646 ' # middleware, and if the exception middleware returns a',
700 "post_context": [
647 ' # response, use that. Otherwise, reraise the exception.',
701 " except Exception as e:",
648 ' for middleware_method in self._exception_middleware:'],
702 " # If the view raised an exception, run it through exception",
649 'pre_context': [
703 " # middleware, and if the exception middleware returns a",
650 ' break',
704 " # response, use that. Otherwise, reraise the exception.",
651 '',
705 " for middleware_method in self._exception_middleware:",
652 ' if response is None:',
706 ],
653 ' wrapped_callback = self.make_view_atomic(callback)',
707 "pre_context": [
654 ' try:'],
708 " break",
655 'vars': {
709 "",
656 'callback': '<function error from djangoapp.views at 0x7fe7c9f2cb90>',
710 " if response is None:",
657 'callback_args': [],
711 " wrapped_callback = self.make_view_atomic(callback)",
658 'callback_kwargs': {},
712 " try:",
659 'e': "Exception(u'test 500 \\u0142\\xf3\\u201c\\u0107\\u201c\\u0107\\u017c\\u0105',)",
713 ],
660 'middleware_method': '<bound method MessageMiddleware.process_request of <django.contrib.messages.middleware.MessageMiddleware object at 0x7fe7c8b0c950>>',
714 "vars": {
661 'request': '<WSGIRequest at 0x140633490316304>',
715 "callback": "<function error from djangoapp.views at 0x7fe7c9f2cb90>",
662 'resolver': "<RegexURLResolver 'djangoapp.urls' (None:None) ^/>",
716 "callback_args": [],
663 'resolver_match': "ResolverMatch(func=<function error at 0x7fe7c9f2cb90>, args=(), kwargs={}, url_name='error', app_name='None', namespace='')",
717 "callback_kwargs": {},
664 'response': None,
718 "e": "Exception(u'test 500 \\u0142\\xf3\\u201c\\u0107\\u201c\\u0107\\u017c\\u0105',)",
665 'self': '<django.core.handlers.wsgi.WSGIHandler object at 0x7fe7cf75a790>',
719 "middleware_method": "<bound method MessageMiddleware.process_request of <django.contrib.messages.middleware.MessageMiddleware object at 0x7fe7c8b0c950>>",
666 'urlconf': "'djangoapp.urls'",
720 "request": "<WSGIRequest at 0x140633490316304>",
667 'wrapped_callback': '<function error from djangoapp.views at 0x7fe7c9f2cb90>'}},
721 "resolver": "<RegexURLResolver 'djangoapp.urls' (None:None) ^/>",
668 {
722 "resolver_match": "ResolverMatch(func=<function error at 0x7fe7c9f2cb90>, args=(), kwargs={}, url_name='error', app_name='None', namespace='')",
669 'abs_path': '/home/ergo/IdeaProjects/django_raven/djangoapp/views.py',
723 "response": None,
670 'context_line': "raise Exception(u'test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105')",
724 "self": "<django.core.handlers.wsgi.WSGIHandler object at 0x7fe7cf75a790>",
671 'filename': 'djangoapp/views.py',
725 "urlconf": "'djangoapp.urls'",
672 'function': 'error',
726 "wrapped_callback": "<function error from djangoapp.views at 0x7fe7c9f2cb90>",
673 'in_app': False,
727 },
674 'lineno': 84,
728 },
675 'module': 'djangoapp.views',
729 {
676 'post_context': [
730 "abs_path": "/home/ergo/IdeaProjects/django_raven/djangoapp/views.py",
677 '',
731 "context_line": "raise Exception(u'test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105')",
678 '',
732 "filename": "djangoapp/views.py",
679 'def notfound(request):',
733 "function": "error",
680 " raise Http404('404 appenlight exception test')",
734 "in_app": False,
681 ''],
735 "lineno": 84,
682 'pre_context': [
736 "module": "djangoapp.views",
683 ' c.execute("INSERT INTO stocks VALUES (\'2006-01-05\',\'BUY\',\'RHAT\',100,35.14)")',
737 "post_context": [
684 ' c.execute("INSERT INTO stocks VALUES (\'2006-01-05\',\'BUY\',\'RHAT\',100,35.14)")',
738 "",
685 ' conn.commit()',
739 "",
686 ' c.close()',
740 "def notfound(request):",
687 " request.POST.get('DUPA')"],
741 " raise Http404('404 appenlight exception test')",
688 'vars': {
742 "",
689 'c': '<sqlite3.Cursor object at 0x7fe7c82af8f0>',
743 ],
690 'conn': '<sqlite3.Connection object at 0x7fe7c8b23bf8>',
744 "pre_context": [
691 'request': '<WSGIRequest at 0x140633490316304>'}}]},
745 " c.execute(\"INSERT INTO stocks VALUES ('2006-01-05','BUY','RHAT',100,35.14)\")",
692 'type': 'Exception',
746 " c.execute(\"INSERT INTO stocks VALUES ('2006-01-05','BUY','RHAT',100,35.14)\")",
693 'value': 'test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105'}]},
747 " conn.commit()",
694 'extra': {
748 " c.close()",
695 'sys.argv': ["'manage.py'", "'runserver'"]},
749 " request.POST.get('DUPA')",
696 'level': 40,
750 ],
697 'message': 'Exception: test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105',
751 "vars": {
698 'modules': {'django': '1.7.1',
752 "c": "<sqlite3.Cursor object at 0x7fe7c82af8f0>",
699 'python': '2.7.6',
753 "conn": "<sqlite3.Connection object at 0x7fe7c8b23bf8>",
700 'raven': '5.9.2'},
754 "request": "<WSGIRequest at 0x140633490316304>",
701 'platform': 'python',
755 },
702 'project': 'sentry',
756 },
703 'release': 'test',
757 ]
704 'request': {'cookies': {
758 },
705 'appenlight': 'X'},
759 "type": "Exception",
706 'data': None,
760 "value": "test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105",
707 'env': {'REMOTE_ADDR': '127.0.0.1',
761 }
708 'SERVER_NAME': 'localhost',
762 ]
709 'SERVER_PORT': '8000'},
763 },
710 'headers': {
764 "extra": {"sys.argv": ["'manage.py'", "'runserver'"]},
711 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
765 "level": 40,
712 'Accept-Encoding': 'gzip, deflate, sdch',
766 "message": "Exception: test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105",
713 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6',
767 "modules": {"django": "1.7.1", "python": "2.7.6", "raven": "5.9.2"},
714 'Connection': 'keep-alive',
768 "platform": "python",
715 'Content-Length': '',
769 "project": "sentry",
716 'Content-Type': 'text/plain',
770 "release": "test",
717 'Cookie': 'appenlight=X',
771 "request": {
718 'Dnt': '1',
772 "cookies": {"appenlight": "X"},
719 'Host': '127.0.0.1:8000',
773 "data": None,
720 'Upgrade-Insecure-Requests': '1',
774 "env": {
721 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36'},
775 "REMOTE_ADDR": "127.0.0.1",
722 'method': 'GET',
776 "SERVER_NAME": "localhost",
723 'query_string': '',
777 "SERVER_PORT": "8000",
724 'url': 'http://127.0.0.1:8000/error'},
778 },
725 'server_name': 'ergo-virtual-machine',
779 "headers": {
726 'tags': {'site': 'example.com'},
780 "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
727 'time_spent': None,
781 "Accept-Encoding": "gzip, deflate, sdch",
728 'timestamp': now.strftime('%Y-%m-%dT%H:%M:%SZ')}
782 "Accept-Language": "en-US,en;q=0.8,pl;q=0.6",
783 "Connection": "keep-alive",
784 "Content-Length": "",
785 "Content-Type": "text/plain",
786 "Cookie": "appenlight=X",
787 "Dnt": "1",
788 "Host": "127.0.0.1:8000",
789 "Upgrade-Insecure-Requests": "1",
790 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36",
791 },
792 "method": "GET",
793 "query_string": "",
794 "url": "http://127.0.0.1:8000/error",
795 },
796 "server_name": "ergo-virtual-machine",
797 "tags": {"site": "example.com"},
798 "time_spent": None,
799 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%SZ"),
800 }
729
801
730
802
731 SENTRY_JS_PAYLOAD_7 = {
803 SENTRY_JS_PAYLOAD_7 = {
732 "project": "sentry", "logger": "javascript",
804 "project": "sentry",
733 "platform": "javascript", "request": {"headers": {
805 "logger": "javascript",
734 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"},
806 "platform": "javascript",
735 "url": "http://localhost:6543/test/js_error#/"},
807 "request": {
736 "exception": {"values": [{"type": "ReferenceError",
808 "headers": {
737 "value": "fateqtwetew is not defined",
809 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"
738 "stacktrace": {"frames": [{
810 },
739 "filename": "https://cdn.ravenjs.com/2.0.0/angular/raven.min.js",
811 "url": "http://localhost:6543/test/js_error#/",
740 "lineno": 1,
812 },
741 "colno": 4466,
813 "exception": {
742 "function": "c",
814 "values": [
743 "in_app": False},
815 {
744 {
816 "type": "ReferenceError",
745 "filename": "http://localhost:6543/test/js_error",
817 "value": "fateqtwetew is not defined",
746 "lineno": 47,
818 "stacktrace": {
747 "colno": 19,
819 "frames": [
748 "function": "?",
820 {
749 "in_app": True}]}}]},
821 "filename": "https://cdn.ravenjs.com/2.0.0/angular/raven.min.js",
822 "lineno": 1,
823 "colno": 4466,
824 "function": "c",
825 "in_app": False,
826 },
827 {
828 "filename": "http://localhost:6543/test/js_error",
829 "lineno": 47,
830 "colno": 19,
831 "function": "?",
832 "in_app": True,
833 },
834 ]
835 },
836 }
837 ]
838 },
750 "culprit": "http://localhost:6543/test/js_error",
839 "culprit": "http://localhost:6543/test/js_error",
751 "message": "ReferenceError: fateqtwetew is not defined",
840 "message": "ReferenceError: fateqtwetew is not defined",
752 "extra": {"session:duration": 5009},
841 "extra": {"session:duration": 5009},
753 "event_id": "2bf514aaf0e94f35a8f435a0d29a888b"}
842 "event_id": "2bf514aaf0e94f35a8f435a0d29a888b",
843 }
754
844
755 SENTRY_JS_PAYLOAD_7_2 = {
845 SENTRY_JS_PAYLOAD_7_2 = {
756 "project": "sentry", "logger": "javascript",
846 "project": "sentry",
757 "platform": "javascript", "request": {"headers": {
847 "logger": "javascript",
758 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"},
848 "platform": "javascript",
759 "url": "http://localhost:6543/#/report/927/9558"},
849 "request": {
760 "exception": {"values": [{"type": "Error",
850 "headers": {
761 "value": "[$injector:modulerr] http://errors.angularjs.org/1.5.0-rc.0/$injector/modulerr?p0=appenlight&p1=Erro…",
851 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"
762 "stacktrace": {"frames": [{
852 },
763 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
853 "url": "http://localhost:6543/#/report/927/9558",
764 "lineno": 1647,
854 },
765 "colno": 112,
855 "exception": {
766 "function": "?",
856 "values": [
767 "in_app": True},
857 {
768 {
858 "type": "Error",
769 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
859 "value": "[$injector:modulerr] http://errors.angularjs.org/1.5.0-rc.0/$injector/modulerr?p0=appenlight&p1=Erro…",
770 "lineno": 1363,
860 "stacktrace": {
771 "colno": 41,
861 "frames": [
772 "function": "be",
862 {
773 "in_app": True},
863 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
774 {
864 "lineno": 1647,
775 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
865 "colno": 112,
776 "lineno": 1364,
866 "function": "?",
777 "colno": 225,
867 "in_app": True,
778 "function": "zc",
868 },
779 "in_app": True},
869 {
780 {
870 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
781 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
871 "lineno": 1363,
782 "lineno": 1363,
872 "colno": 41,
783 "colno": 421,
873 "function": "be",
784 "function": "c",
874 "in_app": True,
785 "in_app": True},
875 },
786 {
876 {
787 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
877 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
788 "lineno": 1386,
878 "lineno": 1364,
789 "colno": 360,
879 "colno": 225,
790 "function": "fb",
880 "function": "zc",
791 "in_app": True},
881 "in_app": True,
792 {
882 },
793 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
883 {
794 "lineno": 1383,
884 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
795 "colno": 49,
885 "lineno": 1363,
796 "function": "g",
886 "colno": 421,
797 "in_app": True},
887 "function": "c",
798 {
888 "in_app": True,
799 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
889 },
800 "lineno": 1351,
890 {
801 "colno": 344,
891 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
802 "function": "n",
892 "lineno": 1386,
803 "in_app": True},
893 "colno": 360,
804 {
894 "function": "fb",
805 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
895 "in_app": True,
806 "lineno": 1383,
896 },
807 "colno": 475,
897 {
808 "function": "?",
898 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
809 "in_app": True},
899 "lineno": 1383,
810 {
900 "colno": 49,
811 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
901 "function": "g",
812 "lineno": 1350,
902 "in_app": True,
813 "colno": 421,
903 },
814 "function": "?",
904 {
815 "in_app": True}]}}]},
905 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
906 "lineno": 1351,
907 "colno": 344,
908 "function": "n",
909 "in_app": True,
910 },
911 {
912 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
913 "lineno": 1383,
914 "colno": 475,
915 "function": "?",
916 "in_app": True,
917 },
918 {
919 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
920 "lineno": 1350,
921 "colno": 421,
922 "function": "?",
923 "in_app": True,
924 },
925 ]
926 },
927 }
928 ]
929 },
816 "culprit": "http://localhost:6543/static/js/appenlight.js?rev=752",
930 "culprit": "http://localhost:6543/static/js/appenlight.js?rev=752",
817 "message": "Error: [$injector:modulerr] http://errors.angularjs.org/1.5.0-rc.0/$injector/modulerr?p0=appenlight&…",
931 "message": "Error: [$injector:modulerr] http://errors.angularjs.org/1.5.0-rc.0/$injector/modulerr?p0=appenlight&…",
818 "extra": {"session:duration": 330},
932 "extra": {"session:duration": 330},
819 "event_id": "c50b5b6a13994f54b1d8da0c2e0e767a"}
933 "event_id": "c50b5b6a13994f54b1d8da0c2e0e767a",
934 }
820
935
821 SENTRY_LOG_PAYLOAD_7 = {
936 SENTRY_LOG_PAYLOAD_7 = {
822 "project": "sentry", "sentry.interfaces.Message": {
937 "project": "sentry",
823 "message": "TEST from django logging", "params": []},
938 "sentry.interfaces.Message": {"message": "TEST from django logging", "params": []},
824 "server_name": "ergo-virtual-machine",
939 "server_name": "ergo-virtual-machine",
825 "culprit": "testlogger in index",
940 "culprit": "testlogger in index",
826 "extra": {"thread": 139723601139456, "process": 24645,
941 "extra": {
827 "sys.argv": ["'manage.py'", "'runserver'"],
942 "thread": 139723601139456,
828 "price": 6, "threadName": "'Thread-1'",
943 "process": 24645,
829 "filename": "'views.py'",
944 "sys.argv": ["'manage.py'", "'runserver'"],
830 "processName": "'MainProcess'",
945 "price": 6,
831 "tag": "'extra'", "dupa": True, "lineno": 22,
946 "threadName": "'Thread-1'",
832 "asctime": "'2016-01-18 05:24:29,001'",
947 "filename": "'views.py'",
833 "pathname": "'/home/ergo/IdeaProjects/django_raven/djangoapp/views.py'"},
948 "processName": "'MainProcess'",
949 "tag": "'extra'",
950 "dupa": True,
951 "lineno": 22,
952 "asctime": "'2016-01-18 05:24:29,001'",
953 "pathname": "'/home/ergo/IdeaProjects/django_raven/djangoapp/views.py'",
954 },
834 "event_id": "9a6172f2e6d2444582f83a6c333d9cfb",
955 "event_id": "9a6172f2e6d2444582f83a6c333d9cfb",
835 "timestamp": now.strftime('%Y-%m-%dT%H:%M:%SZ'),
956 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%SZ"),
836 "tags": {"site": "example.com"},
957 "tags": {"site": "example.com"},
837 "modules": {"python": "2.7.6", "raven": "5.9.2",
958 "modules": {"python": "2.7.6", "raven": "5.9.2", "django": "1.7.1"},
838 "django": "1.7.1"}, "time_spent": None,
959 "time_spent": None,
839 "platform": "python", "release": "test",
960 "platform": "python",
840 "logger": "testlogger", "level": 50,
961 "release": "test",
841 "message": "TEST from django logging"}
962 "logger": "testlogger",
963 "level": 50,
964 "message": "TEST from django logging",
965 }
842
966
843 METRICS_PAYLOAD = {
967 METRICS_PAYLOAD = {
844 "namespace": "some.monitor",
968 "namespace": "some.monitor",
845 "timestamp": now.strftime('%Y-%m-%dT%H:%M:%S.0'),
969 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
846 "server_name": "server.name",
970 "server_name": "server.name",
847 "tags": [["usage_foo", 15.5],
971 "tags": [["usage_foo", 15.5], ["usage_bar", 63]],
848 ["usage_bar", 63],
849 ]
850 }
972 }
851
973
852
974
853 SENTRY_PYTHON_ENCODED = b'x\x9c\xedXmo\xdbF\x12\xfe+\x0b\xdd\x07I9\x89/z\x97[\x05p\x1d5\x0ej\'\x81,\xa7\xed\xd59bE\x8d$V|\xcb\xeeR\x96k\xf8\xbf\xdf\xcc.II\xb6\xdc\xf4\xd2\x16\xb8\x03\xa2 \x06\xb9\xdc\x9d\x9dy\xe6\x99y\x96\xbc\xaf\xa4"\xf9\x15|U9a\x15\t\xb1\x12w\x95\x06\xab\xc0\xd6\x87T\x05I\x8c\xc3\xf7\x95\r\x0f3\x90x\xf9\xcb}E*\xee\xaf\x95\xe0>\xe8G\x0b\xc1\xa3\xe2\xd1"\x8b\xfd|Me\t\xca\x13 \xd3$\x96@\x06\xf9Lz)W+zf\xaf\x92\x08l\x10\xcb\xc4\xde@\xbc\x91vz\xa7VI\xdc\xb2\xc3\xc4\xe7\xa1\x1d\x06\xb3b\xc4\xea\xdb2P\xd0LqO\xbe\x04i\xcf\x7f\xe51.\xf3\x13\x01\xf6\x8a\xc7\xf3\x10\x84\xb4g\\\x82\x95j\xbfS\x01\x9e\x9f\xc4\n\xb6\x14\xd0/\x15\xb6\xf7\x0b\x16\xac\xf0\x88\x05\x92\xbdMb8\xa15\xec\xd1\xefV\xf04\x85\xb9\x87\xbe\x843\xdc\x98\x8d\x98\x84paE|\r\xde&\x80[\x8f\xab$\n\xfcZ1\xa1~\xcc\n\x02y\xd4:\xfdJ7FO6\xab\t\xf8\x84X\xab\x06{Q\x0cy\\,%\xde\xef\x06\xd6\xb74tt[\x9386.\xf2\xc7\xb8d\x18\xe6G\xc2&\x91\xea\x00\x9c\xc7\xeb\xff\xc1\xce\x92(\ry\x10\x13Vj\x05\x8c\xa2EoU&b\x98k\xc4X\x8d3?\x89"\xb4\x0cB$\xa2n=\xb6\xf2Ga\xc6y\x81\x0cb\xe4S\xecC\x89e\x83\xa9\xbb\x14\xa4\xf5}\xce\xa5)\xde\xd5O\x8cw\xdf\x7f\xf7\xe19DuZb\xa4"BZ\x98\xb2<=\xe2y:\xfa\r\x17R3\x96x[)\xf1\xa9eU\x85p\xb3\xae\xe3\xb0\x9b\x9b\xccq;\xad\x9b\x9b\xed\xa2\x8d\xd7-\xc7\xf5\xf5\x90\xd3\x7fr\xe7\xb8\xfd\xfc\xae[m\xe8D\x1cd\x8b\xe0\xa5M\x11\x88$\xdc\x80\xf0"\xae|\xcd\xfdI>rI\x035\xaa\x98\x91\xe14\xd2\xc0\xa2(\xa4\xa5qm0\xb23\xaa\xd5\x1b\xccd{t\xff\xd0`\x99\x08uL\xa3bN\x9a\xea{9\xa2\xed\xf4\x15\x96\x8a\xbe\xd5N\x15\x89\xf0\x02\x89\xd5\x18\xcfA\xc0\x1c\xbdX\xf0P\x02>\x8e\x829V\x10\x9a\x07/\x02,8zV\xf9v\x96d\xf1\x9c\x99\x01v\tRb\xe5]\x963-\xec\x17\xb8\x03\xd9\xd3De\xc9\x82}kB\xb0\x88\\"\x98Y\x91Y$\xad\xdd\x06\xd6\x13C,\x99Q\xdfa\\1g\xdb_\xb4{\xdd\xb6\x03}\x7f\xe8\xbc|I\xaeS\xc9iwJ\xdbh\xa4(y\xebV.\x03\xeb\xc7\xab\xd7o\xce\xcd\xc8ScC\xd7\x1f\xf6\xba\xceK\x03\x83vU\x9b\xa3E\x93\xdcu=\xdbm\x0f\x07}\xb75p;\xbdA\xabc\x16\x14\xc9\xd4+\x8a\xb6f\x08\xcf\x16"\x89\xd8\xa3\x9c\xed\x07\xe1\xf7\x06\x83\xce@\x9by\\\xdc\x7f\xd2\\\xc1&mf\x02K\xd8^O.\nB\xb1\xea\xce\x08\xd2DVYM\x97\x1e\xfd\xa9\xb3\x7f\xdb\x07q\xe5\x1d\x84\xea\xe1a\x8f&x\x1fga\x88#h\x01\x93\xa9\x13\xf0\xd8n\x85VD\xc9<\x0bu%\x1dM\x0fud\xdao\x11\x84@\xac\xdcM|\xbeu\x87A\x0cq\x823\xdd\xce\x10o\x83\xd8\xc3-\xf7\xc8\x9aw.\x8f\xe6\x91\xbd\xcf4V\xdd\xb2\x0b\xae\x96r\xe6\xcd\xee\xbc\x1d)kh7\xe7F\x9d\xc2\xfa\x9f\x97\xb0\xfd\xdfL\x00_\xd3\x82/m\xc0\x7f\xa1\xce\x1d\x95\x97\xc73\x9f\x91\xa6\xcfk\xe4\x7f\x9d\xca#\xa0\xfc\x8d\xda\xf6U]\xbe\xaa\xcbWu\xf9\xffQ\x97\xfe_\xa0.\x7f\xea\xd8\xfeDit\xae~Gbn\x13\xb1\xd6\xa5\x97\x8b\x87\'8\xaa\x8e]Bg\x9b\xd2~^?|\x0b\xb6\xe0g\nj7\x957o\xaf\xc6\x93){\xf3v\xfa\x8eI\x95\xf8k\xc9>\x9c^\\\x8f\xafX\xad\xdar\x9c^\xd3q\x9b\xd4w\xaa\xdf]\xff\x8c\x7f\'\xe7\xa7\xd3j\xc3u\x9cF\xbbk\xb9\x9d\xfaM\xa5\x94\x81\xbf\xc9j\x12\xc7\x16\xb5\xe1@\xd5\xf6\xb6\xf2\xc3D\xc2n \xc7\xdbz\xff\xeejj\xa1R\xd7\xaa\xaf\xae\xdf\x9fV\xeb\xcf\xbf\xe9\xd0\xff9,X\x9c\xa8\x05\xf5\xa0"e\xf5R\x82\x04\x0f0\xb9\xe7J\xa5\x1d\xa7S\xab\xe2\x1fSE\xd8^\xb1\x94J\xe1a\xd4\xd2\xabFe\x0ez\xbf\xafKG~\nQ\xef\xdb\xd6Y&dr\xa4u\xb4\x1c\xde\x99\x7fq\xeb@p\x0ew\xc1\x010\x15\x7f\xa4\xe3\xcd\x17\xfd>.<VSe;8^I\x8fYU\xd6\xcf\xa0\xfbG\xcb\xc7\xc0y\\\x0b\x8d\x14f\x8e\x83Zh\xc4\xcfh\xdf\xc1\xb5\x96A\xa3\x82X4\x1f)\nz9<P\xd8\xcaAhe\x8etT\xfa\xb3\x05\r\x7f\xf1\xbe\xf9\xae~\x16\xa6"PG\xc0cA\xdei\x8d\xa8\x08R\xe3\x02H*\xdd\xe6&\x10*\xe3a3\xe2\xfe\x8a\xb0\xd1\xdfV\x94\xe0\x9a0\xf2NZ\xd8\x126\x9a\xa3\xd5\x88\xc7(a\x88w\x95fUE\x16\x1b\x83\xd5\x8av\x02\xb0\xe4\x95\x17h\x15u\x17\xbd\xe1\x00\x86>\xcc\x16\xb3N\xdb\x1f\xce\x16=\x07\xda\xb3\xd6\x00|\x7f\x01\xd0=\xe4\x1623I\xd6\x01\x18\x96R\x9f\xcf\x84\xfe|Sq55\xb0\xf1\xd2\xcd\n\x89\x7fb\xdbn\xabo9\xf8\xcf=\x198\x8ec\x97\xd1\xad\x80\xa3\xc2\x1b\x1bg\x94\xeeX5/ ^\x9anE3N}B\xbfy\x81\x08e\x18\x89\xc6 n^_5 \xfe\xe6\xd3\xc8\xb1\x06\x8d4\xd4\x17\xbd\xbd\xd9\xe3\xd8O\xe6A\xbc\xd4\'\xee\xdf\x82\xb4\xc1\xb0HC\xae\x90Ur\x8e\xa7\x1a\x9c\xb9\xe38MZ\x03\xa4M\x1e\x06\x1b\xd8Y1I\xde*{\xa5\xa2\xb0\x81\xd9\t\x03\x9f\xd3\x02{K#\xff\xdc>\x1e\x8d\x8c#\xc3F\x10\xa1\xa7\xf6-\xcc\xd2\xc6\x0b\xfb\x85q\x93\xec^\xa7K\x81\xf16\xdf`\x12\xfcL@3/Mi`\xc3\x19\xafbU^\x9f\'\xa6\x88\x0f\xb13\xbe\x13\xf2\xf4\xac\xc0}\xa4W\x9c!\x1f\xa0I8\x8aD\xa3\x1f\xf1m\x13]\x19\xe9U\xd7\x98\xf9\xe6\xe9\x12\xcc\x16\x97\xc9oA\x18r\xbbk9\xac\xf6\x93\xeb~\xc3.\x828\xdb\xb2\xed\xa0\xe7\xf5:uv\x8a\x91\xc1\x8f0\xfb!Pv\xb7\xdd\xb7\xda=V\xfb\xe1|zy\xd1`a\xb0\x06\xf6\x1a\xfcuRgg+A\x8a\xd2%\x07[\xbd\x9ek\r;\xec\x8a/\xb8\x08\xf2U9\xd6:\xb3\xd3\xbc\xd04\xaa\xfa\xdc\xac\xa9\x82\xef:\x9a\x00\xd8\xec?\x8c\'\xde\xdb\xd3\xcb1\xcd\xd2o=+\x02\x01\xe7\xe4\xcf\xde\xbf\x9bL\xe9Y\x81\xc4d|\xf9n:\xf6N_\xbd\x9a\x1c@\xa5\xed"\xb6\xe2\xce\x93x\xbe3L\xd0\xcd\x9a+\xbe;3\xec\x8e\x90\xaf\xc7S\xbd&\xc4\x8a \xe8:N\xd9\x03\x0c;\xcd\x9b\x17M\xc5\xb7/C6-\x984Bjci\x7fL%kW\xac!\xce\xd2\xed%\x88\xc0\x93\xa9\xc1=\xdf\x18\x83G\xc1\x10\x11\xcd\xcc-\xe73\xa5\xe2Q\xaa\xb7q\\\x14\xb8n\xd3\xe9L\xdd\xdeI\xc79i\r\xffE\x93\xe8\x15m\xee\x8b,\x9a\xc9\'\xdfQ\x91\x89\xb0L\xc4]\xd1\x9f\xc2d\xb9\x04]hE\\\xbbc\xc1\xfef\xa8\x06\xad6b\xda\x1aZ\x9dn\xbf7\xdc\x01u\xbf\xd7Y\xb0]\xe9\n\xef\xd1j\xbe4\xbd\x91\x1e6\n\xb3\'\xf8\xe6\x96\xc1\x03E=\xcf\x04\xcf\xab\xab\x04[\x1f\xa7i\xd9t|5\xdd?F2r\x94\xb2\xb4\xd7\x8d\xb1by\x16*s\xb0\xd9\x0f\xcc\xc0\xbe\x1f\xd3\x1cf\xd9\xf2wc\x1at\x9d\xfe^P\x9fu\xf0\n\xdf<\xd0\x1f\x96\x0f\xd1\x1bC\xa8\xdb\x12\xeb\xf6\xfbL%\xecH_\x1b86O\x03\xdb|\xef\xb6\xf1\xbc\x82\xa7\xc6\xa3\x01}4\x07\xd8\x10\xb8,\x95\xa4r\xb8\x7f)E\'\xec\xcbu\xc6\xa4\xc9\xb0\x84>\x17\x98\x84!:!\xd0YH\x93S\xce\xd7\x86E\xd8\x85\xf3^\xb8cs!:\x1a\xf1f\xce\xd3\x87\x87\xff\x00`\xb1k\xbd'
975 SENTRY_PYTHON_ENCODED = b'x\x9c\xedXmo\xdbF\x12\xfe+\x0b\xdd\x07I9\x89/z\x97[\x05p\x1d5\x0ej\'\x81,\xa7\xed\xd59bE\x8d$V|\xcb\xeeR\x96k\xf8\xbf\xdf\xcc.II\xb6\xdc\xf4\xd2\x16\xb8\x03\xa2 \x06\xb9\xdc\x9d\x9dy\xe6\x99y\x96\xbc\xaf\xa4"\xf9\x15|U9a\x15\t\xb1\x12w\x95\x06\xab\xc0\xd6\x87T\x05I\x8c\xc3\xf7\x95\r\x0f3\x90x\xf9\xcb}E*\xee\xaf\x95\xe0>\xe8G\x0b\xc1\xa3\xe2\xd1"\x8b\xfd|Me\t\xca\x13 \xd3$\x96@\x06\xf9Lz)W+zf\xaf\x92\x08l\x10\xcb\xc4\xde@\xbc\x91vz\xa7VI\xdc\xb2\xc3\xc4\xe7\xa1\x1d\x06\xb3b\xc4\xea\xdb2P\xd0LqO\xbe\x04i\xcf\x7f\xe51.\xf3\x13\x01\xf6\x8a\xc7\xf3\x10\x84\xb4g\\\x82\x95j\xbfS\x01\x9e\x9f\xc4\n\xb6\x14\xd0/\x15\xb6\xf7\x0b\x16\xac\xf0\x88\x05\x92\xbdMb8\xa15\xec\xd1\xefV\xf04\x85\xb9\x87\xbe\x843\xdc\x98\x8d\x98\x84paE|\r\xde&\x80[\x8f\xab$\n\xfcZ1\xa1~\xcc\n\x02y\xd4:\xfdJ7FO6\xab\t\xf8\x84X\xab\x06{Q\x0cy\\,%\xde\xef\x06\xd6\xb74tt[\x9386.\xf2\xc7\xb8d\x18\xe6G\xc2&\x91\xea\x00\x9c\xc7\xeb\xff\xc1\xce\x92(\ry\x10\x13Vj\x05\x8c\xa2EoU&b\x98k\xc4X\x8d3?\x89"\xb4\x0cB$\xa2n=\xb6\xf2Ga\xc6y\x81\x0cb\xe4S\xecC\x89e\x83\xa9\xbb\x14\xa4\xf5}\xce\xa5)\xde\xd5O\x8cw\xdf\x7f\xf7\xe19DuZb\xa4"BZ\x98\xb2<=\xe2y:\xfa\r\x17R3\x96x[)\xf1\xa9eU\x85p\xb3\xae\xe3\xb0\x9b\x9b\xccq;\xad\x9b\x9b\xed\xa2\x8d\xd7-\xc7\xf5\xf5\x90\xd3\x7fr\xe7\xb8\xfd\xfc\xae[m\xe8D\x1cd\x8b\xe0\xa5M\x11\x88$\xdc\x80\xf0"\xae|\xcd\xfdI>rI\x035\xaa\x98\x91\xe14\xd2\xc0\xa2(\xa4\xa5qm0\xb23\xaa\xd5\x1b\xccd{t\xff\xd0`\x99\x08uL\xa3bN\x9a\xea{9\xa2\xed\xf4\x15\x96\x8a\xbe\xd5N\x15\x89\xf0\x02\x89\xd5\x18\xcfA\xc0\x1c\xbdX\xf0P\x02>\x8e\x829V\x10\x9a\x07/\x02,8zV\xf9v\x96d\xf1\x9c\x99\x01v\tRb\xe5]\x963-\xec\x17\xb8\x03\xd9\xd3De\xc9\x82}kB\xb0\x88\\"\x98Y\x91Y$\xad\xdd\x06\xd6\x13C,\x99Q\xdfa\\1g\xdb_\xb4{\xdd\xb6\x03}\x7f\xe8\xbc|I\xaeS\xc9iwJ\xdbh\xa4(y\xebV.\x03\xeb\xc7\xab\xd7o\xce\xcd\xc8ScC\xd7\x1f\xf6\xba\xceK\x03\x83vU\x9b\xa3E\x93\xdcu=\xdbm\x0f\x07}\xb75p;\xbdA\xabc\x16\x14\xc9\xd4+\x8a\xb6f\x08\xcf\x16"\x89\xd8\xa3\x9c\xed\x07\xe1\xf7\x06\x83\xce@\x9by\\\xdc\x7f\xd2\\\xc1&mf\x02K\xd8^O.\nB\xb1\xea\xce\x08\xd2DVYM\x97\x1e\xfd\xa9\xb3\x7f\xdb\x07q\xe5\x1d\x84\xea\xe1a\x8f&x\x1fga\x88#h\x01\x93\xa9\x13\xf0\xd8n\x85VD\xc9<\x0bu%\x1dM\x0fud\xdao\x11\x84@\xac\xdcM|\xbeu\x87A\x0cq\x823\xdd\xce\x10o\x83\xd8\xc3-\xf7\xc8\x9aw.\x8f\xe6\x91\xbd\xcf4V\xdd\xb2\x0b\xae\x96r\xe6\xcd\xee\xbc\x1d)kh7\xe7F\x9d\xc2\xfa\x9f\x97\xb0\xfd\xdfL\x00_\xd3\x82/m\xc0\x7f\xa1\xce\x1d\x95\x97\xc73\x9f\x91\xa6\xcfk\xe4\x7f\x9d\xca#\xa0\xfc\x8d\xda\xf6U]\xbe\xaa\xcbWu\xf9\xffQ\x97\xfe_\xa0.\x7f\xea\xd8\xfeDit\xae~Gbn\x13\xb1\xd6\xa5\x97\x8b\x87\'8\xaa\x8e]Bg\x9b\xd2~^?|\x0b\xb6\xe0g\nj7\x957o\xaf\xc6\x93){\xf3v\xfa\x8eI\x95\xf8k\xc9>\x9c^\\\x8f\xafX\xad\xdar\x9c^\xd3q\x9b\xd4w\xaa\xdf]\xff\x8c\x7f\'\xe7\xa7\xd3j\xc3u\x9cF\xbbk\xb9\x9d\xfaM\xa5\x94\x81\xbf\xc9j\x12\xc7\x16\xb5\xe1@\xd5\xf6\xb6\xf2\xc3D\xc2n \xc7\xdbz\xff\xeejj\xa1R\xd7\xaa\xaf\xae\xdf\x9fV\xeb\xcf\xbf\xe9\xd0\xff9,X\x9c\xa8\x05\xf5\xa0"e\xf5R\x82\x04\x0f0\xb9\xe7J\xa5\x1d\xa7S\xab\xe2\x1fSE\xd8^\xb1\x94J\xe1a\xd4\xd2\xabFe\x0ez\xbf\xafKG~\nQ\xef\xdb\xd6Y&dr\xa4u\xb4\x1c\xde\x99\x7fq\xeb@p\x0ew\xc1\x010\x15\x7f\xa4\xe3\xcd\x17\xfd>.<VSe;8^I\x8fYU\xd6\xcf\xa0\xfbG\xcb\xc7\xc0y\\\x0b\x8d\x14f\x8e\x83Zh\xc4\xcfh\xdf\xc1\xb5\x96A\xa3\x82X4\x1f)\nz9<P\xd8\xcaAhe\x8etT\xfa\xb3\x05\r\x7f\xf1\xbe\xf9\xae~\x16\xa6"PG\xc0cA\xdei\x8d\xa8\x08R\xe3\x02H*\xdd\xe6&\x10*\xe3a3\xe2\xfe\x8a\xb0\xd1\xdfV\x94\xe0\x9a0\xf2NZ\xd8\x126\x9a\xa3\xd5\x88\xc7(a\x88w\x95fUE\x16\x1b\x83\xd5\x8av\x02\xb0\xe4\x95\x17h\x15u\x17\xbd\xe1\x00\x86>\xcc\x16\xb3N\xdb\x1f\xce\x16=\x07\xda\xb3\xd6\x00|\x7f\x01\xd0=\xe4\x1623I\xd6\x01\x18\x96R\x9f\xcf\x84\xfe|Sq55\xb0\xf1\xd2\xcd\n\x89\x7fb\xdbn\xabo9\xf8\xcf=\x198\x8ec\x97\xd1\xad\x80\xa3\xc2\x1b\x1bg\x94\xeeX5/ ^\x9anE3N}B\xbfy\x81\x08e\x18\x89\xc6 n^_5 \xfe\xe6\xd3\xc8\xb1\x06\x8d4\xd4\x17\xbd\xbd\xd9\xe3\xd8O\xe6A\xbc\xd4\'\xee\xdf\x82\xb4\xc1\xb0HC\xae\x90Ur\x8e\xa7\x1a\x9c\xb9\xe38MZ\x03\xa4M\x1e\x06\x1b\xd8Y1I\xde*{\xa5\xa2\xb0\x81\xd9\t\x03\x9f\xd3\x02{K#\xff\xdc>\x1e\x8d\x8c#\xc3F\x10\xa1\xa7\xf6-\xcc\xd2\xc6\x0b\xfb\x85q\x93\xec^\xa7K\x81\xf16\xdf`\x12\xfcL@3/Mi`\xc3\x19\xafbU^\x9f\'\xa6\x88\x0f\xb13\xbe\x13\xf2\xf4\xac\xc0}\xa4W\x9c!\x1f\xa0I8\x8aD\xa3\x1f\xf1m\x13]\x19\xe9U\xd7\x98\xf9\xe6\xe9\x12\xcc\x16\x97\xc9oA\x18r\xbbk9\xac\xf6\x93\xeb~\xc3.\x828\xdb\xb2\xed\xa0\xe7\xf5:uv\x8a\x91\xc1\x8f0\xfb!Pv\xb7\xdd\xb7\xda=V\xfb\xe1|zy\xd1`a\xb0\x06\xf6\x1a\xfcuRgg+A\x8a\xd2%\x07[\xbd\x9ek\r;\xec\x8a/\xb8\x08\xf2U9\xd6:\xb3\xd3\xbc\xd04\xaa\xfa\xdc\xac\xa9\x82\xef:\x9a\x00\xd8\xec?\x8c\'\xde\xdb\xd3\xcb1\xcd\xd2o=+\x02\x01\xe7\xe4\xcf\xde\xbf\x9bL\xe9Y\x81\xc4d|\xf9n:\xf6N_\xbd\x9a\x1c@\xa5\xed"\xb6\xe2\xce\x93x\xbe3L\xd0\xcd\x9a+\xbe;3\xec\x8e\x90\xaf\xc7S\xbd&\xc4\x8a \xe8:N\xd9\x03\x0c;\xcd\x9b\x17M\xc5\xb7/C6-\x984Bjci\x7fL%kW\xac!\xce\xd2\xed%\x88\xc0\x93\xa9\xc1=\xdf\x18\x83G\xc1\x10\x11\xcd\xcc-\xe73\xa5\xe2Q\xaa\xb7q\\\x14\xb8n\xd3\xe9L\xdd\xdeI\xc79i\r\xffE\x93\xe8\x15m\xee\x8b,\x9a\xc9\'\xdfQ\x91\x89\xb0L\xc4]\xd1\x9f\xc2d\xb9\x04]hE\\\xbbc\xc1\xfef\xa8\x06\xad6b\xda\x1aZ\x9dn\xbf7\xdc\x01u\xbf\xd7Y\xb0]\xe9\n\xef\xd1j\xbe4\xbd\x91\x1e6\n\xb3\'\xf8\xe6\x96\xc1\x03E=\xcf\x04\xcf\xab\xab\x04[\x1f\xa7i\xd9t|5\xdd?F2r\x94\xb2\xb4\xd7\x8d\xb1by\x16*s\xb0\xd9\x0f\xcc\xc0\xbe\x1f\xd3\x1cf\xd9\xf2wc\x1at\x9d\xfe^P\x9fu\xf0\n\xdf<\xd0\x1f\x96\x0f\xd1\x1bC\xa8\xdb\x12\xeb\xf6\xfbL%\xecH_\x1b86O\x03\xdb|\xef\xb6\xf1\xbc\x82\xa7\xc6\xa3\x01}4\x07\xd8\x10\xb8,\x95\xa4r\xb8\x7f)E\'\xec\xcbu\xc6\xa4\xc9\xb0\x84>\x17\x98\x84!:!\xd0YH\x93S\xce\xd7\x86E\xd8\x85\xf3^\xb8cs!:\x1a\xf1f\xce\xd3\x87\x87\xff\x00`\xb1k\xbd'
854
976
855 SENTRY_RUBY_ENCODED = b'eJzVVttu4zYQ/RVCfUgLRNfYsi002z5sf6DoU7cLgyJHEmOKFEjKW6+Rf++QkmM7ziJF0T7UMBzNhZzLOTPKMYI9KLcVPKoioDndLAvIH5YlrykHVpd8sSlhXfNstWTRfdSDtbQFdP4djP4o9sIKrX4xRpuKcBQ5cFIfSIa+TqC3o/2A3kWWl3G2jLPFb3lZLcuqyGePrR0wgahSo5T3kcR0ZFQtsvtoMPoJ2ItlkNQ12vR4mRnrA56Wum3BoIzPbJSDEegcOYyZmJoIRVJCHZFCAdmgiwWzB7NVtPfpg2l1vBfGjVTGPWUduvn6NB8l2Kg6RpQ5LK2nQoYgi6RISvSY1ANluxvlXsCXV8o9POn6RodRfJWvtAaYNvxGbcdh0MZd6k04XSZZ8oBiLVqESvTUK/PZpx6F5CHxB9QUQaP4VEqe5EWSn1XxqKSmPFiy4Mu0YqMxCEwcmn22AMrCekSTVeJRhj+BjY7WEuJO650Nvg/Bt6GGcupPZ8kmaFro4y+GDgMYOye78mqpayoDB7GkkL/I1yqIUxShY8zJaglBuQiFP1mtwi3rUI3UuqFdSG1qjMcuiGWy8CKyLXaHwcOLXcmuVDGnjk7dQqomWREI2gsltr77SIJivjmb9Z5oqFpi9HD7KJ0YqHHxoIPh5Kv0TrfCiJBpifX4Rgz2wE6prlE2E5/yOVUvxnOADHUPQSekvWBBkGMOA/KGOuBbSzEp8XWGODsfirnuw21CtbNt9WLrXC/jbx11Aq5D7nz/cw+Ar8JwzWazr9RTBRG28SXVFlNB+34inufGNI3KmUNsKK6fOai/wuLUsx24CaLyWhefWoDghVtdp03oUL4JDHCdAeob0cBMpaXXfhWq0TPdiujZc9YZBPuIj462dnoarc/4GMwMBj/Qfg3sqRx9Ez4dI0+UtzYfxgheaHu1Aqd1Mq0oXIVsh3EZ+Gsbg3touhYB3CK5HWaFckQICo1oE24VeSR3nXNDlablpqCroixZTetFttqs101RNIyXWVY2Bd1U7zn8nBcr3+Ukr9bZOksnBO7+UH6IFQ9/8eczkhMJfJ3Rd4TRwY0GCFUH8tIfS750gnWk8xOtCB8NMoxMGwHNRDfEdcKSWiKAIQAhOa7l7CIoxqO13Q7Udeft7ZfHqNiEQfQjDrL64Cccl7RCJFe4ENQWg0aVMyOEfeWT3XoHPPCrZ1VySpnrEG5+n2yN1i8vlQbnen4hnCI/37+BCCEcmlMPvtdz8Y/k+PzDXJb/iGaqdNvi2lY/XVgIqaEV6lvtnT4GLBuBBEpdnUUTFRYQhY9a3bkXLEKZBLy/vTpwuumEE3n8QOCm12mne0j9izBNcD5TP7qpn+EYxyRZTPLlnMZhSlMp6jTIaU0t3KA1Z3cB16Y849VQaW8BO1d6ECD538HrOoM3UAuXvMmEf43Pb4B5MUn4fj2D/h7Hw5X+n5Ybsm/eIfvlSP1zjv+/upX+x/35/Oy/fwFCRniE'
977 SENTRY_RUBY_ENCODED = b"eJzVVttu4zYQ/RVCfUgLRNfYsi002z5sf6DoU7cLgyJHEmOKFEjKW6+Rf++QkmM7ziJF0T7UMBzNhZzLOTPKMYI9KLcVPKoioDndLAvIH5YlrykHVpd8sSlhXfNstWTRfdSDtbQFdP4djP4o9sIKrX4xRpuKcBQ5cFIfSIa+TqC3o/2A3kWWl3G2jLPFb3lZLcuqyGePrR0wgahSo5T3kcR0ZFQtsvtoMPoJ2ItlkNQ12vR4mRnrA56Wum3BoIzPbJSDEegcOYyZmJoIRVJCHZFCAdmgiwWzB7NVtPfpg2l1vBfGjVTGPWUduvn6NB8l2Kg6RpQ5LK2nQoYgi6RISvSY1ANluxvlXsCXV8o9POn6RodRfJWvtAaYNvxGbcdh0MZd6k04XSZZ8oBiLVqESvTUK/PZpx6F5CHxB9QUQaP4VEqe5EWSn1XxqKSmPFiy4Mu0YqMxCEwcmn22AMrCekSTVeJRhj+BjY7WEuJO650Nvg/Bt6GGcupPZ8kmaFro4y+GDgMYOye78mqpayoDB7GkkL/I1yqIUxShY8zJaglBuQiFP1mtwi3rUI3UuqFdSG1qjMcuiGWy8CKyLXaHwcOLXcmuVDGnjk7dQqomWREI2gsltr77SIJivjmb9Z5oqFpi9HD7KJ0YqHHxoIPh5Kv0TrfCiJBpifX4Rgz2wE6prlE2E5/yOVUvxnOADHUPQSekvWBBkGMOA/KGOuBbSzEp8XWGODsfirnuw21CtbNt9WLrXC/jbx11Aq5D7nz/cw+Ar8JwzWazr9RTBRG28SXVFlNB+34inufGNI3KmUNsKK6fOai/wuLUsx24CaLyWhefWoDghVtdp03oUL4JDHCdAeob0cBMpaXXfhWq0TPdiujZc9YZBPuIj462dnoarc/4GMwMBj/Qfg3sqRx9Ez4dI0+UtzYfxgheaHu1Aqd1Mq0oXIVsh3EZ+Gsbg3touhYB3CK5HWaFckQICo1oE24VeSR3nXNDlablpqCroixZTetFttqs101RNIyXWVY2Bd1U7zn8nBcr3+Ukr9bZOksnBO7+UH6IFQ9/8eczkhMJfJ3Rd4TRwY0GCFUH8tIfS750gnWk8xOtCB8NMoxMGwHNRDfEdcKSWiKAIQAhOa7l7CIoxqO13Q7Udeft7ZfHqNiEQfQjDrL64Cccl7RCJFe4ENQWg0aVMyOEfeWT3XoHPPCrZ1VySpnrEG5+n2yN1i8vlQbnen4hnCI/37+BCCEcmlMPvtdz8Y/k+PzDXJb/iGaqdNvi2lY/XVgIqaEV6lvtnT4GLBuBBEpdnUUTFRYQhY9a3bkXLEKZBLy/vTpwuumEE3n8QOCm12mne0j9izBNcD5TP7qpn+EYxyRZTPLlnMZhSlMp6jTIaU0t3KA1Z3cB16Y849VQaW8BO1d6ECD538HrOoM3UAuXvMmEf43Pb4B5MUn4fj2D/h7Hw5X+n5Ybsm/eIfvlSP1zjv+/upX+x/35/Oy/fwFCRniE"
@@ -18,93 +18,101 b' import json'
18 from webtest import TestApp
18 from webtest import TestApp
19
19
20
20
21 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
21 @pytest.mark.usefixtures(
22 'default_application')
22 "base_app", "with_migrations", "clean_tables", "default_application"
23 )
23 class TestAPIReportsView(object):
24 class TestAPIReportsView(object):
24 def test_no_json_payload(self, base_app):
25 def test_no_json_payload(self, base_app):
25 app = TestApp(base_app)
26 app = TestApp(base_app)
26 url_path = '/api/reports'
27 url_path = "/api/reports"
27 headers = {'x-appenlight-api-key': 'xxxx'}
28 headers = {"x-appenlight-api-key": "xxxx"}
28 res = app.post(url_path, {}, status=400,
29 res = app.post(url_path, {}, status=400, headers=headers)
29 headers=headers)
30
30
31 def test_wrong_json_payload(self, base_app):
31 def test_wrong_json_payload(self, base_app):
32 app = TestApp(base_app)
32 app = TestApp(base_app)
33 url_path = '/api/reports'
33 url_path = "/api/reports"
34 headers = {'x-appenlight-api-key': 'xxxx'}
34 headers = {"x-appenlight-api-key": "xxxx"}
35 res = app.post(url_path, {}, status=400, headers=headers)
35 res = app.post(url_path, {}, status=400, headers=headers)
36
36
37 def test_correct_json_payload(self, base_app):
37 def test_correct_json_payload(self, base_app):
38 import appenlight.tests.payload_examples as payload_examples
38 import appenlight.tests.payload_examples as payload_examples
39
39 app = TestApp(base_app)
40 app = TestApp(base_app)
40 url_path = '/api/reports'
41 url_path = "/api/reports"
41 headers = {'x-appenlight-api-key': 'xxxx'}
42 headers = {"x-appenlight-api-key": "xxxx"}
42 res = app.post_json(url_path, [payload_examples.PYTHON_PAYLOAD_0_5],
43 res = app.post_json(
43 headers=headers)
44 url_path, [payload_examples.PYTHON_PAYLOAD_0_5], headers=headers
45 )
44
46
45 def test_json_payload_wrong_key(self, base_app):
47 def test_json_payload_wrong_key(self, base_app):
46 import appenlight.tests.payload_examples as payload_examples
48 import appenlight.tests.payload_examples as payload_examples
49
47 app = TestApp(base_app)
50 app = TestApp(base_app)
48 url_path = '/api/reports'
51 url_path = "/api/reports"
49 res = app.post(url_path,
52 res = app.post(
50 json.dumps([payload_examples.PYTHON_PAYLOAD_0_5]),
53 url_path, json.dumps([payload_examples.PYTHON_PAYLOAD_0_5]), status=403
51 status=403)
54 )
52
55
53
56
54 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
57 @pytest.mark.usefixtures(
55 'default_data', 'default_application')
58 "base_app", "with_migrations", "clean_tables", "default_data", "default_application"
59 )
56 class TestRegistrationView(object):
60 class TestRegistrationView(object):
57 def test_register_empty(self, base_app):
61 def test_register_empty(self, base_app):
58 url_path = '/register'
62 url_path = "/register"
59 app = TestApp(base_app)
63 app = TestApp(base_app)
60 resp = app.get('/')
64 resp = app.get("/")
61 cookies = resp.headers.getall('Set-Cookie')
65 cookies = resp.headers.getall("Set-Cookie")
62 cookie = None
66 cookie = None
63 for name, value in [c.split('=', 1) for c in cookies]:
67 for name, value in [c.split("=", 1) for c in cookies]:
64 if name == 'XSRF-TOKEN':
68 if name == "XSRF-TOKEN":
65 cookie = value.split(';')[0]
69 cookie = value.split(";")[0]
66 headers = {'X-XSRF-TOKEN': cookie}
70 headers = {"X-XSRF-TOKEN": cookie}
67 res = app.post(url_path,
71 res = app.post(
68 params={'user_name': '',
72 url_path,
69 'user_password': '',
73 params={"user_name": "", "user_password": "", "email": ""},
70 'email': ''},
74 headers=headers,
71 headers=headers)
75 )
72 assert 'This field is required.' in res
76 assert "This field is required." in res
73
77
74 def test_register_proper(self, base_app):
78 def test_register_proper(self, base_app):
75 url_path = '/register'
79 url_path = "/register"
76 app = TestApp(base_app)
80 app = TestApp(base_app)
77 resp = app.get('/')
81 resp = app.get("/")
78 cookies = resp.headers.getall('Set-Cookie')
82 cookies = resp.headers.getall("Set-Cookie")
79 cookie = None
83 cookie = None
80 for name, value in [c.split('=', 1) for c in cookies]:
84 for name, value in [c.split("=", 1) for c in cookies]:
81 if name == 'XSRF-TOKEN':
85 if name == "XSRF-TOKEN":
82 cookie = value.split(';')[0]
86 cookie = value.split(";")[0]
83 headers = {'X-XSRF-TOKEN': cookie}
87 headers = {"X-XSRF-TOKEN": cookie}
84 res = app.post(url_path,
88 res = app.post(
85 params={'user_name': 'user_foo',
89 url_path,
86 'user_password': 'passbar',
90 params={
87 'email': 'foobar@blablabla.com'},
91 "user_name": "user_foo",
88 headers=headers,
92 "user_password": "passbar",
89 status=302)
93 "email": "foobar@blablabla.com",
94 },
95 headers=headers,
96 status=302,
97 )
90
98
91
99
92 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
100 @pytest.mark.usefixtures(
93 'default_data', 'default_application')
101 "base_app", "with_migrations", "clean_tables", "default_data", "default_application"
102 )
94 class TestRegistrationAuthTokenView(object):
103 class TestRegistrationAuthTokenView(object):
95
96 def test_create_application_bad(self, base_app):
104 def test_create_application_bad(self, base_app):
97 url_path = '/applications'
105 url_path = "/applications"
98 app = TestApp(base_app)
106 app = TestApp(base_app)
99 headers = {'x-appenlight-auth-token': ''}
107 headers = {"x-appenlight-auth-token": ""}
100 app.post_json(url_path,
108 app.post_json(
101 params={'resource_name': 'user_foo'},
109 url_path, params={"resource_name": "user_foo"}, headers=headers, status=403
102 headers=headers, status=403)
110 )
103
111
104 def test_create_application_proper(self, base_app):
112 def test_create_application_proper(self, base_app):
105 url_path = '/applications'
113 url_path = "/applications"
106 app = TestApp(base_app)
114 app = TestApp(base_app)
107 headers = {'x-appenlight-auth-token': '1234'}
115 headers = {"x-appenlight-auth-token": "1234"}
108 app.post_json(url_path,
116 app.post_json(
109 params={'resource_name': 'user_foo'},
117 url_path, params={"resource_name": "user_foo"}, headers=headers, status=200
110 headers=headers, status=200)
118 )
This diff has been collapsed as it changes many lines, (1710 lines changed) Show them Hide them
@@ -37,12 +37,12 b' class DummyContext(object):'
37 pass
37 pass
38
38
39
39
40 @pytest.mark.usefixtures('base_app')
40 @pytest.mark.usefixtures("base_app")
41 class BasicTest(object):
41 class BasicTest(object):
42 pass
42 pass
43
43
44
44
45 @pytest.mark.usefixtures('base_app')
45 @pytest.mark.usefixtures("base_app")
46 class TestMigration(object):
46 class TestMigration(object):
47 def test_migration(self):
47 def test_migration(self):
48 assert 1 == 1
48 assert 1 == 1
@@ -53,108 +53,125 b' class TestSentryProto_7(object):'
53 import appenlight.tests.payload_examples as payload_examples
53 import appenlight.tests.payload_examples as payload_examples
54 from appenlight.lib.enums import ParsedSentryEventType
54 from appenlight.lib.enums import ParsedSentryEventType
55 from appenlight.lib.utils.sentry import parse_sentry_event
55 from appenlight.lib.utils.sentry import parse_sentry_event
56
56 event_dict, event_type = parse_sentry_event(
57 event_dict, event_type = parse_sentry_event(
57 payload_examples.SENTRY_LOG_PAYLOAD_7)
58 payload_examples.SENTRY_LOG_PAYLOAD_7
59 )
58 assert ParsedSentryEventType.LOG == event_type
60 assert ParsedSentryEventType.LOG == event_type
59 assert event_dict['log_level'] == 'CRITICAL'
61 assert event_dict["log_level"] == "CRITICAL"
60 assert event_dict['message'] == 'TEST from django logging'
62 assert event_dict["message"] == "TEST from django logging"
61 assert event_dict['namespace'] == 'testlogger'
63 assert event_dict["namespace"] == "testlogger"
62 assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb'
64 assert event_dict["request_id"] == "9a6172f2e6d2444582f83a6c333d9cfb"
63 assert event_dict['server'] == 'ergo-virtual-machine'
65 assert event_dict["server"] == "ergo-virtual-machine"
64 assert event_dict['date'] == datetime.utcnow().date().strftime(
66 assert event_dict["date"] == datetime.utcnow().date().strftime(
65 '%Y-%m-%dT%H:%M:%SZ')
67 "%Y-%m-%dT%H:%M:%SZ"
66 tags = [('site', 'example.com'),
68 )
67 ('sys.argv', ["'manage.py'", "'runserver'"]),
69 tags = [
68 ('price', 6),
70 ("site", "example.com"),
69 ('tag', "'extra'"),
71 ("sys.argv", ["'manage.py'", "'runserver'"]),
70 ('dupa', True),
72 ("price", 6),
71 ('project', 'sentry'),
73 ("tag", "'extra'"),
72 ('sentry_culprit', 'testlogger in index'),
74 ("dupa", True),
73 ('sentry_language', 'python'),
75 ("project", "sentry"),
74 ('sentry_release', 'test')]
76 ("sentry_culprit", "testlogger in index"),
75 assert sorted(event_dict['tags']) == sorted(tags)
77 ("sentry_language", "python"),
78 ("sentry_release", "test"),
79 ]
80 assert sorted(event_dict["tags"]) == sorted(tags)
76
81
77 def test_report_payload(self):
82 def test_report_payload(self):
78 import appenlight.tests.payload_examples as payload_examples
83 import appenlight.tests.payload_examples as payload_examples
79 from appenlight.lib.enums import ParsedSentryEventType
84 from appenlight.lib.enums import ParsedSentryEventType
80 from appenlight.lib.utils.sentry import parse_sentry_event
85 from appenlight.lib.utils.sentry import parse_sentry_event
81 utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ')
86
87 utcnow = datetime.utcnow().date().strftime("%Y-%m-%dT%H:%M:%SZ")
82 event_dict, event_type = parse_sentry_event(
88 event_dict, event_type = parse_sentry_event(
83 payload_examples.SENTRY_PYTHON_PAYLOAD_7)
89 payload_examples.SENTRY_PYTHON_PAYLOAD_7
90 )
84 assert ParsedSentryEventType.ERROR_REPORT == event_type
91 assert ParsedSentryEventType.ERROR_REPORT == event_type
85 assert event_dict['client'] == 'sentry'
92 assert event_dict["client"] == "sentry"
86 assert event_dict[
93 assert (
87 'error'] == 'Exception: test 500 ' \
94 event_dict["error"] == "Exception: test 500 "
88 '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105'
95 "\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105"
89 assert event_dict['language'] == 'python'
96 )
90 assert event_dict['ip'] == '127.0.0.1'
97 assert event_dict["language"] == "python"
91 assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98'
98 assert event_dict["ip"] == "127.0.0.1"
92 assert event_dict['server'] == 'ergo-virtual-machine'
99 assert event_dict["request_id"] == "9fae652c8c1c4d6a8eee09260f613a98"
93 assert event_dict['start_time'] == utcnow
100 assert event_dict["server"] == "ergo-virtual-machine"
94 assert event_dict['url'] == 'http://127.0.0.1:8000/error'
101 assert event_dict["start_time"] == utcnow
95 assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \
102 assert event_dict["url"] == "http://127.0.0.1:8000/error"
96 'AppleWebKit/537.36 (KHTML, ' \
103 assert (
97 'like Gecko) Chrome/47.0.2526.106 ' \
104 event_dict["user_agent"] == "Mozilla/5.0 (X11; Linux x86_64) "
98 'Safari/537.36'
105 "AppleWebKit/537.36 (KHTML, "
99 assert event_dict['view_name'] == 'djangoapp.views in error'
106 "like Gecko) Chrome/47.0.2526.106 "
100 tags = [('site', 'example.com'), ('sentry_release', 'test')]
107 "Safari/537.36"
101 assert sorted(event_dict['tags']) == sorted(tags)
108 )
102 extra = [('sys.argv', ["'manage.py'", "'runserver'"]),
109 assert event_dict["view_name"] == "djangoapp.views in error"
103 ('project', 'sentry')]
110 tags = [("site", "example.com"), ("sentry_release", "test")]
104 assert sorted(event_dict['extra']) == sorted(extra)
111 assert sorted(event_dict["tags"]) == sorted(tags)
105 request = event_dict['request']
112 extra = [("sys.argv", ["'manage.py'", "'runserver'"]), ("project", "sentry")]
106 assert request['url'] == 'http://127.0.0.1:8000/error'
113 assert sorted(event_dict["extra"]) == sorted(extra)
107 assert request['cookies'] == {'appenlight': 'X'}
114 request = event_dict["request"]
108 assert request['data'] is None
115 assert request["url"] == "http://127.0.0.1:8000/error"
109 assert request['method'] == 'GET'
116 assert request["cookies"] == {"appenlight": "X"}
110 assert request['query_string'] == ''
117 assert request["data"] is None
111 assert request['env'] == {'REMOTE_ADDR': '127.0.0.1',
118 assert request["method"] == "GET"
112 'SERVER_NAME': 'localhost',
119 assert request["query_string"] == ""
113 'SERVER_PORT': '8000'}
120 assert request["env"] == {
114 assert request['headers'] == {
121 "REMOTE_ADDR": "127.0.0.1",
115 'Accept': 'text/html,application/xhtml+xml,'
122 "SERVER_NAME": "localhost",
116 'application/xml;q=0.9,image/webp,*/*;q=0.8',
123 "SERVER_PORT": "8000",
117 'Accept-Encoding': 'gzip, deflate, sdch',
124 }
118 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6',
125 assert request["headers"] == {
119 'Connection': 'keep-alive',
126 "Accept": "text/html,application/xhtml+xml,"
120 'Content-Length': '',
127 "application/xml;q=0.9,image/webp,*/*;q=0.8",
121 'Content-Type': 'text/plain',
128 "Accept-Encoding": "gzip, deflate, sdch",
122 'Cookie': 'appenlight=X',
129 "Accept-Language": "en-US,en;q=0.8,pl;q=0.6",
123 'Dnt': '1',
130 "Connection": "keep-alive",
124 'Host': '127.0.0.1:8000',
131 "Content-Length": "",
125 'Upgrade-Insecure-Requests': '1',
132 "Content-Type": "text/plain",
126 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) '
133 "Cookie": "appenlight=X",
127 'AppleWebKit/537.36 (KHTML, like Gecko) '
134 "Dnt": "1",
128 'Chrome/47.0.2526.106 Safari/537.36'}
135 "Host": "127.0.0.1:8000",
129 traceback = event_dict['traceback']
136 "Upgrade-Insecure-Requests": "1",
130 assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \
137 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) "
131 '*callback_args, **callback_kwargs)'
138 "AppleWebKit/537.36 (KHTML, like Gecko) "
132 assert traceback[0]['file'] == 'django/core/handlers/base.py'
139 "Chrome/47.0.2526.106 Safari/537.36",
133 assert traceback[0]['fn'] == 'get_response'
140 }
134 assert traceback[0]['line'] == 111
141 traceback = event_dict["traceback"]
135 assert traceback[0]['module'] == 'django.core.handlers.base'
142 assert (
136
143 traceback[0]["cline"] == "response = wrapped_callback(request, "
137 assert traceback[1]['cline'] == "raise Exception(u'test 500 " \
144 "*callback_args, **callback_kwargs)"
138 "\u0142\xf3\u201c\u0107\u201c\u0107" \
145 )
139 "\u017c\u0105')"
146 assert traceback[0]["file"] == "django/core/handlers/base.py"
140 assert traceback[1]['file'] == 'djangoapp/views.py'
147 assert traceback[0]["fn"] == "get_response"
141 assert traceback[1]['fn'] == 'error'
148 assert traceback[0]["line"] == 111
142 assert traceback[1]['line'] == 84
149 assert traceback[0]["module"] == "django.core.handlers.base"
143 assert traceback[1]['module'] == 'djangoapp.views'
150
144 assert sorted(traceback[1]['vars']) == sorted([
151 assert (
145 ('c',
152 traceback[1]["cline"] == "raise Exception(u'test 500 "
146 '<sqlite3.Cursor object at 0x7fe7c82af8f0>'),
153 "\u0142\xf3\u201c\u0107\u201c\u0107"
147 ('request',
154 "\u017c\u0105')"
148 '<WSGIRequest at 0x140633490316304>'),
155 )
149 ('conn',
156 assert traceback[1]["file"] == "djangoapp/views.py"
150 '<sqlite3.Connection object at 0x7fe7c8b23bf8>')])
157 assert traceback[1]["fn"] == "error"
158 assert traceback[1]["line"] == 84
159 assert traceback[1]["module"] == "djangoapp.views"
160 assert sorted(traceback[1]["vars"]) == sorted(
161 [
162 ("c", "<sqlite3.Cursor object at 0x7fe7c82af8f0>"),
163 ("request", "<WSGIRequest at 0x140633490316304>"),
164 ("conn", "<sqlite3.Connection object at 0x7fe7c8b23bf8>"),
165 ]
166 )
151
167
152
168
153 class TestAPIReports_0_5_Validation(object):
169 class TestAPIReports_0_5_Validation(object):
154 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
170 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
155 def test_no_payload(self, dummy_json):
171 def test_no_payload(self, dummy_json):
156 import colander
172 import colander
157 from appenlight.validators import ReportListSchema_0_5
173 from appenlight.validators import ReportListSchema_0_5
174
158 utcnow = datetime.utcnow()
175 utcnow = datetime.utcnow()
159 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
176 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
160 with pytest.raises(colander.Invalid):
177 with pytest.raises(colander.Invalid):
@@ -164,104 +181,128 b' class TestAPIReports_0_5_Validation(object):'
164 dummy_json = [{}]
181 dummy_json = [{}]
165 import colander
182 import colander
166 from appenlight.validators import ReportListSchema_0_5
183 from appenlight.validators import ReportListSchema_0_5
184
167 utcnow = datetime.utcnow()
185 utcnow = datetime.utcnow()
168 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
186 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
169 with pytest.raises(colander.Invalid):
187 with pytest.raises(colander.Invalid):
170 schema.deserialize(dummy_json)
188 schema.deserialize(dummy_json)
171
189
172 def test_minimal_payload(self):
190 def test_minimal_payload(self):
173 dummy_json = [{'report_details': [{}]}]
191 dummy_json = [{"report_details": [{}]}]
174 from appenlight.validators import ReportListSchema_0_5
192 from appenlight.validators import ReportListSchema_0_5
193
175 utcnow = datetime.utcnow()
194 utcnow = datetime.utcnow()
176 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
195 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
177
196
178 deserialized = schema.deserialize(dummy_json)
197 deserialized = schema.deserialize(dummy_json)
179
198
180 expected_deserialization = [
199 expected_deserialization = [
181 {'language': 'unknown',
200 {
182 'server': 'unknown',
201 "language": "unknown",
183 'occurences': 1,
202 "server": "unknown",
184 'priority': 5,
203 "occurences": 1,
185 'view_name': '',
204 "priority": 5,
186 'client': 'unknown',
205 "view_name": "",
187 'http_status': 200,
206 "client": "unknown",
188 'error': '',
207 "http_status": 200,
189 'tags': None,
208 "error": "",
190 'username': '',
209 "tags": None,
191 'traceback': None,
210 "username": "",
192 'extra': None,
211 "traceback": None,
193 'url': '',
212 "extra": None,
194 'ip': None,
213 "url": "",
195 'start_time': utcnow,
214 "ip": None,
196 'group_string': None,
215 "start_time": utcnow,
197 'request': {},
216 "group_string": None,
198 'request_stats': None,
217 "request": {},
199 'end_time': None,
218 "request_stats": None,
200 'request_id': '',
219 "end_time": None,
201 'message': '',
220 "request_id": "",
202 'slow_calls': [],
221 "message": "",
203 'user_agent': ''
222 "slow_calls": [],
204 }
223 "user_agent": "",
224 }
205 ]
225 ]
206 assert deserialized == expected_deserialization
226 assert deserialized == expected_deserialization
207
227
208 def test_full_payload(self):
228 def test_full_payload(self):
209 import appenlight.tests.payload_examples as payload_examples
229 import appenlight.tests.payload_examples as payload_examples
210 from appenlight.validators import ReportListSchema_0_5
230 from appenlight.validators import ReportListSchema_0_5
231
211 PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5)
232 PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5)
212 utcnow = datetime.utcnow()
233 utcnow = datetime.utcnow()
213 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
234 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
214 PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1),
235 PYTHON_PAYLOAD["tags"] = [
215 ("date",
236 ("foo", 1),
216 utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))]
237 ("action", "test"),
238 ("baz", 1.1),
239 ("date", utcnow.strftime("%Y-%m-%dT%H:%M:%S.0")),
240 ]
217 dummy_json = [PYTHON_PAYLOAD]
241 dummy_json = [PYTHON_PAYLOAD]
218 deserialized = schema.deserialize(dummy_json)[0]
242 deserialized = schema.deserialize(dummy_json)[0]
219 assert deserialized['error'] == PYTHON_PAYLOAD['error']
243 assert deserialized["error"] == PYTHON_PAYLOAD["error"]
220 assert deserialized['language'] == PYTHON_PAYLOAD['language']
244 assert deserialized["language"] == PYTHON_PAYLOAD["language"]
221 assert deserialized['server'] == PYTHON_PAYLOAD['server']
245 assert deserialized["server"] == PYTHON_PAYLOAD["server"]
222 assert deserialized['priority'] == PYTHON_PAYLOAD['priority']
246 assert deserialized["priority"] == PYTHON_PAYLOAD["priority"]
223 assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name']
247 assert deserialized["view_name"] == PYTHON_PAYLOAD["view_name"]
224 assert deserialized['client'] == PYTHON_PAYLOAD['client']
248 assert deserialized["client"] == PYTHON_PAYLOAD["client"]
225 assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status']
249 assert deserialized["http_status"] == PYTHON_PAYLOAD["http_status"]
226 assert deserialized['error'] == PYTHON_PAYLOAD['error']
250 assert deserialized["error"] == PYTHON_PAYLOAD["error"]
227 assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences']
251 assert deserialized["occurences"] == PYTHON_PAYLOAD["occurences"]
228 assert deserialized['username'] == PYTHON_PAYLOAD['username']
252 assert deserialized["username"] == PYTHON_PAYLOAD["username"]
229 assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback']
253 assert deserialized["traceback"] == PYTHON_PAYLOAD["traceback"]
230 assert deserialized['url'] == PYTHON_PAYLOAD['url']
254 assert deserialized["url"] == PYTHON_PAYLOAD["url"]
231 assert deserialized['ip'] == PYTHON_PAYLOAD['ip']
255 assert deserialized["ip"] == PYTHON_PAYLOAD["ip"]
232 assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \
256 assert (
233 PYTHON_PAYLOAD['start_time']
257 deserialized["start_time"].strftime("%Y-%m-%dT%H:%M:%S.0")
234 assert deserialized['ip'] == PYTHON_PAYLOAD['ip']
258 == PYTHON_PAYLOAD["start_time"]
235 assert deserialized['group_string'] is None
259 )
236 assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats']
260 assert deserialized["ip"] == PYTHON_PAYLOAD["ip"]
237 assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \
261 assert deserialized["group_string"] is None
238 PYTHON_PAYLOAD['end_time']
262 assert deserialized["request_stats"] == PYTHON_PAYLOAD["request_stats"]
239 assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id']
263 assert (
240 assert deserialized['message'] == PYTHON_PAYLOAD['message']
264 deserialized["end_time"].strftime("%Y-%m-%dT%H:%M:%S.0")
241 assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent']
265 == PYTHON_PAYLOAD["end_time"]
242 assert deserialized['slow_calls'][0]['start'].strftime(
266 )
243 '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][
267 assert deserialized["request_id"] == PYTHON_PAYLOAD["request_id"]
244 'start']
268 assert deserialized["message"] == PYTHON_PAYLOAD["message"]
245 assert deserialized['slow_calls'][0]['end'].strftime(
269 assert deserialized["user_agent"] == PYTHON_PAYLOAD["user_agent"]
246 '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][
270 assert (
247 'end']
271 deserialized["slow_calls"][0]["start"].strftime("%Y-%m-%dT%H:%M:%S.0")
248 assert deserialized['slow_calls'][0]['statement'] == \
272 == PYTHON_PAYLOAD["slow_calls"][0]["start"]
249 PYTHON_PAYLOAD['slow_calls'][0]['statement']
273 )
250 assert deserialized['slow_calls'][0]['parameters'] == \
274 assert (
251 PYTHON_PAYLOAD['slow_calls'][0]['parameters']
275 deserialized["slow_calls"][0]["end"].strftime("%Y-%m-%dT%H:%M:%S.0")
252 assert deserialized['slow_calls'][0]['type'] == \
276 == PYTHON_PAYLOAD["slow_calls"][0]["end"]
253 PYTHON_PAYLOAD['slow_calls'][0]['type']
277 )
254 assert deserialized['slow_calls'][0]['subtype'] == \
278 assert (
255 PYTHON_PAYLOAD['slow_calls'][0]['subtype']
279 deserialized["slow_calls"][0]["statement"]
256 assert deserialized['slow_calls'][0]['location'] == ''
280 == PYTHON_PAYLOAD["slow_calls"][0]["statement"]
257 assert deserialized['tags'] == [
281 )
258 ('foo', 1), ('action', 'test'),
282 assert (
259 ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))]
283 deserialized["slow_calls"][0]["parameters"]
260
284 == PYTHON_PAYLOAD["slow_calls"][0]["parameters"]
261
285 )
262 @pytest.mark.usefixtures('log_schema')
286 assert (
287 deserialized["slow_calls"][0]["type"]
288 == PYTHON_PAYLOAD["slow_calls"][0]["type"]
289 )
290 assert (
291 deserialized["slow_calls"][0]["subtype"]
292 == PYTHON_PAYLOAD["slow_calls"][0]["subtype"]
293 )
294 assert deserialized["slow_calls"][0]["location"] == ""
295 assert deserialized["tags"] == [
296 ("foo", 1),
297 ("action", "test"),
298 ("baz", 1.1),
299 ("date", utcnow.strftime("%Y-%m-%dT%H:%M:%S.0")),
300 ]
301
302
303 @pytest.mark.usefixtures("log_schema")
263 class TestAPILogsValidation(object):
304 class TestAPILogsValidation(object):
264 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
305 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
265 def test_no_payload(self, dummy_json, log_schema):
306 def test_no_payload(self, dummy_json, log_schema):
266 import colander
307 import colander
267
308
@@ -271,74 +312,81 b' class TestAPILogsValidation(object):'
271 def test_minimal_payload(self, log_schema):
312 def test_minimal_payload(self, log_schema):
272 dummy_json = [{}]
313 dummy_json = [{}]
273 deserialized = log_schema.deserialize(dummy_json)[0]
314 deserialized = log_schema.deserialize(dummy_json)[0]
274 expected = {'log_level': 'UNKNOWN',
315 expected = {
275 'namespace': '',
316 "log_level": "UNKNOWN",
276 'server': 'unknown',
317 "namespace": "",
277 'request_id': '',
318 "server": "unknown",
278 'primary_key': None,
319 "request_id": "",
279 'date': datetime.utcnow(),
320 "primary_key": None,
280 'message': '',
321 "date": datetime.utcnow(),
281 'tags': None}
322 "message": "",
282 assert deserialized['log_level'] == expected['log_level']
323 "tags": None,
283 assert deserialized['message'] == expected['message']
324 }
284 assert deserialized['namespace'] == expected['namespace']
325 assert deserialized["log_level"] == expected["log_level"]
285 assert deserialized['request_id'] == expected['request_id']
326 assert deserialized["message"] == expected["message"]
286 assert deserialized['server'] == expected['server']
327 assert deserialized["namespace"] == expected["namespace"]
287 assert deserialized['tags'] == expected['tags']
328 assert deserialized["request_id"] == expected["request_id"]
288 assert deserialized['primary_key'] == expected['primary_key']
329 assert deserialized["server"] == expected["server"]
330 assert deserialized["tags"] == expected["tags"]
331 assert deserialized["primary_key"] == expected["primary_key"]
289
332
290 def test_normal_payload(self, log_schema):
333 def test_normal_payload(self, log_schema):
291 import appenlight.tests.payload_examples as payload_examples
334 import appenlight.tests.payload_examples as payload_examples
335
292 deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0]
336 deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0]
293 expected = payload_examples.LOG_EXAMPLES[0]
337 expected = payload_examples.LOG_EXAMPLES[0]
294 assert deserialized['log_level'] == expected['log_level']
338 assert deserialized["log_level"] == expected["log_level"]
295 assert deserialized['message'] == expected['message']
339 assert deserialized["message"] == expected["message"]
296 assert deserialized['namespace'] == expected['namespace']
340 assert deserialized["namespace"] == expected["namespace"]
297 assert deserialized['request_id'] == expected['request_id']
341 assert deserialized["request_id"] == expected["request_id"]
298 assert deserialized['server'] == expected['server']
342 assert deserialized["server"] == expected["server"]
299 assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \
343 assert deserialized["date"].strftime("%Y-%m-%dT%H:%M:%S.%f") == expected["date"]
300 expected['date']
344 assert deserialized["tags"][0][0] == "tag_name"
301 assert deserialized['tags'][0][0] == "tag_name"
345 assert deserialized["tags"][0][1] == "tag_value"
302 assert deserialized['tags'][0][1] == "tag_value"
346 assert deserialized["tags"][1][0] == "tag_name2"
303 assert deserialized['tags'][1][0] == "tag_name2"
347 assert deserialized["tags"][1][1] == 2
304 assert deserialized['tags'][1][1] == 2
305
348
306 def test_normal_payload_date_without_microseconds(self, log_schema):
349 def test_normal_payload_date_without_microseconds(self, log_schema):
307 import appenlight.tests.payload_examples as payload_examples
350 import appenlight.tests.payload_examples as payload_examples
351
308 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
352 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
309 LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime(
353 LOG_EXAMPLE[0]["date"] = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
310 '%Y-%m-%dT%H:%M:%S')
311 deserialized = log_schema.deserialize(LOG_EXAMPLE)
354 deserialized = log_schema.deserialize(LOG_EXAMPLE)
312 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \
355 assert (
313 LOG_EXAMPLE[0]['date']
356 deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M:%S")
357 == LOG_EXAMPLE[0]["date"]
358 )
314
359
315 def test_normal_payload_date_without_seconds(self, log_schema):
360 def test_normal_payload_date_without_seconds(self, log_schema):
316 import appenlight.tests.payload_examples as payload_examples
361 import appenlight.tests.payload_examples as payload_examples
362
317 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
363 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
318 LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime(
364 LOG_EXAMPLE[0]["date"] = datetime.utcnow().date().strftime("%Y-%m-%dT%H:%M")
319 '%Y-%m-%dT%H:%M')
320 deserialized = log_schema.deserialize(LOG_EXAMPLE)
365 deserialized = log_schema.deserialize(LOG_EXAMPLE)
321 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \
366 assert (
322 LOG_EXAMPLE[0]['date']
367 deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M") == LOG_EXAMPLE[0]["date"]
368 )
323
369
324 def test_payload_empty_date(self, log_schema):
370 def test_payload_empty_date(self, log_schema):
325 import appenlight.tests.payload_examples as payload_examples
371 import appenlight.tests.payload_examples as payload_examples
372
326 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
373 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
327 LOG_EXAMPLE[0]['date'] = None
374 LOG_EXAMPLE[0]["date"] = None
328 deserialized = log_schema.deserialize(LOG_EXAMPLE)
375 deserialized = log_schema.deserialize(LOG_EXAMPLE)
329 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None
376 assert deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M") is not None
330
377
331 def test_payload_no_date(self, log_schema):
378 def test_payload_no_date(self, log_schema):
332 import appenlight.tests.payload_examples as payload_examples
379 import appenlight.tests.payload_examples as payload_examples
380
333 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
381 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
334 LOG_EXAMPLE[0].pop('date', None)
382 LOG_EXAMPLE[0].pop("date", None)
335 deserialized = log_schema.deserialize(LOG_EXAMPLE)
383 deserialized = log_schema.deserialize(LOG_EXAMPLE)
336 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None
384 assert deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M") is not None
337
385
338
386
339 @pytest.mark.usefixtures('general_metrics_schema')
387 @pytest.mark.usefixtures("general_metrics_schema")
340 class TestAPIGeneralMetricsValidation(object):
388 class TestAPIGeneralMetricsValidation(object):
341 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
389 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
342 def test_no_payload(self, dummy_json, general_metrics_schema):
390 def test_no_payload(self, dummy_json, general_metrics_schema):
343 import colander
391 import colander
344
392
@@ -346,32 +394,37 b' class TestAPIGeneralMetricsValidation(object):'
346 general_metrics_schema.deserialize(dummy_json)
394 general_metrics_schema.deserialize(dummy_json)
347
395
348 def test_minimal_payload(self, general_metrics_schema):
396 def test_minimal_payload(self, general_metrics_schema):
349 dummy_json = [{'tags': [['counter_a', 15.5], ['counter_b', 63]]}]
397 dummy_json = [{"tags": [["counter_a", 15.5], ["counter_b", 63]]}]
350 deserialized = general_metrics_schema.deserialize(dummy_json)[0]
398 deserialized = general_metrics_schema.deserialize(dummy_json)[0]
351 expected = {'namespace': '',
399 expected = {
352 'server_name': 'unknown',
400 "namespace": "",
353 'tags': [('counter_a', 15.5), ('counter_b', 63)],
401 "server_name": "unknown",
354 'timestamp': datetime.utcnow()}
402 "tags": [("counter_a", 15.5), ("counter_b", 63)],
355 assert deserialized['namespace'] == expected['namespace']
403 "timestamp": datetime.utcnow(),
356 assert deserialized['server_name'] == expected['server_name']
404 }
357 assert deserialized['tags'] == expected['tags']
405 assert deserialized["namespace"] == expected["namespace"]
406 assert deserialized["server_name"] == expected["server_name"]
407 assert deserialized["tags"] == expected["tags"]
358
408
359 def test_normal_payload(self, general_metrics_schema):
409 def test_normal_payload(self, general_metrics_schema):
360 import appenlight.tests.payload_examples as payload_examples
410 import appenlight.tests.payload_examples as payload_examples
411
361 dummy_json = [payload_examples.METRICS_PAYLOAD]
412 dummy_json = [payload_examples.METRICS_PAYLOAD]
362 deserialized = general_metrics_schema.deserialize(dummy_json)[0]
413 deserialized = general_metrics_schema.deserialize(dummy_json)[0]
363 expected = {'namespace': 'some.monitor',
414 expected = {
364 'server_name': 'server.name',
415 "namespace": "some.monitor",
365 'tags': [('usage_foo', 15.5), ('usage_bar', 63)],
416 "server_name": "server.name",
366 'timestamp': datetime.utcnow()}
417 "tags": [("usage_foo", 15.5), ("usage_bar", 63)],
367 assert deserialized['namespace'] == expected['namespace']
418 "timestamp": datetime.utcnow(),
368 assert deserialized['server_name'] == expected['server_name']
419 }
369 assert deserialized['tags'] == expected['tags']
420 assert deserialized["namespace"] == expected["namespace"]
421 assert deserialized["server_name"] == expected["server_name"]
422 assert deserialized["tags"] == expected["tags"]
370
423
371
424
372 @pytest.mark.usefixtures('request_metrics_schema')
425 @pytest.mark.usefixtures("request_metrics_schema")
373 class TestAPIRequestMetricsValidation(object):
426 class TestAPIRequestMetricsValidation(object):
374 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
427 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
375 def test_no_payload(self, dummy_json, request_metrics_schema):
428 def test_no_payload(self, dummy_json, request_metrics_schema):
376 import colander
429 import colander
377
430
@@ -380,45 +433,58 b' class TestAPIRequestMetricsValidation(object):'
380
433
381 def test_normal_payload(self, request_metrics_schema):
434 def test_normal_payload(self, request_metrics_schema):
382 import appenlight.tests.payload_examples as payload_examples
435 import appenlight.tests.payload_examples as payload_examples
436
383 dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES
437 dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES
384 deserialized = request_metrics_schema.deserialize(dummy_json)[0]
438 deserialized = request_metrics_schema.deserialize(dummy_json)[0]
385 expected = {'metrics': [('dir/module:func',
439 expected = {
386 {'custom': 0.0,
440 "metrics": [
387 'custom_calls': 0.0,
441 (
388 'main': 0.01664,
442 "dir/module:func",
389 'nosql': 0.00061,
443 {
390 'nosql_calls': 23.0,
444 "custom": 0.0,
391 'remote': 0.0,
445 "custom_calls": 0.0,
392 'remote_calls': 0.0,
446 "main": 0.01664,
393 'requests': 1,
447 "nosql": 0.00061,
394 'sql': 0.00105,
448 "nosql_calls": 23.0,
395 'sql_calls': 2.0,
449 "remote": 0.0,
396 'tmpl': 0.0,
450 "remote_calls": 0.0,
397 'tmpl_calls': 0.0}),
451 "requests": 1,
398 ('SomeView.function',
452 "sql": 0.00105,
399 {'custom': 0.0,
453 "sql_calls": 2.0,
400 'custom_calls': 0.0,
454 "tmpl": 0.0,
401 'main': 0.647261,
455 "tmpl_calls": 0.0,
402 'nosql': 0.306554,
456 },
403 'nosql_calls': 140.0,
457 ),
404 'remote': 0.0,
458 (
405 'remote_calls': 0.0,
459 "SomeView.function",
406 'requests': 28,
460 {
407 'sql': 0.0,
461 "custom": 0.0,
408 'sql_calls': 0.0,
462 "custom_calls": 0.0,
409 'tmpl': 0.0,
463 "main": 0.647261,
410 'tmpl_calls': 0.0})],
464 "nosql": 0.306554,
411 'server': 'some.server.hostname',
465 "nosql_calls": 140.0,
412 'timestamp': datetime.utcnow()}
466 "remote": 0.0,
413 assert deserialized['server'] == expected['server']
467 "remote_calls": 0.0,
414 metric = deserialized['metrics'][0]
468 "requests": 28,
415 expected_metric = expected['metrics'][0]
469 "sql": 0.0,
470 "sql_calls": 0.0,
471 "tmpl": 0.0,
472 "tmpl_calls": 0.0,
473 },
474 ),
475 ],
476 "server": "some.server.hostname",
477 "timestamp": datetime.utcnow(),
478 }
479 assert deserialized["server"] == expected["server"]
480 metric = deserialized["metrics"][0]
481 expected_metric = expected["metrics"][0]
416 assert metric[0] == expected_metric[0]
482 assert metric[0] == expected_metric[0]
417 assert sorted(metric[1].items()) == sorted(expected_metric[1].items())
483 assert sorted(metric[1].items()) == sorted(expected_metric[1].items())
418
484
419
485
420 @pytest.mark.usefixtures('default_application')
486 @pytest.mark.usefixtures("default_application")
421 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
487 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
422 class TestAPIReportsView(object):
488 class TestAPIReportsView(object):
423 def test_no_json_payload(self, default_application):
489 def test_no_json_payload(self, default_application):
424 import colander
490 import colander
@@ -427,12 +493,11 b' class TestAPIReportsView(object):'
427
493
428 context = DummyContext()
494 context = DummyContext()
429 context.resource = ApplicationService.by_id(1)
495 context.resource = ApplicationService.by_id(1)
430 request = testing.DummyRequest(
496 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
431 headers={'Content-Type': 'application/json'})
497 request.unsafe_json_body = ""
432 request.unsafe_json_body = ''
433 request.context = context
498 request.context = context
434 route = mock.Mock()
499 route = mock.Mock()
435 route.name = 'api_reports'
500 route.name = "api_reports"
436 request.matched_route = route
501 request.matched_route = route
437 with pytest.raises(colander.Invalid):
502 with pytest.raises(colander.Invalid):
438 response = reports_create(request)
503 response = reports_create(request)
@@ -442,8 +507,9 b' class TestAPIReportsView(object):'
442 from appenlight.views.api import reports_create
507 from appenlight.views.api import reports_create
443 from appenlight.models.services.application import ApplicationService
508 from appenlight.models.services.application import ApplicationService
444 from appenlight.models.report_group import ReportGroup
509 from appenlight.models.report_group import ReportGroup
510
445 route = mock.Mock()
511 route = mock.Mock()
446 route.name = 'api_reports'
512 route.name = "api_reports"
447 request = pyramid.threadlocal.get_current_request()
513 request = pyramid.threadlocal.get_current_request()
448 context = DummyContext()
514 context = DummyContext()
449 context.resource = ApplicationService.by_id(1)
515 context.resource = ApplicationService.by_id(1)
@@ -462,16 +528,19 b' class TestAPIReportsView(object):'
462 from appenlight.views.api import reports_create
528 from appenlight.views.api import reports_create
463 from appenlight.models.services.application import ApplicationService
529 from appenlight.models.services.application import ApplicationService
464 from appenlight.models.report_group import ReportGroup
530 from appenlight.models.report_group import ReportGroup
531
465 route = mock.Mock()
532 route = mock.Mock()
466 route.name = 'api_reports'
533 route.name = "api_reports"
467 request = pyramid.threadlocal.get_current_request()
534 request = pyramid.threadlocal.get_current_request()
468 context = DummyContext()
535 context = DummyContext()
469 context.resource = ApplicationService.by_id(1)
536 context.resource = ApplicationService.by_id(1)
470 request.context = context
537 request.context = context
471 request.matched_route = route
538 request.matched_route = route
472 PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5
539 PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5
473 request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD),
540 request.unsafe_json_body = [
474 copy.deepcopy(PYTHON_PAYLOAD)]
541 copy.deepcopy(PYTHON_PAYLOAD),
542 copy.deepcopy(PYTHON_PAYLOAD),
543 ]
475 reports_create(request)
544 reports_create(request)
476 query = DBSession.query(ReportGroup)
545 query = DBSession.query(ReportGroup)
477 report = query.first()
546 report = query.first()
@@ -483,8 +552,9 b' class TestAPIReportsView(object):'
483 from appenlight.views.api import reports_create
552 from appenlight.views.api import reports_create
484 from appenlight.models.services.application import ApplicationService
553 from appenlight.models.services.application import ApplicationService
485 from appenlight.models.report_group import ReportGroup
554 from appenlight.models.report_group import ReportGroup
555
486 route = mock.Mock()
556 route = mock.Mock()
487 route.name = 'api_reports'
557 route.name = "api_reports"
488 request = pyramid.threadlocal.get_current_request()
558 request = pyramid.threadlocal.get_current_request()
489 context = DummyContext()
559 context = DummyContext()
490 context.resource = ApplicationService.by_id(1)
560 context.resource = ApplicationService.by_id(1)
@@ -492,8 +562,10 b' class TestAPIReportsView(object):'
492 request.matched_route = route
562 request.matched_route = route
493 PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5
563 PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5
494 PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404
564 PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404
495 request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD),
565 request.unsafe_json_body = [
496 copy.deepcopy(PARSED_REPORT_404)]
566 copy.deepcopy(PYTHON_PAYLOAD),
567 copy.deepcopy(PARSED_REPORT_404),
568 ]
497 reports_create(request)
569 reports_create(request)
498 query = DBSession.query(ReportGroup)
570 query = DBSession.query(ReportGroup)
499 report = query.first()
571 report = query.first()
@@ -501,10 +573,9 b' class TestAPIReportsView(object):'
501 assert report.total_reports == 1
573 assert report.total_reports == 1
502
574
503
575
504 @pytest.mark.usefixtures('default_application')
576 @pytest.mark.usefixtures("default_application")
505 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
577 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
506 class TestAirbrakeXMLView(object):
578 class TestAirbrakeXMLView(object):
507
508 def test_normal_payload_parsing(self):
579 def test_normal_payload_parsing(self):
509 import datetime
580 import datetime
510 import defusedxml.ElementTree as ElementTree
581 import defusedxml.ElementTree as ElementTree
@@ -513,8 +584,7 b' class TestAirbrakeXMLView(object):'
513 from appenlight.validators import ReportListSchema_0_5
584 from appenlight.validators import ReportListSchema_0_5
514
585
515 context = DummyContext()
586 context = DummyContext()
516 request = testing.DummyRequest(
587 request = testing.DummyRequest(headers={"Content-Type": "application/xml"})
517 headers={'Content-Type': 'application/xml'})
518 request.context = context
588 request.context = context
519 request.context.possibly_public = False
589 request.context.possibly_public = False
520 root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE)
590 root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE)
@@ -522,37 +592,45 b' class TestAirbrakeXMLView(object):'
522 error_dict = parse_airbrake_xml(request)
592 error_dict = parse_airbrake_xml(request)
523 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
593 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
524 deserialized_report = schema.deserialize([error_dict])[0]
594 deserialized_report = schema.deserialize([error_dict])[0]
525 assert deserialized_report['client'] == 'Airbrake Notifier'
595 assert deserialized_report["client"] == "Airbrake Notifier"
526 assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>'
596 assert (
527 assert deserialized_report['http_status'] == 500
597 deserialized_report["error"]
528 assert deserialized_report['language'] == 'unknown'
598 == "NameError: undefined local variable or method `sdfdfdf' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>"
529 assert deserialized_report['message'] == ''
599 )
530 assert deserialized_report['occurences'] == 1
600 assert deserialized_report["http_status"] == 500
531 assert deserialized_report['priority'] == 5
601 assert deserialized_report["language"] == "unknown"
532 d_request = deserialized_report['request']
602 assert deserialized_report["message"] == ""
533 assert d_request['GET'] == {'test': '1234'}
603 assert deserialized_report["occurences"] == 1
534 assert d_request['action_dispatch.request.parameters'] == {
604 assert deserialized_report["priority"] == 5
535 'action': 'index',
605 d_request = deserialized_report["request"]
536 'controller': 'welcome',
606 assert d_request["GET"] == {"test": "1234"}
537 'test': '1234'}
607 assert d_request["action_dispatch.request.parameters"] == {
538 assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1'
608 "action": "index",
539 assert deserialized_report['server'] == 'ergo-desktop'
609 "controller": "welcome",
540 assert deserialized_report['traceback'][0] == {
610 "test": "1234",
541 'cline': 'block in start_thread',
611 }
542 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb',
612 assert deserialized_report["request_id"] == "c11b2267f3ad8b00a1768cae35559fa1"
543 'fn': 'block in start_thread',
613 assert deserialized_report["server"] == "ergo-desktop"
544 'line': '191',
614 assert deserialized_report["traceback"][0] == {
545 'module': '',
615 "cline": "block in start_thread",
546 'vars': {}}
616 "file": "/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb",
547 assert deserialized_report['traceback'][-1] == {
617 "fn": "block in start_thread",
548 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940',
618 "line": "191",
549 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb',
619 "module": "",
550 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940',
620 "vars": {},
551 'line': '3',
621 }
552 'module': '',
622 assert deserialized_report["traceback"][-1] == {
553 'vars': {}}
623 "cline": "_app_views_welcome_index_html_erb___2570061166873166679_31748940",
554 assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234'
624 "file": "[PROJECT_ROOT]/app/views/welcome/index.html.erb",
555 assert deserialized_report['view_name'] == 'welcome:index'
625 "fn": "_app_views_welcome_index_html_erb___2570061166873166679_31748940",
626 "line": "3",
627 "module": "",
628 "vars": {},
629 }
630 assert (
631 deserialized_report["url"] == "http://0.0.0.0:3000/welcome/index?test=1234"
632 )
633 assert deserialized_report["view_name"] == "welcome:index"
556
634
557 def test_normal_payload_view(self):
635 def test_normal_payload_view(self):
558 import defusedxml.ElementTree as ElementTree
636 import defusedxml.ElementTree as ElementTree
@@ -563,21 +641,20 b' class TestAirbrakeXMLView(object):'
563
641
564 context = DummyContext()
642 context = DummyContext()
565 context.resource = ApplicationService.by_id(1)
643 context.resource = ApplicationService.by_id(1)
566 request = testing.DummyRequest(
644 request = testing.DummyRequest(headers={"Content-Type": "application/xml"})
567 headers={'Content-Type': 'application/xml'})
568 request.context = context
645 request.context = context
569 request.context.possibly_public = False
646 request.context.possibly_public = False
570 root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE)
647 root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE)
571 request.context.airbrake_xml_etree = root
648 request.context.airbrake_xml_etree = root
572 route = mock.Mock()
649 route = mock.Mock()
573 route.name = 'api_airbrake'
650 route.name = "api_airbrake"
574 request.matched_route = route
651 request.matched_route = route
575 result = airbrake_xml_compat(request)
652 result = airbrake_xml_compat(request)
576 assert '<notice><id>' in result
653 assert "<notice><id>" in result
577
654
578
655
579 @pytest.mark.usefixtures('default_application')
656 @pytest.mark.usefixtures("default_application")
580 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
657 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
581 class TestAPILogView(object):
658 class TestAPILogView(object):
582 def test_no_json_payload(self, base_app):
659 def test_no_json_payload(self, base_app):
583 import colander
660 import colander
@@ -586,13 +663,12 b' class TestAPILogView(object):'
586
663
587 context = DummyContext()
664 context = DummyContext()
588 context.resource = ApplicationService.by_id(1)
665 context.resource = ApplicationService.by_id(1)
589 request = testing.DummyRequest(
666 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
590 headers={'Content-Type': 'application/json'})
591 request.context = context
667 request.context = context
592 request.registry = base_app.registry
668 request.registry = base_app.registry
593 request.unsafe_json_body = ''
669 request.unsafe_json_body = ""
594 route = mock.Mock()
670 route = mock.Mock()
595 route.name = 'api_logs'
671 route.name = "api_logs"
596 request.matched_route = route
672 request.matched_route = route
597 with pytest.raises(colander.Invalid):
673 with pytest.raises(colander.Invalid):
598 response = logs_create(request)
674 response = logs_create(request)
@@ -602,15 +678,15 b' class TestAPILogView(object):'
602 from appenlight.models.log import Log
678 from appenlight.models.log import Log
603 from appenlight.views.api import logs_create
679 from appenlight.views.api import logs_create
604 from appenlight.models.services.application import ApplicationService
680 from appenlight.models.services.application import ApplicationService
681
605 route = mock.Mock()
682 route = mock.Mock()
606 route.name = 'api_logs'
683 route.name = "api_logs"
607 request = pyramid.threadlocal.get_current_request()
684 request = pyramid.threadlocal.get_current_request()
608 context = DummyContext()
685 context = DummyContext()
609 context.resource = ApplicationService.by_id(1)
686 context.resource = ApplicationService.by_id(1)
610 request.context = context
687 request.context = context
611 request.matched_route = route
688 request.matched_route = route
612 request.unsafe_json_body = [copy.deepcopy(
689 request.unsafe_json_body = [copy.deepcopy(payload_examples.LOG_EXAMPLES[0])]
613 payload_examples.LOG_EXAMPLES[0])]
614 logs_create(request)
690 logs_create(request)
615 query = DBSession.query(Log)
691 query = DBSession.query(Log)
616 log = query.first()
692 log = query.first()
@@ -622,8 +698,9 b' class TestAPILogView(object):'
622 from appenlight.models.log import Log
698 from appenlight.models.log import Log
623 from appenlight.views.api import logs_create
699 from appenlight.views.api import logs_create
624 from appenlight.models.services.application import ApplicationService
700 from appenlight.models.services.application import ApplicationService
701
625 route = mock.Mock()
702 route = mock.Mock()
626 route.name = 'api_logs'
703 route.name = "api_logs"
627 request = pyramid.threadlocal.get_current_request()
704 request = pyramid.threadlocal.get_current_request()
628 context = DummyContext()
705 context = DummyContext()
629 context.resource = ApplicationService.by_id(1)
706 context.resource = ApplicationService.by_id(1)
@@ -643,8 +720,9 b' class TestAPILogView(object):'
643 from appenlight.models.log import Log
720 from appenlight.models.log import Log
644 from appenlight.views.api import logs_create
721 from appenlight.views.api import logs_create
645 from appenlight.models.services.application import ApplicationService
722 from appenlight.models.services.application import ApplicationService
723
646 route = mock.Mock()
724 route = mock.Mock()
647 route.name = 'api_logs'
725 route.name = "api_logs"
648 request = pyramid.threadlocal.get_current_request()
726 request = pyramid.threadlocal.get_current_request()
649 context = DummyContext()
727 context = DummyContext()
650 context.resource = ApplicationService.by_id(1)
728 context.resource = ApplicationService.by_id(1)
@@ -653,8 +731,8 b' class TestAPILogView(object):'
653
731
654 LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0])
732 LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0])
655 LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1])
733 LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1])
656 LOG_PAYLOAD['primary_key'] = 'X2'
734 LOG_PAYLOAD["primary_key"] = "X2"
657 LOG_PAYLOAD2['primary_key'] = 'X2'
735 LOG_PAYLOAD2["primary_key"] = "X2"
658 request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2]
736 request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2]
659 logs_create(request)
737 logs_create(request)
660
738
@@ -662,22 +740,23 b' class TestAPILogView(object):'
662 assert query.count() == 1
740 assert query.count() == 1
663 assert query[0].message == "OMG ValueError happened2"
741 assert query[0].message == "OMG ValueError happened2"
664
742
665 @pytest.mark.usefixtures('default_application')
743
666 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
744 @pytest.mark.usefixtures("default_application")
745 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
667 class TestAPIGeneralMetricsView(object):
746 class TestAPIGeneralMetricsView(object):
668 def test_no_json_payload(self, base_app):
747 def test_no_json_payload(self, base_app):
669 import colander
748 import colander
670 from appenlight.models.services.application import ApplicationService
749 from appenlight.models.services.application import ApplicationService
671 from appenlight.views.api import general_metrics_create
750 from appenlight.views.api import general_metrics_create
751
672 route = mock.Mock()
752 route = mock.Mock()
673 route.name = 'api_general_metrics'
753 route.name = "api_general_metrics"
674 context = DummyContext()
754 context = DummyContext()
675 context.resource = ApplicationService.by_id(1)
755 context.resource = ApplicationService.by_id(1)
676 request = testing.DummyRequest(
756 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
677 headers={'Content-Type': 'application/json'})
678 request.context = context
757 request.context = context
679 request.registry = base_app.registry
758 request.registry = base_app.registry
680 request.unsafe_json_body = ''
759 request.unsafe_json_body = ""
681 request.matched_route = route
760 request.matched_route = route
682 with pytest.raises(colander.Invalid):
761 with pytest.raises(colander.Invalid):
683 general_metrics_create(request)
762 general_metrics_create(request)
@@ -687,8 +766,9 b' class TestAPIGeneralMetricsView(object):'
687 from appenlight.models.metric import Metric
766 from appenlight.models.metric import Metric
688 from appenlight.views.api import general_metrics_create
767 from appenlight.views.api import general_metrics_create
689 from appenlight.models.services.application import ApplicationService
768 from appenlight.models.services.application import ApplicationService
769
690 route = mock.Mock()
770 route = mock.Mock()
691 route.name = 'api_general_metric'
771 route.name = "api_general_metric"
692 request = pyramid.threadlocal.get_current_request()
772 request = pyramid.threadlocal.get_current_request()
693 request.matched_route = route
773 request.matched_route = route
694 context = DummyContext()
774 context = DummyContext()
@@ -699,15 +779,16 b' class TestAPIGeneralMetricsView(object):'
699 query = DBSession.query(Metric)
779 query = DBSession.query(Metric)
700 metric = query.first()
780 metric = query.first()
701 assert query.count() == 1
781 assert query.count() == 1
702 assert metric.namespace == 'some.monitor'
782 assert metric.namespace == "some.monitor"
703
783
704 def test_multiple_json_payload(self):
784 def test_multiple_json_payload(self):
705 import appenlight.tests.payload_examples as payload_examples
785 import appenlight.tests.payload_examples as payload_examples
706 from appenlight.models.metric import Metric
786 from appenlight.models.metric import Metric
707 from appenlight.views.api import general_metrics_create
787 from appenlight.views.api import general_metrics_create
708 from appenlight.models.services.application import ApplicationService
788 from appenlight.models.services.application import ApplicationService
789
709 route = mock.Mock()
790 route = mock.Mock()
710 route.name = 'api_general_metrics'
791 route.name = "api_general_metrics"
711 request = pyramid.threadlocal.get_current_request()
792 request = pyramid.threadlocal.get_current_request()
712 request.matched_route = route
793 request.matched_route = route
713 context = DummyContext()
794 context = DummyContext()
@@ -721,70 +802,49 b' class TestAPIGeneralMetricsView(object):'
721 query = DBSession.query(Metric)
802 query = DBSession.query(Metric)
722 metric = query.first()
803 metric = query.first()
723 assert query.count() == 2
804 assert query.count() == 2
724 assert metric.namespace == 'some.monitor'
805 assert metric.namespace == "some.monitor"
725
806
726
807
727 class TestGroupingMessageReplacements(object):
808 class TestGroupingMessageReplacements(object):
728 def replace_default_repr_python(self):
809 def replace_default_repr_python(self):
729 test_str = '''
810 test_str = """
730 ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)'))
811 ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)'))
731 '''
812 """
732 regex = r'<(.*?) object at (.*?)>'
813 regex = r"<(.*?) object at (.*?)>"
733
814
734
815
735 class TestRulesKeyGetter(object):
816 class TestRulesKeyGetter(object):
736 def test_default_dict_getter_top_key(self):
817 def test_default_dict_getter_top_key(self):
737 from appenlight.lib.rule import Rule
818 from appenlight.lib.rule import Rule
738 struct = {
819
739 "a": {
820 struct = {"a": {"b": "b", "c": {"d": "d", "g": {"h": "h"}}, "e": "e"}, "f": "f"}
740 "b": 'b',
741 "c": {
742 "d": 'd',
743 "g": {
744 "h": 'h'
745 }
746 },
747 "e": 'e'
748 },
749 "f": 'f'
750 }
751 result = Rule.default_dict_struct_getter(struct, "a")
821 result = Rule.default_dict_struct_getter(struct, "a")
752 assert result == struct['a']
822 assert result == struct["a"]
753
823
754 def test_default_dict_getter_sub_key(self):
824 def test_default_dict_getter_sub_key(self):
755 from appenlight.lib.rule import Rule
825 from appenlight.lib.rule import Rule
756 struct = {
826
757 "a": {
827 struct = {"a": {"b": "b", "c": {"d": "d", "g": {"h": "h"}}, "e": "e"}, "f": "f"}
758 "b": 'b',
828 result = Rule.default_dict_struct_getter(struct, "a:b")
759 "c": {
829 assert result == struct["a"]["b"]
760 "d": 'd',
830 result = Rule.default_dict_struct_getter(struct, "a:c:d")
761 "g": {
831 assert result == struct["a"]["c"]["d"]
762 "h": 'h'
763 }
764 },
765 "e": 'e'
766 },
767 "f": 'f'
768 }
769 result = Rule.default_dict_struct_getter(struct, 'a:b')
770 assert result == struct['a']['b']
771 result = Rule.default_dict_struct_getter(struct, 'a:c:d')
772 assert result == struct['a']['c']['d']
773
832
774 def test_default_obj_getter_top_key(self):
833 def test_default_obj_getter_top_key(self):
775 from appenlight.lib.rule import Rule
834 from appenlight.lib.rule import Rule
835
776 class TestStruct(object):
836 class TestStruct(object):
777 def __init__(self, a, b):
837 def __init__(self, a, b):
778 self.a = a
838 self.a = a
779 self.b = b
839 self.b = b
780
840
781 struct = TestStruct(a='a',
841 struct = TestStruct(a="a", b=TestStruct(a="x", b="y"))
782 b=TestStruct(a='x', b='y'))
783 result = Rule.default_obj_struct_getter(struct, "a")
842 result = Rule.default_obj_struct_getter(struct, "a")
784 assert result == struct.a
843 assert result == struct.a
785
844
786 def test_default_obj_getter_sub_key(self):
845 def test_default_obj_getter_sub_key(self):
787 from appenlight.lib.rule import Rule
846 from appenlight.lib.rule import Rule
847
788 class TestStruct(object):
848 class TestStruct(object):
789 def __init__(self, name, a, b):
849 def __init__(self, name, a, b):
790 self.name = name
850 self.name = name
@@ -792,121 +852,122 b' class TestRulesKeyGetter(object):'
792 self.b = b
852 self.b = b
793
853
794 def __repr__(self):
854 def __repr__(self):
795 return '<obj {}>'.format(self.name)
855 return "<obj {}>".format(self.name)
796
856
797 c = TestStruct('c', a=5, b='z')
857 c = TestStruct("c", a=5, b="z")
798 b = TestStruct('b', a=c, b='y')
858 b = TestStruct("b", a=c, b="y")
799 struct = TestStruct('a', a='a', b=b)
859 struct = TestStruct("a", a="a", b=b)
800 result = Rule.default_obj_struct_getter(struct, 'b:b')
860 result = Rule.default_obj_struct_getter(struct, "b:b")
801 assert result == struct.b.b
861 assert result == struct.b.b
802 result = Rule.default_obj_struct_getter(struct, 'b:a:b')
862 result = Rule.default_obj_struct_getter(struct, "b:a:b")
803 assert result == struct.b.a.b
863 assert result == struct.b.a.b
804
864
805
865
806 @pytest.mark.usefixtures('report_type_matrix')
866 @pytest.mark.usefixtures("report_type_matrix")
807 class TestRulesParsing():
867 class TestRulesParsing:
808 @pytest.mark.parametrize("op, struct_value, test_value, match_result", [
868 @pytest.mark.parametrize(
809 ('eq', 500, 500, True),
869 "op, struct_value, test_value, match_result",
810 ('eq', 600, 500, False),
870 [
811 ('eq', 300, 500, False),
871 ("eq", 500, 500, True),
812 ('eq', "300", 500, False),
872 ("eq", 600, 500, False),
813 ('eq', "600", 500, False),
873 ("eq", 300, 500, False),
814 ('eq', "500", 500, True),
874 ("eq", "300", 500, False),
815 ('ne', 500, 500, False),
875 ("eq", "600", 500, False),
816 ('ne', 600, 500, True),
876 ("eq", "500", 500, True),
817 ('ne', 300, 500, True),
877 ("ne", 500, 500, False),
818 ('ne', "300", 500, True),
878 ("ne", 600, 500, True),
819 ('ne', "600", 500, True),
879 ("ne", 300, 500, True),
820 ('ne', "500", 500, False),
880 ("ne", "300", 500, True),
821 ('ge', 500, 500, True),
881 ("ne", "600", 500, True),
822 ('ge', 600, 500, True),
882 ("ne", "500", 500, False),
823 ('ge', 499, 500, False),
883 ("ge", 500, 500, True),
824 ('gt', 499, 500, False),
884 ("ge", 600, 500, True),
825 ('gt', 500, 500, False),
885 ("ge", 499, 500, False),
826 ('gt', 501, 500, True),
886 ("gt", 499, 500, False),
827 ('le', 499, 500, True),
887 ("gt", 500, 500, False),
828 ('le', 500, 500, True),
888 ("gt", 501, 500, True),
829 ('le', 501, 500, False),
889 ("le", 499, 500, True),
830 ('lt', 499, 500, True),
890 ("le", 500, 500, True),
831 ('lt', 500, 500, False),
891 ("le", 501, 500, False),
832 ('lt', 501, 500, False),
892 ("lt", 499, 500, True),
833 ])
893 ("lt", 500, 500, False),
834 def test_single_op_int(self, op, struct_value, test_value, match_result,
894 ("lt", 501, 500, False),
835 report_type_matrix):
895 ],
896 )
897 def test_single_op_int(
898 self, op, struct_value, test_value, match_result, report_type_matrix
899 ):
836 from appenlight.lib.rule import Rule
900 from appenlight.lib.rule import Rule
837 rule_config = {
901
838 "op": op,
902 rule_config = {"op": op, "field": "http_status", "value": test_value}
839 "field": "http_status",
840 "value": test_value
841 }
842 rule = Rule(rule_config, report_type_matrix)
903 rule = Rule(rule_config, report_type_matrix)
843
904
844 data = {
905 data = {"http_status": struct_value}
845 "http_status": struct_value
846 }
847 assert rule.match(data) is match_result
906 assert rule.match(data) is match_result
848
907
849 @pytest.mark.parametrize("op, struct_value, test_value, match_result", [
908 @pytest.mark.parametrize(
850 ('ge', "500.01", 500, True),
909 "op, struct_value, test_value, match_result",
851 ('ge', "500.01", 500.02, False),
910 [
852 ('le', "500.01", 500.02, True)
911 ("ge", "500.01", 500, True),
853 ])
912 ("ge", "500.01", 500.02, False),
854 def test_single_op_float(self, op, struct_value, test_value, match_result,
913 ("le", "500.01", 500.02, True),
855 report_type_matrix):
914 ],
915 )
916 def test_single_op_float(
917 self, op, struct_value, test_value, match_result, report_type_matrix
918 ):
856 from appenlight.lib.rule import Rule
919 from appenlight.lib.rule import Rule
857 rule_config = {
920
858 "op": op,
921 rule_config = {"op": op, "field": "duration", "value": test_value}
859 "field": "duration",
860 "value": test_value
861 }
862 rule = Rule(rule_config, report_type_matrix)
922 rule = Rule(rule_config, report_type_matrix)
863
923
864 data = {
924 data = {"duration": struct_value}
865 "duration": struct_value
866 }
867 assert rule.match(data) is match_result
925 assert rule.match(data) is match_result
868
926
869 @pytest.mark.parametrize("op, struct_value, test_value, match_result", [
927 @pytest.mark.parametrize(
870 ('contains', 'foo bar baz', 'foo', True),
928 "op, struct_value, test_value, match_result",
871 ('contains', 'foo bar baz', 'bar', True),
929 [
872 ('contains', 'foo bar baz', 'dupa', False),
930 ("contains", "foo bar baz", "foo", True),
873 ('startswith', 'foo bar baz', 'foo', True),
931 ("contains", "foo bar baz", "bar", True),
874 ('startswith', 'foo bar baz', 'bar', False),
932 ("contains", "foo bar baz", "dupa", False),
875 ('endswith', 'foo bar baz', 'baz', True),
933 ("startswith", "foo bar baz", "foo", True),
876 ('endswith', 'foo bar baz', 'bar', False),
934 ("startswith", "foo bar baz", "bar", False),
877 ])
935 ("endswith", "foo bar baz", "baz", True),
878 def test_single_op_string(self, op, struct_value, test_value,
936 ("endswith", "foo bar baz", "bar", False),
879 match_result, report_type_matrix):
937 ],
938 )
939 def test_single_op_string(
940 self, op, struct_value, test_value, match_result, report_type_matrix
941 ):
880 from appenlight.lib.rule import Rule
942 from appenlight.lib.rule import Rule
881 rule_config = {
943
882 "op": op,
944 rule_config = {"op": op, "field": "error", "value": test_value}
883 "field": "error",
884 "value": test_value
885 }
886 rule = Rule(rule_config, report_type_matrix)
945 rule = Rule(rule_config, report_type_matrix)
887
946
888 data = {
947 data = {"error": struct_value}
889 "error": struct_value
890 }
891 assert rule.match(data) is match_result
948 assert rule.match(data) is match_result
892
949
893 @pytest.mark.parametrize("field, value, s_type", [
950 @pytest.mark.parametrize(
894 ('field_unicode', 500, str),
951 "field, value, s_type",
895 ('field_unicode', 500.0, str),
952 [
896 ('field_unicode', "500", str),
953 ("field_unicode", 500, str),
897 ('field_int', "500", int),
954 ("field_unicode", 500.0, str),
898 ('field_int', 500, int),
955 ("field_unicode", "500", str),
899 ('field_int', 500.0, int),
956 ("field_int", "500", int),
900 ('field_float', "500", float),
957 ("field_int", 500, int),
901 ('field_float', 500, float),
958 ("field_int", 500.0, int),
902 ('field_float', 500.0, float),
959 ("field_float", "500", float),
903 ])
960 ("field_float", 500, float),
961 ("field_float", 500.0, float),
962 ],
963 )
904 def test_type_normalization(self, field, value, s_type):
964 def test_type_normalization(self, field, value, s_type):
905 from appenlight.lib.rule import Rule
965 from appenlight.lib.rule import Rule
966
906 type_matrix = {
967 type_matrix = {
907 'field_unicode': {"type": 'unicode'},
968 "field_unicode": {"type": "unicode"},
908 'field_float': {"type": 'float'},
969 "field_float": {"type": "float"},
909 'field_int': {"type": 'int'},
970 "field_int": {"type": "int"},
910 }
971 }
911
972
912 rule = Rule({}, type_matrix)
973 rule = Rule({}, type_matrix)
@@ -914,280 +975,275 b' class TestRulesParsing():'
914 assert isinstance(n_value, s_type) is True
975 assert isinstance(n_value, s_type) is True
915
976
916
977
917 @pytest.mark.usefixtures('report_type_matrix')
978 @pytest.mark.usefixtures("report_type_matrix")
918 class TestNestedRuleParsing():
979 class TestNestedRuleParsing:
919
980 @pytest.mark.parametrize(
920 @pytest.mark.parametrize("data, result", [
981 "data, result",
921 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}},
982 [
922 False),
983 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, False),
923 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
984 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, False),
924 False),
985 ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, False),
925 ({"http_status": 500, "group": {"priority": 1, "occurences": 11}},
986 ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, True),
926 False),
987 ],
927 ({"http_status": 101, "group": {"priority": 3, "occurences": 5}},
988 )
928 True),
929 ])
930 def test_NOT_rule(self, data, result, report_type_matrix):
989 def test_NOT_rule(self, data, result, report_type_matrix):
931 from appenlight.lib.rule import Rule
990 from appenlight.lib.rule import Rule
991
932 rule_config = {
992 rule_config = {
933 "field": "__NOT__",
993 "field": "__NOT__",
934 "rules": [
994 "rules": [
935 {
995 {"op": "ge", "field": "group:occurences", "value": "10"},
936 "op": "ge",
996 {"op": "ge", "field": "group:priority", "value": "4"},
937 "field": "group:occurences",
997 ],
938 "value": "10"
939 },
940 {
941 "op": "ge",
942 "field": "group:priority",
943 "value": "4"
944 }
945 ]
946 }
998 }
947
999
948 rule = Rule(rule_config, report_type_matrix)
1000 rule = Rule(rule_config, report_type_matrix)
949 assert rule.match(data) is result
1001 assert rule.match(data) is result
950
1002
951 @pytest.mark.parametrize("data, result", [
1003 @pytest.mark.parametrize(
952 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}},
1004 "data, result",
953 True),
1005 [
954 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
1006 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, True),
955 True),
1007 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, True),
956 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}},
1008 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, True),
957 True),
1009 ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, False),
958 ({"http_status": 101, "group": {"priority": 3, "occurences": 11}},
1010 ],
959 False),
1011 )
960 ])
961 def test_nested_OR_AND_rule(self, data, result, report_type_matrix):
1012 def test_nested_OR_AND_rule(self, data, result, report_type_matrix):
962 from appenlight.lib.rule import Rule
1013 from appenlight.lib.rule import Rule
1014
963 rule_config = {
1015 rule_config = {
964 "field": "__OR__",
1016 "field": "__OR__",
965 "rules": [
1017 "rules": [
966 {
1018 {
967 "field": "__AND__",
1019 "field": "__AND__",
968 "rules": [
1020 "rules": [
969 {
1021 {"op": "ge", "field": "group:occurences", "value": "10"},
970 "op": "ge",
1022 {"op": "ge", "field": "group:priority", "value": "4"},
971 "field": "group:occurences",
1023 ],
972 "value": "10"
973 },
974 {
975 "op": "ge",
976 "field": "group:priority",
977 "value": "4"
978 }
979 ]
980 },
1024 },
981 {
1025 {"op": "eq", "field": "http_status", "value": "500"},
982 "op": "eq",
1026 ],
983 "field": "http_status",
984 "value": "500"
985 }
986 ]
987 }
1027 }
988
1028
989 rule = Rule(rule_config, report_type_matrix)
1029 rule = Rule(rule_config, report_type_matrix)
990 assert rule.match(data) is result
1030 assert rule.match(data) is result
991
1031
992 @pytest.mark.parametrize("data, result", [
1032 @pytest.mark.parametrize(
993 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}},
1033 "data, result",
994 True),
1034 [
995 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
1035 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, True),
996 True),
1036 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, True),
997 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}},
1037 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, True),
998 True),
1038 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, False),
999 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}},
1039 ],
1000 False),
1040 )
1001 ])
1002 def test_nested_OR_OR_rule(self, data, result, report_type_matrix):
1041 def test_nested_OR_OR_rule(self, data, result, report_type_matrix):
1003 from appenlight.lib.rule import Rule
1042 from appenlight.lib.rule import Rule
1043
1004 rule_config = {
1044 rule_config = {
1005 "field": "__OR__",
1045 "field": "__OR__",
1006 "rules": [
1046 "rules": [
1007 {"field": "__OR__",
1047 {
1008 "rules": [
1048 "field": "__OR__",
1009 {"op": "ge",
1049 "rules": [
1010 "field": "group:occurences",
1050 {"op": "ge", "field": "group:occurences", "value": "10"},
1011 "value": "10"
1051 {"op": "ge", "field": "group:priority", "value": "4"},
1012 },
1052 ],
1013 {"op": "ge",
1053 },
1014 "field": "group:priority",
1054 {"op": "eq", "field": "http_status", "value": "500"},
1015 "value": "4"
1055 ],
1016 }
1017 ]
1018 },
1019 {"op": "eq",
1020 "field": "http_status",
1021 "value": "500"
1022 }
1023 ]
1024 }
1056 }
1025
1057
1026 rule = Rule(rule_config, report_type_matrix)
1058 rule = Rule(rule_config, report_type_matrix)
1027 assert rule.match(data) is result
1059 assert rule.match(data) is result
1028
1060
1029 @pytest.mark.parametrize("data, result", [
1061 @pytest.mark.parametrize(
1030 ({"http_status": 500, "group": {"priority": 7, "occurences": 11}},
1062 "data, result",
1031 True),
1063 [
1032 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
1064 ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, True),
1033 False),
1065 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, False),
1034 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}},
1066 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, False),
1035 False),
1067 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, False),
1036 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}},
1068 ],
1037 False),
1069 )
1038 ])
1039 def test_nested_AND_AND_rule(self, data, result, report_type_matrix):
1070 def test_nested_AND_AND_rule(self, data, result, report_type_matrix):
1040 from appenlight.lib.rule import Rule
1071 from appenlight.lib.rule import Rule
1072
1041 rule_config = {
1073 rule_config = {
1042 "field": "__AND__",
1074 "field": "__AND__",
1043 "rules": [
1075 "rules": [
1044 {"field": "__AND__",
1076 {
1045 "rules": [
1077 "field": "__AND__",
1046 {"op": "ge",
1078 "rules": [
1047 "field": "group:occurences",
1079 {"op": "ge", "field": "group:occurences", "value": "10"},
1048 "value": "10"
1080 {"op": "ge", "field": "group:priority", "value": "4"},
1049 },
1081 ],
1050 {"op": "ge",
1082 },
1051 "field": "group:priority",
1083 {"op": "eq", "field": "http_status", "value": "500"},
1052 "value": "4"
1084 ],
1053 }]
1054 },
1055 {"op": "eq",
1056 "field": "http_status",
1057 "value": "500"
1058 }
1059 ]
1060 }
1085 }
1061
1086
1062 rule = Rule(rule_config, report_type_matrix)
1087 rule = Rule(rule_config, report_type_matrix)
1063 assert rule.match(data) is result
1088 assert rule.match(data) is result
1064
1089
1065 @pytest.mark.parametrize("data, result", [
1090 @pytest.mark.parametrize(
1066 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1091 "data, result",
1067 "url_path": '/test/register', "error": "foo test bar"}, True),
1092 [
1068 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1093 (
1069 "url_path": '/test/register', "error": "foo INVALID bar"}, False),
1094 {
1070 ])
1095 "http_status": 500,
1096 "group": {"priority": 7, "occurences": 11},
1097 "url_path": "/test/register",
1098 "error": "foo test bar",
1099 },
1100 True,
1101 ),
1102 (
1103 {
1104 "http_status": 500,
1105 "group": {"priority": 7, "occurences": 11},
1106 "url_path": "/test/register",
1107 "error": "foo INVALID bar",
1108 },
1109 False,
1110 ),
1111 ],
1112 )
1071 def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix):
1113 def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix):
1072 from appenlight.lib.rule import Rule
1114 from appenlight.lib.rule import Rule
1115
1073 rule_config = {
1116 rule_config = {
1074 "field": "__AND__",
1117 "field": "__AND__",
1075 "rules": [
1118 "rules": [
1076 {"field": "__AND__",
1119 {
1077 "rules": [
1120 "field": "__AND__",
1078 {"op": "ge",
1121 "rules": [
1079 "field": "group:occurences",
1122 {"op": "ge", "field": "group:occurences", "value": "10"},
1080 "value": "10"
1123 {
1081 },
1124 "field": "__AND__",
1082 {"field": "__AND__",
1125 "rules": [
1083 "rules": [
1126 {
1084 {"op": "endswith",
1127 "op": "endswith",
1085 "field": "url_path",
1128 "field": "url_path",
1086 "value": "register"},
1129 "value": "register",
1087 {"op": "contains",
1130 },
1088 "field": "error",
1131 {"op": "contains", "field": "error", "value": "test"},
1089 "value": "test"}]}]
1132 ],
1090 },
1133 },
1091 {"op": "eq",
1134 ],
1092 "field": "http_status",
1135 },
1093 "value": "500"
1136 {"op": "eq", "field": "http_status", "value": "500"},
1094 }
1137 ],
1095 ]
1096 }
1138 }
1097
1139
1098 rule = Rule(rule_config, report_type_matrix)
1140 rule = Rule(rule_config, report_type_matrix)
1099 assert rule.match(data) is result
1141 assert rule.match(data) is result
1100
1142
1101 @pytest.mark.parametrize("data, result", [
1143 @pytest.mark.parametrize(
1102 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1144 "data, result",
1103 "url_path": 6, "error": 3}, False),
1145 [
1104 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1146 (
1105 "url_path": '/test/register', "error": "foo INVALID bar"}, True),
1147 {
1106 ])
1148 "http_status": 500,
1149 "group": {"priority": 7, "occurences": 11},
1150 "url_path": 6,
1151 "error": 3,
1152 },
1153 False,
1154 ),
1155 (
1156 {
1157 "http_status": 500,
1158 "group": {"priority": 7, "occurences": 11},
1159 "url_path": "/test/register",
1160 "error": "foo INVALID bar",
1161 },
1162 True,
1163 ),
1164 ],
1165 )
1107 def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix):
1166 def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix):
1108 from appenlight.lib.rule import Rule
1167 from appenlight.lib.rule import Rule
1168
1109 rule_config = {
1169 rule_config = {
1110 "field": "__AND__",
1170 "field": "__AND__",
1111 "rules": [
1171 "rules": [
1112 {"field": "__AND__",
1172 {
1113 "rules": [
1173 "field": "__AND__",
1114 {"op": "ge",
1174 "rules": [
1115 "field": "group:occurences",
1175 {"op": "ge", "field": "group:occurences", "value": "10"},
1116 "value": "10"
1176 {
1117 },
1177 "field": "__OR__",
1118 {"field": "__OR__",
1178 "rules": [
1119 "rules": [
1179 {
1120 {"op": "endswith",
1180 "op": "endswith",
1121 "field": "url_path",
1181 "field": "url_path",
1122 "value": "register"
1182 "value": "register",
1123 },
1183 },
1124 {"op": "contains",
1184 {"op": "contains", "field": "error", "value": "test"},
1125 "field": "error",
1185 ],
1126 "value": "test"
1186 },
1127 }]}]
1187 ],
1128 },
1188 },
1129 {"op": "eq",
1189 {"op": "eq", "field": "http_status", "value": "500"},
1130 "field": "http_status",
1190 ],
1131 "value": "500"
1132 }
1133 ]
1134 }
1191 }
1135
1192
1136 rule = Rule(rule_config, report_type_matrix)
1193 rule = Rule(rule_config, report_type_matrix)
1137 assert rule.match(data) is result
1194 assert rule.match(data) is result
1138
1195
1139 @pytest.mark.parametrize("op, field, value, should_fail", [
1196 @pytest.mark.parametrize(
1140 ('eq', 'http_status', "1", False),
1197 "op, field, value, should_fail",
1141 ('ne', 'http_status', "1", False),
1198 [
1142 ('ne', 'http_status', "foo", True),
1199 ("eq", "http_status", "1", False),
1143 ('startswith', 'http_status', "1", True),
1200 ("ne", "http_status", "1", False),
1144 ('eq', 'group:priority', "1", False),
1201 ("ne", "http_status", "foo", True),
1145 ('ne', 'group:priority', "1", False),
1202 ("startswith", "http_status", "1", True),
1146 ('ge', 'group:priority', "1", False),
1203 ("eq", "group:priority", "1", False),
1147 ('le', 'group:priority', "1", False),
1204 ("ne", "group:priority", "1", False),
1148 ('startswith', 'group:priority', "1", True),
1205 ("ge", "group:priority", "1", False),
1149 ('eq', 'url_domain', "1", False),
1206 ("le", "group:priority", "1", False),
1150 ('ne', 'url_domain', "1", False),
1207 ("startswith", "group:priority", "1", True),
1151 ('startswith', 'url_domain', "1", False),
1208 ("eq", "url_domain", "1", False),
1152 ('endswith', 'url_domain', "1", False),
1209 ("ne", "url_domain", "1", False),
1153 ('contains', 'url_domain', "1", False),
1210 ("startswith", "url_domain", "1", False),
1154 ('ge', 'url_domain', "1", True),
1211 ("endswith", "url_domain", "1", False),
1155 ('eq', 'url_path', "1", False),
1212 ("contains", "url_domain", "1", False),
1156 ('ne', 'url_path', "1", False),
1213 ("ge", "url_domain", "1", True),
1157 ('startswith', 'url_path', "1", False),
1214 ("eq", "url_path", "1", False),
1158 ('endswith', 'url_path', "1", False),
1215 ("ne", "url_path", "1", False),
1159 ('contains', 'url_path', "1", False),
1216 ("startswith", "url_path", "1", False),
1160 ('ge', 'url_path', "1", True),
1217 ("endswith", "url_path", "1", False),
1161 ('eq', 'error', "1", False),
1218 ("contains", "url_path", "1", False),
1162 ('ne', 'error', "1", False),
1219 ("ge", "url_path", "1", True),
1163 ('startswith', 'error', "1", False),
1220 ("eq", "error", "1", False),
1164 ('endswith', 'error', "1", False),
1221 ("ne", "error", "1", False),
1165 ('contains', 'error', "1", False),
1222 ("startswith", "error", "1", False),
1166 ('ge', 'error', "1", True),
1223 ("endswith", "error", "1", False),
1167 ('ge', 'url_path', "1", True),
1224 ("contains", "error", "1", False),
1168 ('eq', 'tags:server_name', "1", False),
1225 ("ge", "error", "1", True),
1169 ('ne', 'tags:server_name', "1", False),
1226 ("ge", "url_path", "1", True),
1170 ('startswith', 'tags:server_name', "1", False),
1227 ("eq", "tags:server_name", "1", False),
1171 ('endswith', 'tags:server_name', "1", False),
1228 ("ne", "tags:server_name", "1", False),
1172 ('contains', 'tags:server_name', "1", False),
1229 ("startswith", "tags:server_name", "1", False),
1173 ('ge', 'tags:server_name', "1", True),
1230 ("endswith", "tags:server_name", "1", False),
1174 ('contains', 'traceback', "1", False),
1231 ("contains", "tags:server_name", "1", False),
1175 ('ge', 'traceback', "1", True),
1232 ("ge", "tags:server_name", "1", True),
1176 ('eq', 'group:occurences', "1", False),
1233 ("contains", "traceback", "1", False),
1177 ('ne', 'group:occurences', "1", False),
1234 ("ge", "traceback", "1", True),
1178 ('ge', 'group:occurences', "1", False),
1235 ("eq", "group:occurences", "1", False),
1179 ('le', 'group:occurences', "1", False),
1236 ("ne", "group:occurences", "1", False),
1180 ('contains', 'group:occurences', "1", True),
1237 ("ge", "group:occurences", "1", False),
1181 ])
1238 ("le", "group:occurences", "1", False),
1182 def test_rule_validation(self, op, field, value, should_fail,
1239 ("contains", "group:occurences", "1", True),
1183 report_type_matrix):
1240 ],
1241 )
1242 def test_rule_validation(self, op, field, value, should_fail, report_type_matrix):
1184 import colander
1243 import colander
1185 from appenlight.validators import build_rule_schema
1244 from appenlight.validators import build_rule_schema
1186 rule_config = {
1245
1187 "op": op,
1246 rule_config = {"op": op, "field": field, "value": value}
1188 "field": field,
1189 "value": value
1190 }
1191
1247
1192 schema = build_rule_schema(rule_config, report_type_matrix)
1248 schema = build_rule_schema(rule_config, report_type_matrix)
1193 if should_fail:
1249 if should_fail:
@@ -1198,40 +1254,29 b' class TestNestedRuleParsing():'
1198
1254
1199 def test_nested_proper_rule_validation(self, report_type_matrix):
1255 def test_nested_proper_rule_validation(self, report_type_matrix):
1200 from appenlight.validators import build_rule_schema
1256 from appenlight.validators import build_rule_schema
1257
1201 rule_config = {
1258 rule_config = {
1202 "field": "__AND__",
1259 "field": "__AND__",
1203 "rules": [
1260 "rules": [
1204 {
1261 {
1205 "field": "__AND__",
1262 "field": "__AND__",
1206 "rules": [
1263 "rules": [
1207 {
1264 {"op": "ge", "field": "group:occurences", "value": "10"},
1208 "op": "ge",
1209 "field": "group:occurences",
1210 "value": "10"
1211 },
1212 {
1265 {
1213 "field": "__OR__",
1266 "field": "__OR__",
1214 "rules": [
1267 "rules": [
1215 {
1268 {
1216 "op": "endswith",
1269 "op": "endswith",
1217 "field": "url_path",
1270 "field": "url_path",
1218 "value": "register"
1271 "value": "register",
1219 },
1272 },
1220 {
1273 {"op": "contains", "field": "error", "value": "test"},
1221 "op": "contains",
1274 ],
1222 "field": "error",
1275 },
1223 "value": "test"
1276 ],
1224 }
1225 ]
1226 }
1227 ]
1228 },
1277 },
1229 {
1278 {"op": "eq", "field": "http_status", "value": "500"},
1230 "op": "eq",
1279 ],
1231 "field": "http_status",
1232 "value": "500"
1233 }
1234 ]
1235 }
1280 }
1236
1281
1237 schema = build_rule_schema(rule_config, report_type_matrix)
1282 schema = build_rule_schema(rule_config, report_type_matrix)
@@ -1240,40 +1285,25 b' class TestNestedRuleParsing():'
1240 def test_nested_bad_rule_validation(self, report_type_matrix):
1285 def test_nested_bad_rule_validation(self, report_type_matrix):
1241 import colander
1286 import colander
1242 from appenlight.validators import build_rule_schema
1287 from appenlight.validators import build_rule_schema
1288
1243 rule_config = {
1289 rule_config = {
1244 "field": "__AND__",
1290 "field": "__AND__",
1245 "rules": [
1291 "rules": [
1246 {
1292 {
1247 "field": "__AND__",
1293 "field": "__AND__",
1248 "rules": [
1294 "rules": [
1249 {
1295 {"op": "ge", "field": "group:occurences", "value": "10"},
1250 "op": "ge",
1251 "field": "group:occurences",
1252 "value": "10"
1253 },
1254 {
1296 {
1255 "field": "__OR__",
1297 "field": "__OR__",
1256 "rules": [
1298 "rules": [
1257 {
1299 {"op": "gt", "field": "url_path", "value": "register"},
1258 "op": "gt",
1300 {"op": "contains", "field": "error", "value": "test"},
1259 "field": "url_path",
1301 ],
1260 "value": "register"
1302 },
1261 },
1303 ],
1262 {
1263 "op": "contains",
1264 "field": "error",
1265 "value": "test"
1266 }
1267 ]
1268 }
1269 ]
1270 },
1304 },
1271 {
1305 {"op": "eq", "field": "http_status", "value": "500"},
1272 "op": "eq",
1306 ],
1273 "field": "http_status",
1274 "value": "500"
1275 }
1276 ]
1277 }
1307 }
1278
1308
1279 schema = build_rule_schema(rule_config, report_type_matrix)
1309 schema = build_rule_schema(rule_config, report_type_matrix)
@@ -1282,97 +1312,72 b' class TestNestedRuleParsing():'
1282
1312
1283 def test_config_manipulator(self):
1313 def test_config_manipulator(self):
1284 from appenlight.lib.rule import Rule
1314 from appenlight.lib.rule import Rule
1315
1285 type_matrix = {
1316 type_matrix = {
1286 'a': {"type": 'int',
1317 "a": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
1287 "ops": ('eq', 'ne', 'ge', 'le',)},
1318 "b": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
1288 'b': {"type": 'int',
1289 "ops": ('eq', 'ne', 'ge', 'le',)},
1290 }
1319 }
1291 rule_config = {
1320 rule_config = {
1292 "field": "__OR__",
1321 "field": "__OR__",
1293 "rules": [
1322 "rules": [
1294 {
1323 {
1295 "field": "__OR__",
1324 "field": "__OR__",
1296 "rules": [
1325 "rules": [{"op": "ge", "field": "a", "value": "10"}],
1297 {
1298 "op": "ge",
1299 "field": "a",
1300 "value": "10"
1301 }
1302 ]
1303 },
1326 },
1304 {
1327 {"op": "eq", "field": "b", "value": "500"},
1305 "op": "eq",
1328 ],
1306 "field": "b",
1307 "value": "500"
1308 }
1309 ]
1310 }
1329 }
1311
1330
1312 def rule_manipulator(rule):
1331 def rule_manipulator(rule):
1313 if 'value' in rule.config:
1332 if "value" in rule.config:
1314 rule.config['value'] = "1"
1333 rule.config["value"] = "1"
1315
1334
1316 rule = Rule(rule_config, type_matrix,
1335 rule = Rule(rule_config, type_matrix, config_manipulator=rule_manipulator)
1317 config_manipulator=rule_manipulator)
1336 rule.match({"a": 1, "b": "2"})
1318 rule.match({"a": 1,
1337 assert rule.config["rules"][0]["rules"][0]["value"] == "1"
1319 "b": "2"})
1338 assert rule.config["rules"][1]["value"] == "1"
1320 assert rule.config['rules'][0]['rules'][0]['value'] == "1"
1339 assert rule.type_matrix["b"]["type"] == "int"
1321 assert rule.config['rules'][1]['value'] == "1"
1322 assert rule.type_matrix["b"]['type'] == "int"
1323
1340
1324 def test_dynamic_config_manipulator(self):
1341 def test_dynamic_config_manipulator(self):
1325 from appenlight.lib.rule import Rule
1342 from appenlight.lib.rule import Rule
1343
1326 rule_config = {
1344 rule_config = {
1327 "field": "__OR__",
1345 "field": "__OR__",
1328 "rules": [
1346 "rules": [
1329 {
1347 {
1330 "field": "__OR__",
1348 "field": "__OR__",
1331 "rules": [
1349 "rules": [{"op": "ge", "field": "a", "value": "10"}],
1332 {
1333 "op": "ge",
1334 "field": "a",
1335 "value": "10"
1336 }
1337 ]
1338 },
1350 },
1339 {
1351 {"op": "eq", "field": "b", "value": "500"},
1340 "op": "eq",
1352 ],
1341 "field": "b",
1342 "value": "500"
1343 }
1344 ]
1345 }
1353 }
1346
1354
1347 def rule_manipulator(rule):
1355 def rule_manipulator(rule):
1348 rule.type_matrix = {
1356 rule.type_matrix = {
1349 'a': {"type": 'int',
1357 "a": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
1350 "ops": ('eq', 'ne', 'ge', 'le',)},
1358 "b": {"type": "unicode", "ops": ("eq", "ne", "ge", "le")},
1351 'b': {"type": 'unicode',
1352 "ops": ('eq', 'ne', 'ge', 'le',)},
1353 }
1359 }
1354
1360
1355 if 'value' in rule.config:
1361 if "value" in rule.config:
1356 if rule.config['field'] == 'a':
1362 if rule.config["field"] == "a":
1357 rule.config['value'] = "1"
1363 rule.config["value"] = "1"
1358 elif rule.config['field'] == 'b':
1364 elif rule.config["field"] == "b":
1359 rule.config['value'] = "2"
1365 rule.config["value"] = "2"
1360
1366
1361 rule = Rule(rule_config, {},
1367 rule = Rule(rule_config, {}, config_manipulator=rule_manipulator)
1362 config_manipulator=rule_manipulator)
1368 rule.match({"a": 11, "b": "55"})
1363 rule.match({"a": 11,
1369 assert rule.config["rules"][0]["rules"][0]["value"] == "1"
1364 "b": "55"})
1370 assert rule.config["rules"][1]["value"] == "2"
1365 assert rule.config['rules'][0]['rules'][0]['value'] == "1"
1371 assert rule.type_matrix["b"]["type"] == "unicode"
1366 assert rule.config['rules'][1]['value'] == "2"
1367 assert rule.type_matrix["b"]['type'] == "unicode"
1368
1372
1369
1373
1370 @pytest.mark.usefixtures('base_app', 'with_migrations')
1374 @pytest.mark.usefixtures("base_app", "with_migrations")
1371 class TestViewsWithForms(object):
1375 class TestViewsWithForms(object):
1372 def test_bad_csrf(self):
1376 def test_bad_csrf(self):
1373 from appenlight.forms import CSRFException
1377 from appenlight.forms import CSRFException
1374 from appenlight.views.index import register
1378 from appenlight.views.index import register
1375 post_data = {'dupa': 'dupa'}
1379
1380 post_data = {"dupa": "dupa"}
1376 request = testing.DummyRequest(post=post_data)
1381 request = testing.DummyRequest(post=post_data)
1377 request.POST = webob.multidict.MultiDict(request.POST)
1382 request.POST = webob.multidict.MultiDict(request.POST)
1378 with pytest.raises(CSRFException):
1383 with pytest.raises(CSRFException):
@@ -1380,58 +1385,63 b' class TestViewsWithForms(object):'
1380
1385
1381 def test_proper_csrf(self):
1386 def test_proper_csrf(self):
1382 from appenlight.views.index import register
1387 from appenlight.views.index import register
1388
1383 request = pyramid.threadlocal.get_current_request()
1389 request = pyramid.threadlocal.get_current_request()
1384 post_data = {'dupa': 'dupa',
1390 post_data = {"dupa": "dupa", "csrf_token": request.session.get_csrf_token()}
1385 'csrf_token': request.session.get_csrf_token()}
1386 request = testing.DummyRequest(post=post_data)
1391 request = testing.DummyRequest(post=post_data)
1387 request.POST = webob.multidict.MultiDict(request.POST)
1392 request.POST = webob.multidict.MultiDict(request.POST)
1388 result = register(request)
1393 result = register(request)
1389 assert result['form'].errors['email'][0] == 'This field is required.'
1394 assert result["form"].errors["email"][0] == "This field is required."
1390
1395
1391
1396
1392 @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data')
1397 @pytest.mark.usefixtures("base_app", "with_migrations", "default_data")
1393 class TestRegistration(object):
1398 class TestRegistration(object):
1394 def test_invalid_form(self):
1399 def test_invalid_form(self):
1395 from appenlight.views.index import register
1400 from appenlight.views.index import register
1401
1396 request = pyramid.threadlocal.get_current_request()
1402 request = pyramid.threadlocal.get_current_request()
1397 post_data = {'user_name': '',
1403 post_data = {
1398 'user_password': '',
1404 "user_name": "",
1399 'email': '',
1405 "user_password": "",
1400 'csrf_token': request.session.get_csrf_token()}
1406 "email": "",
1407 "csrf_token": request.session.get_csrf_token(),
1408 }
1401 request = testing.DummyRequest(post=post_data)
1409 request = testing.DummyRequest(post=post_data)
1402 request.POST = webob.multidict.MultiDict(request.POST)
1410 request.POST = webob.multidict.MultiDict(request.POST)
1403 result = register(request)
1411 result = register(request)
1404 assert result['form'].errors['user_name'][0] == \
1412 assert result["form"].errors["user_name"][0] == "This field is required."
1405 'This field is required.'
1406
1413
1407 def test_valid_form(self):
1414 def test_valid_form(self):
1408 from appenlight.views.index import register
1415 from appenlight.views.index import register
1409 from ziggurat_foundations.models.services.user import UserService
1416 from ziggurat_foundations.models.services.user import UserService
1417
1410 request = pyramid.threadlocal.get_current_request()
1418 request = pyramid.threadlocal.get_current_request()
1411 post_data = {'user_name': 'foo',
1419 post_data = {
1412 'user_password': 'barr',
1420 "user_name": "foo",
1413 'email': 'test@test.foo',
1421 "user_password": "barr",
1414 'csrf_token': request.session.get_csrf_token()}
1422 "email": "test@test.foo",
1423 "csrf_token": request.session.get_csrf_token(),
1424 }
1415 request = testing.DummyRequest(post=post_data)
1425 request = testing.DummyRequest(post=post_data)
1416 request.add_flash_to_headers = mock.Mock()
1426 request.add_flash_to_headers = mock.Mock()
1417 request.POST = webob.multidict.MultiDict(request.POST)
1427 request.POST = webob.multidict.MultiDict(request.POST)
1418 assert UserService.by_user_name('foo') is None
1428 assert UserService.by_user_name("foo") is None
1419 register(request)
1429 register(request)
1420 user = UserService.by_user_name('foo')
1430 user = UserService.by_user_name("foo")
1421 assert user.user_name == 'foo'
1431 assert user.user_name == "foo"
1422 assert len(user.user_password) >= 60
1432 assert len(user.user_password) >= 60
1423
1433
1424
1434
1425 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
1435 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables", "default_user")
1426 'default_user')
1427 class TestApplicationCreation(object):
1436 class TestApplicationCreation(object):
1428 def test_wrong_data(self):
1437 def test_wrong_data(self):
1429 import appenlight.views.applications as applications
1438 import appenlight.views.applications as applications
1430 from ziggurat_foundations.models.services.user import UserService
1439 from ziggurat_foundations.models.services.user import UserService
1440
1431 request = pyramid.threadlocal.get_current_request()
1441 request = pyramid.threadlocal.get_current_request()
1432 request.user = UserService.by_user_name('testuser')
1442 request.user = UserService.by_user_name("testuser")
1433 request.unsafe_json_body = {}
1443 request.unsafe_json_body = {}
1434 request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token()
1444 request.headers["X-XSRF-TOKEN"] = request.session.get_csrf_token()
1435 response = applications.application_create(request)
1445 response = applications.application_create(request)
1436 assert response.code == 422
1446 assert response.code == 422
1437
1447
@@ -1440,31 +1450,30 b' class TestApplicationCreation(object):'
1440 from ziggurat_foundations.models.services.user import UserService
1450 from ziggurat_foundations.models.services.user import UserService
1441
1451
1442 request = pyramid.threadlocal.get_current_request()
1452 request = pyramid.threadlocal.get_current_request()
1443 request.user = UserService.by_user_name('testuser')
1453 request.user = UserService.by_user_name("testuser")
1444 request.unsafe_json_body = {"resource_name": "app name",
1454 request.unsafe_json_body = {"resource_name": "app name", "domains": "foo"}
1445 "domains": "foo"}
1455 request.headers["X-XSRF-TOKEN"] = request.session.get_csrf_token()
1446 request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token()
1447 app_dict = applications.application_create(request)
1456 app_dict = applications.application_create(request)
1448 assert app_dict['public_key'] is not None
1457 assert app_dict["public_key"] is not None
1449 assert app_dict['api_key'] is not None
1458 assert app_dict["api_key"] is not None
1450 assert app_dict['resource_name'] == 'app name'
1459 assert app_dict["resource_name"] == "app name"
1451 assert app_dict['owner_group_id'] is None
1460 assert app_dict["owner_group_id"] is None
1452 assert app_dict['resource_id'] is not None
1461 assert app_dict["resource_id"] is not None
1453 assert app_dict['default_grouping'] == 'url_traceback'
1462 assert app_dict["default_grouping"] == "url_traceback"
1454 assert app_dict['possible_permissions'] == ('view', 'update_reports')
1463 assert app_dict["possible_permissions"] == ("view", "update_reports")
1455 assert app_dict['slow_report_threshold'] == 10
1464 assert app_dict["slow_report_threshold"] == 10
1456 assert app_dict['owner_user_name'] == 'testuser'
1465 assert app_dict["owner_user_name"] == "testuser"
1457 assert app_dict['owner_user_id'] == request.user.id
1466 assert app_dict["owner_user_id"] == request.user.id
1458 assert app_dict['domains'] is 'foo'
1467 assert app_dict["domains"] is "foo"
1459 assert app_dict['postprocessing_rules'] == []
1468 assert app_dict["postprocessing_rules"] == []
1460 assert app_dict['error_report_threshold'] == 10
1469 assert app_dict["error_report_threshold"] == 10
1461 assert app_dict['allow_permanent_storage'] is False
1470 assert app_dict["allow_permanent_storage"] is False
1462 assert app_dict['resource_type'] == 'application'
1471 assert app_dict["resource_type"] == "application"
1463 assert app_dict['current_permissions'] == []
1472 assert app_dict["current_permissions"] == []
1464
1473
1465
1474
1466 @pytest.mark.usefixtures('default_application')
1475 @pytest.mark.usefixtures("default_application")
1467 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
1476 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
1468 class TestAPISentryView(object):
1477 class TestAPISentryView(object):
1469 def test_no_payload(self, default_application):
1478 def test_no_payload(self, default_application):
1470 import colander
1479 import colander
@@ -1474,12 +1483,11 b' class TestAPISentryView(object):'
1474
1483
1475 context = DummyContext()
1484 context = DummyContext()
1476 context.resource = ApplicationService.by_id(1)
1485 context.resource = ApplicationService.by_id(1)
1477 request = testing.DummyRequest(
1486 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
1478 headers={'Content-Type': 'application/json'})
1487 request.unsafe_json_body = ""
1479 request.unsafe_json_body = ''
1480 request.context = context
1488 request.context = context
1481 route = mock.Mock()
1489 route = mock.Mock()
1482 route.name = 'api_sentry'
1490 route.name = "api_sentry"
1483 request.matched_route = route
1491 request.matched_route = route
1484 with pytest.raises(JSONException):
1492 with pytest.raises(JSONException):
1485 sentry_compat(request)
1493 sentry_compat(request)
@@ -1488,28 +1496,31 b' class TestAPISentryView(object):'
1488 from appenlight.views.api import sentry_compat
1496 from appenlight.views.api import sentry_compat
1489 from appenlight.models.services.application import ApplicationService
1497 from appenlight.models.services.application import ApplicationService
1490 from appenlight.models.report_group import ReportGroup
1498 from appenlight.models.report_group import ReportGroup
1499
1491 route = mock.Mock()
1500 route = mock.Mock()
1492 route.name = 'api_sentry'
1501 route.name = "api_sentry"
1493 request = pyramid.threadlocal.get_current_request()
1502 request = pyramid.threadlocal.get_current_request()
1494 context = DummyContext()
1503 context = DummyContext()
1495 context.resource = ApplicationService.by_id(1)
1504 context.resource = ApplicationService.by_id(1)
1496 context.resource.allow_permanent_storage = True
1505 context.resource.allow_permanent_storage = True
1497 request.context = context
1506 request.context = context
1498 request.matched_route = route
1507 request.matched_route = route
1499 request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \
1508 request.body = (
1500 b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \
1509 b"eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki"
1501 b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \
1510 b"RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87"
1502 b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \
1511 b"JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa"
1503 b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \
1512 b"fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b"
1504 b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \
1513 b"oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz"
1505 b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \
1514 b"m1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5"
1506 b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \
1515 b"JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+"
1507 b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \
1516 b"lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs"
1508 b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \
1517 b"3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN"
1509 b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \
1518 b"Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/"
1510 b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \
1519 b"IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P"
1511 b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \
1520 b"MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0"
1512 b'w7CtfWmP85SdCs8OvA53fUV19cg=='
1521 b"Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb"
1522 b"w7CtfWmP85SdCs8OvA53fUV19cg=="
1523 )
1513 sentry_compat(request)
1524 sentry_compat(request)
1514 query = DBSession.query(ReportGroup)
1525 query = DBSession.query(ReportGroup)
1515 report = query.first()
1526 report = query.first()
@@ -1521,16 +1532,19 b' class TestAPISentryView(object):'
1521 from appenlight.models.services.application import ApplicationService
1532 from appenlight.models.services.application import ApplicationService
1522 from appenlight.models.report_group import ReportGroup
1533 from appenlight.models.report_group import ReportGroup
1523 from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED
1534 from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED
1535
1524 route = mock.Mock()
1536 route = mock.Mock()
1525 route.name = 'api_sentry'
1537 route.name = "api_sentry"
1526 request = testing.DummyRequest(
1538 request = testing.DummyRequest(
1527 headers={'Content-Type': 'application/octet-stream',
1539 headers={
1528 'User-Agent': 'sentry-ruby/1.0.0',
1540 "Content-Type": "application/octet-stream",
1529 'X-Sentry-Auth': 'Sentry sentry_version=5, '
1541 "User-Agent": "sentry-ruby/1.0.0",
1530 'sentry_client=raven-ruby/1.0.0, '
1542 "X-Sentry-Auth": "Sentry sentry_version=5, "
1531 'sentry_timestamp=1462378483, '
1543 "sentry_client=raven-ruby/1.0.0, "
1532 'sentry_key=xxx, sentry_secret=xxx'
1544 "sentry_timestamp=1462378483, "
1533 })
1545 "sentry_key=xxx, sentry_secret=xxx",
1546 }
1547 )
1534 context = DummyContext()
1548 context = DummyContext()
1535 context.resource = ApplicationService.by_id(1)
1549 context.resource = ApplicationService.by_id(1)
1536 context.resource.allow_permanent_storage = True
1550 context.resource.allow_permanent_storage = True
@@ -1548,15 +1562,16 b' class TestAPISentryView(object):'
1548 from appenlight.models.services.application import ApplicationService
1562 from appenlight.models.services.application import ApplicationService
1549 from appenlight.models.report_group import ReportGroup
1563 from appenlight.models.report_group import ReportGroup
1550 from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7
1564 from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7
1565
1551 route = mock.Mock()
1566 route = mock.Mock()
1552 route.name = 'api_sentry'
1567 route.name = "api_sentry"
1553 request = pyramid.threadlocal.get_current_request()
1568 request = pyramid.threadlocal.get_current_request()
1554 context = DummyContext()
1569 context = DummyContext()
1555 context.resource = ApplicationService.by_id(1)
1570 context.resource = ApplicationService.by_id(1)
1556 context.resource.allow_permanent_storage = True
1571 context.resource.allow_permanent_storage = True
1557 request.context = context
1572 request.context = context
1558 request.matched_route = route
1573 request.matched_route = route
1559 request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8')
1574 request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode("utf8")
1560 sentry_compat(request)
1575 sentry_compat(request)
1561 query = DBSession.query(ReportGroup)
1576 query = DBSession.query(ReportGroup)
1562 report = query.first()
1577 report = query.first()
@@ -1568,17 +1583,20 b' class TestAPISentryView(object):'
1568 from appenlight.models.services.application import ApplicationService
1583 from appenlight.models.services.application import ApplicationService
1569 from appenlight.models.report_group import ReportGroup
1584 from appenlight.models.report_group import ReportGroup
1570 from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED
1585 from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED
1586
1571 route = mock.Mock()
1587 route = mock.Mock()
1572 route.name = 'api_sentry'
1588 route.name = "api_sentry"
1573 request = testing.DummyRequest(
1589 request = testing.DummyRequest(
1574 headers={'Content-Type': 'application/octet-stream',
1590 headers={
1575 'Content-Encoding': 'deflate',
1591 "Content-Type": "application/octet-stream",
1576 'User-Agent': 'sentry-ruby/1.0.0',
1592 "Content-Encoding": "deflate",
1577 'X-Sentry-Auth': 'Sentry sentry_version=5, '
1593 "User-Agent": "sentry-ruby/1.0.0",
1578 'sentry_client=raven-ruby/1.0.0, '
1594 "X-Sentry-Auth": "Sentry sentry_version=5, "
1579 'sentry_timestamp=1462378483, '
1595 "sentry_client=raven-ruby/1.0.0, "
1580 'sentry_key=xxx, sentry_secret=xxx'
1596 "sentry_timestamp=1462378483, "
1581 })
1597 "sentry_key=xxx, sentry_secret=xxx",
1598 }
1599 )
1582 context = DummyContext()
1600 context = DummyContext()
1583 context.resource = ApplicationService.by_id(1)
1601 context.resource = ApplicationService.by_id(1)
1584 context.resource.allow_permanent_storage = True
1602 context.resource.allow_permanent_storage = True
This diff has been collapsed as it changes many lines, (665 lines changed) Show them Hide them
@@ -21,33 +21,35 b' from colander import null'
21
21
22 # those keywords are here so we can distingush between searching for tags and
22 # those keywords are here so we can distingush between searching for tags and
23 # normal properties of reports/logs
23 # normal properties of reports/logs
24 accepted_search_params = ['resource',
24 accepted_search_params = [
25 'request_id',
25 "resource",
26 'start_date',
26 "request_id",
27 'end_date',
27 "start_date",
28 'page',
28 "end_date",
29 'min_occurences',
29 "page",
30 'http_status',
30 "min_occurences",
31 'priority',
31 "http_status",
32 'error',
32 "priority",
33 'url_path',
33 "error",
34 'url_domain',
34 "url_path",
35 'report_status',
35 "url_domain",
36 'min_duration',
36 "report_status",
37 'max_duration',
37 "min_duration",
38 'message',
38 "max_duration",
39 'level',
39 "message",
40 'namespace']
40 "level",
41 "namespace",
42 ]
41
43
42
44
43 @colander.deferred
45 @colander.deferred
44 def deferred_utcnow(node, kw):
46 def deferred_utcnow(node, kw):
45 return kw['utcnow']
47 return kw["utcnow"]
46
48
47
49
48 @colander.deferred
50 @colander.deferred
49 def optional_limited_date(node, kw):
51 def optional_limited_date(node, kw):
50 if not kw.get('allow_permanent_storage'):
52 if not kw.get("allow_permanent_storage"):
51 return limited_date
53 return limited_date
52
54
53
55
@@ -123,21 +125,21 b' class UnknownType(object):'
123
125
124 # SLOW REPORT SCHEMA
126 # SLOW REPORT SCHEMA
125
127
128
126 def rewrite_type(input_data):
129 def rewrite_type(input_data):
127 """
130 """
128 Fix for legacy appenlight clients
131 Fix for legacy appenlight clients
129 """
132 """
130 if input_data == 'remote_call':
133 if input_data == "remote_call":
131 return 'remote'
134 return "remote"
132 return input_data
135 return input_data
133
136
134
137
135 class ExtraTupleSchema(colander.TupleSchema):
138 class ExtraTupleSchema(colander.TupleSchema):
136 name = colander.SchemaNode(colander.String(),
139 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 64))
137 validator=colander.Length(1, 64))
140 value = colander.SchemaNode(
138 value = colander.SchemaNode(UnknownType(),
141 UnknownType(), preparer=shortener_factory(512), missing=None
139 preparer=shortener_factory(512),
142 )
140 missing=None)
141
143
142
144
143 class ExtraSchemaList(colander.SequenceSchema):
145 class ExtraSchemaList(colander.SequenceSchema):
@@ -146,11 +148,10 b' class ExtraSchemaList(colander.SequenceSchema):'
146
148
147
149
148 class TagsTupleSchema(colander.TupleSchema):
150 class TagsTupleSchema(colander.TupleSchema):
149 name = colander.SchemaNode(colander.String(),
151 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 128))
150 validator=colander.Length(1, 128))
152 value = colander.SchemaNode(
151 value = colander.SchemaNode(UnknownType(),
153 UnknownType(), preparer=shortener_factory(128), missing=None
152 preparer=shortener_factory(128),
154 )
153 missing=None)
154
155
155
156
156 class TagSchemaList(colander.SequenceSchema):
157 class TagSchemaList(colander.SequenceSchema):
@@ -159,8 +160,7 b' class TagSchemaList(colander.SequenceSchema):'
159
160
160
161
161 class NumericTagsTupleSchema(colander.TupleSchema):
162 class NumericTagsTupleSchema(colander.TupleSchema):
162 name = colander.SchemaNode(colander.String(),
163 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 128))
163 validator=colander.Length(1, 128))
164 value = colander.SchemaNode(colander.Float(), missing=0)
164 value = colander.SchemaNode(colander.Float(), missing=0)
165
165
166
166
@@ -173,41 +173,46 b' class SlowCallSchema(colander.MappingSchema):'
173 """
173 """
174 Validates slow call format in slow call list
174 Validates slow call format in slow call list
175 """
175 """
176
176 start = colander.SchemaNode(NonTZDate())
177 start = colander.SchemaNode(NonTZDate())
177 end = colander.SchemaNode(NonTZDate())
178 end = colander.SchemaNode(NonTZDate())
178 statement = colander.SchemaNode(colander.String(), missing='')
179 statement = colander.SchemaNode(colander.String(), missing="")
179 parameters = colander.SchemaNode(UnknownType(), missing=None)
180 parameters = colander.SchemaNode(UnknownType(), missing=None)
180 type = colander.SchemaNode(
181 type = colander.SchemaNode(
181 colander.String(),
182 colander.String(),
182 preparer=rewrite_type,
183 preparer=rewrite_type,
183 validator=colander.OneOf(
184 validator=colander.OneOf(
184 ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']),
185 ["tmpl", "sql", "nosql", "remote", "unknown", "custom"]
185 missing='unknown')
186 ),
186 subtype = colander.SchemaNode(colander.String(),
187 missing="unknown",
187 validator=colander.Length(1, 16),
188 )
188 missing='unknown')
189 subtype = colander.SchemaNode(
189 location = colander.SchemaNode(colander.String(),
190 colander.String(), validator=colander.Length(1, 16), missing="unknown"
190 validator=colander.Length(1, 255),
191 )
191 missing='')
192 location = colander.SchemaNode(
193 colander.String(), validator=colander.Length(1, 255), missing=""
194 )
192
195
193
196
194 def limited_date(node, value):
197 def limited_date(node, value):
195 """ checks to make sure that the value is not older/newer than 2h """
198 """ checks to make sure that the value is not older/newer than 2h """
196 past_hours = 72
199 past_hours = 72
197 future_hours = 2
200 future_hours = 2
198 min_time = datetime.datetime.utcnow() - datetime.timedelta(
201 min_time = datetime.datetime.utcnow() - datetime.timedelta(hours=past_hours)
199 hours=past_hours)
202 max_time = datetime.datetime.utcnow() + datetime.timedelta(hours=future_hours)
200 max_time = datetime.datetime.utcnow() + datetime.timedelta(
201 hours=future_hours)
202 if min_time > value:
203 if min_time > value:
203 msg = '%r is older from current UTC time by ' + str(past_hours)
204 msg = "%r is older from current UTC time by " + str(past_hours)
204 msg += ' hours. Ask administrator to enable permanent logging for ' \
205 msg += (
205 'your application to store logs with dates in past.'
206 " hours. Ask administrator to enable permanent logging for "
207 "your application to store logs with dates in past."
208 )
206 raise colander.Invalid(node, msg % value)
209 raise colander.Invalid(node, msg % value)
207 if max_time < value:
210 if max_time < value:
208 msg = '%r is newer from current UTC time by ' + str(future_hours)
211 msg = "%r is newer from current UTC time by " + str(future_hours)
209 msg += ' hours. Ask administrator to enable permanent logging for ' \
212 msg += (
210 'your application to store logs with dates in future.'
213 " hours. Ask administrator to enable permanent logging for "
214 "your application to store logs with dates in future."
215 )
211 raise colander.Invalid(node, msg % value)
216 raise colander.Invalid(node, msg % value)
212
217
213
218
@@ -215,6 +220,7 b' class SlowCallListSchema(colander.SequenceSchema):'
215 """
220 """
216 Validates list of individual slow calls
221 Validates list of individual slow calls
217 """
222 """
223
218 slow_call = SlowCallSchema()
224 slow_call = SlowCallSchema()
219
225
220
226
@@ -222,52 +228,54 b' class RequestStatsSchema(colander.MappingSchema):'
222 """
228 """
223 Validates format of requests statistics dictionary
229 Validates format of requests statistics dictionary
224 """
230 """
225 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
231
226 missing=0)
232 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0), missing=0)
227 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
233 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), missing=0)
228 missing=0)
234 nosql = colander.SchemaNode(
229 nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
235 colander.Float(), validator=colander.Range(0), missing=0
230 missing=0)
236 )
231 remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
237 remote = colander.SchemaNode(
232 missing=0)
238 colander.Float(), validator=colander.Range(0), missing=0
233 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
239 )
234 missing=0)
240 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0), missing=0)
235 custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
241 custom = colander.SchemaNode(
236 missing=0)
242 colander.Float(), validator=colander.Range(0), missing=0
237 sql_calls = colander.SchemaNode(colander.Float(),
243 )
238 validator=colander.Range(0),
244 sql_calls = colander.SchemaNode(
239 missing=0)
245 colander.Float(), validator=colander.Range(0), missing=0
240 nosql_calls = colander.SchemaNode(colander.Float(),
246 )
241 validator=colander.Range(0),
247 nosql_calls = colander.SchemaNode(
242 missing=0)
248 colander.Float(), validator=colander.Range(0), missing=0
243 remote_calls = colander.SchemaNode(colander.Float(),
249 )
244 validator=colander.Range(0),
250 remote_calls = colander.SchemaNode(
245 missing=0)
251 colander.Float(), validator=colander.Range(0), missing=0
246 tmpl_calls = colander.SchemaNode(colander.Float(),
252 )
247 validator=colander.Range(0),
253 tmpl_calls = colander.SchemaNode(
248 missing=0)
254 colander.Float(), validator=colander.Range(0), missing=0
249 custom_calls = colander.SchemaNode(colander.Float(),
255 )
250 validator=colander.Range(0),
256 custom_calls = colander.SchemaNode(
251 missing=0)
257 colander.Float(), validator=colander.Range(0), missing=0
258 )
252
259
253
260
254 class FrameInfoVarSchema(colander.SequenceSchema):
261 class FrameInfoVarSchema(colander.SequenceSchema):
255 """
262 """
256 Validates format of frame variables of a traceback
263 Validates format of frame variables of a traceback
257 """
264 """
258 vars = colander.SchemaNode(UnknownType(),
265
259 validator=colander.Length(2, 2))
266 vars = colander.SchemaNode(UnknownType(), validator=colander.Length(2, 2))
260
267
261
268
262 class FrameInfoSchema(colander.MappingSchema):
269 class FrameInfoSchema(colander.MappingSchema):
263 """
270 """
264 Validates format of a traceback line
271 Validates format of a traceback line
265 """
272 """
266 cline = colander.SchemaNode(colander.String(), missing='')
273
267 module = colander.SchemaNode(colander.String(), missing='')
274 cline = colander.SchemaNode(colander.String(), missing="")
268 line = colander.SchemaNode(colander.String(), missing='')
275 module = colander.SchemaNode(colander.String(), missing="")
269 file = colander.SchemaNode(colander.String(), missing='')
276 line = colander.SchemaNode(colander.String(), missing="")
270 fn = colander.SchemaNode(colander.String(), missing='')
277 file = colander.SchemaNode(colander.String(), missing="")
278 fn = colander.SchemaNode(colander.String(), missing="")
271 vars = FrameInfoVarSchema()
279 vars = FrameInfoVarSchema()
272
280
273
281
@@ -275,6 +283,7 b' class FrameInfoListSchema(colander.SequenceSchema):'
275 """
283 """
276 Validates format of list of traceback lines
284 Validates format of list of traceback lines
277 """
285 """
286
278 frame = colander.SchemaNode(UnknownType())
287 frame = colander.SchemaNode(UnknownType())
279
288
280
289
@@ -282,36 +291,40 b' class ReportDetailBaseSchema(colander.MappingSchema):'
282 """
291 """
283 Validates format of report - ie. request parameters and stats for a request in report group
292 Validates format of report - ie. request parameters and stats for a request in report group
284 """
293 """
285 username = colander.SchemaNode(colander.String(),
294
286 preparer=[shortener_factory(255),
295 username = colander.SchemaNode(
287 lambda x: x or ''],
296 colander.String(),
288 missing='')
297 preparer=[shortener_factory(255), lambda x: x or ""],
289 request_id = colander.SchemaNode(colander.String(),
298 missing="",
290 preparer=shortener_factory(40),
299 )
291 missing='')
300 request_id = colander.SchemaNode(
292 url = colander.SchemaNode(colander.String(),
301 colander.String(), preparer=shortener_factory(40), missing=""
293 preparer=shortener_factory(1024), missing='')
302 )
294 ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39),
303 url = colander.SchemaNode(
295 missing=None)
304 colander.String(), preparer=shortener_factory(1024), missing=""
296 start_time = colander.SchemaNode(NonTZDate(),
305 )
297 validator=optional_limited_date,
306 ip = colander.SchemaNode(
298 missing=deferred_utcnow)
307 colander.String(), preparer=shortener_factory(39), missing=None
299 end_time = colander.SchemaNode(NonTZDate(),
308 )
300 validator=optional_limited_date,
309 start_time = colander.SchemaNode(
301 missing=None)
310 NonTZDate(), validator=optional_limited_date, missing=deferred_utcnow
302 user_agent = colander.SchemaNode(colander.String(),
311 )
303 preparer=[shortener_factory(512),
312 end_time = colander.SchemaNode(
304 lambda x: x or ''],
313 NonTZDate(), validator=optional_limited_date, missing=None
305 missing='')
314 )
306 message = colander.SchemaNode(colander.String(),
315 user_agent = colander.SchemaNode(
307 preparer=shortener_factory(2048),
316 colander.String(),
308 missing='')
317 preparer=[shortener_factory(512), lambda x: x or ""],
309 group_string = colander.SchemaNode(colander.String(),
318 missing="",
310 preparer=shortener_factory(512),
319 )
311 missing=None)
320 message = colander.SchemaNode(
321 colander.String(), preparer=shortener_factory(2048), missing=""
322 )
323 group_string = colander.SchemaNode(
324 colander.String(), preparer=shortener_factory(512), missing=None
325 )
312 request_stats = RequestStatsSchema(missing=None)
326 request_stats = RequestStatsSchema(missing=None)
313 request = colander.SchemaNode(colander.Mapping(unknown='preserve'),
327 request = colander.SchemaNode(colander.Mapping(unknown="preserve"), missing={})
314 missing={})
315 traceback = FrameInfoListSchema(missing=None)
328 traceback = FrameInfoListSchema(missing=None)
316 slow_calls = SlowCallListSchema(missing=[])
329 slow_calls = SlowCallListSchema(missing=[])
317 extra = ExtraSchemaList()
330 extra = ExtraSchemaList()
@@ -330,32 +343,35 b' class ReportSchemaBase(colander.MappingSchema):'
330 """
343 """
331 Validates format of report group
344 Validates format of report group
332 """
345 """
333 client = colander.SchemaNode(colander.String(),
346
334 preparer=lambda x: x or 'unknown')
347 client = colander.SchemaNode(colander.String(), preparer=lambda x: x or "unknown")
335 server = colander.SchemaNode(
348 server = colander.SchemaNode(
336 colander.String(),
349 colander.String(),
337 preparer=[
350 preparer=[lambda x: x.lower() if x else "unknown", shortener_factory(128)],
338 lambda x: x.lower() if x else 'unknown', shortener_factory(128)],
351 missing="unknown",
339 missing='unknown')
352 )
340 priority = colander.SchemaNode(colander.Int(),
353 priority = colander.SchemaNode(
341 preparer=[lambda x: x or 5],
354 colander.Int(),
342 validator=colander.Range(1, 10),
355 preparer=[lambda x: x or 5],
343 missing=5)
356 validator=colander.Range(1, 10),
344 language = colander.SchemaNode(colander.String(), missing='unknown')
357 missing=5,
345 error = colander.SchemaNode(colander.String(),
358 )
346 preparer=shortener_factory(512),
359 language = colander.SchemaNode(colander.String(), missing="unknown")
347 missing='')
360 error = colander.SchemaNode(
348 view_name = colander.SchemaNode(colander.String(),
361 colander.String(), preparer=shortener_factory(512), missing=""
349 preparer=[shortener_factory(128),
362 )
350 lambda x: x or ''],
363 view_name = colander.SchemaNode(
351 missing='')
364 colander.String(),
352 http_status = colander.SchemaNode(colander.Int(),
365 preparer=[shortener_factory(128), lambda x: x or ""],
353 preparer=[lambda x: x or 200],
366 missing="",
354 validator=colander.Range(1))
367 )
355
368 http_status = colander.SchemaNode(
356 occurences = colander.SchemaNode(colander.Int(),
369 colander.Int(), preparer=[lambda x: x or 200], validator=colander.Range(1)
357 validator=colander.Range(1, 99999999999),
370 )
358 missing=1)
371
372 occurences = colander.SchemaNode(
373 colander.Int(), validator=colander.Range(1, 99999999999), missing=1
374 )
359 tags = TagSchemaList()
375 tags = TagSchemaList()
360
376
361
377
@@ -363,8 +379,9 b' class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5):'
363 pass
379 pass
364
380
365
381
366 class ReportSchemaPermissiveDate_0_5(ReportSchemaBase,
382 class ReportSchemaPermissiveDate_0_5(
367 ReportDetailSchemaPermissiveDate_0_5):
383 ReportSchemaBase, ReportDetailSchemaPermissiveDate_0_5
384 ):
368 pass
385 pass
369
386
370
387
@@ -372,6 +389,7 b' class ReportListSchema_0_5(colander.SequenceSchema):'
372 """
389 """
373 Validates format of list of report groups
390 Validates format of list of report groups
374 """
391 """
392
375 report = ReportSchema_0_5()
393 report = ReportSchema_0_5()
376 validator = colander.Length(1)
394 validator = colander.Length(1)
377
395
@@ -380,6 +398,7 b' class ReportListPermissiveDateSchema_0_5(colander.SequenceSchema):'
380 """
398 """
381 Validates format of list of report groups
399 Validates format of list of report groups
382 """
400 """
401
383 report = ReportSchemaPermissiveDate_0_5()
402 report = ReportSchemaPermissiveDate_0_5()
384 validator = colander.Length(1)
403 validator = colander.Length(1)
385
404
@@ -388,34 +407,35 b' class LogSchema(colander.MappingSchema):'
388 """
407 """
389 Validates format if individual log entry
408 Validates format if individual log entry
390 """
409 """
391 primary_key = colander.SchemaNode(UnknownType(),
410
392 preparer=[cast_to_unicode_or_null,
411 primary_key = colander.SchemaNode(
393 shortener_factory(128)],
412 UnknownType(),
394 missing=None)
413 preparer=[cast_to_unicode_or_null, shortener_factory(128)],
395 log_level = colander.SchemaNode(colander.String(),
414 missing=None,
396 preparer=shortener_factory(10),
415 )
397 missing='UNKNOWN')
416 log_level = colander.SchemaNode(
398 message = colander.SchemaNode(colander.String(),
417 colander.String(), preparer=shortener_factory(10), missing="UNKNOWN"
399 preparer=shortener_factory(4096),
418 )
400 missing='')
419 message = colander.SchemaNode(
401 namespace = colander.SchemaNode(colander.String(),
420 colander.String(), preparer=shortener_factory(4096), missing=""
402 preparer=shortener_factory(128),
421 )
403 missing='')
422 namespace = colander.SchemaNode(
404 request_id = colander.SchemaNode(colander.String(),
423 colander.String(), preparer=shortener_factory(128), missing=""
405 preparer=shortener_factory(40),
424 )
406 missing='')
425 request_id = colander.SchemaNode(
407 server = colander.SchemaNode(colander.String(),
426 colander.String(), preparer=shortener_factory(40), missing=""
408 preparer=shortener_factory(128),
427 )
409 missing='unknown')
428 server = colander.SchemaNode(
410 date = colander.SchemaNode(NonTZDate(),
429 colander.String(), preparer=shortener_factory(128), missing="unknown"
411 validator=limited_date,
430 )
412 missing=deferred_utcnow)
431 date = colander.SchemaNode(
432 NonTZDate(), validator=limited_date, missing=deferred_utcnow
433 )
413 tags = TagSchemaList()
434 tags = TagSchemaList()
414
435
415
436
416 class LogSchemaPermanent(LogSchema):
437 class LogSchemaPermanent(LogSchema):
417 date = colander.SchemaNode(NonTZDate(),
438 date = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow)
418 missing=deferred_utcnow)
419 permanent = colander.SchemaNode(colander.Boolean(), missing=False)
439 permanent = colander.SchemaNode(colander.Boolean(), missing=False)
420
440
421
441
@@ -423,6 +443,7 b' class LogListSchema(colander.SequenceSchema):'
423 """
443 """
424 Validates format of list of log entries
444 Validates format of list of log entries
425 """
445 """
446
426 log = LogSchema()
447 log = LogSchema()
427 validator = colander.Length(1)
448 validator = colander.Length(1)
428
449
@@ -431,14 +452,15 b' class LogListPermanentSchema(colander.SequenceSchema):'
431 """
452 """
432 Validates format of list of log entries
453 Validates format of list of log entries
433 """
454 """
455
434 log = LogSchemaPermanent()
456 log = LogSchemaPermanent()
435 validator = colander.Length(1)
457 validator = colander.Length(1)
436
458
437
459
438 class ViewRequestStatsSchema(RequestStatsSchema):
460 class ViewRequestStatsSchema(RequestStatsSchema):
439 requests = colander.SchemaNode(colander.Integer(),
461 requests = colander.SchemaNode(
440 validator=colander.Range(0),
462 colander.Integer(), validator=colander.Range(0), missing=0
441 missing=0)
463 )
442
464
443
465
444 class ViewMetricTupleSchema(colander.TupleSchema):
466 class ViewMetricTupleSchema(colander.TupleSchema):
@@ -446,10 +468,12 b' class ViewMetricTupleSchema(colander.TupleSchema):'
446 Validates list of views and their corresponding request stats object ie:
468 Validates list of views and their corresponding request stats object ie:
447 ["dir/module:func",{"custom": 0.0..}]
469 ["dir/module:func",{"custom": 0.0..}]
448 """
470 """
449 view_name = colander.SchemaNode(colander.String(),
471
450 preparer=[shortener_factory(128),
472 view_name = colander.SchemaNode(
451 lambda x: x or 'unknown'],
473 colander.String(),
452 missing='unknown')
474 preparer=[shortener_factory(128), lambda x: x or "unknown"],
475 missing="unknown",
476 )
453 metrics = ViewRequestStatsSchema()
477 metrics = ViewRequestStatsSchema()
454
478
455
479
@@ -458,6 +482,7 b' class ViewMetricListSchema(colander.SequenceSchema):'
458 Validates view breakdown stats objects list
482 Validates view breakdown stats objects list
459 {metrics key of server/time object}
483 {metrics key of server/time object}
460 """
484 """
485
461 view_tuple = ViewMetricTupleSchema()
486 view_tuple = ViewMetricTupleSchema()
462 validator = colander.Length(1)
487 validator = colander.Length(1)
463
488
@@ -468,13 +493,13 b' class ViewMetricSchema(colander.MappingSchema):'
468 {server/time object}
493 {server/time object}
469
494
470 """
495 """
471 timestamp = colander.SchemaNode(NonTZDate(),
496
472 validator=limited_date,
497 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date, missing=None)
473 missing=None)
498 server = colander.SchemaNode(
474 server = colander.SchemaNode(colander.String(),
499 colander.String(),
475 preparer=[shortener_factory(128),
500 preparer=[shortener_factory(128), lambda x: x or "unknown"],
476 lambda x: x or 'unknown'],
501 missing="unknown",
477 missing='unknown')
502 )
478 metrics = ViewMetricListSchema()
503 metrics = ViewMetricListSchema()
479
504
480
505
@@ -483,15 +508,19 b' class GeneralMetricSchema(colander.MappingSchema):'
483 Validates universal metric schema
508 Validates universal metric schema
484
509
485 """
510 """
486 namespace = colander.SchemaNode(colander.String(), missing='',
487 preparer=shortener_factory(128))
488
511
489 server_name = colander.SchemaNode(colander.String(),
512 namespace = colander.SchemaNode(
490 preparer=[shortener_factory(128),
513 colander.String(), missing="", preparer=shortener_factory(128)
491 lambda x: x or 'unknown'],
514 )
492 missing='unknown')
515
493 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date,
516 server_name = colander.SchemaNode(
494 missing=deferred_utcnow)
517 colander.String(),
518 preparer=[shortener_factory(128), lambda x: x or "unknown"],
519 missing="unknown",
520 )
521 timestamp = colander.SchemaNode(
522 NonTZDate(), validator=limited_date, missing=deferred_utcnow
523 )
495 tags = TagSchemaList(missing=colander.required)
524 tags = TagSchemaList(missing=colander.required)
496
525
497
526
@@ -500,6 +529,7 b' class GeneralMetricPermanentSchema(GeneralMetricSchema):'
500 Validates universal metric schema
529 Validates universal metric schema
501
530
502 """
531 """
532
503 timestamp = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow)
533 timestamp = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow)
504
534
505
535
@@ -520,6 +550,7 b' class MetricsListSchema(colander.SequenceSchema):'
520
550
521
551
522 """
552 """
553
523 metric = ViewMetricSchema()
554 metric = ViewMetricSchema()
524 validator = colander.Length(1)
555 validator = colander.Length(1)
525
556
@@ -540,7 +571,7 b' class StringToAppList(object):'
540 if cstruct is null:
571 if cstruct is null:
541 return null
572 return null
542
573
543 apps = set([int(a) for a in node.bindings['resources']])
574 apps = set([int(a) for a in node.bindings["resources"]])
544
575
545 if isinstance(cstruct, str):
576 if isinstance(cstruct, str):
546 cstruct = [cstruct]
577 cstruct = [cstruct]
@@ -558,41 +589,41 b' class StringToAppList(object):'
558
589
559 @colander.deferred
590 @colander.deferred
560 def possible_applications_validator(node, kw):
591 def possible_applications_validator(node, kw):
561 possible_apps = [int(a) for a in kw['resources']]
592 possible_apps = [int(a) for a in kw["resources"]]
562 return colander.All(colander.ContainsOnly(possible_apps),
593 return colander.All(colander.ContainsOnly(possible_apps), colander.Length(1))
563 colander.Length(1))
564
594
565
595
566 @colander.deferred
596 @colander.deferred
567 def possible_applications(node, kw):
597 def possible_applications(node, kw):
568 return [int(a) for a in kw['resources']]
598 return [int(a) for a in kw["resources"]]
569
599
570
600
571 @colander.deferred
601 @colander.deferred
572 def today_start(node, kw):
602 def today_start(node, kw):
573 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
603 return datetime.datetime.utcnow().replace(second=0, microsecond=0, minute=0, hour=0)
574 minute=0,
575 hour=0)
576
604
577
605
578 @colander.deferred
606 @colander.deferred
579 def today_end(node, kw):
607 def today_end(node, kw):
580 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
608 return datetime.datetime.utcnow().replace(
581 minute=59, hour=23)
609 second=0, microsecond=0, minute=59, hour=23
610 )
582
611
583
612
584 @colander.deferred
613 @colander.deferred
585 def old_start(node, kw):
614 def old_start(node, kw):
586 t_delta = datetime.timedelta(days=90)
615 t_delta = datetime.timedelta(days=90)
587 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
616 return (
588 minute=0,
617 datetime.datetime.utcnow().replace(second=0, microsecond=0, minute=0, hour=0)
589 hour=0) - t_delta
618 - t_delta
619 )
590
620
591
621
592 @colander.deferred
622 @colander.deferred
593 def today_end(node, kw):
623 def today_end(node, kw):
594 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
624 return datetime.datetime.utcnow().replace(
595 minute=59, hour=23)
625 second=0, microsecond=0, minute=59, hour=23
626 )
596
627
597
628
598 class PermissiveDate(colander.DateTime):
629 class PermissiveDate(colander.DateTime):
@@ -604,7 +635,8 b' class PermissiveDate(colander.DateTime):'
604
635
605 try:
636 try:
606 result = colander.iso8601.parse_date(
637 result = colander.iso8601.parse_date(
607 cstruct, default_timezone=self.default_tzinfo)
638 cstruct, default_timezone=self.default_tzinfo
639 )
608 except colander.iso8601.ParseError:
640 except colander.iso8601.ParseError:
609 return null
641 return null
610 return result.replace(tzinfo=None)
642 return result.replace(tzinfo=None)
@@ -612,99 +644,126 b' class PermissiveDate(colander.DateTime):'
612
644
613 class LogSearchSchema(colander.MappingSchema):
645 class LogSearchSchema(colander.MappingSchema):
614 def schema_type(self, **kw):
646 def schema_type(self, **kw):
615 return colander.Mapping(unknown='preserve')
647 return colander.Mapping(unknown="preserve")
616
648
617 resource = colander.SchemaNode(StringToAppList(),
649 resource = colander.SchemaNode(
618 validator=possible_applications_validator,
650 StringToAppList(),
619 missing=possible_applications)
651 validator=possible_applications_validator,
620
652 missing=possible_applications,
621 message = colander.SchemaNode(colander.Sequence(accept_scalar=True),
653 )
622 colander.SchemaNode(colander.String()),
654
623 missing=None)
655 message = colander.SchemaNode(
624 level = colander.SchemaNode(colander.Sequence(accept_scalar=True),
656 colander.Sequence(accept_scalar=True),
625 colander.SchemaNode(colander.String()),
657 colander.SchemaNode(colander.String()),
626 preparer=lowercase_preparer,
658 missing=None,
627 missing=None)
659 )
628 namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True),
660 level = colander.SchemaNode(
629 colander.SchemaNode(colander.String()),
661 colander.Sequence(accept_scalar=True),
630 preparer=lowercase_preparer,
662 colander.SchemaNode(colander.String()),
631 missing=None)
663 preparer=lowercase_preparer,
632 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
664 missing=None,
633 colander.SchemaNode(colander.String()),
665 )
634 preparer=lowercase_preparer,
666 namespace = colander.SchemaNode(
635 missing=None)
667 colander.Sequence(accept_scalar=True),
636 start_date = colander.SchemaNode(PermissiveDate(),
668 colander.SchemaNode(colander.String()),
637 missing=None)
669 preparer=lowercase_preparer,
638 end_date = colander.SchemaNode(PermissiveDate(),
670 missing=None,
639 missing=None)
671 )
640 page = colander.SchemaNode(colander.Integer(),
672 request_id = colander.SchemaNode(
641 validator=colander.Range(min=1),
673 colander.Sequence(accept_scalar=True),
642 missing=1)
674 colander.SchemaNode(colander.String()),
675 preparer=lowercase_preparer,
676 missing=None,
677 )
678 start_date = colander.SchemaNode(PermissiveDate(), missing=None)
679 end_date = colander.SchemaNode(PermissiveDate(), missing=None)
680 page = colander.SchemaNode(
681 colander.Integer(), validator=colander.Range(min=1), missing=1
682 )
643
683
644
684
645 class ReportSearchSchema(colander.MappingSchema):
685 class ReportSearchSchema(colander.MappingSchema):
646 def schema_type(self, **kw):
686 def schema_type(self, **kw):
647 return colander.Mapping(unknown='preserve')
687 return colander.Mapping(unknown="preserve")
648
688
649 resource = colander.SchemaNode(StringToAppList(),
689 resource = colander.SchemaNode(
650 validator=possible_applications_validator,
690 StringToAppList(),
651 missing=possible_applications)
691 validator=possible_applications_validator,
652 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
692 missing=possible_applications,
653 colander.SchemaNode(colander.String()),
693 )
654 missing=None)
694 request_id = colander.SchemaNode(
655 start_date = colander.SchemaNode(PermissiveDate(),
695 colander.Sequence(accept_scalar=True),
656 missing=None)
696 colander.SchemaNode(colander.String()),
657 end_date = colander.SchemaNode(PermissiveDate(),
697 missing=None,
658 missing=None)
698 )
659 page = colander.SchemaNode(colander.Integer(),
699 start_date = colander.SchemaNode(PermissiveDate(), missing=None)
660 validator=colander.Range(min=1),
700 end_date = colander.SchemaNode(PermissiveDate(), missing=None)
661 missing=1)
701 page = colander.SchemaNode(
702 colander.Integer(), validator=colander.Range(min=1), missing=1
703 )
662
704
663 min_occurences = colander.SchemaNode(
705 min_occurences = colander.SchemaNode(
664 colander.Sequence(accept_scalar=True),
706 colander.Sequence(accept_scalar=True),
665 colander.SchemaNode(colander.Integer()),
707 colander.SchemaNode(colander.Integer()),
666 missing=None)
708 missing=None,
667
709 )
668 http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
710
669 colander.SchemaNode(colander.Integer()),
711 http_status = colander.SchemaNode(
670 missing=None)
712 colander.Sequence(accept_scalar=True),
671 priority = colander.SchemaNode(colander.Sequence(accept_scalar=True),
713 colander.SchemaNode(colander.Integer()),
672 colander.SchemaNode(colander.Integer()),
714 missing=None,
673 missing=None)
715 )
674 error = colander.SchemaNode(colander.Sequence(accept_scalar=True),
716 priority = colander.SchemaNode(
675 colander.SchemaNode(colander.String()),
717 colander.Sequence(accept_scalar=True),
676 missing=None)
718 colander.SchemaNode(colander.Integer()),
677 url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True),
719 missing=None,
678 colander.SchemaNode(colander.String()),
720 )
679 missing=None)
721 error = colander.SchemaNode(
680 url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True),
722 colander.Sequence(accept_scalar=True),
681 colander.SchemaNode(colander.String()),
723 colander.SchemaNode(colander.String()),
682 missing=None)
724 missing=None,
683 report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
725 )
684 colander.SchemaNode(colander.String()),
726 url_path = colander.SchemaNode(
685 missing=None)
727 colander.Sequence(accept_scalar=True),
686 min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
728 colander.SchemaNode(colander.String()),
687 colander.SchemaNode(colander.Float()),
729 missing=None,
688 missing=None)
730 )
689 max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
731 url_domain = colander.SchemaNode(
690 colander.SchemaNode(colander.Float()),
732 colander.Sequence(accept_scalar=True),
691 missing=None)
733 colander.SchemaNode(colander.String()),
734 missing=None,
735 )
736 report_status = colander.SchemaNode(
737 colander.Sequence(accept_scalar=True),
738 colander.SchemaNode(colander.String()),
739 missing=None,
740 )
741 min_duration = colander.SchemaNode(
742 colander.Sequence(accept_scalar=True),
743 colander.SchemaNode(colander.Float()),
744 missing=None,
745 )
746 max_duration = colander.SchemaNode(
747 colander.Sequence(accept_scalar=True),
748 colander.SchemaNode(colander.Float()),
749 missing=None,
750 )
692
751
693
752
694 class TagSchema(colander.MappingSchema):
753 class TagSchema(colander.MappingSchema):
695 """
754 """
696 Used in log search
755 Used in log search
697 """
756 """
698 name = colander.SchemaNode(colander.String(),
757
699 validator=colander.Length(1, 32))
758 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 32))
700 value = colander.SchemaNode(colander.Sequence(accept_scalar=True),
759 value = colander.SchemaNode(
701 colander.SchemaNode(colander.String(),
760 colander.Sequence(accept_scalar=True),
702 validator=colander.Length(
761 colander.SchemaNode(colander.String(), validator=colander.Length(1, 128)),
703 1, 128)),
762 missing=None,
704 missing=None)
763 )
705 op = colander.SchemaNode(colander.String(),
764 op = colander.SchemaNode(
706 validator=colander.Length(1, 128),
765 colander.String(), validator=colander.Length(1, 128), missing=None
707 missing=None)
766 )
708
767
709
768
710 class TagListSchema(colander.SequenceSchema):
769 class TagListSchema(colander.SequenceSchema):
@@ -720,16 +779,16 b' class RuleFieldType(object):'
720
779
721 def __call__(self, node, value):
780 def __call__(self, node, value):
722 try:
781 try:
723 if self.cast_to == 'int':
782 if self.cast_to == "int":
724 int(value)
783 int(value)
725 elif self.cast_to == 'float':
784 elif self.cast_to == "float":
726 float(value)
785 float(value)
727 elif self.cast_to == 'unicode':
786 elif self.cast_to == "unicode":
728 str(value)
787 str(value)
729 except:
788 except:
730 raise colander.Invalid(node,
789 raise colander.Invalid(
731 "Can't cast {} to {}".format(
790 node, "Can't cast {} to {}".format(value, self.cast_to)
732 value, self.cast_to))
791 )
733
792
734
793
735 def build_rule_schema(ruleset, check_matrix):
794 def build_rule_schema(ruleset, check_matrix):
@@ -739,23 +798,27 b' def build_rule_schema(ruleset, check_matrix):'
739 """
798 """
740
799
741 schema = colander.SchemaNode(colander.Mapping())
800 schema = colander.SchemaNode(colander.Mapping())
742 schema.add(colander.SchemaNode(colander.String(), name='field'))
801 schema.add(colander.SchemaNode(colander.String(), name="field"))
743
802
744 if ruleset['field'] in ['__AND__', '__OR__', '__NOT__']:
803 if ruleset["field"] in ["__AND__", "__OR__", "__NOT__"]:
745 subrules = colander.SchemaNode(colander.Tuple(), name='rules')
804 subrules = colander.SchemaNode(colander.Tuple(), name="rules")
746 for rule in ruleset['rules']:
805 for rule in ruleset["rules"]:
747 subrules.add(build_rule_schema(rule, check_matrix))
806 subrules.add(build_rule_schema(rule, check_matrix))
748 schema.add(subrules)
807 schema.add(subrules)
749 else:
808 else:
750 op_choices = check_matrix[ruleset['field']]['ops']
809 op_choices = check_matrix[ruleset["field"]]["ops"]
751 cast_to = check_matrix[ruleset['field']]['type']
810 cast_to = check_matrix[ruleset["field"]]["type"]
752 schema.add(colander.SchemaNode(colander.String(),
811 schema.add(
753 validator=colander.OneOf(op_choices),
812 colander.SchemaNode(
754 name='op'))
813 colander.String(), validator=colander.OneOf(op_choices), name="op"
755
814 )
756 schema.add(colander.SchemaNode(colander.String(),
815 )
757 name='value',
816
758 validator=RuleFieldType(cast_to)))
817 schema.add(
818 colander.SchemaNode(
819 colander.String(), name="value", validator=RuleFieldType(cast_to)
820 )
821 )
759 return schema
822 return schema
760
823
761
824
@@ -28,148 +28,206 b' def includeme(config):'
28 """Add the application's view handlers.
28 """Add the application's view handlers.
29 """
29 """
30
30
31 config.add_route('/', '/')
31 config.add_route("/", "/")
32 config.add_route('angular_app_ui_ix', '/ui')
32 config.add_route("angular_app_ui_ix", "/ui")
33 config.add_route('angular_app_ui', '/ui/*remainder')
33 config.add_route("angular_app_ui", "/ui/*remainder")
34
34
35 # applications API
35 # applications API
36 config.add_route('applications_no_id', '/applications')
36 config.add_route("applications_no_id", "/applications")
37 config.add_route('applications', '/applications/{resource_id}',
38 factory='appenlight.security.ResourceFactory')
39 config.add_route('applications_property',
40 '/applications/{resource_id}/{key}',
41 factory='appenlight.security.ResourceFactory')
42 config.add_route(
37 config.add_route(
43 'integrations_id',
38 "applications",
44 '/applications/{resource_id}/integrations/{integration}/{action}',
39 "/applications/{resource_id}",
45 factory='appenlight.security.ResourceFactory')
40 factory="appenlight.security.ResourceFactory",
41 )
42 config.add_route(
43 "applications_property",
44 "/applications/{resource_id}/{key}",
45 factory="appenlight.security.ResourceFactory",
46 )
47 config.add_route(
48 "integrations_id",
49 "/applications/{resource_id}/integrations/{integration}/{action}",
50 factory="appenlight.security.ResourceFactory",
51 )
46
52
47 # users API
53 # users API
48 config.add_route('users_self', '/users/self')
54 config.add_route("users_self", "/users/self")
49 config.add_route('users_self_property', '/users/self/{key}')
55 config.add_route("users_self_property", "/users/self/{key}")
50 config.add_route('users_no_id', '/users')
56 config.add_route("users_no_id", "/users")
51 config.add_route('users', '/users/{user_id}')
57 config.add_route("users", "/users/{user_id}")
52 config.add_route('users_property', '/users/{user_id}/{key}')
58 config.add_route("users_property", "/users/{user_id}/{key}")
53
59
54 # events
60 # events
55 config.add_route('events_no_id', '/events')
61 config.add_route("events_no_id", "/events")
56 config.add_route('events', '/events/{event_id}')
62 config.add_route("events", "/events/{event_id}")
57 config.add_route('events_property', '/events/{event_id}/{key}')
63 config.add_route("events_property", "/events/{event_id}/{key}")
58
64
59 # groups
65 # groups
60 config.add_route('groups_no_id', '/groups')
66 config.add_route("groups_no_id", "/groups")
61 config.add_route('groups', '/groups/{group_id}')
67 config.add_route("groups", "/groups/{group_id}")
62 config.add_route('groups_property', '/groups/{group_id}/{key}')
68 config.add_route("groups_property", "/groups/{group_id}/{key}")
63
69
64 # reports API
70 # reports API
65 config.add_route('reports', '/reports')
71 config.add_route("reports", "/reports")
66 config.add_route('slow_reports', '/slow_reports')
72 config.add_route("slow_reports", "/slow_reports")
67 config.add_route('report_groups', '/report_groups/{group_id}',
73 config.add_route(
68 factory='appenlight.security.ResourceReportFactory')
74 "report_groups",
69 config.add_route('report_groups_property',
75 "/report_groups/{group_id}",
70 '/report_groups/{group_id}/{key}',
76 factory="appenlight.security.ResourceReportFactory",
71 factory='appenlight.security.ResourceReportFactory')
77 )
72
78 config.add_route(
73 #generic resource API
79 "report_groups_property",
74 config.add_route('resources_property',
80 "/report_groups/{group_id}/{key}",
75 '/resources/{resource_id}/{key}',
81 factory="appenlight.security.ResourceReportFactory",
76 factory='appenlight.security.ResourceFactory')
82 )
83
84 # generic resource API
85 config.add_route(
86 "resources_property",
87 "/resources/{resource_id}/{key}",
88 factory="appenlight.security.ResourceFactory",
89 )
77
90
78 # plugin configs API
91 # plugin configs API
79 config.add_route('plugin_configs', '/plugin_configs/{plugin_name}',
92 config.add_route(
80 factory='appenlight.security.ResourcePluginMixedFactory')
93 "plugin_configs",
81 config.add_route('plugin_config', '/plugin_configs/{plugin_name}/{id}',
94 "/plugin_configs/{plugin_name}",
82 factory='appenlight.security.ResourcePluginConfigFactory')
95 factory="appenlight.security.ResourcePluginMixedFactory",
96 )
97 config.add_route(
98 "plugin_config",
99 "/plugin_configs/{plugin_name}/{id}",
100 factory="appenlight.security.ResourcePluginConfigFactory",
101 )
83
102
84 # client endpoints API
103 # client endpoints API
85 config.add_route('api_reports', '/api/reports',
104 config.add_route(
86 factory='appenlight.security.APIFactory')
105 "api_reports", "/api/reports", factory="appenlight.security.APIFactory"
87 config.add_route('api_report', '/api/report',
106 )
88 factory='appenlight.security.APIFactory')
107 config.add_route(
89 config.add_route('api_logs', '/api/logs',
108 "api_report", "/api/report", factory="appenlight.security.APIFactory"
90 factory='appenlight.security.APIFactory')
109 )
91 config.add_route('api_log', '/api/log',
110 config.add_route("api_logs", "/api/logs", factory="appenlight.security.APIFactory")
92 factory='appenlight.security.APIFactory')
111 config.add_route("api_log", "/api/log", factory="appenlight.security.APIFactory")
93 config.add_route('api_slow_reports', '/api/slow_reports',
112 config.add_route(
94 factory='appenlight.security.APIFactory')
113 "api_slow_reports",
95 config.add_route('api_request_stats', '/api/request_stats',
114 "/api/slow_reports",
96 factory='appenlight.security.APIFactory')
115 factory="appenlight.security.APIFactory",
97 config.add_route('api_metrics', '/api/metrics',
116 )
98 factory='appenlight.security.APIFactory')
117 config.add_route(
99 config.add_route('api_general_metrics', '/api/general_metrics',
118 "api_request_stats",
100 factory='appenlight.security.APIFactory')
119 "/api/request_stats",
101 config.add_route('api_general_metric', '/api/general_metric',
120 factory="appenlight.security.APIFactory",
102 factory='appenlight.security.APIFactory')
121 )
103 config.add_route('api_airbrake', '/notifier_api/v2/{action}',
122 config.add_route(
104 factory='appenlight.security.AirbrakeV2APIFactory')
123 "api_metrics", "/api/metrics", factory="appenlight.security.APIFactory"
105 config.add_route('api_sentry', '/api/{project}/store',
124 )
106 factory='appenlight.security.SentryAPIFactory')
125 config.add_route(
107 config.add_route('api_sentry_slash', '/api/{project}/store/',
126 "api_general_metrics",
108 factory='appenlight.security.SentryAPIFactory')
127 "/api/general_metrics",
128 factory="appenlight.security.APIFactory",
129 )
130 config.add_route(
131 "api_general_metric",
132 "/api/general_metric",
133 factory="appenlight.security.APIFactory",
134 )
135 config.add_route(
136 "api_airbrake",
137 "/notifier_api/v2/{action}",
138 factory="appenlight.security.AirbrakeV2APIFactory",
139 )
140 config.add_route(
141 "api_sentry",
142 "/api/{project}/store",
143 factory="appenlight.security.SentryAPIFactory",
144 )
145 config.add_route(
146 "api_sentry_slash",
147 "/api/{project}/store/",
148 factory="appenlight.security.SentryAPIFactory",
149 )
109
150
110 # other
151 # other
111 config.add_route('register', '/register')
152 config.add_route("register", "/register")
112 config.add_route('register_ajax', '/register_ajax')
153 config.add_route("register_ajax", "/register_ajax")
113 config.add_route('lost_password', '/lost_password')
154 config.add_route("lost_password", "/lost_password")
114 config.add_route('lost_password_generate', '/lost_password_generate')
155 config.add_route("lost_password_generate", "/lost_password_generate")
115 config.add_route('logs_no_id', '/logs')
156 config.add_route("logs_no_id", "/logs")
116 config.add_route('forbidden', '/forbidden')
157 config.add_route("forbidden", "/forbidden")
117 config.add_route('test', '/test/{action}')
158 config.add_route("test", "/test/{action}")
118 config.add_route('section_view', '/sections/{section}/{view}')
159 config.add_route("section_view", "/sections/{section}/{view}")
119
160
120 config.add_view('appenlight.views.forbidden_view',
161 config.add_view(
121 context='pyramid.exceptions.Forbidden',
162 "appenlight.views.forbidden_view",
122 renderer='appenlight:templates/forbidden.jinja2',
163 context="pyramid.exceptions.Forbidden",
123 permission='__no_permission_required__')
164 renderer="appenlight:templates/forbidden.jinja2",
124 config.add_view('appenlight.views.not_found_view',
165 permission="__no_permission_required__",
125 context='pyramid.exceptions.NotFound',
166 )
126 renderer='appenlight:templates/not_found.jinja2',
167 config.add_view(
127 permission='__no_permission_required__')
168 "appenlight.views.not_found_view",
128 config.add_view('appenlight.views.csrf_view',
169 context="pyramid.exceptions.NotFound",
129 context='appenlight.lib.request.CSRFException',
170 renderer="appenlight:templates/not_found.jinja2",
130 renderer='appenlight:templates/forbidden.jinja2',
171 permission="__no_permission_required__",
131 permission='__no_permission_required__')
172 )
132 config.add_view('appenlight.views.csrf_view',
173 config.add_view(
133 context='appenlight.forms.CSRFException',
174 "appenlight.views.csrf_view",
134 renderer='appenlight:templates/forbidden.jinja2',
175 context="appenlight.lib.request.CSRFException",
135 permission='__no_permission_required__')
176 renderer="appenlight:templates/forbidden.jinja2",
136 config.add_view('appenlight.views.colander_invalid_view',
177 permission="__no_permission_required__",
137 context='colander.Invalid',
178 )
138 renderer='json',
179 config.add_view(
139 permission='__no_permission_required__')
180 "appenlight.views.csrf_view",
140 config.add_view('appenlight.views.bad_json_view',
181 context="appenlight.forms.CSRFException",
141 context='appenlight.lib.request.JSONException',
182 renderer="appenlight:templates/forbidden.jinja2",
142 renderer='json',
183 permission="__no_permission_required__",
143 permission='__no_permission_required__')
184 )
185 config.add_view(
186 "appenlight.views.colander_invalid_view",
187 context="colander.Invalid",
188 renderer="json",
189 permission="__no_permission_required__",
190 )
191 config.add_view(
192 "appenlight.views.bad_json_view",
193 context="appenlight.lib.request.JSONException",
194 renderer="json",
195 permission="__no_permission_required__",
196 )
144
197
145 # handle authomatic
198 # handle authomatic
146 config.add_route('social_auth', '/social_auth/{provider}')
199 config.add_route("social_auth", "/social_auth/{provider}")
147 config.add_route('social_auth_abort', '/social_auth/{provider}/abort')
200 config.add_route("social_auth_abort", "/social_auth/{provider}/abort")
148
201
149 # only use in production
202 # only use in production
150 if (config.registry.settings.get('pyramid.reload_templates') is False
203 if (
151 and config.registry.settings.get('pyramid.debug_templates') is False):
204 config.registry.settings.get("pyramid.reload_templates") is False
152 config.add_view('appenlight.views.error_view',
205 and config.registry.settings.get("pyramid.debug_templates") is False
153 context=Exception,
206 ):
154 renderer='appenlight:templates/error.jinja2',
207 config.add_view(
155 permission='__no_permission_required__')
208 "appenlight.views.error_view",
209 context=Exception,
210 renderer="appenlight:templates/error.jinja2",
211 permission="__no_permission_required__",
212 )
156
213
157
214
158 def bad_json_view(exc, request):
215 def bad_json_view(exc, request):
159 request.environ['appenlight.ignore_error'] = 1
216 request.environ["appenlight.ignore_error"] = 1
160 request.response.headers.add('X-AppEnlight-Error', 'Incorrect JSON')
217 request.response.headers.add("X-AppEnlight-Error", "Incorrect JSON")
161 request.response.status_int = 400
218 request.response.status_int = 400
162 return "Incorrect JSON"
219 return "Incorrect JSON"
163
220
164
221
165 def colander_invalid_view(exc, request):
222 def colander_invalid_view(exc, request):
166 request.environ['appenlight.ignore_error'] = 1
223 request.environ["appenlight.ignore_error"] = 1
167 log.warning('API version %s, %s' % (
224 log.warning(
168 request.params.get('protocol_version'),
225 "API version %s, %s"
169 request.context.resource))
226 % (request.params.get("protocol_version"), request.context.resource)
170 log.warning('Invalid payload sent')
227 )
228 log.warning("Invalid payload sent")
171 errors = exc.asdict()
229 errors = exc.asdict()
172 request.response.headers.add('X-AppEnlight-Error', 'Invalid payload sent')
230 request.response.headers.add("X-AppEnlight-Error", "Invalid payload sent")
173 request.response.status_int = 422
231 request.response.status_int = 422
174 return errors
232 return errors
175
233
@@ -177,11 +235,12 b' def colander_invalid_view(exc, request):'
177 def csrf_view(exc, request):
235 def csrf_view(exc, request):
178 request.response.status = 403
236 request.response.status = 403
179 from ..models import DBSession
237 from ..models import DBSession
238
180 request.environ["appenlight.ignore_error"] = 1
239 request.environ["appenlight.ignore_error"] = 1
181 request.response.headers.add('X-AppEnlight-Error', str(exc))
240 request.response.headers.add("X-AppEnlight-Error", str(exc))
182 if request.user:
241 if request.user:
183 request.user = DBSession.merge(request.user)
242 request.user = DBSession.merge(request.user)
184 return {'forbidden_view': True, 'csrf': True}
243 return {"forbidden_view": True, "csrf": True}
185
244
186
245
187 def not_found_view(exc, request):
246 def not_found_view(exc, request):
@@ -192,9 +251,10 b' def not_found_view(exc, request):'
192 request.user = DBSession.merge(request.user)
251 request.user = DBSession.merge(request.user)
193
252
194 if request.user:
253 if request.user:
195 request.response.headers['x-appenlight-uid'] = '%s' % request.user.id
254 request.response.headers["x-appenlight-uid"] = "%s" % request.user.id
196 request.response.headers['x-appenlight-flash'] = json.dumps(
255 request.response.headers["x-appenlight-flash"] = json.dumps(
197 helpers.get_flash(request))
256 helpers.get_flash(request)
257 )
198
258
199 return {}
259 return {}
200
260
@@ -205,34 +265,34 b' def forbidden_view(exc, request):'
205
265
206 if request.user:
266 if request.user:
207 request.user = DBSession.merge(request.user)
267 request.user = DBSession.merge(request.user)
208 if request.path.startswith('/api'):
268 if request.path.startswith("/api"):
209 logging.warning('Wrong API Key sent')
269 logging.warning("Wrong API Key sent")
210 logging.info(request.url)
270 logging.info(request.url)
211 logging.info(
271 logging.info("\n".join(["%s:%s" % (k, v) for k, v in request.headers.items()]))
212 '\n'.join(
213 ['%s:%s' % (k, v) for k, v in request.headers.items()]))
214 resp = Response(
272 resp = Response(
215 "Wrong api key",
273 "Wrong api key", headers=(("X-AppEnlight-Error", "Incorrect API key"),)
216 headers=(('X-AppEnlight-Error', 'Incorrect API key',),))
274 )
217 resp.status_int = 403
275 resp.status_int = 403
218 return resp
276 return resp
219
277
220 if request.user:
278 if request.user:
221 request.response.headers['x-appenlight-uid'] = '%s' % request.user.id
279 request.response.headers["x-appenlight-uid"] = "%s" % request.user.id
222 request.response.headers['x-appenlight-flash'] = json.dumps(
280 request.response.headers["x-appenlight-flash"] = json.dumps(
223 helpers.get_flash(request))
281 helpers.get_flash(request)
282 )
224 request.response.status = 403
283 request.response.status = 403
225 return {'forbidden_view': True}
284 return {"forbidden_view": True}
226
285
227
286
228 def error_view(exc, request):
287 def error_view(exc, request):
229 from ..models import DBSession
288 from ..models import DBSession
289
230 if request.user:
290 if request.user:
231 request.user = DBSession.merge(request.user)
291 request.user = DBSession.merge(request.user)
232 if request.path.startswith('/api'):
292 if request.path.startswith("/api"):
233 resp = Response(
293 resp = Response(
234 "There was a problem handling your request please try again",
294 "There was a problem handling your request please try again",
235 headers=(('X-AppEnlight-Error', 'Problem handling request',),)
295 headers=(("X-AppEnlight-Error", "Problem handling request"),),
236 )
296 )
237 resp.status_int = 500
297 resp.status_int = 500
238 return resp
298 return resp
@@ -24,8 +24,8 b' log = logging.getLogger(__name__)'
24 def includeme(config):
24 def includeme(config):
25 """Add the application's view handlers.
25 """Add the application's view handlers.
26 """
26 """
27 config.add_route('admin_users_no_id', '/admin/users/')
27 config.add_route("admin_users_no_id", "/admin/users/")
28 config.add_route('admin_users', '/admin/users/{user_id}')
28 config.add_route("admin_users", "/admin/users/{user_id}")
29 config.add_route('admin', '/admin/{action}')
29 config.add_route("admin", "/admin/{action}")
30 config.add_route('admin_configs', '/configs')
30 config.add_route("admin_configs", "/configs")
31 config.add_route('admin_config', '/configs/{key}/{section}')
31 config.add_route("admin_config", "/configs/{key}/{section}")
@@ -34,31 +34,38 b' def bytes2human(total):'
34 mega = 1024.0 ** 2
34 mega = 1024.0 ** 2
35 kilo = 1024.0
35 kilo = 1024.0
36 if giga <= total:
36 if giga <= total:
37 return '{:0.1f}G'.format(total / giga)
37 return "{:0.1f}G".format(total / giga)
38 elif mega <= total:
38 elif mega <= total:
39 return '{:0.1f}M'.format(total / mega)
39 return "{:0.1f}M".format(total / mega)
40 else:
40 else:
41 return '{:0.1f}K'.format(total / kilo)
41 return "{:0.1f}K".format(total / kilo)
42
42
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 @view_config(route_name='section_view',
47 @view_config(
48 match_param=['section=admin_section', 'view=system'],
48 route_name="section_view",
49 renderer='json', permission='root_administration')
49 match_param=["section=admin_section", "view=system"],
50 renderer="json",
51 permission="root_administration",
52 )
50 def system(request):
53 def system(request):
51 current_time = datetime.utcnow(). \
54 current_time = datetime.utcnow().replace(second=0, microsecond=0) - timedelta(
52 replace(second=0, microsecond=0) - timedelta(minutes=1)
55 minutes=1
56 )
53 # global app counter
57 # global app counter
54 processed_reports = request.registry.redis_conn.get(
58 processed_reports = request.registry.redis_conn.get(
55 REDIS_KEYS['counters']['reports_per_minute'].format(current_time))
59 REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
60 )
56 processed_reports = int(processed_reports) if processed_reports else 0
61 processed_reports = int(processed_reports) if processed_reports else 0
57 processed_logs = request.registry.redis_conn.get(
62 processed_logs = request.registry.redis_conn.get(
58 REDIS_KEYS['counters']['logs_per_minute'].format(current_time))
63 REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
64 )
59 processed_logs = int(processed_logs) if processed_logs else 0
65 processed_logs = int(processed_logs) if processed_logs else 0
60 processed_metrics = request.registry.redis_conn.get(
66 processed_metrics = request.registry.redis_conn.get(
61 REDIS_KEYS['counters']['metrics_per_minute'].format(current_time))
67 REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
68 )
62 processed_metrics = int(processed_metrics) if processed_metrics else 0
69 processed_metrics = int(processed_metrics) if processed_metrics else 0
63
70
64 waiting_reports = 0
71 waiting_reports = 0
@@ -66,61 +73,62 b' def system(request):'
66 waiting_metrics = 0
73 waiting_metrics = 0
67 waiting_other = 0
74 waiting_other = 0
68
75
69 if 'redis' in request.registry.settings['celery.broker_type']:
76 if "redis" in request.registry.settings["celery.broker_type"]:
70 redis_client = redis.StrictRedis.from_url(
77 redis_client = redis.StrictRedis.from_url(
71 request.registry.settings['celery.broker_url'])
78 request.registry.settings["celery.broker_url"]
72 waiting_reports = redis_client.llen('reports')
79 )
73 waiting_logs = redis_client.llen('logs')
80 waiting_reports = redis_client.llen("reports")
74 waiting_metrics = redis_client.llen('metrics')
81 waiting_logs = redis_client.llen("logs")
75 waiting_other = redis_client.llen('default')
82 waiting_metrics = redis_client.llen("metrics")
83 waiting_other = redis_client.llen("default")
76
84
77 # process
85 # process
78 def replace_inf(val):
86 def replace_inf(val):
79 return val if val != psutil.RLIM_INFINITY else 'unlimited'
87 return val if val != psutil.RLIM_INFINITY else "unlimited"
80
88
81 p = psutil.Process()
89 p = psutil.Process()
82 fd = p.rlimit(psutil.RLIMIT_NOFILE)
90 fd = p.rlimit(psutil.RLIMIT_NOFILE)
83 memlock = p.rlimit(psutil.RLIMIT_MEMLOCK)
91 memlock = p.rlimit(psutil.RLIMIT_MEMLOCK)
84 self_info = {
92 self_info = {
85 'fds': {'soft': replace_inf(fd[0]),
93 "fds": {"soft": replace_inf(fd[0]), "hard": replace_inf(fd[1])},
86 'hard': replace_inf(fd[1])},
94 "memlock": {"soft": replace_inf(memlock[0]), "hard": replace_inf(memlock[1])},
87 'memlock': {'soft': replace_inf(memlock[0]),
88 'hard': replace_inf(memlock[1])},
89 }
95 }
90
96
91 # disks
97 # disks
92 disks = []
98 disks = []
93 for part in psutil.disk_partitions(all=False):
99 for part in psutil.disk_partitions(all=False):
94 if os.name == 'nt':
100 if os.name == "nt":
95 if 'cdrom' in part.opts or part.fstype == '':
101 if "cdrom" in part.opts or part.fstype == "":
96 continue
102 continue
97 usage = psutil.disk_usage(part.mountpoint)
103 usage = psutil.disk_usage(part.mountpoint)
98 disks.append({
104 disks.append(
99 'device': part.device,
105 {
100 'total': bytes2human(usage.total),
106 "device": part.device,
101 'used': bytes2human(usage.used),
107 "total": bytes2human(usage.total),
102 'free': bytes2human(usage.free),
108 "used": bytes2human(usage.used),
103 'percentage': int(usage.percent),
109 "free": bytes2human(usage.free),
104 'mountpoint': part.mountpoint,
110 "percentage": int(usage.percent),
105 'fstype': part.fstype
111 "mountpoint": part.mountpoint,
106 })
112 "fstype": part.fstype,
113 }
114 )
107
115
108 # memory
116 # memory
109 memory_v = psutil.virtual_memory()
117 memory_v = psutil.virtual_memory()
110 memory_s = psutil.swap_memory()
118 memory_s = psutil.swap_memory()
111
119
112 memory = {
120 memory = {
113 'total': bytes2human(memory_v.total),
121 "total": bytes2human(memory_v.total),
114 'available': bytes2human(memory_v.available),
122 "available": bytes2human(memory_v.available),
115 'percentage': memory_v.percent,
123 "percentage": memory_v.percent,
116 'used': bytes2human(memory_v.used),
124 "used": bytes2human(memory_v.used),
117 'free': bytes2human(memory_v.free),
125 "free": bytes2human(memory_v.free),
118 'active': bytes2human(memory_v.active),
126 "active": bytes2human(memory_v.active),
119 'inactive': bytes2human(memory_v.inactive),
127 "inactive": bytes2human(memory_v.inactive),
120 'buffers': bytes2human(memory_v.buffers),
128 "buffers": bytes2human(memory_v.buffers),
121 'cached': bytes2human(memory_v.cached),
129 "cached": bytes2human(memory_v.cached),
122 'swap_total': bytes2human(memory_s.total),
130 "swap_total": bytes2human(memory_s.total),
123 'swap_used': bytes2human(memory_s.used)
131 "swap_used": bytes2human(memory_s.used),
124 }
132 }
125
133
126 # load
134 # load
@@ -133,60 +141,63 b' def system(request):'
133 mem_used = p.memory_info().rss
141 mem_used = p.memory_info().rss
134 if mem_used < min_mem:
142 if mem_used < min_mem:
135 continue
143 continue
136 process_info.append({'owner': p.username(),
144 process_info.append(
137 'pid': p.pid,
145 {
138 'cpu': round(p.cpu_percent(interval=0), 1),
146 "owner": p.username(),
139 'mem_percentage': round(p.memory_percent(),1),
147 "pid": p.pid,
140 'mem_usage': bytes2human(mem_used),
148 "cpu": round(p.cpu_percent(interval=0), 1),
141 'name': p.name(),
149 "mem_percentage": round(p.memory_percent(), 1),
142 'command': ' '.join(p.cmdline())
150 "mem_usage": bytes2human(mem_used),
143 })
151 "name": p.name(),
144 process_info = sorted(process_info, key=lambda x: x['mem_percentage'],
152 "command": " ".join(p.cmdline()),
145 reverse=True)
153 }
154 )
155 process_info = sorted(process_info, key=lambda x: x["mem_percentage"], reverse=True)
146
156
147 # pg tables
157 # pg tables
148
158
149 db_size_query = '''
159 db_size_query = """
150 SELECT tablename, pg_total_relation_size(tablename::text) size
160 SELECT tablename, pg_total_relation_size(tablename::text) size
151 FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
161 FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
152 tablename NOT LIKE 'sql_%' ORDER BY size DESC;'''
162 tablename NOT LIKE 'sql_%' ORDER BY size DESC;"""
153
163
154 db_tables = []
164 db_tables = []
155 for row in DBSession.execute(db_size_query):
165 for row in DBSession.execute(db_size_query):
156 db_tables.append({"size_human": bytes2human(row.size),
166 db_tables.append(
157 "table_name": row.tablename})
167 {"size_human": bytes2human(row.size), "table_name": row.tablename}
168 )
158
169
159 # es indices
170 # es indices
160 es_indices = []
171 es_indices = []
161 result = Datastores.es.indices.stats(metric=['store, docs'])
172 result = Datastores.es.indices.stats(metric=["store, docs"])
162 for ix, stats in result['indices'].items():
173 for ix, stats in result["indices"].items():
163 size = stats['primaries']['store']['size_in_bytes']
174 size = stats["primaries"]["store"]["size_in_bytes"]
164 es_indices.append({'name': ix,
175 es_indices.append({"name": ix, "size": size, "size_human": bytes2human(size)})
165 'size': size,
166 'size_human': bytes2human(size)})
167
176
168 # packages
177 # packages
169
178
170 packages = ({'name': p.project_name, 'version': p.version}
179 packages = (
171 for p in pkg_resources.working_set)
180 {"name": p.project_name, "version": p.version}
172
181 for p in pkg_resources.working_set
173 return {'db_tables': db_tables,
182 )
174 'es_indices': sorted(es_indices,
183
175 key=lambda x: x['size'], reverse=True),
184 return {
176 'process_info': process_info,
185 "db_tables": db_tables,
177 'system_load': system_load,
186 "es_indices": sorted(es_indices, key=lambda x: x["size"], reverse=True),
178 'disks': disks,
187 "process_info": process_info,
179 'memory': memory,
188 "system_load": system_load,
180 'packages': sorted(packages, key=lambda x: x['name'].lower()),
189 "disks": disks,
181 'current_time': current_time,
190 "memory": memory,
182 'queue_stats': {
191 "packages": sorted(packages, key=lambda x: x["name"].lower()),
183 'processed_reports': processed_reports,
192 "current_time": current_time,
184 'processed_logs': processed_logs,
193 "queue_stats": {
185 'processed_metrics': processed_metrics,
194 "processed_reports": processed_reports,
186 'waiting_reports': waiting_reports,
195 "processed_logs": processed_logs,
187 'waiting_logs': waiting_logs,
196 "processed_metrics": processed_metrics,
188 'waiting_metrics': waiting_metrics,
197 "waiting_reports": waiting_reports,
189 'waiting_other': waiting_other
198 "waiting_logs": waiting_logs,
190 },
199 "waiting_metrics": waiting_metrics,
191 'self_info': self_info
200 "waiting_other": waiting_other,
192 }
201 },
202 "self_info": self_info,
203 }
@@ -23,25 +23,33 b' import logging'
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 @view_config(route_name='admin_configs', renderer='json',
26 @view_config(
27 permission='root_administration', request_method='GET')
27 route_name="admin_configs",
28 renderer="json",
29 permission="root_administration",
30 request_method="GET",
31 )
28 def query(request):
32 def query(request):
29 ConfigService.setup_default_values()
33 ConfigService.setup_default_values()
30 pairs = []
34 pairs = []
31 for value in request.GET.getall('filter'):
35 for value in request.GET.getall("filter"):
32 split = value.split(':', 1)
36 split = value.split(":", 1)
33 pairs.append({'key': split[0], 'section': split[1]})
37 pairs.append({"key": split[0], "section": split[1]})
34 return [c for c in ConfigService.filtered_key_and_section(pairs)]
38 return [c for c in ConfigService.filtered_key_and_section(pairs)]
35
39
36
40
37 @view_config(route_name='admin_config', renderer='json',
41 @view_config(
38 permission='root_administration', request_method='POST')
42 route_name="admin_config",
43 renderer="json",
44 permission="root_administration",
45 request_method="POST",
46 )
39 def post(request):
47 def post(request):
40 row = ConfigService.by_key_and_section(
48 row = ConfigService.by_key_and_section(
41 key=request.matchdict.get('key'),
49 key=request.matchdict.get("key"), section=request.matchdict.get("section")
42 section=request.matchdict.get('section'))
50 )
43 if not row:
51 if not row:
44 raise HTTPNotFound()
52 raise HTTPNotFound()
45 row.value = None
53 row.value = None
46 row.value = request.unsafe_json_body['value']
54 row.value = request.unsafe_json_body["value"]
47 return row
55 return row
@@ -48,101 +48,119 b' def get_partition_stats():'
48
48
49 def add_key(key, holder):
49 def add_key(key, holder):
50 if not ix_time in holder:
50 if not ix_time in holder:
51 holder[ix_time] = {'pg': [], 'elasticsearch': []}
51 holder[ix_time] = {"pg": [], "elasticsearch": []}
52
52
53 for partition in list(Datastores.es.indices.get_alias('rcae*')):
53 for partition in list(Datastores.es.indices.get_alias("rcae*")):
54 if not partition.startswith('rcae'):
54 if not partition.startswith("rcae"):
55 continue
55 continue
56 split_data = partition.split('_')
56 split_data = partition.split("_")
57 permanent = False
57 permanent = False
58 # if we dont have a day then treat it as permanent partion
58 # if we dont have a day then treat it as permanent partion
59 if False in list(map(is_int, split_data[-3:])):
59 if False in list(map(is_int, split_data[-3:])):
60 ix_time = datetime(year=int(split_data[-2]),
60 ix_time = datetime(
61 month=int(split_data[-1]),
61 year=int(split_data[-2]), month=int(split_data[-1]), day=1
62 day=1).date()
62 ).date()
63 permanent = True
63 permanent = True
64 else:
64 else:
65 ix_time = datetime(year=int(split_data[-3]),
65 ix_time = datetime(
66 month=int(split_data[-2]),
66 year=int(split_data[-3]),
67 day=int(split_data[-1])).date()
67 month=int(split_data[-2]),
68 day=int(split_data[-1]),
69 ).date()
68
70
69 ix_time = str(ix_time)
71 ix_time = str(ix_time)
70 if permanent:
72 if permanent:
71 add_key(ix_time, permanent_partitions)
73 add_key(ix_time, permanent_partitions)
72 if ix_time not in permanent_partitions:
74 if ix_time not in permanent_partitions:
73 permanent_partitions[ix_time]['elasticsearch'] = []
75 permanent_partitions[ix_time]["elasticsearch"] = []
74 permanent_partitions[ix_time]['elasticsearch'].append(partition)
76 permanent_partitions[ix_time]["elasticsearch"].append(partition)
75 else:
77 else:
76 add_key(ix_time, daily_partitions)
78 add_key(ix_time, daily_partitions)
77 if ix_time not in daily_partitions:
79 if ix_time not in daily_partitions:
78 daily_partitions[ix_time]['elasticsearch'] = []
80 daily_partitions[ix_time]["elasticsearch"] = []
79 daily_partitions[ix_time]['elasticsearch'].append(partition)
81 daily_partitions[ix_time]["elasticsearch"].append(partition)
80
82
81 for row in DBSession.execute(table_query):
83 for row in DBSession.execute(table_query):
82 splitted = row['table_name'].split('_')
84 splitted = row["table_name"].split("_")
83 if 'p' in splitted:
85 if "p" in splitted:
84 # dealing with partition
86 # dealing with partition
85 split_data = [int(x) for x in splitted[splitted.index('p') + 1:]]
87 split_data = [int(x) for x in splitted[splitted.index("p") + 1 :]]
86 if len(split_data) == 3:
88 if len(split_data) == 3:
87 ix_time = datetime(split_data[0], split_data[1],
89 ix_time = datetime(split_data[0], split_data[1], split_data[2]).date()
88 split_data[2]).date()
89 ix_time = str(ix_time)
90 ix_time = str(ix_time)
90 add_key(ix_time, daily_partitions)
91 add_key(ix_time, daily_partitions)
91 daily_partitions[ix_time]['pg'].append(row['table_name'])
92 daily_partitions[ix_time]["pg"].append(row["table_name"])
92 else:
93 else:
93 ix_time = datetime(split_data[0], split_data[1], 1).date()
94 ix_time = datetime(split_data[0], split_data[1], 1).date()
94 ix_time = str(ix_time)
95 ix_time = str(ix_time)
95 add_key(ix_time, permanent_partitions)
96 add_key(ix_time, permanent_partitions)
96 permanent_partitions[ix_time]['pg'].append(row['table_name'])
97 permanent_partitions[ix_time]["pg"].append(row["table_name"])
97
98
98 return permanent_partitions, daily_partitions
99 return permanent_partitions, daily_partitions
99
100
100
101
101 @view_config(route_name='section_view', permission='root_administration',
102 @view_config(
102 match_param=['section=admin_section', 'view=partitions'],
103 route_name="section_view",
103 renderer='json', request_method='GET')
104 permission="root_administration",
105 match_param=["section=admin_section", "view=partitions"],
106 renderer="json",
107 request_method="GET",
108 )
104 def index(request):
109 def index(request):
105 permanent_partitions, daily_partitions = get_partition_stats()
110 permanent_partitions, daily_partitions = get_partition_stats()
106
111
107 return {"permanent_partitions": sorted(list(permanent_partitions.items()),
112 return {
108 key=lambda x: x[0], reverse=True),
113 "permanent_partitions": sorted(
109 "daily_partitions": sorted(list(daily_partitions.items()),
114 list(permanent_partitions.items()), key=lambda x: x[0], reverse=True
110 key=lambda x: x[0], reverse=True)}
115 ),
111
116 "daily_partitions": sorted(
112
117 list(daily_partitions.items()), key=lambda x: x[0], reverse=True
113 @view_config(route_name='section_view', request_method='POST',
118 ),
114 match_param=['section=admin_section', 'view=partitions_remove'],
119 }
115 renderer='json', permission='root_administration')
120
121
122 @view_config(
123 route_name="section_view",
124 request_method="POST",
125 match_param=["section=admin_section", "view=partitions_remove"],
126 renderer="json",
127 permission="root_administration",
128 )
116 def partitions_remove(request):
129 def partitions_remove(request):
117 permanent_partitions, daily_partitions = get_partition_stats()
130 permanent_partitions, daily_partitions = get_partition_stats()
118 pg_partitions = []
131 pg_partitions = []
119 es_partitions = []
132 es_partitions = []
120 for item in list(permanent_partitions.values()) + list(daily_partitions.values()):
133 for item in list(permanent_partitions.values()) + list(daily_partitions.values()):
121 es_partitions.extend(item['elasticsearch'])
134 es_partitions.extend(item["elasticsearch"])
122 pg_partitions.extend(item['pg'])
135 pg_partitions.extend(item["pg"])
123 FormCls = get_partition_deletion_form(es_partitions, pg_partitions)
136 FormCls = get_partition_deletion_form(es_partitions, pg_partitions)
124 form = FormCls(es_index=request.unsafe_json_body['es_indices'],
137 form = FormCls(
125 pg_index=request.unsafe_json_body['pg_indices'],
138 es_index=request.unsafe_json_body["es_indices"],
126 confirm=request.unsafe_json_body['confirm'],
139 pg_index=request.unsafe_json_body["pg_indices"],
127 csrf_context=request)
140 confirm=request.unsafe_json_body["confirm"],
141 csrf_context=request,
142 )
128 if form.validate():
143 if form.validate():
129 for ix in form.data['es_index']:
144 for ix in form.data["es_index"]:
130 log.warning('deleting ES partition: {}'.format(ix))
145 log.warning("deleting ES partition: {}".format(ix))
131 Datastores.es.indices.delete(ix)
146 Datastores.es.indices.delete(ix)
132 for ix in form.data['pg_index']:
147 for ix in form.data["pg_index"]:
133 log.warning('deleting PG partition: {}'.format(ix))
148 log.warning("deleting PG partition: {}".format(ix))
134 stmt = sa.text('DROP TABLE %s CASCADE' % sa.text(ix))
149 stmt = sa.text("DROP TABLE %s CASCADE" % sa.text(ix))
135 session = DBSession()
150 session = DBSession()
136 session.connection().execute(stmt)
151 session.connection().execute(stmt)
137 mark_changed(session)
152 mark_changed(session)
138
153
139 for field, error in form.errors.items():
154 for field, error in form.errors.items():
140 msg = '%s: %s' % (field, error[0])
155 msg = "%s: %s" % (field, error[0])
141 request.session.flash(msg, 'error')
156 request.session.flash(msg, "error")
142
157
143 permanent_partitions, daily_partitions = get_partition_stats()
158 permanent_partitions, daily_partitions = get_partition_stats()
144 return {
159 return {
145 "permanent_partitions": sorted(
160 "permanent_partitions": sorted(
146 list(permanent_partitions.items()), key=lambda x: x[0], reverse=True),
161 list(permanent_partitions.items()), key=lambda x: x[0], reverse=True
162 ),
147 "daily_partitions": sorted(
163 "daily_partitions": sorted(
148 list(daily_partitions.items()), key=lambda x: x[0], reverse=True)}
164 list(daily_partitions.items()), key=lambda x: x[0], reverse=True
165 ),
166 }
@@ -24,13 +24,16 b' import logging'
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 @view_config(route_name='section_view', permission='root_administration',
27 @view_config(
28 match_param=['section=admin_section', 'view=relogin_user'],
28 route_name="section_view",
29 renderer='json', request_method='GET')
29 permission="root_administration",
30 match_param=["section=admin_section", "view=relogin_user"],
31 renderer="json",
32 request_method="GET",
33 )
30 def relogin_to_user(request):
34 def relogin_to_user(request):
31 user = UserService.by_id(request.GET.get('user_id'))
35 user = UserService.by_id(request.GET.get("user_id"))
32 if not user:
36 if not user:
33 return HTTPNotFound()
37 return HTTPNotFound()
34 headers = security.remember(request, user.id)
38 headers = security.remember(request, user.id)
35 return HTTPFound(location=request.route_url('/'),
39 return HTTPFound(location=request.route_url("/"), headers=headers)
36 headers=headers)
@@ -34,158 +34,167 b' from appenlight.lib.utils.airbrake import parse_airbrake_xml'
34 from appenlight.lib.utils.date_utils import convert_date
34 from appenlight.lib.utils.date_utils import convert_date
35 from appenlight.lib.utils.sentry import parse_sentry_event
35 from appenlight.lib.utils.sentry import parse_sentry_event
36 from appenlight.lib.request import JSONException
36 from appenlight.lib.request import JSONException
37 from appenlight.validators import (LogListSchema,
37 from appenlight.validators import (
38 MetricsListSchema,
38 LogListSchema,
39 GeneralMetricsListSchema,
39 MetricsListSchema,
40 GeneralMetricsPermanentListSchema,
40 GeneralMetricsListSchema,
41 GeneralMetricSchema,
41 GeneralMetricsPermanentListSchema,
42 GeneralMetricPermanentSchema,
42 GeneralMetricSchema,
43 LogListPermanentSchema,
43 GeneralMetricPermanentSchema,
44 ReportListSchema_0_5,
44 LogListPermanentSchema,
45 LogSchema,
45 ReportListSchema_0_5,
46 LogSchemaPermanent,
46 LogSchema,
47 ReportSchema_0_5)
47 LogSchemaPermanent,
48 ReportSchema_0_5,
49 )
48
50
49 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
50
52
51
53
52 @view_config(route_name='api_logs', renderer='string', permission='create',
54 @view_config(
53 require_csrf=False)
55 route_name="api_logs", renderer="string", permission="create", require_csrf=False
54 @view_config(route_name='api_log', renderer='string', permission='create',
56 )
55 require_csrf=False)
57 @view_config(
58 route_name="api_log", renderer="string", permission="create", require_csrf=False
59 )
56 def logs_create(request):
60 def logs_create(request):
57 """
61 """
58 Endpoint for log aggregation
62 Endpoint for log aggregation
59 """
63 """
60 application = request.context.resource
64 application = request.context.resource
61 if request.method.upper() == 'OPTIONS':
65 if request.method.upper() == "OPTIONS":
62 return check_cors(request, application)
66 return check_cors(request, application)
63 else:
67 else:
64 check_cors(request, application, should_return=False)
68 check_cors(request, application, should_return=False)
65
69
66 params = dict(request.params.copy())
70 params = dict(request.params.copy())
67 proto_version = parse_proto(params.get('protocol_version', ''))
71 proto_version = parse_proto(params.get("protocol_version", ""))
68 payload = request.unsafe_json_body
72 payload = request.unsafe_json_body
69 sequence_accepted = request.matched_route.name == 'api_logs'
73 sequence_accepted = request.matched_route.name == "api_logs"
70
74
71 if sequence_accepted:
75 if sequence_accepted:
72 if application.allow_permanent_storage:
76 if application.allow_permanent_storage:
73 schema = LogListPermanentSchema().bind(
77 schema = LogListPermanentSchema().bind(utcnow=datetime.datetime.utcnow())
74 utcnow=datetime.datetime.utcnow())
75 else:
78 else:
76 schema = LogListSchema().bind(
79 schema = LogListSchema().bind(utcnow=datetime.datetime.utcnow())
77 utcnow=datetime.datetime.utcnow())
78 else:
80 else:
79 if application.allow_permanent_storage:
81 if application.allow_permanent_storage:
80 schema = LogSchemaPermanent().bind(
82 schema = LogSchemaPermanent().bind(utcnow=datetime.datetime.utcnow())
81 utcnow=datetime.datetime.utcnow())
82 else:
83 else:
83 schema = LogSchema().bind(
84 schema = LogSchema().bind(utcnow=datetime.datetime.utcnow())
84 utcnow=datetime.datetime.utcnow())
85
85
86 deserialized_logs = schema.deserialize(payload)
86 deserialized_logs = schema.deserialize(payload)
87 if sequence_accepted is False:
87 if sequence_accepted is False:
88 deserialized_logs = [deserialized_logs]
88 deserialized_logs = [deserialized_logs]
89
89
90 rate_limiting(request, application, 'per_application_logs_rate_limit',
90 rate_limiting(
91 len(deserialized_logs))
91 request, application, "per_application_logs_rate_limit", len(deserialized_logs)
92 )
92
93
93 # pprint.pprint(deserialized_logs)
94 # pprint.pprint(deserialized_logs)
94
95
95 # we need to split those out so we can process the pkey ones one by one
96 # we need to split those out so we can process the pkey ones one by one
96 non_pkey_logs = [log_dict for log_dict in deserialized_logs
97 non_pkey_logs = [
97 if not log_dict['primary_key']]
98 log_dict for log_dict in deserialized_logs if not log_dict["primary_key"]
99 ]
98 pkey_dict = {}
100 pkey_dict = {}
99 # try to process the logs as best as we can and group together to reduce
101 # try to process the logs as best as we can and group together to reduce
100 # the amount of
102 # the amount of
101 for log_dict in deserialized_logs:
103 for log_dict in deserialized_logs:
102 if log_dict['primary_key']:
104 if log_dict["primary_key"]:
103 key = (log_dict['primary_key'], log_dict['namespace'],)
105 key = (log_dict["primary_key"], log_dict["namespace"])
104 if not key in pkey_dict:
106 if not key in pkey_dict:
105 pkey_dict[key] = []
107 pkey_dict[key] = []
106 pkey_dict[key].append(log_dict)
108 pkey_dict[key].append(log_dict)
107
109
108 if non_pkey_logs:
110 if non_pkey_logs:
109 log.debug('%s non-pkey logs received: %s' % (application,
111 log.debug("%s non-pkey logs received: %s" % (application, len(non_pkey_logs)))
110 len(non_pkey_logs)))
111 tasks.add_logs.delay(application.resource_id, params, non_pkey_logs)
112 tasks.add_logs.delay(application.resource_id, params, non_pkey_logs)
112 if pkey_dict:
113 if pkey_dict:
113 logs_to_insert = []
114 logs_to_insert = []
114 for primary_key_tuple, payload in pkey_dict.items():
115 for primary_key_tuple, payload in pkey_dict.items():
115 sorted_logs = sorted(payload, key=lambda x: x['date'])
116 sorted_logs = sorted(payload, key=lambda x: x["date"])
116 logs_to_insert.append(sorted_logs[-1])
117 logs_to_insert.append(sorted_logs[-1])
117 log.debug('%s pkey logs received: %s' % (application,
118 log.debug("%s pkey logs received: %s" % (application, len(logs_to_insert)))
118 len(logs_to_insert)))
119 tasks.add_logs.delay(application.resource_id, params, logs_to_insert)
119 tasks.add_logs.delay(application.resource_id, params, logs_to_insert)
120
120
121 log.info('LOG call %s %s client:%s' % (
121 log.info(
122 application, proto_version, request.headers.get('user_agent')))
122 "LOG call %s %s client:%s"
123 return 'OK: Logs accepted'
123 % (application, proto_version, request.headers.get("user_agent"))
124
124 )
125
125 return "OK: Logs accepted"
126 @view_config(route_name='api_request_stats', renderer='string',
126
127 permission='create', require_csrf=False)
127
128 @view_config(route_name='api_metrics', renderer='string',
128 @view_config(
129 permission='create', require_csrf=False)
129 route_name="api_request_stats",
130 renderer="string",
131 permission="create",
132 require_csrf=False,
133 )
134 @view_config(
135 route_name="api_metrics", renderer="string", permission="create", require_csrf=False
136 )
130 def request_metrics_create(request):
137 def request_metrics_create(request):
131 """
138 """
132 Endpoint for performance metrics, aggregates view performance stats
139 Endpoint for performance metrics, aggregates view performance stats
133 and converts them to general metric row
140 and converts them to general metric row
134 """
141 """
135 application = request.context.resource
142 application = request.context.resource
136 if request.method.upper() == 'OPTIONS':
143 if request.method.upper() == "OPTIONS":
137 return check_cors(request, application)
144 return check_cors(request, application)
138 else:
145 else:
139 check_cors(request, application, should_return=False)
146 check_cors(request, application, should_return=False)
140
147
141 params = dict(request.params.copy())
148 params = dict(request.params.copy())
142 proto_version = parse_proto(params.get('protocol_version', ''))
149 proto_version = parse_proto(params.get("protocol_version", ""))
143
150
144 payload = request.unsafe_json_body
151 payload = request.unsafe_json_body
145 schema = MetricsListSchema()
152 schema = MetricsListSchema()
146 dataset = schema.deserialize(payload)
153 dataset = schema.deserialize(payload)
147
154
148 rate_limiting(request, application, 'per_application_metrics_rate_limit',
155 rate_limiting(
149 len(dataset))
156 request, application, "per_application_metrics_rate_limit", len(dataset)
157 )
150
158
151 # looping report data
159 # looping report data
152 metrics = {}
160 metrics = {}
153 for metric in dataset:
161 for metric in dataset:
154 server_name = metric.get('server', '').lower() or 'unknown'
162 server_name = metric.get("server", "").lower() or "unknown"
155 start_interval = convert_date(metric['timestamp'])
163 start_interval = convert_date(metric["timestamp"])
156 start_interval = start_interval.replace(second=0, microsecond=0)
164 start_interval = start_interval.replace(second=0, microsecond=0)
157
165
158 for view_name, view_metrics in metric['metrics']:
166 for view_name, view_metrics in metric["metrics"]:
159 key = '%s%s%s' % (metric['server'], start_interval, view_name)
167 key = "%s%s%s" % (metric["server"], start_interval, view_name)
160 if start_interval not in metrics:
168 if start_interval not in metrics:
161 metrics[key] = {"requests": 0, "main": 0, "sql": 0,
169 metrics[key] = {
162 "nosql": 0, "remote": 0, "tmpl": 0,
170 "requests": 0,
163 "custom": 0, 'sql_calls': 0,
171 "main": 0,
164 'nosql_calls': 0,
172 "sql": 0,
165 'remote_calls': 0, 'tmpl_calls': 0,
173 "nosql": 0,
166 'custom_calls': 0,
174 "remote": 0,
167 "start_interval": start_interval,
175 "tmpl": 0,
168 "server_name": server_name,
176 "custom": 0,
169 "view_name": view_name
177 "sql_calls": 0,
170 }
178 "nosql_calls": 0,
171 metrics[key]["requests"] += int(view_metrics['requests'])
179 "remote_calls": 0,
172 metrics[key]["main"] += round(view_metrics['main'], 5)
180 "tmpl_calls": 0,
173 metrics[key]["sql"] += round(view_metrics['sql'], 5)
181 "custom_calls": 0,
174 metrics[key]["nosql"] += round(view_metrics['nosql'], 5)
182 "start_interval": start_interval,
175 metrics[key]["remote"] += round(view_metrics['remote'], 5)
183 "server_name": server_name,
176 metrics[key]["tmpl"] += round(view_metrics['tmpl'], 5)
184 "view_name": view_name,
177 metrics[key]["custom"] += round(view_metrics.get('custom', 0.0),
185 }
178 5)
186 metrics[key]["requests"] += int(view_metrics["requests"])
179 metrics[key]["sql_calls"] += int(
187 metrics[key]["main"] += round(view_metrics["main"], 5)
180 view_metrics.get('sql_calls', 0))
188 metrics[key]["sql"] += round(view_metrics["sql"], 5)
181 metrics[key]["nosql_calls"] += int(
189 metrics[key]["nosql"] += round(view_metrics["nosql"], 5)
182 view_metrics.get('nosql_calls', 0))
190 metrics[key]["remote"] += round(view_metrics["remote"], 5)
183 metrics[key]["remote_calls"] += int(
191 metrics[key]["tmpl"] += round(view_metrics["tmpl"], 5)
184 view_metrics.get('remote_calls', 0))
192 metrics[key]["custom"] += round(view_metrics.get("custom", 0.0), 5)
185 metrics[key]["tmpl_calls"] += int(
193 metrics[key]["sql_calls"] += int(view_metrics.get("sql_calls", 0))
186 view_metrics.get('tmpl_calls', 0))
194 metrics[key]["nosql_calls"] += int(view_metrics.get("nosql_calls", 0))
187 metrics[key]["custom_calls"] += int(
195 metrics[key]["remote_calls"] += int(view_metrics.get("remote_calls", 0))
188 view_metrics.get('custom_calls', 0))
196 metrics[key]["tmpl_calls"] += int(view_metrics.get("tmpl_calls", 0))
197 metrics[key]["custom_calls"] += int(view_metrics.get("custom_calls", 0))
189
198
190 if not metrics[key]["requests"]:
199 if not metrics[key]["requests"]:
191 # fix this here because validator can't
200 # fix this here because validator can't
@@ -197,126 +206,153 b' def request_metrics_create(request):'
197 normalized_metrics = []
206 normalized_metrics = []
198 for metric in metrics.values():
207 for metric in metrics.values():
199 new_metric = {
208 new_metric = {
200 'namespace': 'appenlight.request_metric',
209 "namespace": "appenlight.request_metric",
201 'timestamp': metric.pop('start_interval'),
210 "timestamp": metric.pop("start_interval"),
202 'server_name': metric['server_name'],
211 "server_name": metric["server_name"],
203 'tags': list(metric.items())
212 "tags": list(metric.items()),
204 }
213 }
205 normalized_metrics.append(new_metric)
214 normalized_metrics.append(new_metric)
206
215
207 tasks.add_metrics.delay(application.resource_id, params,
216 tasks.add_metrics.delay(
208 normalized_metrics, proto_version)
217 application.resource_id, params, normalized_metrics, proto_version
209
218 )
210 log.info('REQUEST METRICS call {} {} client:{}'.format(
219
211 application.resource_name, proto_version,
220 log.info(
212 request.headers.get('user_agent')))
221 "REQUEST METRICS call {} {} client:{}".format(
213 return 'OK: request metrics accepted'
222 application.resource_name, proto_version, request.headers.get("user_agent")
214
223 )
215
224 )
216 @view_config(route_name='api_general_metrics', renderer='string',
225 return "OK: request metrics accepted"
217 permission='create', require_csrf=False)
226
218 @view_config(route_name='api_general_metric', renderer='string',
227
219 permission='create', require_csrf=False)
228 @view_config(
229 route_name="api_general_metrics",
230 renderer="string",
231 permission="create",
232 require_csrf=False,
233 )
234 @view_config(
235 route_name="api_general_metric",
236 renderer="string",
237 permission="create",
238 require_csrf=False,
239 )
220 def general_metrics_create(request):
240 def general_metrics_create(request):
221 """
241 """
222 Endpoint for general metrics aggregation
242 Endpoint for general metrics aggregation
223 """
243 """
224 application = request.context.resource
244 application = request.context.resource
225 if request.method.upper() == 'OPTIONS':
245 if request.method.upper() == "OPTIONS":
226 return check_cors(request, application)
246 return check_cors(request, application)
227 else:
247 else:
228 check_cors(request, application, should_return=False)
248 check_cors(request, application, should_return=False)
229
249
230 params = dict(request.params.copy())
250 params = dict(request.params.copy())
231 proto_version = parse_proto(params.get('protocol_version', ''))
251 proto_version = parse_proto(params.get("protocol_version", ""))
232 payload = request.unsafe_json_body
252 payload = request.unsafe_json_body
233 sequence_accepted = request.matched_route.name == 'api_general_metrics'
253 sequence_accepted = request.matched_route.name == "api_general_metrics"
234 if sequence_accepted:
254 if sequence_accepted:
235 if application.allow_permanent_storage:
255 if application.allow_permanent_storage:
236 schema = GeneralMetricsPermanentListSchema().bind(
256 schema = GeneralMetricsPermanentListSchema().bind(
237 utcnow=datetime.datetime.utcnow())
257 utcnow=datetime.datetime.utcnow()
258 )
238 else:
259 else:
239 schema = GeneralMetricsListSchema().bind(
260 schema = GeneralMetricsListSchema().bind(utcnow=datetime.datetime.utcnow())
240 utcnow=datetime.datetime.utcnow())
241 else:
261 else:
242 if application.allow_permanent_storage:
262 if application.allow_permanent_storage:
243 schema = GeneralMetricPermanentSchema().bind(
263 schema = GeneralMetricPermanentSchema().bind(
244 utcnow=datetime.datetime.utcnow())
264 utcnow=datetime.datetime.utcnow()
265 )
245 else:
266 else:
246 schema = GeneralMetricSchema().bind(
267 schema = GeneralMetricSchema().bind(utcnow=datetime.datetime.utcnow())
247 utcnow=datetime.datetime.utcnow())
248
268
249 deserialized_metrics = schema.deserialize(payload)
269 deserialized_metrics = schema.deserialize(payload)
250 if sequence_accepted is False:
270 if sequence_accepted is False:
251 deserialized_metrics = [deserialized_metrics]
271 deserialized_metrics = [deserialized_metrics]
252
272
253 rate_limiting(request, application, 'per_application_metrics_rate_limit',
273 rate_limiting(
254 len(deserialized_metrics))
274 request,
255
275 application,
256 tasks.add_metrics.delay(application.resource_id, params,
276 "per_application_metrics_rate_limit",
257 deserialized_metrics, proto_version)
277 len(deserialized_metrics),
258
278 )
259 log.info('METRICS call {} {} client:{}'.format(
279
260 application.resource_name, proto_version,
280 tasks.add_metrics.delay(
261 request.headers.get('user_agent')))
281 application.resource_id, params, deserialized_metrics, proto_version
262 return 'OK: Metrics accepted'
282 )
263
283
264
284 log.info(
265 @view_config(route_name='api_reports', renderer='string', permission='create',
285 "METRICS call {} {} client:{}".format(
266 require_csrf=False)
286 application.resource_name, proto_version, request.headers.get("user_agent")
267 @view_config(route_name='api_slow_reports', renderer='string',
287 )
268 permission='create', require_csrf=False)
288 )
269 @view_config(route_name='api_report', renderer='string', permission='create',
289 return "OK: Metrics accepted"
270 require_csrf=False)
290
291
292 @view_config(
293 route_name="api_reports", renderer="string", permission="create", require_csrf=False
294 )
295 @view_config(
296 route_name="api_slow_reports",
297 renderer="string",
298 permission="create",
299 require_csrf=False,
300 )
301 @view_config(
302 route_name="api_report", renderer="string", permission="create", require_csrf=False
303 )
271 def reports_create(request):
304 def reports_create(request):
272 """
305 """
273 Endpoint for exception and slowness reports
306 Endpoint for exception and slowness reports
274 """
307 """
275 # route_url('reports')
308 # route_url('reports')
276 application = request.context.resource
309 application = request.context.resource
277 if request.method.upper() == 'OPTIONS':
310 if request.method.upper() == "OPTIONS":
278 return check_cors(request, application)
311 return check_cors(request, application)
279 else:
312 else:
280 check_cors(request, application, should_return=False)
313 check_cors(request, application, should_return=False)
281 params = dict(request.params.copy())
314 params = dict(request.params.copy())
282 proto_version = parse_proto(params.get('protocol_version', ''))
315 proto_version = parse_proto(params.get("protocol_version", ""))
283 payload = request.unsafe_json_body
316 payload = request.unsafe_json_body
284 sequence_accepted = request.matched_route.name == 'api_reports'
317 sequence_accepted = request.matched_route.name == "api_reports"
285
318
286 if sequence_accepted:
319 if sequence_accepted:
287 schema = ReportListSchema_0_5().bind(
320 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
288 utcnow=datetime.datetime.utcnow())
289 else:
321 else:
290 schema = ReportSchema_0_5().bind(
322 schema = ReportSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
291 utcnow=datetime.datetime.utcnow())
292
323
293 deserialized_reports = schema.deserialize(payload)
324 deserialized_reports = schema.deserialize(payload)
294 if sequence_accepted is False:
325 if sequence_accepted is False:
295 deserialized_reports = [deserialized_reports]
326 deserialized_reports = [deserialized_reports]
296 if deserialized_reports:
327 if deserialized_reports:
297 rate_limiting(request, application,
328 rate_limiting(
298 'per_application_reports_rate_limit',
329 request,
299 len(deserialized_reports))
330 application,
331 "per_application_reports_rate_limit",
332 len(deserialized_reports),
333 )
300
334
301 # pprint.pprint(deserialized_reports)
335 # pprint.pprint(deserialized_reports)
302 tasks.add_reports.delay(application.resource_id, params,
336 tasks.add_reports.delay(application.resource_id, params, deserialized_reports)
303 deserialized_reports)
337 log.info(
304 log.info('REPORT call %s, %s client:%s' % (
338 "REPORT call %s, %s client:%s"
305 application,
339 % (application, proto_version, request.headers.get("user_agent"))
306 proto_version,
340 )
307 request.headers.get('user_agent'))
341 return "OK: Reports accepted"
308 )
342
309 return 'OK: Reports accepted'
343
310
344 @view_config(
311
345 route_name="api_airbrake",
312 @view_config(route_name='api_airbrake', renderer='string', permission='create',
346 renderer="string",
313 require_csrf=False)
347 permission="create",
348 require_csrf=False,
349 )
314 def airbrake_xml_compat(request):
350 def airbrake_xml_compat(request):
315 """
351 """
316 Airbrake compatible endpoint for XML reports
352 Airbrake compatible endpoint for XML reports
317 """
353 """
318 application = request.context.resource
354 application = request.context.resource
319 if request.method.upper() == 'OPTIONS':
355 if request.method.upper() == "OPTIONS":
320 return check_cors(request, application)
356 return check_cors(request, application)
321 else:
357 else:
322 check_cors(request, application, should_return=False)
358 check_cors(request, application, should_return=False)
@@ -326,18 +362,27 b' def airbrake_xml_compat(request):'
326 error_dict = parse_airbrake_xml(request)
362 error_dict = parse_airbrake_xml(request)
327 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
363 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
328 deserialized_reports = schema.deserialize([error_dict])
364 deserialized_reports = schema.deserialize([error_dict])
329 rate_limiting(request, application, 'per_application_reports_rate_limit',
365 rate_limiting(
330 len(deserialized_reports))
366 request,
331
367 application,
332 tasks.add_reports.delay(application.resource_id, params,
368 "per_application_reports_rate_limit",
333 deserialized_reports)
369 len(deserialized_reports),
334 log.info('%s AIRBRAKE call for application %s, api_ver:%s client:%s' % (
370 )
335 500, application.resource_name,
371
336 request.params.get('protocol_version', 'unknown'),
372 tasks.add_reports.delay(application.resource_id, params, deserialized_reports)
337 request.headers.get('user_agent'))
373 log.info(
338 )
374 "%s AIRBRAKE call for application %s, api_ver:%s client:%s"
339 return '<notice><id>no-id</id><url>%s</url></notice>' % \
375 % (
340 request.registry.settings['mailing.app_url']
376 500,
377 application.resource_name,
378 request.params.get("protocol_version", "unknown"),
379 request.headers.get("user_agent"),
380 )
381 )
382 return (
383 "<notice><id>no-id</id><url>%s</url></notice>"
384 % request.registry.settings["mailing.app_url"]
385 )
341
386
342
387
343 def decompress_gzip(data):
388 def decompress_gzip(data):
@@ -369,33 +414,38 b' def decode_b64(data):'
369 raise HTTPBadRequest()
414 raise HTTPBadRequest()
370
415
371
416
372 @view_config(route_name='api_sentry', renderer='string', permission='create',
417 @view_config(
373 require_csrf=False)
418 route_name="api_sentry", renderer="string", permission="create", require_csrf=False
374 @view_config(route_name='api_sentry_slash', renderer='string',
419 )
375 permission='create', require_csrf=False)
420 @view_config(
421 route_name="api_sentry_slash",
422 renderer="string",
423 permission="create",
424 require_csrf=False,
425 )
376 def sentry_compat(request):
426 def sentry_compat(request):
377 """
427 """
378 Sentry compatible endpoint
428 Sentry compatible endpoint
379 """
429 """
380 application = request.context.resource
430 application = request.context.resource
381 if request.method.upper() == 'OPTIONS':
431 if request.method.upper() == "OPTIONS":
382 return check_cors(request, application)
432 return check_cors(request, application)
383 else:
433 else:
384 check_cors(request, application, should_return=False)
434 check_cors(request, application, should_return=False)
385
435
386 # handle various report encoding
436 # handle various report encoding
387 content_encoding = request.headers.get('Content-Encoding')
437 content_encoding = request.headers.get("Content-Encoding")
388 content_type = request.headers.get('Content-Type')
438 content_type = request.headers.get("Content-Type")
389 if content_encoding == 'gzip':
439 if content_encoding == "gzip":
390 body = decompress_gzip(request.body)
440 body = decompress_gzip(request.body)
391 elif content_encoding == 'deflate':
441 elif content_encoding == "deflate":
392 body = decompress_zlib(request.body)
442 body = decompress_zlib(request.body)
393 else:
443 else:
394 body = request.body
444 body = request.body
395 # attempt to fix string before decoding for stupid clients
445 # attempt to fix string before decoding for stupid clients
396 if content_type == 'application/x-www-form-urlencoded':
446 if content_type == "application/x-www-form-urlencoded":
397 body = urllib.parse.unquote(body.decode('utf8'))
447 body = urllib.parse.unquote(body.decode("utf8"))
398 check_char = '{' if isinstance(body, str) else b'{'
448 check_char = "{" if isinstance(body, str) else b"{"
399 if not body.startswith(check_char):
449 if not body.startswith(check_char):
400 try:
450 try:
401 body = decode_b64(body)
451 body = decode_b64(body)
@@ -404,7 +454,7 b' def sentry_compat(request):'
404 log.info(exc)
454 log.info(exc)
405
455
406 try:
456 try:
407 json_body = json.loads(body.decode('utf8'))
457 json_body = json.loads(body.decode("utf8"))
408 except ValueError:
458 except ValueError:
409 raise JSONException("Incorrect JSON")
459 raise JSONException("Incorrect JSON")
410
460
@@ -412,24 +462,24 b' def sentry_compat(request):'
412
462
413 if event_type == ParsedSentryEventType.LOG:
463 if event_type == ParsedSentryEventType.LOG:
414 if application.allow_permanent_storage:
464 if application.allow_permanent_storage:
415 schema = LogSchemaPermanent().bind(
465 schema = LogSchemaPermanent().bind(utcnow=datetime.datetime.utcnow())
416 utcnow=datetime.datetime.utcnow())
417 else:
466 else:
418 schema = LogSchema().bind(
467 schema = LogSchema().bind(utcnow=datetime.datetime.utcnow())
419 utcnow=datetime.datetime.utcnow())
420 deserialized_logs = schema.deserialize(event)
468 deserialized_logs = schema.deserialize(event)
421 non_pkey_logs = [deserialized_logs]
469 non_pkey_logs = [deserialized_logs]
422 log.debug('%s non-pkey logs received: %s' % (application,
470 log.debug("%s non-pkey logs received: %s" % (application, len(non_pkey_logs)))
423 len(non_pkey_logs)))
424 tasks.add_logs.delay(application.resource_id, {}, non_pkey_logs)
471 tasks.add_logs.delay(application.resource_id, {}, non_pkey_logs)
425 if event_type == ParsedSentryEventType.ERROR_REPORT:
472 if event_type == ParsedSentryEventType.ERROR_REPORT:
426 schema = ReportSchema_0_5().bind(
473 schema = ReportSchema_0_5().bind(
427 utcnow=datetime.datetime.utcnow(),
474 utcnow=datetime.datetime.utcnow(),
428 allow_permanent_storage=application.allow_permanent_storage)
475 allow_permanent_storage=application.allow_permanent_storage,
476 )
429 deserialized_reports = [schema.deserialize(event)]
477 deserialized_reports = [schema.deserialize(event)]
430 rate_limiting(request, application,
478 rate_limiting(
431 'per_application_reports_rate_limit',
479 request,
432 len(deserialized_reports))
480 application,
433 tasks.add_reports.delay(application.resource_id, {},
481 "per_application_reports_rate_limit",
434 deserialized_reports)
482 len(deserialized_reports),
435 return 'OK: Events accepted'
483 )
484 tasks.add_reports.delay(application.resource_id, {}, deserialized_reports)
485 return "OK: Events accepted"
This diff has been collapsed as it changes many lines, (679 lines changed) Show them Hide them
@@ -31,19 +31,22 b' import appenlight.forms as forms'
31 from appenlight.models import DBSession
31 from appenlight.models import DBSession
32 from appenlight.models.resource import Resource
32 from appenlight.models.resource import Resource
33 from appenlight.models.application import Application
33 from appenlight.models.application import Application
34 from appenlight.models.application_postprocess_conf import \
34 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
35 ApplicationPostprocessConf
36 from ziggurat_foundations.models.services.user import UserService
35 from ziggurat_foundations.models.services.user import UserService
37 from ziggurat_foundations.models.services.resource import ResourceService
36 from ziggurat_foundations.models.services.resource import ResourceService
38 from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService
37 from ziggurat_foundations.models.services.user_resource_permission import (
38 UserResourcePermissionService,
39 )
39 from appenlight.models.user_resource_permission import UserResourcePermission
40 from appenlight.models.user_resource_permission import UserResourcePermission
40 from appenlight.models.group_resource_permission import GroupResourcePermission
41 from appenlight.models.group_resource_permission import GroupResourcePermission
41 from appenlight.models.services.application import ApplicationService
42 from appenlight.models.services.application import ApplicationService
42 from appenlight.models.services.application_postprocess_conf import \
43 from appenlight.models.services.application_postprocess_conf import (
43 ApplicationPostprocessConfService
44 ApplicationPostprocessConfService,
45 )
44 from appenlight.models.services.group import GroupService
46 from appenlight.models.services.group import GroupService
45 from appenlight.models.services.group_resource_permission import \
47 from appenlight.models.services.group_resource_permission import (
46 GroupResourcePermissionService
48 GroupResourcePermissionService,
49 )
47 from appenlight.models.services.request_metric import RequestMetricService
50 from appenlight.models.services.request_metric import RequestMetricService
48 from appenlight.models.services.report_group import ReportGroupService
51 from appenlight.models.services.report_group import ReportGroupService
49 from appenlight.models.services.slow_call import SlowCallService
52 from appenlight.models.services.slow_call import SlowCallService
@@ -62,13 +65,16 b' def app_not_found(request, id):'
62 """
65 """
63 Redirects on non found and sets a flash message
66 Redirects on non found and sets a flash message
64 """
67 """
65 request.session.flash(_('Application not found'), 'warning')
68 request.session.flash(_("Application not found"), "warning")
66 return HTTPFound(
69 return HTTPFound(location=request.route_url("applications", action="index"))
67 location=request.route_url('applications', action='index'))
68
70
69
71
70 @view_config(route_name='applications_no_id',
72 @view_config(
71 renderer='json', request_method="GET", permission='authenticated')
73 route_name="applications_no_id",
74 renderer="json",
75 request_method="GET",
76 permission="authenticated",
77 )
72 def applications_list(request):
78 def applications_list(request):
73 """
79 """
74 Applications list
80 Applications list
@@ -82,55 +88,68 b' def applications_list(request):'
82 applications in the system
88 applications in the system
83
89
84 """
90 """
85 is_root = request.has_permission('root_administration',
91 is_root = request.has_permission("root_administration", RootFactory(request))
86 RootFactory(request))
92 if is_root and request.GET.get("root_list"):
87 if is_root and request.GET.get('root_list'):
88 resources = Resource.all().order_by(Resource.resource_name)
93 resources = Resource.all().order_by(Resource.resource_name)
89 resource_type = request.GET.get('resource_type', 'application')
94 resource_type = request.GET.get("resource_type", "application")
90 if resource_type:
95 if resource_type:
91 resources = resources.filter(
96 resources = resources.filter(Resource.resource_type == resource_type)
92 Resource.resource_type == resource_type)
93 else:
97 else:
94 permissions = request.params.getall('permission')
98 permissions = request.params.getall("permission")
95 if permissions:
99 if permissions:
96 resources = UserService.resources_with_perms(
100 resources = UserService.resources_with_perms(
97 request.user,
101 request.user,
98 permissions,
102 permissions,
99 resource_types=[request.GET.get('resource_type',
103 resource_types=[request.GET.get("resource_type", "application")],
100 'application')])
104 )
101 else:
105 else:
102 resources = request.user.resources.filter(
106 resources = request.user.resources.filter(
103 Application.resource_type == request.GET.get(
107 Application.resource_type
104 'resource_type',
108 == request.GET.get("resource_type", "application")
105 'application'))
109 )
106 return [r.get_dict(include_keys=['resource_id', 'resource_name', 'domains',
110 return [
107 'owner_user_name', 'owner_group_name'])
111 r.get_dict(
108 for
112 include_keys=[
109 r in resources]
113 "resource_id",
110
114 "resource_name",
111
115 "domains",
112 @view_config(route_name='applications', renderer='json',
116 "owner_user_name",
113 request_method="GET", permission='view')
117 "owner_group_name",
118 ]
119 )
120 for r in resources
121 ]
122
123
124 @view_config(
125 route_name="applications", renderer="json", request_method="GET", permission="view"
126 )
114 def application_GET(request):
127 def application_GET(request):
115 resource = request.context.resource
128 resource = request.context.resource
116 include_sensitive_info = False
129 include_sensitive_info = False
117 if request.has_permission('edit'):
130 if request.has_permission("edit"):
118 include_sensitive_info = True
131 include_sensitive_info = True
119 resource_dict = resource.get_dict(
132 resource_dict = resource.get_dict(
120 include_perms=include_sensitive_info,
133 include_perms=include_sensitive_info,
121 include_processing_rules=include_sensitive_info)
134 include_processing_rules=include_sensitive_info,
135 )
122 return resource_dict
136 return resource_dict
123
137
124
138
125 @view_config(route_name='applications_no_id', request_method="POST",
139 @view_config(
126 renderer='json', permission='create_resources')
140 route_name="applications_no_id",
141 request_method="POST",
142 renderer="json",
143 permission="create_resources",
144 )
127 def application_create(request):
145 def application_create(request):
128 """
146 """
129 Creates new application instances
147 Creates new application instances
130 """
148 """
131 user = request.user
149 user = request.user
132 form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body),
150 form = forms.ApplicationCreateForm(
133 csrf_context=request)
151 MultiDict(request.unsafe_json_body), csrf_context=request
152 )
134 if form.validate():
153 if form.validate():
135 session = DBSession()
154 session = DBSession()
136 resource = Application()
155 resource = Application()
@@ -138,7 +157,7 b' def application_create(request):'
138 form.populate_obj(resource)
157 form.populate_obj(resource)
139 resource.api_key = resource.generate_api_key()
158 resource.api_key = resource.generate_api_key()
140 user.resources.append(resource)
159 user.resources.append(resource)
141 request.session.flash(_('Application created'))
160 request.session.flash(_("Application created"))
142 DBSession.flush()
161 DBSession.flush()
143 mark_changed(session)
162 mark_changed(session)
144 else:
163 else:
@@ -147,8 +166,12 b' def application_create(request):'
147 return resource.get_dict()
166 return resource.get_dict()
148
167
149
168
150 @view_config(route_name='applications', request_method="PATCH",
169 @view_config(
151 renderer='json', permission='edit')
170 route_name="applications",
171 request_method="PATCH",
172 renderer="json",
173 permission="edit",
174 )
152 def application_update(request):
175 def application_update(request):
153 """
176 """
154 Updates main application configuration
177 Updates main application configuration
@@ -160,60 +183,72 b' def application_update(request):'
160 # disallow setting permanent storage by non-admins
183 # disallow setting permanent storage by non-admins
161 # use default/non-resource based context for this check
184 # use default/non-resource based context for this check
162 req_dict = copy.copy(request.unsafe_json_body)
185 req_dict = copy.copy(request.unsafe_json_body)
163 if not request.has_permission('root_administration', RootFactory(request)):
186 if not request.has_permission("root_administration", RootFactory(request)):
164 req_dict['allow_permanent_storage'] = ''
187 req_dict["allow_permanent_storage"] = ""
165 if not req_dict.get('uptime_url'):
188 if not req_dict.get("uptime_url"):
166 # needed cause validator is still triggered by default
189 # needed cause validator is still triggered by default
167 req_dict.pop('uptime_url', '')
190 req_dict.pop("uptime_url", "")
168 application_form = forms.ApplicationUpdateForm(MultiDict(req_dict),
191 application_form = forms.ApplicationUpdateForm(
169 csrf_context=request)
192 MultiDict(req_dict), csrf_context=request
193 )
170 if application_form.validate():
194 if application_form.validate():
171 application_form.populate_obj(resource)
195 application_form.populate_obj(resource)
172 request.session.flash(_('Application updated'))
196 request.session.flash(_("Application updated"))
173 else:
197 else:
174 return HTTPUnprocessableEntity(body=application_form.errors_json)
198 return HTTPUnprocessableEntity(body=application_form.errors_json)
175
199
176 include_sensitive_info = False
200 include_sensitive_info = False
177 if request.has_permission('edit'):
201 if request.has_permission("edit"):
178 include_sensitive_info = True
202 include_sensitive_info = True
179 resource_dict = resource.get_dict(
203 resource_dict = resource.get_dict(
180 include_perms=include_sensitive_info,
204 include_perms=include_sensitive_info,
181 include_processing_rules=include_sensitive_info)
205 include_processing_rules=include_sensitive_info,
206 )
182 return resource_dict
207 return resource_dict
183
208
184
209
185 @view_config(route_name='applications_property', match_param='key=api_key',
210 @view_config(
186 request_method="POST", renderer='json',
211 route_name="applications_property",
187 permission='delete')
212 match_param="key=api_key",
213 request_method="POST",
214 renderer="json",
215 permission="delete",
216 )
188 def application_regenerate_key(request):
217 def application_regenerate_key(request):
189 """
218 """
190 Regenerates API keys for application
219 Regenerates API keys for application
191 """
220 """
192 resource = request.context.resource
221 resource = request.context.resource
193
222
194 form = forms.CheckPasswordForm(MultiDict(request.unsafe_json_body),
223 form = forms.CheckPasswordForm(
195 csrf_context=request)
224 MultiDict(request.unsafe_json_body), csrf_context=request
225 )
196 form.password.user = request.user
226 form.password.user = request.user
197
227
198 if form.validate():
228 if form.validate():
199 resource.api_key = resource.generate_api_key()
229 resource.api_key = resource.generate_api_key()
200 resource.public_key = resource.generate_api_key()
230 resource.public_key = resource.generate_api_key()
201 msg = 'API keys regenerated - please update your application config.'
231 msg = "API keys regenerated - please update your application config."
202 request.session.flash(_(msg))
232 request.session.flash(_(msg))
203 else:
233 else:
204 return HTTPUnprocessableEntity(body=form.errors_json)
234 return HTTPUnprocessableEntity(body=form.errors_json)
205
235
206 if request.has_permission('edit'):
236 if request.has_permission("edit"):
207 include_sensitive_info = True
237 include_sensitive_info = True
208 resource_dict = resource.get_dict(
238 resource_dict = resource.get_dict(
209 include_perms=include_sensitive_info,
239 include_perms=include_sensitive_info,
210 include_processing_rules=include_sensitive_info)
240 include_processing_rules=include_sensitive_info,
241 )
211 return resource_dict
242 return resource_dict
212
243
213
244
214 @view_config(route_name='applications_property',
245 @view_config(
215 match_param='key=delete_resource',
246 route_name="applications_property",
216 request_method="PATCH", renderer='json', permission='delete')
247 match_param="key=delete_resource",
248 request_method="PATCH",
249 renderer="json",
250 permission="delete",
251 )
217 def application_remove(request):
252 def application_remove(request):
218 """
253 """
219 Removes application resources
254 Removes application resources
@@ -221,27 +256,34 b' def application_remove(request):'
221 resource = request.context.resource
256 resource = request.context.resource
222 # we need polymorphic object here, to properly launch sqlalchemy events
257 # we need polymorphic object here, to properly launch sqlalchemy events
223 resource = ApplicationService.by_id(resource.resource_id)
258 resource = ApplicationService.by_id(resource.resource_id)
224 form = forms.CheckPasswordForm(MultiDict(request.safe_json_body or {}),
259 form = forms.CheckPasswordForm(
225 csrf_context=request)
260 MultiDict(request.safe_json_body or {}), csrf_context=request
261 )
226 form.password.user = request.user
262 form.password.user = request.user
227 if form.validate():
263 if form.validate():
228 DBSession.delete(resource)
264 DBSession.delete(resource)
229 request.session.flash(_('Application removed'))
265 request.session.flash(_("Application removed"))
230 else:
266 else:
231 return HTTPUnprocessableEntity(body=form.errors_json)
267 return HTTPUnprocessableEntity(body=form.errors_json)
232
268
233 return True
269 return True
234
270
235
271
236 @view_config(route_name='applications_property', match_param='key=owner',
272 @view_config(
237 request_method="PATCH", renderer='json', permission='delete')
273 route_name="applications_property",
274 match_param="key=owner",
275 request_method="PATCH",
276 renderer="json",
277 permission="delete",
278 )
238 def application_ownership_transfer(request):
279 def application_ownership_transfer(request):
239 """
280 """
240 Allows application owner to transfer application ownership to other user
281 Allows application owner to transfer application ownership to other user
241 """
282 """
242 resource = request.context.resource
283 resource = request.context.resource
243 form = forms.ChangeApplicationOwnerForm(
284 form = forms.ChangeApplicationOwnerForm(
244 MultiDict(request.safe_json_body or {}), csrf_context=request)
285 MultiDict(request.safe_json_body or {}), csrf_context=request
286 )
245 form.password.user = request.user
287 form.password.user = request.user
246 if form.validate():
288 if form.validate():
247 user = UserService.by_user_name(form.user_name.data)
289 user = UserService.by_user_name(form.user_name.data)
@@ -249,55 +291,68 b' def application_ownership_transfer(request):'
249 # remove integrations to not leak security data of external applications
291 # remove integrations to not leak security data of external applications
250 for integration in resource.integrations[:]:
292 for integration in resource.integrations[:]:
251 resource.integrations.remove(integration)
293 resource.integrations.remove(integration)
252 request.session.flash(_('Application transfered'))
294 request.session.flash(_("Application transfered"))
253 else:
295 else:
254 return HTTPUnprocessableEntity(body=form.errors_json)
296 return HTTPUnprocessableEntity(body=form.errors_json)
255 return True
297 return True
256
298
257
299
258 @view_config(route_name='applications_property',
300 @view_config(
259 match_param='key=postprocessing_rules', renderer='json',
301 route_name="applications_property",
260 request_method='POST', permission='edit')
302 match_param="key=postprocessing_rules",
303 renderer="json",
304 request_method="POST",
305 permission="edit",
306 )
261 def applications_postprocess_POST(request):
307 def applications_postprocess_POST(request):
262 """
308 """
263 Creates new postprocessing rules for applications
309 Creates new postprocessing rules for applications
264 """
310 """
265 resource = request.context.resource
311 resource = request.context.resource
266 conf = ApplicationPostprocessConf()
312 conf = ApplicationPostprocessConf()
267 conf.do = 'postprocess'
313 conf.do = "postprocess"
268 conf.new_value = '1'
314 conf.new_value = "1"
269 resource.postprocess_conf.append(conf)
315 resource.postprocess_conf.append(conf)
270 DBSession.flush()
316 DBSession.flush()
271 return conf.get_dict()
317 return conf.get_dict()
272
318
273
319
274 @view_config(route_name='applications_property',
320 @view_config(
275 match_param='key=postprocessing_rules', renderer='json',
321 route_name="applications_property",
276 request_method='PATCH', permission='edit')
322 match_param="key=postprocessing_rules",
323 renderer="json",
324 request_method="PATCH",
325 permission="edit",
326 )
277 def applications_postprocess_PATCH(request):
327 def applications_postprocess_PATCH(request):
278 """
328 """
279 Creates new postprocessing rules for applications
329 Creates new postprocessing rules for applications
280 """
330 """
281 json_body = request.unsafe_json_body
331 json_body = request.unsafe_json_body
282
332
283 schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX)
333 schema = build_rule_schema(json_body["rule"], REPORT_TYPE_MATRIX)
284 try:
334 try:
285 schema.deserialize(json_body['rule'])
335 schema.deserialize(json_body["rule"])
286 except colander.Invalid as exc:
336 except colander.Invalid as exc:
287 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
337 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
288
338
289 resource = request.context.resource
339 resource = request.context.resource
290 conf = ApplicationPostprocessConfService.by_pkey_and_resource_id(
340 conf = ApplicationPostprocessConfService.by_pkey_and_resource_id(
291 json_body['pkey'], resource.resource_id)
341 json_body["pkey"], resource.resource_id
292 conf.rule = request.unsafe_json_body['rule']
342 )
343 conf.rule = request.unsafe_json_body["rule"]
293 # for now hardcode int since we dont support anything else so far
344 # for now hardcode int since we dont support anything else so far
294 conf.new_value = int(request.unsafe_json_body['new_value'])
345 conf.new_value = int(request.unsafe_json_body["new_value"])
295 return conf.get_dict()
346 return conf.get_dict()
296
347
297
348
298 @view_config(route_name='applications_property',
349 @view_config(
299 match_param='key=postprocessing_rules', renderer='json',
350 route_name="applications_property",
300 request_method='DELETE', permission='edit')
351 match_param="key=postprocessing_rules",
352 renderer="json",
353 request_method="DELETE",
354 permission="edit",
355 )
301 def applications_postprocess_DELETE(request):
356 def applications_postprocess_DELETE(request):
302 """
357 """
303 Removes application postprocessing rules
358 Removes application postprocessing rules
@@ -306,89 +361,99 b' def applications_postprocess_DELETE(request):'
306 resource = request.context.resource
361 resource = request.context.resource
307 if form.validate():
362 if form.validate():
308 for postprocess_conf in resource.postprocess_conf:
363 for postprocess_conf in resource.postprocess_conf:
309 if postprocess_conf.pkey == int(request.GET['pkey']):
364 if postprocess_conf.pkey == int(request.GET["pkey"]):
310 # remove rule
365 # remove rule
311 DBSession.delete(postprocess_conf)
366 DBSession.delete(postprocess_conf)
312 return True
367 return True
313
368
314
369
315 @view_config(route_name='applications_property',
370 @view_config(
316 match_param='key=report_graphs', renderer='json',
371 route_name="applications_property",
317 permission='view')
372 match_param="key=report_graphs",
318 @view_config(route_name='applications_property',
373 renderer="json",
319 match_param='key=slow_report_graphs', renderer='json',
374 permission="view",
320 permission='view')
375 )
376 @view_config(
377 route_name="applications_property",
378 match_param="key=slow_report_graphs",
379 renderer="json",
380 permission="view",
381 )
321 def get_application_report_stats(request):
382 def get_application_report_stats(request):
322 query_params = request.GET.mixed()
383 query_params = request.GET.mixed()
323 query_params['resource'] = (request.context.resource.resource_id,)
384 query_params["resource"] = (request.context.resource.resource_id,)
324
385
325 filter_settings = build_filter_settings_from_query_dict(request,
386 filter_settings = build_filter_settings_from_query_dict(request, query_params)
326 query_params)
387 if not filter_settings.get("end_date"):
327 if not filter_settings.get('end_date'):
328 end_date = datetime.utcnow().replace(microsecond=0, second=0)
388 end_date = datetime.utcnow().replace(microsecond=0, second=0)
329 filter_settings['end_date'] = end_date
389 filter_settings["end_date"] = end_date
330
390
331 if not filter_settings.get('start_date'):
391 if not filter_settings.get("start_date"):
332 delta = timedelta(hours=1)
392 delta = timedelta(hours=1)
333 filter_settings['start_date'] = filter_settings['end_date'] - delta
393 filter_settings["start_date"] = filter_settings["end_date"] - delta
334
394
335 result = ReportGroupService.get_report_stats(request, filter_settings)
395 result = ReportGroupService.get_report_stats(request, filter_settings)
336 return result
396 return result
337
397
338
398
339 @view_config(route_name='applications_property',
399 @view_config(
340 match_param='key=metrics_graphs', renderer='json',
400 route_name="applications_property",
341 permission='view')
401 match_param="key=metrics_graphs",
402 renderer="json",
403 permission="view",
404 )
342 def metrics_graphs(request):
405 def metrics_graphs(request):
343 """
406 """
344 Handles metric dashboard graphs
407 Handles metric dashboard graphs
345 Returns information for time/tier breakdown
408 Returns information for time/tier breakdown
346 """
409 """
347 query_params = request.GET.mixed()
410 query_params = request.GET.mixed()
348 query_params['resource'] = (request.context.resource.resource_id,)
411 query_params["resource"] = (request.context.resource.resource_id,)
349
412
350 filter_settings = build_filter_settings_from_query_dict(request,
413 filter_settings = build_filter_settings_from_query_dict(request, query_params)
351 query_params)
352
414
353 if not filter_settings.get('end_date'):
415 if not filter_settings.get("end_date"):
354 end_date = datetime.utcnow().replace(microsecond=0, second=0)
416 end_date = datetime.utcnow().replace(microsecond=0, second=0)
355 filter_settings['end_date'] = end_date
417 filter_settings["end_date"] = end_date
356
418
357 delta = timedelta(hours=1)
419 delta = timedelta(hours=1)
358 if not filter_settings.get('start_date'):
420 if not filter_settings.get("start_date"):
359 filter_settings['start_date'] = filter_settings['end_date'] - delta
421 filter_settings["start_date"] = filter_settings["end_date"] - delta
360 if filter_settings['end_date'] <= filter_settings['start_date']:
422 if filter_settings["end_date"] <= filter_settings["start_date"]:
361 filter_settings['end_date'] = filter_settings['start_date']
423 filter_settings["end_date"] = filter_settings["start_date"]
362
424
363 delta = filter_settings['end_date'] - filter_settings['start_date']
425 delta = filter_settings["end_date"] - filter_settings["start_date"]
364 if delta < h.time_deltas.get('12h')['delta']:
426 if delta < h.time_deltas.get("12h")["delta"]:
365 divide_by_min = 1
427 divide_by_min = 1
366 elif delta <= h.time_deltas.get('3d')['delta']:
428 elif delta <= h.time_deltas.get("3d")["delta"]:
367 divide_by_min = 5.0
429 divide_by_min = 5.0
368 elif delta >= h.time_deltas.get('2w')['delta']:
430 elif delta >= h.time_deltas.get("2w")["delta"]:
369 divide_by_min = 60.0 * 24
431 divide_by_min = 60.0 * 24
370 else:
432 else:
371 divide_by_min = 60.0
433 divide_by_min = 60.0
372
434
373 results = RequestMetricService.get_metrics_stats(
435 results = RequestMetricService.get_metrics_stats(request, filter_settings)
374 request, filter_settings)
375 # because requests are PER SECOND / we divide 1 min stats by 60
436 # because requests are PER SECOND / we divide 1 min stats by 60
376 # requests are normalized to 1 min average
437 # requests are normalized to 1 min average
377 # results are average seconds time spent per request in specific area
438 # results are average seconds time spent per request in specific area
378 for point in results:
439 for point in results:
379 if point['requests']:
440 if point["requests"]:
380 point['main'] = (point['main'] - point['sql'] -
441 point["main"] = (
381 point['nosql'] - point['remote'] -
442 point["main"]
382 point['tmpl'] -
443 - point["sql"]
383 point['custom']) / point['requests']
444 - point["nosql"]
384 point['sql'] = point['sql'] / point['requests']
445 - point["remote"]
385 point['nosql'] = point['nosql'] / point['requests']
446 - point["tmpl"]
386 point['remote'] = point['remote'] / point['requests']
447 - point["custom"]
387 point['tmpl'] = point['tmpl'] / point['requests']
448 ) / point["requests"]
388 point['custom'] = point['custom'] / point['requests']
449 point["sql"] = point["sql"] / point["requests"]
389 point['requests_2'] = point['requests'] / 60.0 / divide_by_min
450 point["nosql"] = point["nosql"] / point["requests"]
390
451 point["remote"] = point["remote"] / point["requests"]
391 selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom']
452 point["tmpl"] = point["tmpl"] / point["requests"]
453 point["custom"] = point["custom"] / point["requests"]
454 point["requests_2"] = point["requests"] / 60.0 / divide_by_min
455
456 selected_types = ["main", "sql", "nosql", "remote", "tmpl", "custom"]
392
457
393 for point in results:
458 for point in results:
394 for stat_type in selected_types:
459 for stat_type in selected_types:
@@ -397,228 +462,231 b' def metrics_graphs(request):'
397 return results
462 return results
398
463
399
464
400 @view_config(route_name='applications_property',
465 @view_config(
401 match_param='key=response_graphs', renderer='json',
466 route_name="applications_property",
402 permission='view')
467 match_param="key=response_graphs",
468 renderer="json",
469 permission="view",
470 )
403 def response_graphs(request):
471 def response_graphs(request):
404 """
472 """
405 Handles dashboard infomation for avg. response time split by today,
473 Handles dashboard infomation for avg. response time split by today,
406 2 days ago and week ago
474 2 days ago and week ago
407 """
475 """
408 query_params = request.GET.mixed()
476 query_params = request.GET.mixed()
409 query_params['resource'] = (request.context.resource.resource_id,)
477 query_params["resource"] = (request.context.resource.resource_id,)
410
478
411 filter_settings = build_filter_settings_from_query_dict(request,
479 filter_settings = build_filter_settings_from_query_dict(request, query_params)
412 query_params)
413
480
414 if not filter_settings.get('end_date'):
481 if not filter_settings.get("end_date"):
415 end_date = datetime.utcnow().replace(microsecond=0, second=0)
482 end_date = datetime.utcnow().replace(microsecond=0, second=0)
416 filter_settings['end_date'] = end_date
483 filter_settings["end_date"] = end_date
417
484
418 delta = timedelta(hours=1)
485 delta = timedelta(hours=1)
419 if not filter_settings.get('start_date'):
486 if not filter_settings.get("start_date"):
420 filter_settings['start_date'] = filter_settings['end_date'] - delta
487 filter_settings["start_date"] = filter_settings["end_date"] - delta
421
488
422 result_now = RequestMetricService.get_metrics_stats(
489 result_now = RequestMetricService.get_metrics_stats(request, filter_settings)
423 request, filter_settings)
424
490
425 filter_settings_2d = filter_settings.copy()
491 filter_settings_2d = filter_settings.copy()
426 filter_settings_2d['start_date'] = filter_settings['start_date'] - \
492 filter_settings_2d["start_date"] = filter_settings["start_date"] - timedelta(days=2)
427 timedelta(days=2)
493 filter_settings_2d["end_date"] = filter_settings["end_date"] - timedelta(days=2)
428 filter_settings_2d['end_date'] = filter_settings['end_date'] - \
494 result_2d = RequestMetricService.get_metrics_stats(request, filter_settings_2d)
429 timedelta(days=2)
430 result_2d = RequestMetricService.get_metrics_stats(
431 request, filter_settings_2d)
432
495
433 filter_settings_7d = filter_settings.copy()
496 filter_settings_7d = filter_settings.copy()
434 filter_settings_7d['start_date'] = filter_settings['start_date'] - \
497 filter_settings_7d["start_date"] = filter_settings["start_date"] - timedelta(days=7)
435 timedelta(days=7)
498 filter_settings_7d["end_date"] = filter_settings["end_date"] - timedelta(days=7)
436 filter_settings_7d['end_date'] = filter_settings['end_date'] - \
499 result_7d = RequestMetricService.get_metrics_stats(request, filter_settings_7d)
437 timedelta(days=7)
438 result_7d = RequestMetricService.get_metrics_stats(
439 request, filter_settings_7d)
440
500
441 plot_data = []
501 plot_data = []
442
502
443 for item in result_now:
503 for item in result_now:
444 point = {'x': item['x'], 'today': 0, 'days_ago_2': 0,
504 point = {"x": item["x"], "today": 0, "days_ago_2": 0, "days_ago_7": 0}
445 'days_ago_7': 0}
505 if item["requests"]:
446 if item['requests']:
506 point["today"] = round(item["main"] / item["requests"], 3)
447 point['today'] = round(item['main'] / item['requests'], 3)
448 plot_data.append(point)
507 plot_data.append(point)
449
508
450 for i, item in enumerate(result_2d[:len(plot_data)]):
509 for i, item in enumerate(result_2d[: len(plot_data)]):
451 plot_data[i]['days_ago_2'] = 0
510 plot_data[i]["days_ago_2"] = 0
452 point = result_2d[i]
511 point = result_2d[i]
453 if point['requests']:
512 if point["requests"]:
454 plot_data[i]['days_ago_2'] = round(point['main'] /
513 plot_data[i]["days_ago_2"] = round(point["main"] / point["requests"], 3)
455 point['requests'], 3)
456
514
457 for i, item in enumerate(result_7d[:len(plot_data)]):
515 for i, item in enumerate(result_7d[: len(plot_data)]):
458 plot_data[i]['days_ago_7'] = 0
516 plot_data[i]["days_ago_7"] = 0
459 point = result_7d[i]
517 point = result_7d[i]
460 if point['requests']:
518 if point["requests"]:
461 plot_data[i]['days_ago_7'] = round(point['main'] /
519 plot_data[i]["days_ago_7"] = round(point["main"] / point["requests"], 3)
462 point['requests'], 3)
463
520
464 return plot_data
521 return plot_data
465
522
466
523
467 @view_config(route_name='applications_property',
524 @view_config(
468 match_param='key=requests_graphs', renderer='json',
525 route_name="applications_property",
469 permission='view')
526 match_param="key=requests_graphs",
527 renderer="json",
528 permission="view",
529 )
470 def requests_graphs(request):
530 def requests_graphs(request):
471 """
531 """
472 Handles dashboard infomation for avg. response time split by today,
532 Handles dashboard infomation for avg. response time split by today,
473 2 days ago and week ago
533 2 days ago and week ago
474 """
534 """
475 query_params = request.GET.mixed()
535 query_params = request.GET.mixed()
476 query_params['resource'] = (request.context.resource.resource_id,)
536 query_params["resource"] = (request.context.resource.resource_id,)
477
537
478 filter_settings = build_filter_settings_from_query_dict(request,
538 filter_settings = build_filter_settings_from_query_dict(request, query_params)
479 query_params)
480
539
481 if not filter_settings.get('end_date'):
540 if not filter_settings.get("end_date"):
482 end_date = datetime.utcnow().replace(microsecond=0, second=0)
541 end_date = datetime.utcnow().replace(microsecond=0, second=0)
483 filter_settings['end_date'] = end_date
542 filter_settings["end_date"] = end_date
484
543
485 delta = timedelta(hours=1)
544 delta = timedelta(hours=1)
486 if not filter_settings.get('start_date'):
545 if not filter_settings.get("start_date"):
487 filter_settings['start_date'] = filter_settings['end_date'] - delta
546 filter_settings["start_date"] = filter_settings["end_date"] - delta
488
547
489 result_now = RequestMetricService.get_metrics_stats(
548 result_now = RequestMetricService.get_metrics_stats(request, filter_settings)
490 request, filter_settings)
549
491
550 delta = filter_settings["end_date"] - filter_settings["start_date"]
492 delta = filter_settings['end_date'] - filter_settings['start_date']
551 if delta < h.time_deltas.get("12h")["delta"]:
493 if delta < h.time_deltas.get('12h')['delta']:
552 seconds = h.time_deltas["1m"]["minutes"] * 60.0
494 seconds = h.time_deltas['1m']['minutes'] * 60.0
553 elif delta <= h.time_deltas.get("3d")["delta"]:
495 elif delta <= h.time_deltas.get('3d')['delta']:
554 seconds = h.time_deltas["5m"]["minutes"] * 60.0
496 seconds = h.time_deltas['5m']['minutes'] * 60.0
555 elif delta >= h.time_deltas.get("2w")["delta"]:
497 elif delta >= h.time_deltas.get('2w')['delta']:
556 seconds = h.time_deltas["24h"]["minutes"] * 60.0
498 seconds = h.time_deltas['24h']['minutes'] * 60.0
499 else:
557 else:
500 seconds = h.time_deltas['1h']['minutes'] * 60.0
558 seconds = h.time_deltas["1h"]["minutes"] * 60.0
501
559
502 for item in result_now:
560 for item in result_now:
503 if item['requests']:
561 if item["requests"]:
504 item['requests'] = round(item['requests'] / seconds, 3)
562 item["requests"] = round(item["requests"] / seconds, 3)
505 return result_now
563 return result_now
506
564
507
565
508 @view_config(route_name='applications_property',
566 @view_config(
509 match_param='key=apdex_stats', renderer='json',
567 route_name="applications_property",
510 permission='view')
568 match_param="key=apdex_stats",
569 renderer="json",
570 permission="view",
571 )
511 def get_apdex_stats(request):
572 def get_apdex_stats(request):
512 """
573 """
513 Returns information and calculates APDEX score per server for dashboard
574 Returns information and calculates APDEX score per server for dashboard
514 server information (upper right stats boxes)
575 server information (upper right stats boxes)
515 """
576 """
516 query_params = request.GET.mixed()
577 query_params = request.GET.mixed()
517 query_params['resource'] = (request.context.resource.resource_id,)
578 query_params["resource"] = (request.context.resource.resource_id,)
518
579
519 filter_settings = build_filter_settings_from_query_dict(request,
580 filter_settings = build_filter_settings_from_query_dict(request, query_params)
520 query_params)
521 # make sure we have only one resource here to don't produce
581 # make sure we have only one resource here to don't produce
522 # weird results when we have wrong app in app selector
582 # weird results when we have wrong app in app selector
523 filter_settings['resource'] = [filter_settings['resource'][0]]
583 filter_settings["resource"] = [filter_settings["resource"][0]]
524
584
525 if not filter_settings.get('end_date'):
585 if not filter_settings.get("end_date"):
526 end_date = datetime.utcnow().replace(microsecond=0, second=0)
586 end_date = datetime.utcnow().replace(microsecond=0, second=0)
527 filter_settings['end_date'] = end_date
587 filter_settings["end_date"] = end_date
528
588
529 delta = timedelta(hours=1)
589 delta = timedelta(hours=1)
530 if not filter_settings.get('start_date'):
590 if not filter_settings.get("start_date"):
531 filter_settings['start_date'] = filter_settings['end_date'] - delta
591 filter_settings["start_date"] = filter_settings["end_date"] - delta
532
592
533 return RequestMetricService.get_apdex_stats(request, filter_settings)
593 return RequestMetricService.get_apdex_stats(request, filter_settings)
534
594
535
595
536 @view_config(route_name='applications_property', match_param='key=slow_calls',
596 @view_config(
537 renderer='json', permission='view')
597 route_name="applications_property",
598 match_param="key=slow_calls",
599 renderer="json",
600 permission="view",
601 )
538 def get_slow_calls(request):
602 def get_slow_calls(request):
539 """
603 """
540 Returns information for time consuming calls in specific time interval
604 Returns information for time consuming calls in specific time interval
541 """
605 """
542 query_params = request.GET.mixed()
606 query_params = request.GET.mixed()
543 query_params['resource'] = (request.context.resource.resource_id,)
607 query_params["resource"] = (request.context.resource.resource_id,)
544
608
545 filter_settings = build_filter_settings_from_query_dict(request,
609 filter_settings = build_filter_settings_from_query_dict(request, query_params)
546 query_params)
547
610
548 if not filter_settings.get('end_date'):
611 if not filter_settings.get("end_date"):
549 end_date = datetime.utcnow().replace(microsecond=0, second=0)
612 end_date = datetime.utcnow().replace(microsecond=0, second=0)
550 filter_settings['end_date'] = end_date
613 filter_settings["end_date"] = end_date
551
614
552 delta = timedelta(hours=1)
615 delta = timedelta(hours=1)
553 if not filter_settings.get('start_date'):
616 if not filter_settings.get("start_date"):
554 filter_settings['start_date'] = filter_settings['end_date'] - delta
617 filter_settings["start_date"] = filter_settings["end_date"] - delta
555
618
556 return SlowCallService.get_time_consuming_calls(request, filter_settings)
619 return SlowCallService.get_time_consuming_calls(request, filter_settings)
557
620
558
621
559 @view_config(route_name='applications_property',
622 @view_config(
560 match_param='key=requests_breakdown',
623 route_name="applications_property",
561 renderer='json', permission='view')
624 match_param="key=requests_breakdown",
625 renderer="json",
626 permission="view",
627 )
562 def get_requests_breakdown(request):
628 def get_requests_breakdown(request):
563 """
629 """
564 Used on dashboard to get information which views are most used in
630 Used on dashboard to get information which views are most used in
565 a time interval
631 a time interval
566 """
632 """
567 query_params = request.GET.mixed()
633 query_params = request.GET.mixed()
568 query_params['resource'] = (request.context.resource.resource_id,)
634 query_params["resource"] = (request.context.resource.resource_id,)
569
635
570 filter_settings = build_filter_settings_from_query_dict(request,
636 filter_settings = build_filter_settings_from_query_dict(request, query_params)
571 query_params)
637 if not filter_settings.get("end_date"):
572 if not filter_settings.get('end_date'):
573 end_date = datetime.utcnow().replace(microsecond=0, second=0)
638 end_date = datetime.utcnow().replace(microsecond=0, second=0)
574 filter_settings['end_date'] = end_date
639 filter_settings["end_date"] = end_date
575
640
576 if not filter_settings.get('start_date'):
641 if not filter_settings.get("start_date"):
577 delta = timedelta(hours=1)
642 delta = timedelta(hours=1)
578 filter_settings['start_date'] = filter_settings['end_date'] - delta
643 filter_settings["start_date"] = filter_settings["end_date"] - delta
579
644
580 series = RequestMetricService.get_requests_breakdown(
645 series = RequestMetricService.get_requests_breakdown(request, filter_settings)
581 request, filter_settings)
582
646
583 results = []
647 results = []
584 for row in series:
648 for row in series:
585 d_row = {'avg_response': round(row['main'] / row['requests'], 3),
649 d_row = {
586 'requests': row['requests'],
650 "avg_response": round(row["main"] / row["requests"], 3),
587 'main': row['main'],
651 "requests": row["requests"],
588 'view_name': row['key'],
652 "main": row["main"],
589 'latest_details': row['latest_details'],
653 "view_name": row["key"],
590 'percentage': round(row['percentage'] * 100, 1)}
654 "latest_details": row["latest_details"],
655 "percentage": round(row["percentage"] * 100, 1),
656 }
591
657
592 results.append(d_row)
658 results.append(d_row)
593
659
594 return results
660 return results
595
661
596
662
597 @view_config(route_name='applications_property',
663 @view_config(
598 match_param='key=trending_reports', renderer='json',
664 route_name="applications_property",
599 permission='view')
665 match_param="key=trending_reports",
666 renderer="json",
667 permission="view",
668 )
600 def trending_reports(request):
669 def trending_reports(request):
601 """
670 """
602 Returns exception/slow reports trending for specific time interval
671 Returns exception/slow reports trending for specific time interval
603 """
672 """
604 query_params = request.GET.mixed().copy()
673 query_params = request.GET.mixed().copy()
605 # pop report type to rewrite it to tag later
674 # pop report type to rewrite it to tag later
606 report_type = query_params.pop('report_type', None)
675 report_type = query_params.pop("report_type", None)
607 if report_type:
676 if report_type:
608 query_params['type'] = report_type
677 query_params["type"] = report_type
609
678
610 query_params['resource'] = (request.context.resource.resource_id,)
679 query_params["resource"] = (request.context.resource.resource_id,)
611
680
612 filter_settings = build_filter_settings_from_query_dict(request,
681 filter_settings = build_filter_settings_from_query_dict(request, query_params)
613 query_params)
614
682
615 if not filter_settings.get('end_date'):
683 if not filter_settings.get("end_date"):
616 end_date = datetime.utcnow().replace(microsecond=0, second=0)
684 end_date = datetime.utcnow().replace(microsecond=0, second=0)
617 filter_settings['end_date'] = end_date
685 filter_settings["end_date"] = end_date
618
686
619 if not filter_settings.get('start_date'):
687 if not filter_settings.get("start_date"):
620 delta = timedelta(hours=1)
688 delta = timedelta(hours=1)
621 filter_settings['start_date'] = filter_settings['end_date'] - delta
689 filter_settings["start_date"] = filter_settings["end_date"] - delta
622
690
623 results = ReportGroupService.get_trending(request, filter_settings)
691 results = ReportGroupService.get_trending(request, filter_settings)
624
692
@@ -626,112 +694,133 b' def trending_reports(request):'
626 for occurences, group in results:
694 for occurences, group in results:
627 report_group = group.get_dict(request)
695 report_group = group.get_dict(request)
628 # show the occurences in time range instead of global ones
696 # show the occurences in time range instead of global ones
629 report_group['occurences'] = occurences
697 report_group["occurences"] = occurences
630 trending.append(report_group)
698 trending.append(report_group)
631
699
632 return trending
700 return trending
633
701
634
702
635 @view_config(route_name='applications_property',
703 @view_config(
636 match_param='key=integrations',
704 route_name="applications_property",
637 renderer='json', permission='view')
705 match_param="key=integrations",
706 renderer="json",
707 permission="view",
708 )
638 def integrations(request):
709 def integrations(request):
639 """
710 """
640 Integration list for given application
711 Integration list for given application
641 """
712 """
642 application = request.context.resource
713 application = request.context.resource
643 return {'resource': application}
714 return {"resource": application}
644
715
645
716
646 @view_config(route_name='applications_property',
717 @view_config(
647 match_param='key=user_permissions', renderer='json',
718 route_name="applications_property",
648 permission='owner', request_method='POST')
719 match_param="key=user_permissions",
720 renderer="json",
721 permission="owner",
722 request_method="POST",
723 )
649 def user_resource_permission_create(request):
724 def user_resource_permission_create(request):
650 """
725 """
651 Set new permissions for user for a resource
726 Set new permissions for user for a resource
652 """
727 """
653 resource = request.context.resource
728 resource = request.context.resource
654 user_name = request.unsafe_json_body.get('user_name')
729 user_name = request.unsafe_json_body.get("user_name")
655 user = UserService.by_user_name(user_name)
730 user = UserService.by_user_name(user_name)
656 if not user:
731 if not user:
657 user = UserService.by_email(user_name)
732 user = UserService.by_email(user_name)
658 if not user:
733 if not user:
659 return False
734 return False
660
735
661 for perm_name in request.unsafe_json_body.get('permissions', []):
736 for perm_name in request.unsafe_json_body.get("permissions", []):
662 permission = UserResourcePermissionService.by_resource_user_and_perm(
737 permission = UserResourcePermissionService.by_resource_user_and_perm(
663 user.id, perm_name, resource.resource_id)
738 user.id, perm_name, resource.resource_id
739 )
664 if not permission:
740 if not permission:
665 permission = UserResourcePermission(perm_name=perm_name,
741 permission = UserResourcePermission(perm_name=perm_name, user_id=user.id)
666 user_id=user.id)
667 resource.user_permissions.append(permission)
742 resource.user_permissions.append(permission)
668 DBSession.flush()
743 DBSession.flush()
669 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
744 perms = [
670 if p.type == 'user']
745 p.perm_name
671 result = {'user_name': user.user_name,
746 for p in ResourceService.perms_for_user(resource, user)
672 'permissions': list(set(perms))}
747 if p.type == "user"
748 ]
749 result = {"user_name": user.user_name, "permissions": list(set(perms))}
673 return result
750 return result
674
751
675
752
676 @view_config(route_name='applications_property',
753 @view_config(
677 match_param='key=user_permissions', renderer='json',
754 route_name="applications_property",
678 permission='owner', request_method='DELETE')
755 match_param="key=user_permissions",
756 renderer="json",
757 permission="owner",
758 request_method="DELETE",
759 )
679 def user_resource_permission_delete(request):
760 def user_resource_permission_delete(request):
680 """
761 """
681 Removes user permission from specific resource
762 Removes user permission from specific resource
682 """
763 """
683 resource = request.context.resource
764 resource = request.context.resource
684
765
685 user = UserService.by_user_name(request.GET.get('user_name'))
766 user = UserService.by_user_name(request.GET.get("user_name"))
686 if not user:
767 if not user:
687 return False
768 return False
688
769
689 for perm_name in request.GET.getall('permissions'):
770 for perm_name in request.GET.getall("permissions"):
690 permission = UserResourcePermissionService.by_resource_user_and_perm(
771 permission = UserResourcePermissionService.by_resource_user_and_perm(
691 user.id, perm_name, resource.resource_id)
772 user.id, perm_name, resource.resource_id
773 )
692 resource.user_permissions.remove(permission)
774 resource.user_permissions.remove(permission)
693 DBSession.flush()
775 DBSession.flush()
694 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
776 perms = [
695 if p.type == 'user']
777 p.perm_name
696 result = {'user_name': user.user_name,
778 for p in ResourceService.perms_for_user(resource, user)
697 'permissions': list(set(perms))}
779 if p.type == "user"
780 ]
781 result = {"user_name": user.user_name, "permissions": list(set(perms))}
698 return result
782 return result
699
783
700
784
701 @view_config(route_name='applications_property',
785 @view_config(
702 match_param='key=group_permissions', renderer='json',
786 route_name="applications_property",
703 permission='owner', request_method='POST')
787 match_param="key=group_permissions",
788 renderer="json",
789 permission="owner",
790 request_method="POST",
791 )
704 def group_resource_permission_create(request):
792 def group_resource_permission_create(request):
705 """
793 """
706 Set new permissions for group for a resource
794 Set new permissions for group for a resource
707 """
795 """
708 resource = request.context.resource
796 resource = request.context.resource
709 group = GroupService.by_id(request.unsafe_json_body.get('group_id'))
797 group = GroupService.by_id(request.unsafe_json_body.get("group_id"))
710 if not group:
798 if not group:
711 return False
799 return False
712
800
713 for perm_name in request.unsafe_json_body.get('permissions', []):
801 for perm_name in request.unsafe_json_body.get("permissions", []):
714 permission = GroupResourcePermissionService.by_resource_group_and_perm(
802 permission = GroupResourcePermissionService.by_resource_group_and_perm(
715 group.id, perm_name, resource.resource_id)
803 group.id, perm_name, resource.resource_id
804 )
716 if not permission:
805 if not permission:
717 permission = GroupResourcePermission(perm_name=perm_name,
806 permission = GroupResourcePermission(perm_name=perm_name, group_id=group.id)
718 group_id=group.id)
719 resource.group_permissions.append(permission)
807 resource.group_permissions.append(permission)
720 DBSession.flush()
808 DBSession.flush()
721 perm_tuples = ResourceService.groups_for_perm(
809 perm_tuples = ResourceService.groups_for_perm(
722 resource,
810 resource, ANY_PERMISSION, limit_group_permissions=True, group_ids=[group.id]
723 ANY_PERMISSION,
811 )
724 limit_group_permissions=True,
812 perms = [p.perm_name for p in perm_tuples if p.type == "group"]
725 group_ids=[group.id])
813 result = {"group": group.get_dict(), "permissions": list(set(perms))}
726 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
727 result = {'group': group.get_dict(),
728 'permissions': list(set(perms))}
729 return result
814 return result
730
815
731
816
732 @view_config(route_name='applications_property',
817 @view_config(
733 match_param='key=group_permissions', renderer='json',
818 route_name="applications_property",
734 permission='owner', request_method='DELETE')
819 match_param="key=group_permissions",
820 renderer="json",
821 permission="owner",
822 request_method="DELETE",
823 )
735 def group_resource_permission_delete(request):
824 def group_resource_permission_delete(request):
736 """
825 """
737 Removes group permission from specific resource
826 Removes group permission from specific resource
@@ -739,21 +828,19 b' def group_resource_permission_delete(request):'
739 form = forms.ReactorForm(request.POST, csrf_context=request)
828 form = forms.ReactorForm(request.POST, csrf_context=request)
740 form.validate()
829 form.validate()
741 resource = request.context.resource
830 resource = request.context.resource
742 group = GroupService.by_id(request.GET.get('group_id'))
831 group = GroupService.by_id(request.GET.get("group_id"))
743 if not group:
832 if not group:
744 return False
833 return False
745
834
746 for perm_name in request.GET.getall('permissions'):
835 for perm_name in request.GET.getall("permissions"):
747 permission = GroupResourcePermissionService.by_resource_group_and_perm(
836 permission = GroupResourcePermissionService.by_resource_group_and_perm(
748 group.id, perm_name, resource.resource_id)
837 group.id, perm_name, resource.resource_id
838 )
749 resource.group_permissions.remove(permission)
839 resource.group_permissions.remove(permission)
750 DBSession.flush()
840 DBSession.flush()
751 perm_tuples = ResourceService.groups_for_perm(
841 perm_tuples = ResourceService.groups_for_perm(
752 resource,
842 resource, ANY_PERMISSION, limit_group_permissions=True, group_ids=[group.id]
753 ANY_PERMISSION,
843 )
754 limit_group_permissions=True,
844 perms = [p.perm_name for p in perm_tuples if p.type == "group"]
755 group_ids=[group.id])
845 result = {"group": group.get_dict(), "permissions": list(set(perms))}
756 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
757 result = {'group': group.get_dict(),
758 'permissions': list(set(perms))}
759 return result
846 return result
@@ -21,16 +21,13 b' from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound'
21 from ziggurat_foundations.models.services.user import UserService
21 from ziggurat_foundations.models.services.user import UserService
22
22
23
23
24 @view_config(route_name='events_no_id',
24 @view_config(route_name="events_no_id", renderer="json", permission="authenticated")
25 renderer='json', permission='authenticated')
26 def fetch_events(request):
25 def fetch_events(request):
27 """
26 """
28 Returns list of log entries from Elasticsearch
27 Returns list of log entries from Elasticsearch
29 """
28 """
30 event_paginator = EventService.get_paginator(
29 event_paginator = EventService.get_paginator(
31 user=request.user,
30 user=request.user, page=1, items_per_page=100
32 page=1,
33 items_per_page=100
34 )
31 )
35 headers = gen_pagination_headers(request, event_paginator)
32 headers = gen_pagination_headers(request, event_paginator)
36 request.response.headers.update(headers)
33 request.response.headers.update(headers)
@@ -38,20 +35,25 b' def fetch_events(request):'
38 return [ev.get_dict() for ev in event_paginator.items]
35 return [ev.get_dict() for ev in event_paginator.items]
39
36
40
37
41 @view_config(route_name='events', renderer='json', request_method='PATCH',
38 @view_config(
42 permission='authenticated')
39 route_name="events",
40 renderer="json",
41 request_method="PATCH",
42 permission="authenticated",
43 )
43 def event_PATCH(request):
44 def event_PATCH(request):
44 resources = UserService.resources_with_perms(
45 resources = UserService.resources_with_perms(
45 request.user, ['view'], resource_types=request.registry.resource_types)
46 request.user, ["view"], resource_types=request.registry.resource_types
47 )
46 event = EventService.for_resource(
48 event = EventService.for_resource(
47 [r.resource_id for r in resources],
49 [r.resource_id for r in resources], event_id=request.matchdict["event_id"]
48 event_id=request.matchdict['event_id']).first()
50 ).first()
49 if not event:
51 if not event:
50 return HTTPNotFound()
52 return HTTPNotFound()
51 allowed_keys = ['status']
53 allowed_keys = ["status"]
52 for k, v in request.unsafe_json_body.items():
54 for k, v in request.unsafe_json_body.items():
53 if k in allowed_keys:
55 if k in allowed_keys:
54 if k == 'status':
56 if k == "status":
55 event.close()
57 event.close()
56 else:
58 else:
57 setattr(event, k, v)
59 setattr(event, k, v)
@@ -34,76 +34,99 b' log = logging.getLogger(__name__)'
34 _ = str
34 _ = str
35
35
36
36
37 @view_config(route_name='groups_no_id', renderer='json',
37 @view_config(
38 request_method="GET", permission='authenticated')
38 route_name="groups_no_id",
39 renderer="json",
40 request_method="GET",
41 permission="authenticated",
42 )
39 def groups_list(request):
43 def groups_list(request):
40 """
44 """
41 Returns groups list
45 Returns groups list
42 """
46 """
43 groups = Group.all().order_by(Group.group_name)
47 groups = Group.all().order_by(Group.group_name)
44 list_groups = ConfigService.by_key_and_section(
48 list_groups = ConfigService.by_key_and_section(
45 'list_groups_to_non_admins', 'global')
49 "list_groups_to_non_admins", "global"
46 if list_groups.value or request.has_permission('root_administration'):
50 )
51 if list_groups.value or request.has_permission("root_administration"):
47 return [g.get_dict() for g in groups]
52 return [g.get_dict() for g in groups]
48 else:
53 else:
49 return []
54 return []
50
55
51
56
52 @view_config(route_name='groups_no_id', renderer='json',
57 @view_config(
53 request_method="POST", permission='root_administration')
58 route_name="groups_no_id",
59 renderer="json",
60 request_method="POST",
61 permission="root_administration",
62 )
54 def groups_create(request):
63 def groups_create(request):
55 """
64 """
56 Returns groups list
65 Returns groups list
57 """
66 """
58 form = forms.GroupCreateForm(
67 form = forms.GroupCreateForm(
59 MultiDict(request.safe_json_body or {}), csrf_context=request)
68 MultiDict(request.safe_json_body or {}), csrf_context=request
69 )
60 if form.validate():
70 if form.validate():
61 log.info('registering group')
71 log.info("registering group")
62 group = Group()
72 group = Group()
63 # insert new group here
73 # insert new group here
64 DBSession.add(group)
74 DBSession.add(group)
65 form.populate_obj(group)
75 form.populate_obj(group)
66 request.session.flash(_('Group created'))
76 request.session.flash(_("Group created"))
67 DBSession.flush()
77 DBSession.flush()
68 return group.get_dict(include_perms=True)
78 return group.get_dict(include_perms=True)
69 else:
79 else:
70 return HTTPUnprocessableEntity(body=form.errors_json)
80 return HTTPUnprocessableEntity(body=form.errors_json)
71
81
72
82
73 @view_config(route_name='groups', renderer='json',
83 @view_config(
74 request_method="DELETE", permission='root_administration')
84 route_name="groups",
85 renderer="json",
86 request_method="DELETE",
87 permission="root_administration",
88 )
75 def groups_DELETE(request):
89 def groups_DELETE(request):
76 """
90 """
77 Removes a groups permanently from db
91 Removes a groups permanently from db
78 """
92 """
79 msg = _('You cannot remove administrator group from the system')
93 msg = _("You cannot remove administrator group from the system")
80 group = GroupService.by_id(request.matchdict.get('group_id'))
94 group = GroupService.by_id(request.matchdict.get("group_id"))
81 if group:
95 if group:
82 if group.id == 1:
96 if group.id == 1:
83 request.session.flash(msg, 'warning')
97 request.session.flash(msg, "warning")
84 else:
98 else:
85 DBSession.delete(group)
99 DBSession.delete(group)
86 request.session.flash(_('Group removed'))
100 request.session.flash(_("Group removed"))
87 return True
101 return True
88 request.response.status = 422
102 request.response.status = 422
89 return False
103 return False
90
104
91
105
92 @view_config(route_name='groups', renderer='json',
106 @view_config(
93 request_method="GET", permission='root_administration')
107 route_name="groups",
94 @view_config(route_name='groups', renderer='json',
108 renderer="json",
95 request_method="PATCH", permission='root_administration')
109 request_method="GET",
110 permission="root_administration",
111 )
112 @view_config(
113 route_name="groups",
114 renderer="json",
115 request_method="PATCH",
116 permission="root_administration",
117 )
96 def group_update(request):
118 def group_update(request):
97 """
119 """
98 Updates group object
120 Updates group object
99 """
121 """
100 group = GroupService.by_id(request.matchdict.get('group_id'))
122 group = GroupService.by_id(request.matchdict.get("group_id"))
101 if not group:
123 if not group:
102 return HTTPNotFound()
124 return HTTPNotFound()
103
125
104 if request.method == 'PATCH':
126 if request.method == "PATCH":
105 form = forms.GroupCreateForm(
127 form = forms.GroupCreateForm(
106 MultiDict(request.unsafe_json_body), csrf_context=request)
128 MultiDict(request.unsafe_json_body), csrf_context=request
129 )
107 form._modified_group = group
130 form._modified_group = group
108 if form.validate():
131 if form.validate():
109 form.populate_obj(group)
132 form.populate_obj(group)
@@ -112,49 +135,69 b' def group_update(request):'
112 return group.get_dict(include_perms=True)
135 return group.get_dict(include_perms=True)
113
136
114
137
115 @view_config(route_name='groups_property',
138 @view_config(
116 match_param='key=resource_permissions',
139 route_name="groups_property",
117 renderer='json', permission='root_administration')
140 match_param="key=resource_permissions",
141 renderer="json",
142 permission="root_administration",
143 )
118 def groups_resource_permissions_list(request):
144 def groups_resource_permissions_list(request):
119 """
145 """
120 Get list of permissions assigned to specific resources
146 Get list of permissions assigned to specific resources
121 """
147 """
122 group = GroupService.by_id(request.matchdict.get('group_id'))
148 group = GroupService.by_id(request.matchdict.get("group_id"))
123 if not group:
149 if not group:
124 return HTTPNotFound()
150 return HTTPNotFound()
125 return [permission_tuple_to_dict(perm) for perm in
151 return [
126 GroupService.resources_with_possible_perms(group)]
152 permission_tuple_to_dict(perm)
127
153 for perm in GroupService.resources_with_possible_perms(group)
128
154 ]
129 @view_config(route_name='groups_property',
155
130 match_param='key=users', request_method="GET",
156
131 renderer='json', permission='root_administration')
157 @view_config(
158 route_name="groups_property",
159 match_param="key=users",
160 request_method="GET",
161 renderer="json",
162 permission="root_administration",
163 )
132 def groups_users_list(request):
164 def groups_users_list(request):
133 """
165 """
134 Get list of permissions assigned to specific resources
166 Get list of permissions assigned to specific resources
135 """
167 """
136 group = GroupService.by_id(request.matchdict.get('group_id'))
168 group = GroupService.by_id(request.matchdict.get("group_id"))
137 if not group:
169 if not group:
138 return HTTPNotFound()
170 return HTTPNotFound()
139 props = ['user_name', 'id', 'first_name', 'last_name', 'email',
171 props = [
140 'last_login_date', 'status']
172 "user_name",
173 "id",
174 "first_name",
175 "last_name",
176 "email",
177 "last_login_date",
178 "status",
179 ]
141 users_dicts = []
180 users_dicts = []
142 for user in group.users:
181 for user in group.users:
143 u_dict = user.get_dict(include_keys=props)
182 u_dict = user.get_dict(include_keys=props)
144 u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20)
183 u_dict["gravatar_url"] = UserService.gravatar_url(user, s=20)
145 users_dicts.append(u_dict)
184 users_dicts.append(u_dict)
146 return users_dicts
185 return users_dicts
147
186
148
187
149 @view_config(route_name='groups_property',
188 @view_config(
150 match_param='key=users', request_method="DELETE",
189 route_name="groups_property",
151 renderer='json', permission='root_administration')
190 match_param="key=users",
191 request_method="DELETE",
192 renderer="json",
193 permission="root_administration",
194 )
152 def groups_users_remove(request):
195 def groups_users_remove(request):
153 """
196 """
154 Get list of permissions assigned to specific resources
197 Get list of permissions assigned to specific resources
155 """
198 """
156 group = GroupService.by_id(request.matchdict.get('group_id'))
199 group = GroupService.by_id(request.matchdict.get("group_id"))
157 user = UserService.by_user_name(request.GET.get('user_name'))
200 user = UserService.by_user_name(request.GET.get("user_name"))
158 if not group or not user:
201 if not group or not user:
159 return HTTPNotFound()
202 return HTTPNotFound()
160 if len(group.users) > 1:
203 if len(group.users) > 1:
@@ -164,29 +207,40 b' def groups_users_remove(request):'
164 group.member_count = group.users_dynamic.count()
207 group.member_count = group.users_dynamic.count()
165 return True
208 return True
166 msg = "Administrator group needs to contain at least one user"
209 msg = "Administrator group needs to contain at least one user"
167 request.session.flash(msg, 'warning')
210 request.session.flash(msg, "warning")
168 return False
211 return False
169
212
170
213
171 @view_config(route_name='groups_property',
214 @view_config(
172 match_param='key=users', request_method="POST",
215 route_name="groups_property",
173 renderer='json', permission='root_administration')
216 match_param="key=users",
217 request_method="POST",
218 renderer="json",
219 permission="root_administration",
220 )
174 def groups_users_add(request):
221 def groups_users_add(request):
175 """
222 """
176 Get list of permissions assigned to specific resources
223 Get list of permissions assigned to specific resources
177 """
224 """
178 group = GroupService.by_id(request.matchdict.get('group_id'))
225 group = GroupService.by_id(request.matchdict.get("group_id"))
179 user = UserService.by_user_name(request.unsafe_json_body.get('user_name'))
226 user = UserService.by_user_name(request.unsafe_json_body.get("user_name"))
180 if not user:
227 if not user:
181 user = UserService.by_email(request.unsafe_json_body.get('user_name'))
228 user = UserService.by_email(request.unsafe_json_body.get("user_name"))
182
229
183 if not group or not user:
230 if not group or not user:
184 return HTTPNotFound()
231 return HTTPNotFound()
185 if user not in group.users:
232 if user not in group.users:
186 group.users.append(user)
233 group.users.append(user)
187 group.member_count = group.users_dynamic.count()
234 group.member_count = group.users_dynamic.count()
188 props = ['user_name', 'id', 'first_name', 'last_name', 'email',
235 props = [
189 'last_login_date', 'status']
236 "user_name",
237 "id",
238 "first_name",
239 "last_name",
240 "email",
241 "last_login_date",
242 "status",
243 ]
190 u_dict = user.get_dict(include_keys=props)
244 u_dict = user.get_dict(include_keys=props)
191 u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20)
245 u_dict["gravatar_url"] = UserService.gravatar_url(user, s=20)
192 return u_dict
246 return u_dict
@@ -50,18 +50,20 b' def sign_in(request):'
50 if user.status == 1:
50 if user.status == 1:
51 request.session.new_csrf_token()
51 request.session.new_csrf_token()
52 user.last_login_date = datetime.datetime.utcnow()
52 user.last_login_date = datetime.datetime.utcnow()
53 social_data = request.session.get('zigg.social_auth')
53 social_data = request.session.get("zigg.social_auth")
54 if social_data:
54 if social_data:
55 handle_social_data(request, user, social_data)
55 handle_social_data(request, user, social_data)
56 else:
56 else:
57 request.session.flash(_('Account got disabled'))
57 request.session.flash(_("Account got disabled"))
58
58
59 if request.context.came_from != '/':
59 if request.context.came_from != "/":
60 return HTTPFound(location=request.context.came_from,
60 return HTTPFound(
61 headers=request.context.headers)
61 location=request.context.came_from, headers=request.context.headers
62 )
62 else:
63 else:
63 return HTTPFound(location=request.route_url('/'),
64 return HTTPFound(
64 headers=request.context.headers)
65 location=request.route_url("/"), headers=request.context.headers
66 )
65
67
66
68
67 @view_config(context=ZigguratSignInBadAuth, permission=NO_PERMISSION_REQUIRED)
69 @view_config(context=ZigguratSignInBadAuth, permission=NO_PERMISSION_REQUIRED)
@@ -69,9 +71,10 b' def bad_auth(request):'
69 """
71 """
70 Handles incorrect login flow
72 Handles incorrect login flow
71 """
73 """
72 request.session.flash(_('Incorrect username or password'), 'warning')
74 request.session.flash(_("Incorrect username or password"), "warning")
73 return HTTPFound(location=request.route_url('register'),
75 return HTTPFound(
74 headers=request.context.headers)
76 location=request.route_url("register"), headers=request.context.headers
77 )
75
78
76
79
77 @view_config(context=ZigguratSignOut, permission=NO_PERMISSION_REQUIRED)
80 @view_config(context=ZigguratSignOut, permission=NO_PERMISSION_REQUIRED)
@@ -79,13 +82,16 b' def sign_out(request):'
79 """
82 """
80 Removes user identification cookie
83 Removes user identification cookie
81 """
84 """
82 return HTTPFound(location=request.route_url('register'),
85 return HTTPFound(
83 headers=request.context.headers)
86 location=request.route_url("register"), headers=request.context.headers
87 )
84
88
85
89
86 @view_config(route_name='lost_password',
90 @view_config(
87 renderer='appenlight:templates/user/lost_password.jinja2',
91 route_name="lost_password",
88 permission=NO_PERMISSION_REQUIRED)
92 renderer="appenlight:templates/user/lost_password.jinja2",
93 permission=NO_PERMISSION_REQUIRED,
94 )
89 def lost_password(request):
95 def lost_password(request):
90 """
96 """
91 Presents lost password page - sends password reset link to
97 Presents lost password page - sends password reset link to
@@ -93,36 +99,43 b' def lost_password(request):'
93 This link is valid only for 10 minutes
99 This link is valid only for 10 minutes
94 """
100 """
95 form = forms.LostPasswordForm(request.POST, csrf_context=request)
101 form = forms.LostPasswordForm(request.POST, csrf_context=request)
96 if request.method == 'POST' and form.validate():
102 if request.method == "POST" and form.validate():
97 user = UserService.by_email(form.email.data)
103 user = UserService.by_email(form.email.data)
98 if user:
104 if user:
99 UserService.regenerate_security_code(user)
105 UserService.regenerate_security_code(user)
100 user.security_code_date = datetime.datetime.utcnow()
106 user.security_code_date = datetime.datetime.utcnow()
101 email_vars = {
107 email_vars = {
102 'user': user,
108 "user": user,
103 'request': request,
109 "request": request,
104 'email_title': "AppEnlight :: New password request"
110 "email_title": "AppEnlight :: New password request",
105 }
111 }
106 UserService.send_email(
112 UserService.send_email(
107 request, recipients=[user.email],
113 request,
114 recipients=[user.email],
108 variables=email_vars,
115 variables=email_vars,
109 template='/email_templates/lost_password.jinja2')
116 template="/email_templates/lost_password.jinja2",
110 msg = 'Password reset email had been sent. ' \
117 )
111 'Please check your mailbox for further instructions.'
118 msg = (
119 "Password reset email had been sent. "
120 "Please check your mailbox for further instructions."
121 )
112 request.session.flash(_(msg))
122 request.session.flash(_(msg))
113 return HTTPFound(location=request.route_url('lost_password'))
123 return HTTPFound(location=request.route_url("lost_password"))
114 return {"form": form}
124 return {"form": form}
115
125
116
126
117 @view_config(route_name='lost_password_generate',
127 @view_config(
118 permission=NO_PERMISSION_REQUIRED,
128 route_name="lost_password_generate",
119 renderer='appenlight:templates/user/lost_password_generate.jinja2')
129 permission=NO_PERMISSION_REQUIRED,
130 renderer="appenlight:templates/user/lost_password_generate.jinja2",
131 )
120 def lost_password_generate(request):
132 def lost_password_generate(request):
121 """
133 """
122 Shows new password form - perform time check and set new password for user
134 Shows new password form - perform time check and set new password for user
123 """
135 """
124 user = UserService.by_user_name_and_security_code(
136 user = UserService.by_user_name_and_security_code(
125 request.GET.get('user_name'), request.GET.get('security_code'))
137 request.GET.get("user_name"), request.GET.get("security_code")
138 )
126 if user:
139 if user:
127 delta = datetime.datetime.utcnow() - user.security_code_date
140 delta = datetime.datetime.utcnow() - user.security_code_date
128
141
@@ -130,56 +143,54 b' def lost_password_generate(request):'
130 form = forms.NewPasswordForm(request.POST, csrf_context=request)
143 form = forms.NewPasswordForm(request.POST, csrf_context=request)
131 if request.method == "POST" and form.validate():
144 if request.method == "POST" and form.validate():
132 UserService.set_password(user, form.new_password.data)
145 UserService.set_password(user, form.new_password.data)
133 request.session.flash(_('You can sign in with your new password.'))
146 request.session.flash(_("You can sign in with your new password."))
134 return HTTPFound(location=request.route_url('register'))
147 return HTTPFound(location=request.route_url("register"))
135 else:
148 else:
136 return {"form": form}
149 return {"form": form}
137 else:
150 else:
138 return Response('Security code expired')
151 return Response("Security code expired")
139
152
140
153
141 @view_config(route_name='register',
154 @view_config(
142 renderer='appenlight:templates/user/register.jinja2',
155 route_name="register",
143 permission=NO_PERMISSION_REQUIRED)
156 renderer="appenlight:templates/user/register.jinja2",
157 permission=NO_PERMISSION_REQUIRED,
158 )
144 def register(request):
159 def register(request):
145 """
160 """
146 Render register page with form
161 Render register page with form
147 Also handles oAuth flow for registration
162 Also handles oAuth flow for registration
148 """
163 """
149 login_url = request.route_url('ziggurat.routes.sign_in')
164 login_url = request.route_url("ziggurat.routes.sign_in")
150 if request.query_string:
165 if request.query_string:
151 query_string = '?%s' % request.query_string
166 query_string = "?%s" % request.query_string
152 else:
167 else:
153 query_string = ''
168 query_string = ""
154 referrer = '%s%s' % (request.path, query_string)
169 referrer = "%s%s" % (request.path, query_string)
155
170
156 if referrer in [login_url, '/register', '/register?sign_in=1']:
171 if referrer in [login_url, "/register", "/register?sign_in=1"]:
157 referrer = '/' # never use the login form itself as came_from
172 referrer = "/" # never use the login form itself as came_from
158 sign_in_form = forms.SignInForm(
173 sign_in_form = forms.SignInForm(
159 came_from=request.params.get('came_from', referrer),
174 came_from=request.params.get("came_from", referrer), csrf_context=request
160 csrf_context=request)
175 )
161
176
162 # populate form from oAuth session data returned by authomatic
177 # populate form from oAuth session data returned by authomatic
163 social_data = request.session.get('zigg.social_auth')
178 social_data = request.session.get("zigg.social_auth")
164 if request.method != 'POST' and social_data:
179 if request.method != "POST" and social_data:
165 log.debug(social_data)
180 log.debug(social_data)
166 user_name = social_data['user'].get('user_name', '').split('@')[0]
181 user_name = social_data["user"].get("user_name", "").split("@")[0]
167 form_data = {
182 form_data = {"user_name": user_name, "email": social_data["user"].get("email")}
168 'user_name': user_name,
183 form_data["user_password"] = str(uuid.uuid4())
169 'email': social_data['user'].get('email')
184 form = forms.UserRegisterForm(MultiDict(form_data), csrf_context=request)
170 }
171 form_data['user_password'] = str(uuid.uuid4())
172 form = forms.UserRegisterForm(MultiDict(form_data),
173 csrf_context=request)
174 form.user_password.widget.hide_value = False
185 form.user_password.widget.hide_value = False
175 else:
186 else:
176 form = forms.UserRegisterForm(request.POST, csrf_context=request)
187 form = forms.UserRegisterForm(request.POST, csrf_context=request)
177 if request.method == 'POST' and form.validate():
188 if request.method == "POST" and form.validate():
178 log.info('registering user')
189 log.info("registering user")
179 # insert new user here
190 # insert new user here
180 if request.registry.settings['appenlight.disable_registration']:
191 if request.registry.settings["appenlight.disable_registration"]:
181 request.session.flash(_('Registration is currently disabled.'))
192 request.session.flash(_("Registration is currently disabled."))
182 return HTTPFound(location=request.route_url('/'))
193 return HTTPFound(location=request.route_url("/"))
183
194
184 new_user = User()
195 new_user = User()
185 DBSession.add(new_user)
196 DBSession.add(new_user)
@@ -187,49 +198,59 b' def register(request):'
187 UserService.regenerate_security_code(new_user)
198 UserService.regenerate_security_code(new_user)
188 new_user.status = 1
199 new_user.status = 1
189 UserService.set_password(new_user, new_user.user_password)
200 UserService.set_password(new_user, new_user.user_password)
190 new_user.registration_ip = request.environ.get('REMOTE_ADDR')
201 new_user.registration_ip = request.environ.get("REMOTE_ADDR")
191
202
192 if social_data:
203 if social_data:
193 handle_social_data(request, new_user, social_data)
204 handle_social_data(request, new_user, social_data)
194
205
195 email_vars = {'user': new_user,
206 email_vars = {
196 'request': request,
207 "user": new_user,
197 'email_title': "AppEnlight :: Start information"}
208 "request": request,
209 "email_title": "AppEnlight :: Start information",
210 }
198 UserService.send_email(
211 UserService.send_email(
199 request, recipients=[new_user.email], variables=email_vars,
212 request,
200 template='/email_templates/registered.jinja2')
213 recipients=[new_user.email],
201 request.session.flash(_('You have successfully registered.'))
214 variables=email_vars,
215 template="/email_templates/registered.jinja2",
216 )
217 request.session.flash(_("You have successfully registered."))
202 DBSession.flush()
218 DBSession.flush()
203 headers = security.remember(request, new_user.id)
219 headers = security.remember(request, new_user.id)
204 return HTTPFound(location=request.route_url('/'),
220 return HTTPFound(location=request.route_url("/"), headers=headers)
205 headers=headers)
206 settings = request.registry.settings
221 settings = request.registry.settings
207 social_plugins = {}
222 social_plugins = {}
208 if settings.get('authomatic.pr.twitter.key', ''):
223 if settings.get("authomatic.pr.twitter.key", ""):
209 social_plugins['twitter'] = True
224 social_plugins["twitter"] = True
210 if settings.get('authomatic.pr.google.key', ''):
225 if settings.get("authomatic.pr.google.key", ""):
211 social_plugins['google'] = True
226 social_plugins["google"] = True
212 if settings.get('authomatic.pr.github.key', ''):
227 if settings.get("authomatic.pr.github.key", ""):
213 social_plugins['github'] = True
228 social_plugins["github"] = True
214 if settings.get('authomatic.pr.bitbucket.key', ''):
229 if settings.get("authomatic.pr.bitbucket.key", ""):
215 social_plugins['bitbucket'] = True
230 social_plugins["bitbucket"] = True
216
231
217 return {
232 return {
218 "form": form,
233 "form": form,
219 "sign_in_form": sign_in_form,
234 "sign_in_form": sign_in_form,
220 "social_plugins": social_plugins
235 "social_plugins": social_plugins,
221 }
236 }
222
237
223
238
224 @view_config(route_name='/',
239 @view_config(
225 renderer='appenlight:templates/app.jinja2',
240 route_name="/",
226 permission=NO_PERMISSION_REQUIRED)
241 renderer="appenlight:templates/app.jinja2",
227 @view_config(route_name='angular_app_ui',
242 permission=NO_PERMISSION_REQUIRED,
228 renderer='appenlight:templates/app.jinja2',
243 )
229 permission=NO_PERMISSION_REQUIRED)
244 @view_config(
230 @view_config(route_name='angular_app_ui_ix',
245 route_name="angular_app_ui",
231 renderer='appenlight:templates/app.jinja2',
246 renderer="appenlight:templates/app.jinja2",
232 permission=NO_PERMISSION_REQUIRED)
247 permission=NO_PERMISSION_REQUIRED,
248 )
249 @view_config(
250 route_name="angular_app_ui_ix",
251 renderer="appenlight:templates/app.jinja2",
252 permission=NO_PERMISSION_REQUIRED,
253 )
233 def app_main_index(request):
254 def app_main_index(request):
234 """
255 """
235 Render dashoard/report browser page page along with:
256 Render dashoard/report browser page page along with:
@@ -32,14 +32,16 b' from appenlight.lib import generate_random_string'
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34 dummy_report = ReportGroup()
34 dummy_report = ReportGroup()
35 dummy_report.error = "ProtocolError: ('Connection aborted.', " \
35 dummy_report.error = (
36 "error(111, 'Connection refused'))"
36 "ProtocolError: ('Connection aborted.', " "error(111, 'Connection refused'))"
37 )
37 dummy_report.total_reports = 4
38 dummy_report.total_reports = 4
38 dummy_report.occurences = 4
39 dummy_report.occurences = 4
39
40
40 dummy_report2 = ReportGroup()
41 dummy_report2 = ReportGroup()
41 dummy_report2.error = "UnboundLocalError: local variable " \
42 dummy_report2.error = (
42 "'hits' referenced before assignment"
43 "UnboundLocalError: local variable " "'hits' referenced before assignment"
44 )
43 dummy_report2.total_reports = 8
45 dummy_report2.total_reports = 8
44 dummy_report2.occurences = 8
46 dummy_report2.occurences = 8
45
47
@@ -54,9 +56,10 b' class IntegrationView(object):'
54 def __init__(self, request):
56 def __init__(self, request):
55 self.request = request
57 self.request = request
56 resource = self.request.context.resource
58 resource = self.request.context.resource
57 integration_name = request.matchdict['integration']
59 integration_name = request.matchdict["integration"]
58 integration = IntegrationBase.by_app_id_and_integration_name(
60 integration = IntegrationBase.by_app_id_and_integration_name(
59 resource.resource_id, integration_name)
61 resource.resource_id, integration_name
62 )
60 if integration:
63 if integration:
61 dict_config = integration.config
64 dict_config = integration.config
62 else:
65 else:
@@ -64,25 +67,28 b' class IntegrationView(object):'
64 self.integration = integration
67 self.integration = integration
65 self.integration_config = dict_config
68 self.integration_config = dict_config
66
69
67 @view_config(route_name='integrations_id',
70 @view_config(
68 request_method="DELETE",
71 route_name="integrations_id",
69 renderer='json',
72 request_method="DELETE",
70 permission='edit')
73 renderer="json",
74 permission="edit",
75 )
71 def remove_integration(self):
76 def remove_integration(self):
72 if self.integration:
77 if self.integration:
73 DBSession.delete(self.integration)
78 DBSession.delete(self.integration)
74 self.request.session.flash('Integration removed')
79 self.request.session.flash("Integration removed")
75 return ''
80 return ""
76
81
77 @view_config(route_name='integrations_id',
82 @view_config(
78 request_method="POST",
83 route_name="integrations_id",
79 match_param=['action=test_report_notification'],
84 request_method="POST",
80 renderer='json',
85 match_param=["action=test_report_notification"],
81 permission='edit')
86 renderer="json",
87 permission="edit",
88 )
82 def test_report_notification(self):
89 def test_report_notification(self):
83 if not self.integration:
90 if not self.integration:
84 self.request.session.flash('Integration needs to be configured',
91 self.request.session.flash("Integration needs to be configured", "warning")
85 'warning')
86 return False
92 return False
87
93
88 resource = self.integration.resource
94 resource = self.integration.resource
@@ -94,138 +100,154 b' class IntegrationView(object):'
94 else:
100 else:
95 confirmed_reports = [random.choice(dummy_reports)]
101 confirmed_reports = [random.choice(dummy_reports)]
96
102
97 channel.notify_reports(resource=resource,
103 channel.notify_reports(
98 user=self.request.user,
104 resource=resource,
99 request=self.request,
105 user=self.request.user,
100 since_when=datetime.utcnow(),
106 request=self.request,
101 reports=confirmed_reports)
107 since_when=datetime.utcnow(),
102 self.request.session.flash('Report notification sent')
108 reports=confirmed_reports,
109 )
110 self.request.session.flash("Report notification sent")
103 return True
111 return True
104
112
105 @view_config(route_name='integrations_id',
113 @view_config(
106 request_method="POST",
114 route_name="integrations_id",
107 match_param=['action=test_error_alert'],
115 request_method="POST",
108 renderer='json',
116 match_param=["action=test_error_alert"],
109 permission='edit')
117 renderer="json",
118 permission="edit",
119 )
110 def test_error_alert(self):
120 def test_error_alert(self):
111 if not self.integration:
121 if not self.integration:
112 self.request.session.flash('Integration needs to be configured',
122 self.request.session.flash("Integration needs to be configured", "warning")
113 'warning')
114 return False
123 return False
115
124
116 resource = self.integration.resource
125 resource = self.integration.resource
117
126
118 event_name = random.choice(('error_report_alert',
127 event_name = random.choice(("error_report_alert", "slow_report_alert"))
119 'slow_report_alert',))
128 new_event = Event(
120 new_event = Event(resource_id=resource.resource_id,
129 resource_id=resource.resource_id,
121 event_type=Event.types[event_name],
130 event_type=Event.types[event_name],
122 start_date=datetime.utcnow(),
131 start_date=datetime.utcnow(),
123 status=Event.statuses['active'],
132 status=Event.statuses["active"],
124 values={'reports': random.randint(11, 99),
133 values={"reports": random.randint(11, 99), "threshold": 10},
125 'threshold': 10}
134 )
126 )
127
135
128 channel = AlertChannelService.by_integration_id(self.integration.id)
136 channel = AlertChannelService.by_integration_id(self.integration.id)
129
137
130 channel.notify_alert(resource=resource,
138 channel.notify_alert(
131 event=new_event,
139 resource=resource,
132 user=self.request.user,
140 event=new_event,
133 request=self.request)
141 user=self.request.user,
134 self.request.session.flash('Notification sent')
142 request=self.request,
143 )
144 self.request.session.flash("Notification sent")
135 return True
145 return True
136
146
137 @view_config(route_name='integrations_id',
147 @view_config(
138 request_method="POST",
148 route_name="integrations_id",
139 match_param=['action=test_daily_digest'],
149 request_method="POST",
140 renderer='json',
150 match_param=["action=test_daily_digest"],
141 permission='edit')
151 renderer="json",
152 permission="edit",
153 )
142 def test_daily_digest(self):
154 def test_daily_digest(self):
143 if not self.integration:
155 if not self.integration:
144 self.request.session.flash('Integration needs to be configured',
156 self.request.session.flash("Integration needs to be configured", "warning")
145 'warning')
146 return False
157 return False
147
158
148 resource = self.integration.resource
159 resource = self.integration.resource
149 channel = AlertChannelService.by_integration_id(self.integration.id)
160 channel = AlertChannelService.by_integration_id(self.integration.id)
150
161
151 channel.send_digest(resource=resource,
162 channel.send_digest(
152 user=self.request.user,
163 resource=resource,
153 request=self.request,
164 user=self.request.user,
154 since_when=datetime.utcnow(),
165 request=self.request,
155 reports=dummy_reports)
166 since_when=datetime.utcnow(),
156 self.request.session.flash('Notification sent')
167 reports=dummy_reports,
168 )
169 self.request.session.flash("Notification sent")
157 return True
170 return True
158
171
159 @view_config(route_name='integrations_id',
172 @view_config(
160 request_method="POST",
173 route_name="integrations_id",
161 match_param=['action=test_uptime_alert'],
174 request_method="POST",
162 renderer='json',
175 match_param=["action=test_uptime_alert"],
163 permission='edit')
176 renderer="json",
177 permission="edit",
178 )
164 def test_uptime_alert(self):
179 def test_uptime_alert(self):
165 if not self.integration:
180 if not self.integration:
166 self.request.session.flash('Integration needs to be configured',
181 self.request.session.flash("Integration needs to be configured", "warning")
167 'warning')
168 return False
182 return False
169
183
170 resource = self.integration.resource
184 resource = self.integration.resource
171
185
172 new_event = Event(resource_id=resource.resource_id,
186 new_event = Event(
173 event_type=Event.types['uptime_alert'],
187 resource_id=resource.resource_id,
174 start_date=datetime.utcnow(),
188 event_type=Event.types["uptime_alert"],
175 status=Event.statuses['active'],
189 start_date=datetime.utcnow(),
176 values={"status_code": 500,
190 status=Event.statuses["active"],
177 "tries": 2,
191 values={"status_code": 500, "tries": 2, "response_time": 0},
178 "response_time": 0})
192 )
179
193
180 channel = AlertChannelService.by_integration_id(self.integration.id)
194 channel = AlertChannelService.by_integration_id(self.integration.id)
181 channel.notify_uptime_alert(resource=resource,
195 channel.notify_uptime_alert(
182 event=new_event,
196 resource=resource,
183 user=self.request.user,
197 event=new_event,
184 request=self.request)
198 user=self.request.user,
185
199 request=self.request,
186 self.request.session.flash('Notification sent')
200 )
201
202 self.request.session.flash("Notification sent")
187 return True
203 return True
188
204
189 @view_config(route_name='integrations_id',
205 @view_config(
190 request_method="POST",
206 route_name="integrations_id",
191 match_param=['action=test_chart_alert'],
207 request_method="POST",
192 renderer='json',
208 match_param=["action=test_chart_alert"],
193 permission='edit')
209 renderer="json",
210 permission="edit",
211 )
194 def test_chart_alert(self):
212 def test_chart_alert(self):
195 if not self.integration:
213 if not self.integration:
196 self.request.session.flash('Integration needs to be configured',
214 self.request.session.flash("Integration needs to be configured", "warning")
197 'warning')
198 return False
215 return False
199
216
200 resource = self.integration.resource
217 resource = self.integration.resource
201
218
202 chart_values = {
219 chart_values = {
203 "matched_rule": {'name': 'Fraud attempt limit'},
220 "matched_rule": {"name": "Fraud attempt limit"},
204 "matched_step_values": {"labels": {
221 "matched_step_values": {
205 "0_1": {"human_label": "Attempts sum"}},
222 "labels": {"0_1": {"human_label": "Attempts sum"}},
206 "values": {"0_1": random.randint(11, 55),
223 "values": {"0_1": random.randint(11, 55), "key": "2015-12-16T15:49:00"},
207 "key": "2015-12-16T15:49:00"}},
224 },
208 "start_interval": datetime.utcnow(),
225 "start_interval": datetime.utcnow(),
209 "resource": 1,
226 "resource": 1,
210 "chart_name": "Fraud attempts per day",
227 "chart_name": "Fraud attempts per day",
211 "chart_uuid": "some_uuid",
228 "chart_uuid": "some_uuid",
212 "step_size": 3600,
229 "step_size": 3600,
213 "action_name": "Notify excessive fraud attempts"}
230 "action_name": "Notify excessive fraud attempts",
214
231 }
215 new_event = Event(resource_id=resource.resource_id,
232
216 event_type=Event.types['chart_alert'],
233 new_event = Event(
217 status=Event.statuses['active'],
234 resource_id=resource.resource_id,
218 values=chart_values,
235 event_type=Event.types["chart_alert"],
219 target_uuid="some_uuid",
236 status=Event.statuses["active"],
220 start_date=datetime.utcnow())
237 values=chart_values,
238 target_uuid="some_uuid",
239 start_date=datetime.utcnow(),
240 )
221
241
222 channel = AlertChannelService.by_integration_id(self.integration.id)
242 channel = AlertChannelService.by_integration_id(self.integration.id)
223 channel.notify_chart_alert(resource=resource,
243 channel.notify_chart_alert(
224 event=new_event,
244 resource=resource,
225 user=self.request.user,
245 event=new_event,
226 request=self.request)
246 user=self.request.user,
227
247 request=self.request,
228 self.request.session.flash('Notification sent')
248 )
249
250 self.request.session.flash("Notification sent")
229 return True
251 return True
230
252
231 def create_missing_channel(self, resource, channel_name):
253 def create_missing_channel(self, resource, channel_name):
@@ -240,5 +262,5 b' class IntegrationView(object):'
240 channel.channel_value = resource.resource_id
262 channel.channel_value = resource.resource_id
241 channel.integration_id = self.integration.id
263 channel.integration_id = self.integration.id
242 security_code = generate_random_string(10)
264 security_code = generate_random_string(10)
243 channel.channel_json_conf = {'security_code': security_code}
265 channel.channel_json_conf = {"security_code": security_code}
244 resource.owner.alert_channels.append(channel)
266 resource.owner.alert_channels.append(channel)
@@ -14,8 +14,10 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from appenlight.models.integrations.bitbucket import BitbucketIntegration, \
17 from appenlight.models.integrations.bitbucket import (
18 IntegrationException
18 BitbucketIntegration,
19 IntegrationException,
20 )
19 from appenlight.models.report_comment import ReportComment
21 from appenlight.models.report_comment import ReportComment
20 from appenlight.models.services.report_group import ReportGroupService
22 from appenlight.models.services.report_group import ReportGroupService
21 from pyramid.view import view_config
23 from pyramid.view import view_config
@@ -31,9 +33,11 b' from . import IntegrationView'
31
33
32
34
33 class BitbucketView(IntegrationView):
35 class BitbucketView(IntegrationView):
34 @view_config(route_name='integrations_id',
36 @view_config(
35 match_param=['action=info', 'integration=bitbucket'],
37 route_name="integrations_id",
36 renderer='json')
38 match_param=["action=info", "integration=bitbucket"],
39 renderer="json",
40 )
37 def get_bitbucket_info(self):
41 def get_bitbucket_info(self):
38 """
42 """
39 Grab information about possible priority levels and assignable users
43 Grab information about possible priority levels and assignable users
@@ -41,56 +45,60 b' class BitbucketView(IntegrationView):'
41 try:
45 try:
42 client = BitbucketIntegration.create_client(
46 client = BitbucketIntegration.create_client(
43 self.request,
47 self.request,
44 self.integration.config['user_name'],
48 self.integration.config["user_name"],
45 self.integration.config['repo_name'])
49 self.integration.config["repo_name"],
50 )
46 except IntegrationException as e:
51 except IntegrationException as e:
47 self.request.response.status_code = 503
52 self.request.response.status_code = 503
48 return {'error_messages': [str(e)]}
53 return {"error_messages": [str(e)]}
49 assignees = client.get_assignees()
54 assignees = client.get_assignees()
50 priorities = client.get_priorities()
55 priorities = client.get_priorities()
51 return {'assignees': assignees,
56 return {"assignees": assignees, "priorities": priorities}
52 'priorities': priorities}
53
57
54 @view_config(route_name='integrations_id',
58 @view_config(
55 match_param=['action=create-issue',
59 route_name="integrations_id",
56 'integration=bitbucket'],
60 match_param=["action=create-issue", "integration=bitbucket"],
57 renderer='json')
61 renderer="json",
62 )
58 def create_issue(self):
63 def create_issue(self):
59 """
64 """
60 Creates a new issue in bitbucket issue tracker from report group
65 Creates a new issue in bitbucket issue tracker from report group
61 """
66 """
62 report = ReportGroupService.by_id(
67 report = ReportGroupService.by_id(self.request.unsafe_json_body["group_id"])
63 self.request.unsafe_json_body['group_id'])
64 form_data = {
68 form_data = {
65 'title': self.request.unsafe_json_body.get('title',
69 "title": self.request.unsafe_json_body.get("title", "Unknown Title"),
66 'Unknown Title'),
70 "content": self.request.unsafe_json_body.get("content", ""),
67 'content': self.request.unsafe_json_body.get('content', ''),
71 "kind": "bug",
68 'kind': 'bug',
72 "priority": self.request.unsafe_json_body["priority"],
69 'priority': self.request.unsafe_json_body['priority'],
73 "responsible": self.request.unsafe_json_body["responsible"]["user"],
70 'responsible': self.request.unsafe_json_body['responsible']['user']
71 }
74 }
72
75
73 try:
76 try:
74 client = BitbucketIntegration.create_client(
77 client = BitbucketIntegration.create_client(
75 self.request,
78 self.request,
76 self.integration.config['user_name'],
79 self.integration.config["user_name"],
77 self.integration.config['repo_name'])
80 self.integration.config["repo_name"],
81 )
78 issue = client.create_issue(form_data)
82 issue = client.create_issue(form_data)
79 except IntegrationException as e:
83 except IntegrationException as e:
80 self.request.response.status_code = 503
84 self.request.response.status_code = 503
81 return {'error_messages': [str(e)]}
85 return {"error_messages": [str(e)]}
82
86
83 comment_body = 'Bitbucket issue created: %s ' % issue['web_url']
87 comment_body = "Bitbucket issue created: %s " % issue["web_url"]
84 comment = ReportComment(owner_id=self.request.user.id,
88 comment = ReportComment(
85 report_time=report.first_timestamp,
89 owner_id=self.request.user.id,
86 body=comment_body)
90 report_time=report.first_timestamp,
91 body=comment_body,
92 )
87 report.comments.append(comment)
93 report.comments.append(comment)
88 return True
94 return True
89
95
90 @view_config(route_name='integrations_id',
96 @view_config(
91 match_param=['action=setup', 'integration=bitbucket'],
97 route_name="integrations_id",
92 renderer='json',
98 match_param=["action=setup", "integration=bitbucket"],
93 permission='edit')
99 renderer="json",
100 permission="edit",
101 )
94 def setup(self):
102 def setup(self):
95 """
103 """
96 Validates and creates integration between application and bitbucket
104 Validates and creates integration between application and bitbucket
@@ -98,24 +106,24 b' class BitbucketView(IntegrationView):'
98 resource = self.request.context.resource
106 resource = self.request.context.resource
99 form = forms.IntegrationBitbucketForm(
107 form = forms.IntegrationBitbucketForm(
100 MultiDict(self.request.safe_json_body or {}),
108 MultiDict(self.request.safe_json_body or {}),
101 csrf_context=self.request, **self.integration_config)
109 csrf_context=self.request,
102 if self.request.method == 'POST' and form.validate():
110 **self.integration_config
111 )
112 if self.request.method == "POST" and form.validate():
103 integration_config = {
113 integration_config = {
104 'repo_name': form.repo_name.data,
114 "repo_name": form.repo_name.data,
105 'user_name': form.user_name.data,
115 "user_name": form.user_name.data,
106 'host_name': 'https://bitbucket.org'
116 "host_name": "https://bitbucket.org",
107 }
117 }
108 if not self.integration:
118 if not self.integration:
109 # add new integration
119 # add new integration
110 self.integration = BitbucketIntegration(
120 self.integration = BitbucketIntegration(modified_date=datetime.utcnow())
111 modified_date=datetime.utcnow(),
121 self.request.session.flash("Integration added")
112 )
113 self.request.session.flash('Integration added')
114 resource.integrations.append(self.integration)
122 resource.integrations.append(self.integration)
115 else:
123 else:
116 self.request.session.flash('Integration updated')
124 self.request.session.flash("Integration updated")
117 self.integration.config = integration_config
125 self.integration.config = integration_config
118 return integration_config
126 return integration_config
119 elif self.request.method == 'POST':
127 elif self.request.method == "POST":
120 return HTTPUnprocessableEntity(body=form.errors_json)
128 return HTTPUnprocessableEntity(body=form.errors_json)
121 return self.integration_config
129 return self.integration_config
@@ -15,8 +15,7 b''
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from ...models import DBSession
17 from ...models import DBSession
18 from ...models.integrations.campfire import CampfireIntegration, \
18 from ...models.integrations.campfire import CampfireIntegration, IntegrationException
19 IntegrationException
20 from ...models.alert_channel import AlertChannel
19 from ...models.alert_channel import AlertChannel
21 from ...lib import generate_random_string
20 from ...lib import generate_random_string
22 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
21 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
@@ -32,47 +31,50 b' from . import IntegrationView'
32
31
33
32
34 class CampfireView(IntegrationView):
33 class CampfireView(IntegrationView):
35 @view_config(route_name='integrations_id',
34 @view_config(
36 match_param=['action=info', 'integration=campfire'],
35 route_name="integrations_id",
37 renderer='json')
36 match_param=["action=info", "integration=campfire"],
37 renderer="json",
38 )
38 def get_info(self):
39 def get_info(self):
39 pass
40 pass
40
41
41 @view_config(route_name='integrations_id',
42 @view_config(
42 match_param=['action=setup', 'integration=campfire'],
43 route_name="integrations_id",
43 renderer='json',
44 match_param=["action=setup", "integration=campfire"],
44 permission='edit')
45 renderer="json",
46 permission="edit",
47 )
45 def setup(self):
48 def setup(self):
46 """
49 """
47 Validates and creates integration between application and campfire
50 Validates and creates integration between application and campfire
48 """
51 """
49 resource = self.request.context.resource
52 resource = self.request.context.resource
50 self.create_missing_channel(resource, 'campfire')
53 self.create_missing_channel(resource, "campfire")
51
54
52 form = forms.IntegrationCampfireForm(
55 form = forms.IntegrationCampfireForm(
53 MultiDict(self.request.safe_json_body or {}),
56 MultiDict(self.request.safe_json_body or {}),
54 csrf_context=self.request,
57 csrf_context=self.request,
55 **self.integration_config)
58 **self.integration_config
59 )
56
60
57 if self.request.method == 'POST' and form.validate():
61 if self.request.method == "POST" and form.validate():
58 integration_config = {
62 integration_config = {
59 'account': form.account.data,
63 "account": form.account.data,
60 'api_token': form.api_token.data,
64 "api_token": form.api_token.data,
61 'rooms': form.rooms.data,
65 "rooms": form.rooms.data,
62 }
66 }
63 if not self.integration:
67 if not self.integration:
64 # add new integration
68 # add new integration
65 self.integration = CampfireIntegration(
69 self.integration = CampfireIntegration(modified_date=datetime.utcnow())
66 modified_date=datetime.utcnow(),
70 self.request.session.flash("Integration added")
67 )
68 self.request.session.flash('Integration added')
69 resource.integrations.append(self.integration)
71 resource.integrations.append(self.integration)
70 else:
72 else:
71 self.request.session.flash('Integration updated')
73 self.request.session.flash("Integration updated")
72 self.integration.config = integration_config
74 self.integration.config = integration_config
73 DBSession.flush()
75 DBSession.flush()
74 self.create_missing_channel(resource, 'campfire')
76 self.create_missing_channel(resource, "campfire")
75 return integration_config
77 return integration_config
76 elif self.request.method == 'POST':
78 elif self.request.method == "POST":
77 return HTTPUnprocessableEntity(body=form.errors_json)
79 return HTTPUnprocessableEntity(body=form.errors_json)
78 return self.integration_config
80 return self.integration_config
@@ -29,43 +29,45 b' from . import IntegrationView'
29
29
30
30
31 class FlowdockView(IntegrationView):
31 class FlowdockView(IntegrationView):
32 @view_config(route_name='integrations_id',
32 @view_config(
33 match_param=['action=info', 'integration=flowdock'],
33 route_name="integrations_id",
34 renderer='json')
34 match_param=["action=info", "integration=flowdock"],
35 renderer="json",
36 )
35 def get_info(self):
37 def get_info(self):
36 pass
38 pass
37
39
38 @view_config(route_name='integrations_id',
40 @view_config(
39 match_param=['action=setup', 'integration=flowdock'],
41 route_name="integrations_id",
40 renderer='json',
42 match_param=["action=setup", "integration=flowdock"],
41 permission='edit')
43 renderer="json",
44 permission="edit",
45 )
42 def setup(self):
46 def setup(self):
43 """
47 """
44 Validates and creates integration between application and flowdock
48 Validates and creates integration between application and flowdock
45 """
49 """
46 resource = self.request.context.resource
50 resource = self.request.context.resource
47 self.create_missing_channel(resource, 'flowdock')
51 self.create_missing_channel(resource, "flowdock")
48
52
49 form = forms.IntegrationFlowdockForm(
53 form = forms.IntegrationFlowdockForm(
50 MultiDict(self.request.safe_json_body or {}),
54 MultiDict(self.request.safe_json_body or {}),
51 csrf_context=self.request, **self.integration_config)
55 csrf_context=self.request,
52 if self.request.method == 'POST' and form.validate():
56 **self.integration_config
53 integration_config = {
57 )
54 'api_token': form.api_token.data,
58 if self.request.method == "POST" and form.validate():
55 }
59 integration_config = {"api_token": form.api_token.data}
56 if not self.integration:
60 if not self.integration:
57 # add new integration
61 # add new integration
58 self.integration = FlowdockIntegration(
62 self.integration = FlowdockIntegration(modified_date=datetime.utcnow())
59 modified_date=datetime.utcnow(),
63 self.request.session.flash("Integration added")
60 )
61 self.request.session.flash('Integration added')
62 resource.integrations.append(self.integration)
64 resource.integrations.append(self.integration)
63 else:
65 else:
64 self.request.session.flash('Integration updated')
66 self.request.session.flash("Integration updated")
65 self.integration.config = integration_config
67 self.integration.config = integration_config
66 DBSession.flush()
68 DBSession.flush()
67 self.create_missing_channel(resource, 'flowdock')
69 self.create_missing_channel(resource, "flowdock")
68 return integration_config
70 return integration_config
69 elif self.request.method == 'POST':
71 elif self.request.method == "POST":
70 return HTTPUnprocessableEntity(body=form.errors_json)
72 return HTTPUnprocessableEntity(body=form.errors_json)
71 return self.integration_config
73 return self.integration_config
@@ -15,8 +15,10 b''
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from appenlight.models import DBSession
17 from appenlight.models import DBSession
18 from appenlight.models.integrations.github import GithubIntegration, \
18 from appenlight.models.integrations.github import (
19 IntegrationException
19 GithubIntegration,
20 IntegrationException,
21 )
20 from appenlight.models.report_comment import ReportComment
22 from appenlight.models.report_comment import ReportComment
21 from appenlight.models.services.report_group import ReportGroupService
23 from appenlight.models.services.report_group import ReportGroupService
22 from pyramid.view import view_config
24 from pyramid.view import view_config
@@ -32,9 +34,11 b' from . import IntegrationView'
32
34
33
35
34 class GithubView(IntegrationView):
36 class GithubView(IntegrationView):
35 @view_config(route_name='integrations_id',
37 @view_config(
36 match_param=['action=info', 'integration=github'],
38 route_name="integrations_id",
37 renderer='json')
39 match_param=["action=info", "integration=github"],
40 renderer="json",
41 )
38 def get_github_info(self):
42 def get_github_info(self):
39 """
43 """
40 Grab information about possible priority statuses and assignable users
44 Grab information about possible priority statuses and assignable users
@@ -42,57 +46,62 b' class GithubView(IntegrationView):'
42 try:
46 try:
43 client = GithubIntegration.create_client(
47 client = GithubIntegration.create_client(
44 self.request,
48 self.request,
45 self.integration.config['user_name'],
49 self.integration.config["user_name"],
46 self.integration.config['repo_name'])
50 self.integration.config["repo_name"],
51 )
47 except IntegrationException as e:
52 except IntegrationException as e:
48 self.request.response.status_code = 503
53 self.request.response.status_code = 503
49 return {'error_messages': [str(e)]}
54 return {"error_messages": [str(e)]}
50 try:
55 try:
51 assignees = client.get_assignees()
56 assignees = client.get_assignees()
52 statuses = client.get_statuses()
57 statuses = client.get_statuses()
53 except IntegrationException as e:
58 except IntegrationException as e:
54 return {'error_messages': [str(e)]}
59 return {"error_messages": [str(e)]}
55 return {'assignees': assignees,
60 return {"assignees": assignees, "statuses": statuses}
56 'statuses': statuses}
57
61
58 @view_config(route_name='integrations_id',
62 @view_config(
59 match_param=['action=create-issue', 'integration=github'],
63 route_name="integrations_id",
60 renderer='json')
64 match_param=["action=create-issue", "integration=github"],
65 renderer="json",
66 )
61 def create_issue(self):
67 def create_issue(self):
62 """
68 """
63 Creates a new issue in github issue tracker from report group
69 Creates a new issue in github issue tracker from report group
64 """
70 """
65 report = ReportGroupService.by_id(
71 report = ReportGroupService.by_id(self.request.unsafe_json_body["group_id"])
66 self.request.unsafe_json_body['group_id'])
67 form_data = {
72 form_data = {
68 'title': self.request.unsafe_json_body.get('title',
73 "title": self.request.unsafe_json_body.get("title", "Unknown Title"),
69 'Unknown Title'),
74 "content": self.request.unsafe_json_body.get("content"),
70 'content': self.request.unsafe_json_body.get('content'),
75 "kind": [self.request.unsafe_json_body["status"]],
71 'kind': [self.request.unsafe_json_body['status']],
76 "responsible": self.request.unsafe_json_body["responsible"]["user"],
72 'responsible': self.request.unsafe_json_body['responsible']['user']
73 }
77 }
74
78
75 try:
79 try:
76 client = GithubIntegration.create_client(
80 client = GithubIntegration.create_client(
77 self.request,
81 self.request,
78 self.integration.config['user_name'],
82 self.integration.config["user_name"],
79 self.integration.config['repo_name'])
83 self.integration.config["repo_name"],
84 )
80 issue = client.create_issue(form_data)
85 issue = client.create_issue(form_data)
81 except IntegrationException as e:
86 except IntegrationException as e:
82 self.request.response.status_code = 503
87 self.request.response.status_code = 503
83 return {'error_messages': [str(e)]}
88 return {"error_messages": [str(e)]}
84
89
85 comment_body = 'Github issue created: %s ' % issue['web_url']
90 comment_body = "Github issue created: %s " % issue["web_url"]
86 comment = ReportComment(owner_id=self.request.user.id,
91 comment = ReportComment(
87 report_time=report.first_timestamp,
92 owner_id=self.request.user.id,
88 body=comment_body)
93 report_time=report.first_timestamp,
94 body=comment_body,
95 )
89 report.comments.append(comment)
96 report.comments.append(comment)
90 return True
97 return True
91
98
92 @view_config(route_name='integrations_id',
99 @view_config(
93 match_param=['action=setup', 'integration=github'],
100 route_name="integrations_id",
94 renderer='json',
101 match_param=["action=setup", "integration=github"],
95 permission='edit')
102 renderer="json",
103 permission="edit",
104 )
96 def setup(self):
105 def setup(self):
97 """
106 """
98 Validates and creates integration between application and github
107 Validates and creates integration between application and github
@@ -101,22 +110,20 b' class GithubView(IntegrationView):'
101 form = forms.IntegrationGithubForm(
110 form = forms.IntegrationGithubForm(
102 MultiDict(self.request.safe_json_body or {}),
111 MultiDict(self.request.safe_json_body or {}),
103 csrf_context=self.request,
112 csrf_context=self.request,
104 **self.integration_config)
113 **self.integration_config
105 if self.request.method == 'POST' and form.validate():
114 )
115 if self.request.method == "POST" and form.validate():
106 integration_config = {
116 integration_config = {
107 'repo_name': form.repo_name.data,
117 "repo_name": form.repo_name.data,
108 'user_name': form.user_name.data,
118 "user_name": form.user_name.data,
109 'host_name': 'https://api.github.com'
119 "host_name": "https://api.github.com",
110 }
120 }
111 if not self.integration:
121 if not self.integration:
112 self.integration = GithubIntegration(
122 self.integration = GithubIntegration(modified_date=datetime.utcnow())
113 modified_date=datetime.utcnow(),
114
115 )
116 self.integration.config = integration_config
123 self.integration.config = integration_config
117 resource.integrations.append(self.integration)
124 resource.integrations.append(self.integration)
118 self.request.session.flash('Integration updated')
125 self.request.session.flash("Integration updated")
119 return integration_config
126 return integration_config
120 elif self.request.method == 'POST':
127 elif self.request.method == "POST":
121 return HTTPUnprocessableEntity(body=form.errors_json)
128 return HTTPUnprocessableEntity(body=form.errors_json)
122 return self.integration_config
129 return self.integration_config
@@ -31,43 +31,47 b' from . import IntegrationView'
31
31
32
32
33 class HipchatView(IntegrationView):
33 class HipchatView(IntegrationView):
34 @view_config(route_name='integrations_id',
34 @view_config(
35 match_param=['action=info', 'integration=hipchat'],
35 route_name="integrations_id",
36 renderer='json')
36 match_param=["action=info", "integration=hipchat"],
37 renderer="json",
38 )
37 def get_info(self):
39 def get_info(self):
38 pass
40 pass
39
41
40 @view_config(route_name='integrations_id',
42 @view_config(
41 match_param=['action=setup', 'integration=hipchat'],
43 route_name="integrations_id",
42 renderer='json',
44 match_param=["action=setup", "integration=hipchat"],
43 permission='edit')
45 renderer="json",
46 permission="edit",
47 )
44 def setup(self):
48 def setup(self):
45 """
49 """
46 Validates and creates integration between application and hipchat
50 Validates and creates integration between application and hipchat
47 """
51 """
48 resource = self.request.context.resource
52 resource = self.request.context.resource
49 self.create_missing_channel(resource, 'hipchat')
53 self.create_missing_channel(resource, "hipchat")
50 form = forms.IntegrationHipchatForm(
54 form = forms.IntegrationHipchatForm(
51 MultiDict(self.request.safe_json_body or {}),
55 MultiDict(self.request.safe_json_body or {}),
52 csrf_context=self.request, **self.integration_config)
56 csrf_context=self.request,
53 if self.request.method == 'POST' and form.validate():
57 **self.integration_config
58 )
59 if self.request.method == "POST" and form.validate():
54 integration_config = {
60 integration_config = {
55 'api_token': form.api_token.data,
61 "api_token": form.api_token.data,
56 'rooms': form.rooms.data,
62 "rooms": form.rooms.data,
57 }
63 }
58 if not self.integration:
64 if not self.integration:
59 # add new integration
65 # add new integration
60 self.integration = HipchatIntegration(
66 self.integration = HipchatIntegration(modified_date=datetime.utcnow())
61 modified_date=datetime.utcnow(),
67 self.request.session.flash("Integration added")
62 )
63 self.request.session.flash('Integration added')
64 resource.integrations.append(self.integration)
68 resource.integrations.append(self.integration)
65 else:
69 else:
66 self.request.session.flash('Integration updated')
70 self.request.session.flash("Integration updated")
67 self.integration.config = integration_config
71 self.integration.config = integration_config
68 DBSession.flush()
72 DBSession.flush()
69 self.create_missing_channel(resource, 'hipchat')
73 self.create_missing_channel(resource, "hipchat")
70 return integration_config
74 return integration_config
71 elif self.request.method == 'POST':
75 elif self.request.method == "POST":
72 return HTTPUnprocessableEntity(body=form.errors_json)
76 return HTTPUnprocessableEntity(body=form.errors_json)
73 return self.integration_config
77 return self.integration_config
@@ -16,8 +16,11 b''
16
16
17 import copy
17 import copy
18 import logging
18 import logging
19 from appenlight.models.integrations.jira import JiraIntegration, \
19 from appenlight.models.integrations.jira import (
20 JiraClient, IntegrationException
20 JiraIntegration,
21 JiraClient,
22 IntegrationException,
23 )
21 from appenlight.models.report_comment import ReportComment
24 from appenlight.models.report_comment import ReportComment
22 from appenlight.models.services.report_group import ReportGroupService
25 from appenlight.models.services.report_group import ReportGroupService
23 from pyramid.view import view_config
26 from pyramid.view import view_config
@@ -37,21 +40,24 b' class JiraView(IntegrationView):'
37 Creates a client that can make authenticated requests to jira
40 Creates a client that can make authenticated requests to jira
38 """
41 """
39 if self.integration and not form:
42 if self.integration and not form:
40 user_name = self.integration.config['user_name']
43 user_name = self.integration.config["user_name"]
41 password = self.integration.config['password']
44 password = self.integration.config["password"]
42 host_name = self.integration.config['host_name']
45 host_name = self.integration.config["host_name"]
43 project = self.integration.config['project']
46 project = self.integration.config["project"]
44 else:
47 else:
45 user_name, password = form.user_name.data, form.password.data
48 user_name, password = form.user_name.data, form.password.data
46 host_name = form.host_name.data
49 host_name = form.host_name.data
47 project = form.host_name.data
50 project = form.host_name.data
48 client = JiraClient(user_name, password, host_name, project,
51 client = JiraClient(
49 request=self.request)
52 user_name, password, host_name, project, request=self.request
53 )
50 return client
54 return client
51
55
52 @view_config(route_name='integrations_id',
56 @view_config(
53 match_param=['action=info', 'integration=jira'],
57 route_name="integrations_id",
54 renderer='json')
58 match_param=["action=info", "integration=jira"],
59 renderer="json",
60 )
55 def get_jira_info(self):
61 def get_jira_info(self):
56 """
62 """
57 Get basic metadata - assignees and priority levels from tracker
63 Get basic metadata - assignees and priority levels from tracker
@@ -60,60 +66,64 b' class JiraView(IntegrationView):'
60 client = self.create_client()
66 client = self.create_client()
61 except IntegrationException as e:
67 except IntegrationException as e:
62 self.request.response.status_code = 503
68 self.request.response.status_code = 503
63 return {'error_messages': [str(e)]}
69 return {"error_messages": [str(e)]}
64 assignees = []
70 assignees = []
65 priorities = []
71 priorities = []
66 issue_types = []
72 issue_types = []
67 possible_issue_types = client.get_issue_types(self.request)
73 possible_issue_types = client.get_issue_types(self.request)
68 for issue_type in possible_issue_types:
74 for issue_type in possible_issue_types:
69 for field in issue_type['fields']:
75 for field in issue_type["fields"]:
70 if field['id'] == 'assignee':
76 if field["id"] == "assignee":
71 assignees = field['values']
77 assignees = field["values"]
72 if field['id'] == 'priority':
78 if field["id"] == "priority":
73 priorities = field['values']
79 priorities = field["values"]
74 issue_types.append({'name':issue_type['name'],
80 issue_types.append({"name": issue_type["name"], "id": issue_type["id"]})
75 'id':issue_type['id']})
81 return {
76 return {'assignees': assignees,
82 "assignees": assignees,
77 'priorities': priorities,
83 "priorities": priorities,
78 'issue_types': issue_types}
84 "issue_types": issue_types,
85 }
79
86
80 @view_config(route_name='integrations_id',
87 @view_config(
81 match_param=['action=create-issue',
88 route_name="integrations_id",
82 'integration=jira'],
89 match_param=["action=create-issue", "integration=jira"],
83 renderer='json')
90 renderer="json",
91 )
84 def create_issue(self):
92 def create_issue(self):
85 """
93 """
86 Creates a new issue in jira from report group
94 Creates a new issue in jira from report group
87 """
95 """
88 report = ReportGroupService.by_id(
96 report = ReportGroupService.by_id(self.request.unsafe_json_body["group_id"])
89 self.request.unsafe_json_body['group_id'])
90 form_data = {
97 form_data = {
91 'title': self.request.unsafe_json_body.get('title',
98 "title": self.request.unsafe_json_body.get("title", "Unknown Title"),
92 'Unknown Title'),
99 "content": self.request.unsafe_json_body.get("content", ""),
93 'content': self.request.unsafe_json_body.get('content', ''),
100 "issue_type": self.request.unsafe_json_body["issue_type"]["id"],
94 'issue_type': self.request.unsafe_json_body['issue_type']['id'],
101 "priority": self.request.unsafe_json_body["priority"]["id"],
95 'priority': self.request.unsafe_json_body['priority']['id'],
102 "responsible": self.request.unsafe_json_body["responsible"]["id"],
96 'responsible': self.request.unsafe_json_body['responsible']['id'],
103 "project": self.integration.config["project"],
97 'project': self.integration.config['project']
98 }
104 }
99 try:
105 try:
100 client = self.create_client()
106 client = self.create_client()
101 issue = client.create_issue(form_data, request=self.request)
107 issue = client.create_issue(form_data, request=self.request)
102 except IntegrationException as e:
108 except IntegrationException as e:
103 self.request.response.status_code = 503
109 self.request.response.status_code = 503
104 return {'error_messages': [str(e)]}
110 return {"error_messages": [str(e)]}
105
111
106 comment_body = 'Jira issue created: %s ' % issue['web_url']
112 comment_body = "Jira issue created: %s " % issue["web_url"]
107 comment = ReportComment(owner_id=self.request.user.id,
113 comment = ReportComment(
108 report_time=report.first_timestamp,
114 owner_id=self.request.user.id,
109 body=comment_body)
115 report_time=report.first_timestamp,
116 body=comment_body,
117 )
110 report.comments.append(comment)
118 report.comments.append(comment)
111 return True
119 return True
112
120
113 @view_config(route_name='integrations_id',
121 @view_config(
114 match_param=['action=setup', 'integration=jira'],
122 route_name="integrations_id",
115 renderer='json',
123 match_param=["action=setup", "integration=jira"],
116 permission='edit')
124 renderer="json",
125 permission="edit",
126 )
117 def setup(self):
127 def setup(self):
118 """
128 """
119 Validates and creates integration between application and jira
129 Validates and creates integration between application and jira
@@ -121,28 +131,28 b' class JiraView(IntegrationView):'
121 resource = self.request.context.resource
131 resource = self.request.context.resource
122 form = forms.IntegrationJiraForm(
132 form = forms.IntegrationJiraForm(
123 MultiDict(self.request.safe_json_body or {}),
133 MultiDict(self.request.safe_json_body or {}),
124 csrf_context=self.request, **self.integration_config)
134 csrf_context=self.request,
125 if self.request.method == 'POST' and form.validate():
135 **self.integration_config
136 )
137 if self.request.method == "POST" and form.validate():
126 integration_config = {
138 integration_config = {
127 'user_name': form.user_name.data,
139 "user_name": form.user_name.data,
128 'password': form.password.data,
140 "password": form.password.data,
129 'host_name': form.host_name.data,
141 "host_name": form.host_name.data,
130 'project': form.project.data
142 "project": form.project.data,
131 }
143 }
132 if not self.integration:
144 if not self.integration:
133 # add new integration
145 # add new integration
134 self.integration = JiraIntegration(
146 self.integration = JiraIntegration(modified_date=datetime.utcnow())
135 modified_date=datetime.utcnow(),
147 self.request.session.flash("Integration added")
136 )
137 self.request.session.flash('Integration added')
138 resource.integrations.append(self.integration)
148 resource.integrations.append(self.integration)
139 else:
149 else:
140 self.request.session.flash('Integration updated')
150 self.request.session.flash("Integration updated")
141 self.integration.config = integration_config
151 self.integration.config = integration_config
142 return integration_config
152 return integration_config
143 elif self.request.method == 'POST':
153 elif self.request.method == "POST":
144 return HTTPUnprocessableEntity(body=form.errors_json)
154 return HTTPUnprocessableEntity(body=form.errors_json)
145
155
146 to_return = copy.deepcopy(self.integration_config)
156 to_return = copy.deepcopy(self.integration_config)
147 to_return.pop('password', None)
157 to_return.pop("password", None)
148 return to_return
158 return to_return
@@ -17,8 +17,7 b''
17 import logging
17 import logging
18
18
19 from appenlight.models import DBSession
19 from appenlight.models import DBSession
20 from appenlight.models.integrations.slack import SlackIntegration, \
20 from appenlight.models.integrations.slack import SlackIntegration, IntegrationException
21 IntegrationException
22 from pyramid.httpexceptions import HTTPUnprocessableEntity
21 from pyramid.httpexceptions import HTTPUnprocessableEntity
23 from pyramid.view import view_config
22 from pyramid.view import view_config
24 from appenlight import forms
23 from appenlight import forms
@@ -31,43 +30,45 b' from . import IntegrationView'
31
30
32
31
33 class SlackView(IntegrationView):
32 class SlackView(IntegrationView):
34 @view_config(route_name='integrations_id',
33 @view_config(
35 match_param=['action=info', 'integration=slack'],
34 route_name="integrations_id",
36 renderer='json')
35 match_param=["action=info", "integration=slack"],
36 renderer="json",
37 )
37 def get_info(self):
38 def get_info(self):
38 pass
39 pass
39
40
40 @view_config(route_name='integrations_id',
41 @view_config(
41 match_param=['action=setup', 'integration=slack'],
42 route_name="integrations_id",
42 renderer='json',
43 match_param=["action=setup", "integration=slack"],
43 permission='edit')
44 renderer="json",
45 permission="edit",
46 )
44 def setup(self):
47 def setup(self):
45 """
48 """
46 Validates and creates integration between application and slack
49 Validates and creates integration between application and slack
47 """
50 """
48 resource = self.request.context.resource
51 resource = self.request.context.resource
49 self.create_missing_channel(resource, 'slack')
52 self.create_missing_channel(resource, "slack")
50 form = forms.IntegrationSlackForm(
53 form = forms.IntegrationSlackForm(
51 MultiDict(self.request.safe_json_body or {}),
54 MultiDict(self.request.safe_json_body or {}),
52 csrf_context=self.request, **self.integration_config)
55 csrf_context=self.request,
56 **self.integration_config
57 )
53
58
54 if self.request.method == 'POST' and form.validate():
59 if self.request.method == "POST" and form.validate():
55 integration_config = {
60 integration_config = {"webhook_url": form.webhook_url.data}
56 'webhook_url': form.webhook_url.data
57 }
58 if not self.integration:
61 if not self.integration:
59 # add new integration
62 # add new integration
60 self.integration = SlackIntegration(
63 self.integration = SlackIntegration(modified_date=datetime.utcnow())
61 modified_date=datetime.utcnow(),
64 self.request.session.flash("Integration added")
62 )
63 self.request.session.flash('Integration added')
64 resource.integrations.append(self.integration)
65 resource.integrations.append(self.integration)
65 else:
66 else:
66 self.request.session.flash('Integration updated')
67 self.request.session.flash("Integration updated")
67 self.integration.config = integration_config
68 self.integration.config = integration_config
68 DBSession.flush()
69 DBSession.flush()
69 self.create_missing_channel(resource, 'slack')
70 self.create_missing_channel(resource, "slack")
70 return integration_config
71 return integration_config
71 elif self.request.method == 'POST':
72 elif self.request.method == "POST":
72 return HTTPUnprocessableEntity(body=form.errors_json)
73 return HTTPUnprocessableEntity(body=form.errors_json)
73 return self.integration_config
74 return self.integration_config
@@ -15,8 +15,10 b''
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from appenlight.models import DBSession
17 from appenlight.models import DBSession
18 from appenlight.models.integrations.webhooks import WebhooksIntegration, \
18 from appenlight.models.integrations.webhooks import (
19 IntegrationException
19 WebhooksIntegration,
20 IntegrationException,
21 )
20 from pyramid.httpexceptions import HTTPUnprocessableEntity
22 from pyramid.httpexceptions import HTTPUnprocessableEntity
21 from pyramid.view import view_config
23 from pyramid.view import view_config
22 from appenlight import forms
24 from appenlight import forms
@@ -30,43 +32,48 b' from . import IntegrationView'
30
32
31
33
32 class WebhooksView(IntegrationView):
34 class WebhooksView(IntegrationView):
33 @view_config(route_name='integrations_id',
35 @view_config(
34 match_param=['action=info', 'integration=webhooks'],
36 route_name="integrations_id",
35 renderer='json')
37 match_param=["action=info", "integration=webhooks"],
38 renderer="json",
39 )
36 def get_info(self):
40 def get_info(self):
37 pass
41 pass
38
42
39 @view_config(route_name='integrations_id',
43 @view_config(
40 match_param=['action=setup', 'integration=webhooks'],
44 route_name="integrations_id",
41 renderer='json', permission='edit')
45 match_param=["action=setup", "integration=webhooks"],
46 renderer="json",
47 permission="edit",
48 )
42 def setup(self):
49 def setup(self):
43 """
50 """
44 Creates webhook integration
51 Creates webhook integration
45 """
52 """
46 resource = self.request.context.resource
53 resource = self.request.context.resource
47 self.create_missing_channel(resource, 'webhooks')
54 self.create_missing_channel(resource, "webhooks")
48
55
49 form = forms.IntegrationWebhooksForm(
56 form = forms.IntegrationWebhooksForm(
50 MultiDict(self.request.safe_json_body or {}),
57 MultiDict(self.request.safe_json_body or {}),
51 csrf_context=self.request, **self.integration_config)
58 csrf_context=self.request,
52 if self.request.method == 'POST' and form.validate():
59 **self.integration_config
60 )
61 if self.request.method == "POST" and form.validate():
53 integration_config = {
62 integration_config = {
54 'reports_webhook': form.reports_webhook.data,
63 "reports_webhook": form.reports_webhook.data,
55 'alerts_webhook': form.alerts_webhook.data,
64 "alerts_webhook": form.alerts_webhook.data,
56 }
65 }
57 if not self.integration:
66 if not self.integration:
58 # add new integration
67 # add new integration
59 self.integration = WebhooksIntegration(
68 self.integration = WebhooksIntegration(modified_date=datetime.utcnow())
60 modified_date=datetime.utcnow(),
69 self.request.session.flash("Integration added")
61 )
62 self.request.session.flash('Integration added')
63 resource.integrations.append(self.integration)
70 resource.integrations.append(self.integration)
64 else:
71 else:
65 self.request.session.flash('Integration updated')
72 self.request.session.flash("Integration updated")
66 self.integration.config = integration_config
73 self.integration.config = integration_config
67 DBSession.flush()
74 DBSession.flush()
68 self.create_missing_channel(resource, 'webhooks')
75 self.create_missing_channel(resource, "webhooks")
69 return integration_config
76 return integration_config
70 elif self.request.method == 'POST':
77 elif self.request.method == "POST":
71 return HTTPUnprocessableEntity(body=form.errors_json)
78 return HTTPUnprocessableEntity(body=form.errors_json)
72 return self.integration_config
79 return self.integration_config
@@ -21,29 +21,31 b' from pyramid.view import view_config'
21 from pyramid.httpexceptions import HTTPUnprocessableEntity
21 from pyramid.httpexceptions import HTTPUnprocessableEntity
22 from appenlight.models import Datastores, Log
22 from appenlight.models import Datastores, Log
23 from appenlight.models.services.log import LogService
23 from appenlight.models.services.log import LogService
24 from appenlight.lib.utils import (build_filter_settings_from_query_dict,
24 from appenlight.lib.utils import (
25 es_index_name_limiter)
25 build_filter_settings_from_query_dict,
26 es_index_name_limiter,
27 )
26 from appenlight.lib.helpers import gen_pagination_headers
28 from appenlight.lib.helpers import gen_pagination_headers
27 from appenlight.celery.tasks import logs_cleanup
29 from appenlight.celery.tasks import logs_cleanup
28
30
29 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
30
32
31 section_filters_key = 'appenlight:logs:filter:%s'
33 section_filters_key = "appenlight:logs:filter:%s"
32
34
33
35
34 @view_config(route_name='logs_no_id', renderer='json',
36 @view_config(route_name="logs_no_id", renderer="json", permission="authenticated")
35 permission='authenticated')
36 def fetch_logs(request):
37 def fetch_logs(request):
37 """
38 """
38 Returns list of log entries from Elasticsearch
39 Returns list of log entries from Elasticsearch
39 """
40 """
40
41
41 filter_settings = build_filter_settings_from_query_dict(request,
42 filter_settings = build_filter_settings_from_query_dict(
42 request.GET.mixed())
43 request, request.GET.mixed()
44 )
43 logs_paginator = LogService.get_paginator_by_app_ids(
45 logs_paginator = LogService.get_paginator_by_app_ids(
44 app_ids=filter_settings['resource'],
46 app_ids=filter_settings["resource"],
45 page=filter_settings['page'],
47 page=filter_settings["page"],
46 filter_settings=filter_settings
48 filter_settings=filter_settings,
47 )
49 )
48 headers = gen_pagination_headers(request, logs_paginator)
50 headers = gen_pagination_headers(request, logs_paginator)
49 request.response.headers.update(headers)
51 request.response.headers.update(headers)
@@ -51,91 +53,108 b' def fetch_logs(request):'
51 return [l.get_dict() for l in logs_paginator.sa_items]
53 return [l.get_dict() for l in logs_paginator.sa_items]
52
54
53
55
54 @view_config(route_name='section_view',
56 @view_config(
55 match_param=['section=logs_section', 'view=fetch_series'],
57 route_name="section_view",
56 renderer='json', permission='authenticated')
58 match_param=["section=logs_section", "view=fetch_series"],
59 renderer="json",
60 permission="authenticated",
61 )
57 def logs_fetch_series(request):
62 def logs_fetch_series(request):
58 """
63 """
59 Handles metric dashboard graphs
64 Handles metric dashboard graphs
60 Returns information for time/tier breakdown
65 Returns information for time/tier breakdown
61 """
66 """
62 filter_settings = build_filter_settings_from_query_dict(request,
67 filter_settings = build_filter_settings_from_query_dict(
63 request.GET.mixed())
68 request, request.GET.mixed()
69 )
64 paginator = LogService.get_paginator_by_app_ids(
70 paginator = LogService.get_paginator_by_app_ids(
65 app_ids=filter_settings['resource'],
71 app_ids=filter_settings["resource"],
66 page=1, filter_settings=filter_settings, items_per_page=1)
72 page=1,
73 filter_settings=filter_settings,
74 items_per_page=1,
75 )
67 now = datetime.utcnow().replace(microsecond=0, second=0)
76 now = datetime.utcnow().replace(microsecond=0, second=0)
68 delta = timedelta(days=7)
77 delta = timedelta(days=7)
69 if paginator.sa_items:
78 if paginator.sa_items:
70 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0,
79 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0, second=0)
71 second=0)
80 filter_settings["start_date"] = start_date - delta
72 filter_settings['start_date'] = start_date - delta
73 else:
81 else:
74 filter_settings['start_date'] = now - delta
82 filter_settings["start_date"] = now - delta
75 filter_settings['end_date'] = filter_settings['start_date'] \
83 filter_settings["end_date"] = filter_settings["start_date"] + timedelta(days=7)
76 + timedelta(days=7)
77
84
78 @request.registry.cache_regions.redis_sec_30.cache_on_arguments(
85 @request.registry.cache_regions.redis_sec_30.cache_on_arguments("logs_graphs")
79 'logs_graphs')
80 def cached(apps, search_params, delta, now):
86 def cached(apps, search_params, delta, now):
81 data = LogService.get_time_series_aggregate(
87 data = LogService.get_time_series_aggregate(
82 filter_settings['resource'], filter_settings)
88 filter_settings["resource"], filter_settings
89 )
83 if not data:
90 if not data:
84 return []
91 return []
85 buckets = data['aggregations']['events_over_time']['buckets']
92 buckets = data["aggregations"]["events_over_time"]["buckets"]
86 return [{"x": datetime.utcfromtimestamp(item["key"] / 1000),
93 return [
87 "logs": item["doc_count"]} for item in buckets]
94 {
95 "x": datetime.utcfromtimestamp(item["key"] / 1000),
96 "logs": item["doc_count"],
97 }
98 for item in buckets
99 ]
88
100
89 return cached(filter_settings, request.GET.mixed(), delta, now)
101 return cached(filter_settings, request.GET.mixed(), delta, now)
90
102
91
103
92 @view_config(route_name='logs_no_id', renderer='json', request_method="DELETE",
104 @view_config(
93 permission='authenticated')
105 route_name="logs_no_id",
106 renderer="json",
107 request_method="DELETE",
108 permission="authenticated",
109 )
94 def logs_mass_delete(request):
110 def logs_mass_delete(request):
95 params = request.GET.mixed()
111 params = request.GET.mixed()
96 if 'resource' not in params:
112 if "resource" not in params:
97 raise HTTPUnprocessableEntity()
113 raise HTTPUnprocessableEntity()
98 # this might be '' and then colander will not validate the schema
114 # this might be '' and then colander will not validate the schema
99 if not params.get('namespace'):
115 if not params.get("namespace"):
100 params.pop('namespace', None)
116 params.pop("namespace", None)
101 filter_settings = build_filter_settings_from_query_dict(
117 filter_settings = build_filter_settings_from_query_dict(
102 request, params, resource_permissions=['update_reports'])
118 request, params, resource_permissions=["update_reports"]
119 )
103
120
104 resource_id = list(filter_settings['resource'])[0]
121 resource_id = list(filter_settings["resource"])[0]
105 # filter settings returns list of all of users applications
122 # filter settings returns list of all of users applications
106 # if app is not matching - normally we would not care as its used for search
123 # if app is not matching - normally we would not care as its used for search
107 # but here user playing with params would possibly wipe out their whole data
124 # but here user playing with params would possibly wipe out their whole data
108 if int(resource_id) != int(params['resource']):
125 if int(resource_id) != int(params["resource"]):
109 raise HTTPUnprocessableEntity()
126 raise HTTPUnprocessableEntity()
110
127
111 logs_cleanup.delay(resource_id, filter_settings)
128 logs_cleanup.delay(resource_id, filter_settings)
112 msg = 'Log cleanup process started - it may take a while for ' \
129 msg = (
113 'everything to get removed'
130 "Log cleanup process started - it may take a while for "
131 "everything to get removed"
132 )
114 request.session.flash(msg)
133 request.session.flash(msg)
115 return {}
134 return {}
116
135
117
136
118 @view_config(route_name='section_view',
137 @view_config(
119 match_param=("view=common_tags", "section=logs_section"),
138 route_name="section_view",
120 renderer='json', permission='authenticated')
139 match_param=("view=common_tags", "section=logs_section"),
140 renderer="json",
141 permission="authenticated",
142 )
121 def common_tags(request):
143 def common_tags(request):
122 config = request.GET.mixed()
144 config = request.GET.mixed()
123 filter_settings = build_filter_settings_from_query_dict(request,
145 filter_settings = build_filter_settings_from_query_dict(request, config)
124 config)
125
146
126 resources = list(filter_settings["resource"])
147 resources = list(filter_settings["resource"])
127 query = {
148 query = {
128 "query": {
149 "query": {
129 "filtered": {
150 "filtered": {
130 "filter": {
151 "filter": {"and": [{"terms": {"resource_id": list(resources)}}]}
131 "and": [{"terms": {"resource_id": list(resources)}}]
132 }
133 }
152 }
134 }
153 }
135 }
154 }
136 start_date = filter_settings.get('start_date')
155 start_date = filter_settings.get("start_date")
137 end_date = filter_settings.get('end_date')
156 end_date = filter_settings.get("end_date")
138 filter_part = query['query']['filtered']['filter']['and']
157 filter_part = query["query"]["filtered"]["filter"]["and"]
139
158
140 date_range = {"range": {"timestamp": {}}}
159 date_range = {"range": {"timestamp": {}}}
141 if start_date:
160 if start_date:
@@ -145,80 +164,56 b' def common_tags(request):'
145 if start_date or end_date:
164 if start_date or end_date:
146 filter_part.append(date_range)
165 filter_part.append(date_range)
147
166
148 levels = filter_settings.get('level')
167 levels = filter_settings.get("level")
149 if levels:
168 if levels:
150 filter_part.append({"terms": {'log_level': levels}})
169 filter_part.append({"terms": {"log_level": levels}})
151 namespaces = filter_settings.get('namespace')
170 namespaces = filter_settings.get("namespace")
152 if namespaces:
171 if namespaces:
153 filter_part.append({"terms": {'namespace': namespaces}})
172 filter_part.append({"terms": {"namespace": namespaces}})
154
173
155 query["aggs"] = {
174 query["aggs"] = {"sub_agg": {"terms": {"field": "tag_list", "size": 50}}}
156 "sub_agg": {
157 "terms": {
158 "field": "tag_list",
159 "size": 50
160 }
161 }
162 }
163 # tags
175 # tags
164 index_names = es_index_name_limiter(
176 index_names = es_index_name_limiter(ixtypes=[config.get("datasource", "logs")])
165 ixtypes=[config.get('datasource', 'logs')])
177 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
166 result = Datastores.es.search(body=query, index=index_names, doc_type='log',
178 tag_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
167 size=0)
168 tag_buckets = result['aggregations']['sub_agg'].get('buckets', [])
169 # namespaces
179 # namespaces
170 query["aggs"] = {
180 query["aggs"] = {"sub_agg": {"terms": {"field": "namespace", "size": 50}}}
171 "sub_agg": {
181 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
172 "terms": {
182 namespaces_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
173 "field": "namespace",
174 "size": 50
175 }
176 }
177 }
178 result = Datastores.es.search(body=query, index=index_names, doc_type='log',
179 size=0)
180 namespaces_buckets = result['aggregations']['sub_agg'].get('buckets', [])
181 return {
183 return {
182 "tags": [item['key'] for item in tag_buckets],
184 "tags": [item["key"] for item in tag_buckets],
183 "namespaces": [item['key'] for item in namespaces_buckets]
185 "namespaces": [item["key"] for item in namespaces_buckets],
184 }
186 }
185
187
186
188
187 @view_config(route_name='section_view',
189 @view_config(
188 match_param=("view=common_values", "section=logs_section"),
190 route_name="section_view",
189 renderer='json', permission='authenticated')
191 match_param=("view=common_values", "section=logs_section"),
192 renderer="json",
193 permission="authenticated",
194 )
190 def common_values(request):
195 def common_values(request):
191 config = request.GET.mixed()
196 config = request.GET.mixed()
192 datasource = config.pop('datasource', 'logs')
197 datasource = config.pop("datasource", "logs")
193 filter_settings = build_filter_settings_from_query_dict(request,
198 filter_settings = build_filter_settings_from_query_dict(request, config)
194 config)
195 resources = list(filter_settings["resource"])
199 resources = list(filter_settings["resource"])
196 tag_name = filter_settings['tags'][0]['value'][0]
200 tag_name = filter_settings["tags"][0]["value"][0]
197 query = {
201 query = {
198 'query': {
202 "query": {
199 'filtered': {
203 "filtered": {
200 'filter': {
204 "filter": {
201 'and': [
205 "and": [
202 {'terms': {'resource_id': list(resources)}},
206 {"terms": {"resource_id": list(resources)}},
203 {'terms': {
207 {"terms": {"namespace": filter_settings["namespace"]}},
204 'namespace': filter_settings['namespace']}}
205 ]
208 ]
206 }
209 }
207 }
210 }
208 }
211 }
209 }
212 }
210 query['aggs'] = {
213 query["aggs"] = {
211 'sub_agg': {
214 "sub_agg": {"terms": {"field": "tags.{}.values".format(tag_name), "size": 50}}
212 'terms': {
213 'field': 'tags.{}.values'.format(tag_name),
214 'size': 50
215 }
216 }
217 }
215 }
218 index_names = es_index_name_limiter(ixtypes=[datasource])
216 index_names = es_index_name_limiter(ixtypes=[datasource])
219 result = Datastores.es.search(body=query, index=index_names, doc_type='log',
217 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
220 size=0)
218 values_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
221 values_buckets = result['aggregations']['sub_agg'].get('buckets', [])
219 return {"values": [item["key"] for item in values_buckets]}
222 return {
223 "values": [item['key'] for item in values_buckets]
224 }
@@ -22,11 +22,16 b' import logging'
22 log = logging.getLogger(__name__)
22 log = logging.getLogger(__name__)
23
23
24
24
25 @view_config(route_name='plugin_configs', renderer='json',
25 @view_config(
26 permission='edit', request_method='GET')
26 route_name="plugin_configs",
27 renderer="json",
28 permission="edit",
29 request_method="GET",
30 )
27 def query(request):
31 def query(request):
28 configs = PluginConfigService.by_query(
32 configs = PluginConfigService.by_query(
29 request.params.get('resource_id'),
33 request.params.get("resource_id"),
30 plugin_name=request.matchdict.get('plugin_name'),
34 plugin_name=request.matchdict.get("plugin_name"),
31 section=request.params.get('section'))
35 section=request.params.get("section"),
36 )
32 return [c for c in configs]
37 return [c for c in configs]
@@ -33,20 +33,22 b' from appenlight import forms'
33 from appenlight.lib.enums import ReportType
33 from appenlight.lib.enums import ReportType
34 from appenlight.lib.helpers import gen_pagination_headers
34 from appenlight.lib.helpers import gen_pagination_headers
35 from appenlight.lib.utils import build_filter_settings_from_query_dict
35 from appenlight.lib.utils import build_filter_settings_from_query_dict
36 from appenlight.validators import ReportSearchSchema, TagListSchema, \
36 from appenlight.validators import (
37 accepted_search_params
37 ReportSearchSchema,
38 TagListSchema,
39 accepted_search_params,
40 )
38 from webob.multidict import MultiDict
41 from webob.multidict import MultiDict
39
42
40 _ = str
43 _ = str
41
44
42 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
43
46
44 section_filters_key = 'appenlight:reports:filter:%s'
47 section_filters_key = "appenlight:reports:filter:%s"
45
48
46
49
47 @view_config(route_name='reports', renderer='json', permission='authenticated')
50 @view_config(route_name="reports", renderer="json", permission="authenticated")
48 @view_config(route_name='slow_reports', renderer='json',
51 @view_config(route_name="slow_reports", renderer="json", permission="authenticated")
49 permission='authenticated')
50 def index(request):
52 def index(request):
51 """
53 """
52 Returns list of report groups based on user search query
54 Returns list of report groups based on user search query
@@ -55,7 +57,8 b' def index(request):'
55 request.user.last_login_date = datetime.utcnow()
57 request.user.last_login_date = datetime.utcnow()
56
58
57 applications = UserService.resources_with_perms(
59 applications = UserService.resources_with_perms(
58 request.user, ['view'], resource_types=['application'])
60 request.user, ["view"], resource_types=["application"]
61 )
59
62
60 search_params = request.GET.mixed()
63 search_params = request.GET.mixed()
61
64
@@ -63,24 +66,36 b' def index(request):'
63 schema = ReportSearchSchema().bind(resources=all_possible_app_ids)
66 schema = ReportSearchSchema().bind(resources=all_possible_app_ids)
64 tag_schema = TagListSchema()
67 tag_schema = TagListSchema()
65 filter_settings = schema.deserialize(search_params)
68 filter_settings = schema.deserialize(search_params)
66 tag_list = [{"name": k, "value": v} for k, v in filter_settings.items()
69 tag_list = [
67 if k not in accepted_search_params]
70 {"name": k, "value": v}
71 for k, v in filter_settings.items()
72 if k not in accepted_search_params
73 ]
68 tags = tag_schema.deserialize(tag_list)
74 tags = tag_schema.deserialize(tag_list)
69 filter_settings['tags'] = tags
75 filter_settings["tags"] = tags
70 if request.matched_route.name == 'slow_reports':
76 if request.matched_route.name == "slow_reports":
71 filter_settings['report_type'] = [ReportType.slow]
77 filter_settings["report_type"] = [ReportType.slow]
72 else:
78 else:
73 filter_settings['report_type'] = [ReportType.error]
79 filter_settings["report_type"] = [ReportType.error]
74
80
75 reports_paginator = ReportGroupService.get_paginator_by_app_ids(
81 reports_paginator = ReportGroupService.get_paginator_by_app_ids(
76 app_ids=filter_settings['resource'],
82 app_ids=filter_settings["resource"],
77 page=filter_settings['page'],
83 page=filter_settings["page"],
78 filter_settings=filter_settings
84 filter_settings=filter_settings,
79 )
85 )
80 reports = []
86 reports = []
81 include_keys = ('id', 'http_status', 'report_type', 'resource_name',
87 include_keys = (
82 'front_url', 'resource_id', 'error', 'url_path', 'tags',
88 "id",
83 'duration')
89 "http_status",
90 "report_type",
91 "resource_name",
92 "front_url",
93 "resource_id",
94 "error",
95 "url_path",
96 "tags",
97 "duration",
98 )
84 for report in reports_paginator.sa_items:
99 for report in reports_paginator.sa_items:
85 reports.append(report.get_dict(request, include_keys=include_keys))
100 reports.append(report.get_dict(request, include_keys=include_keys))
86 headers = gen_pagination_headers(request, reports_paginator)
101 headers = gen_pagination_headers(request, reports_paginator)
@@ -88,8 +103,9 b' def index(request):'
88 return reports
103 return reports
89
104
90
105
91 @view_config(route_name='report_groups', renderer='json', permission='view',
106 @view_config(
92 request_method="GET")
107 route_name="report_groups", renderer="json", permission="view", request_method="GET"
108 )
93 def view_report(request):
109 def view_report(request):
94 """
110 """
95 Show individual detailed report group along with latest report
111 Show individual detailed report group along with latest report
@@ -98,18 +114,21 b' def view_report(request):'
98 if not report_group.read:
114 if not report_group.read:
99 report_group.read = True
115 report_group.read = True
100
116
101 report_id = request.params.get('reportId', request.params.get('report_id'))
117 report_id = request.params.get("reportId", request.params.get("report_id"))
102 report_dict = report_group.get_report(report_id).get_dict(request,
118 report_dict = report_group.get_report(report_id).get_dict(request, details=True)
103 details=True)
104 # disallow browsing other occurences by anonymous
119 # disallow browsing other occurences by anonymous
105 if not request.user:
120 if not request.user:
106 report_dict.pop('group_next_report', None)
121 report_dict.pop("group_next_report", None)
107 report_dict.pop('group_previous_report', None)
122 report_dict.pop("group_previous_report", None)
108 return report_dict
123 return report_dict
109
124
110
125
111 @view_config(route_name='report_groups', renderer='json',
126 @view_config(
112 permission='update_reports', request_method='DELETE')
127 route_name="report_groups",
128 renderer="json",
129 permission="update_reports",
130 request_method="DELETE",
131 )
113 def remove(request):
132 def remove(request):
114 """
133 """
115 Used to remove reourt groups from database
134 Used to remove reourt groups from database
@@ -121,8 +140,13 b' def remove(request):'
121 return True
140 return True
122
141
123
142
124 @view_config(route_name='report_groups_property', match_param='key=comments',
143 @view_config(
125 renderer='json', permission='view', request_method="POST")
144 route_name="report_groups_property",
145 match_param="key=comments",
146 renderer="json",
147 permission="view",
148 request_method="POST",
149 )
126 def comment_create(request):
150 def comment_create(request):
127 """
151 """
128 Creates user comments for report group, sends email notifications
152 Creates user comments for report group, sends email notifications
@@ -130,76 +154,92 b' def comment_create(request):'
130 """
154 """
131 report_group = request.context.report_group
155 report_group = request.context.report_group
132 application = request.context.resource
156 application = request.context.resource
133 form = forms.CommentForm(MultiDict(request.unsafe_json_body),
157 form = forms.CommentForm(MultiDict(request.unsafe_json_body), csrf_context=request)
134 csrf_context=request)
158 if request.method == "POST" and form.validate():
135 if request.method == 'POST' and form.validate():
159 comment = ReportComment(
136 comment = ReportComment(owner_id=request.user.id,
160 owner_id=request.user.id, report_time=report_group.first_timestamp
137 report_time=report_group.first_timestamp)
161 )
138 form.populate_obj(comment)
162 form.populate_obj(comment)
139 report_group.comments.append(comment)
163 report_group.comments.append(comment)
140 perm_list = ResourceService.users_for_perm(application, 'view')
164 perm_list = ResourceService.users_for_perm(application, "view")
141 uids_to_notify = []
165 uids_to_notify = []
142 users_to_notify = []
166 users_to_notify = []
143 for perm in perm_list:
167 for perm in perm_list:
144 user = perm.user
168 user = perm.user
145 if ('@{}'.format(user.user_name) in comment.body and
169 if (
146 user.id not in uids_to_notify):
170 "@{}".format(user.user_name) in comment.body
171 and user.id not in uids_to_notify
172 ):
147 uids_to_notify.append(user.id)
173 uids_to_notify.append(user.id)
148 users_to_notify.append(user)
174 users_to_notify.append(user)
149
175
150 commenters = ReportGroupService.users_commenting(
176 commenters = ReportGroupService.users_commenting(
151 report_group, exclude_user_id=request.user.id)
177 report_group, exclude_user_id=request.user.id
178 )
152 for user in commenters:
179 for user in commenters:
153 if user.id not in uids_to_notify:
180 if user.id not in uids_to_notify:
154 uids_to_notify.append(user.id)
181 uids_to_notify.append(user.id)
155 users_to_notify.append(user)
182 users_to_notify.append(user)
156
183
157 for user in users_to_notify:
184 for user in users_to_notify:
158 email_vars = {'user': user,
185 email_vars = {
159 'commenting_user': request.user,
186 "user": user,
160 'request': request,
187 "commenting_user": request.user,
161 'application': application,
188 "request": request,
162 'report_group': report_group,
189 "application": application,
163 'comment': comment,
190 "report_group": report_group,
164 'email_title': "AppEnlight :: New comment"}
191 "comment": comment,
192 "email_title": "AppEnlight :: New comment",
193 }
165 UserService.send_email(
194 UserService.send_email(
166 request,
195 request,
167 recipients=[user.email],
196 recipients=[user.email],
168 variables=email_vars,
197 variables=email_vars,
169 template='/email_templates/new_comment_report.jinja2')
198 template="/email_templates/new_comment_report.jinja2",
170 request.session.flash(_('Your comment was created'))
199 )
200 request.session.flash(_("Your comment was created"))
171 return comment.get_dict()
201 return comment.get_dict()
172 else:
202 else:
173 return form.errors
203 return form.errors
174
204
175
205
176 @view_config(route_name='report_groups_property',
206 @view_config(
177 match_param='key=assigned_users', renderer='json',
207 route_name="report_groups_property",
178 permission='update_reports', request_method="GET")
208 match_param="key=assigned_users",
209 renderer="json",
210 permission="update_reports",
211 request_method="GET",
212 )
179 def assigned_users(request):
213 def assigned_users(request):
180 """
214 """
181 Returns list of users a specific report group is assigned for review
215 Returns list of users a specific report group is assigned for review
182 """
216 """
183 report_group = request.context.report_group
217 report_group = request.context.report_group
184 application = request.context.resource
218 application = request.context.resource
185 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
219 users = set([p.user for p in ResourceService.users_for_perm(application, "view")])
186 currently_assigned = [u.user_name for u in report_group.assigned_users]
220 currently_assigned = [u.user_name for u in report_group.assigned_users]
187 user_status = {'assigned': [], 'unassigned': []}
221 user_status = {"assigned": [], "unassigned": []}
188 # handle users
222 # handle users
189 for user in users:
223 for user in users:
190 user_dict = {'user_name': user.user_name,
224 user_dict = {
191 'gravatar_url': UserService.gravatar_url(user),
225 "user_name": user.user_name,
192 'name': '%s %s' % (user.first_name, user.last_name,)}
226 "gravatar_url": UserService.gravatar_url(user),
227 "name": "%s %s" % (user.first_name, user.last_name),
228 }
193 if user.user_name in currently_assigned:
229 if user.user_name in currently_assigned:
194 user_status['assigned'].append(user_dict)
230 user_status["assigned"].append(user_dict)
195 elif user_dict not in user_status['unassigned']:
231 elif user_dict not in user_status["unassigned"]:
196 user_status['unassigned'].append(user_dict)
232 user_status["unassigned"].append(user_dict)
197 return user_status
233 return user_status
198
234
199
235
200 @view_config(route_name='report_groups_property',
236 @view_config(
201 match_param='key=assigned_users', renderer='json',
237 route_name="report_groups_property",
202 permission='update_reports', request_method="PATCH")
238 match_param="key=assigned_users",
239 renderer="json",
240 permission="update_reports",
241 request_method="PATCH",
242 )
203 def assign_users(request):
243 def assign_users(request):
204 """
244 """
205 Assigns specific report group to user for review - send email notification
245 Assigns specific report group to user for review - send email notification
@@ -210,17 +250,18 b' def assign_users(request):'
210 new_assigns = request.unsafe_json_body
250 new_assigns = request.unsafe_json_body
211
251
212 # first unassign old users
252 # first unassign old users
213 for user_name in new_assigns['unassigned']:
253 for user_name in new_assigns["unassigned"]:
214 if user_name in currently_assigned:
254 if user_name in currently_assigned:
215 user = UserService.by_user_name(user_name)
255 user = UserService.by_user_name(user_name)
216 report_group.assigned_users.remove(user)
256 report_group.assigned_users.remove(user)
217 comment = ReportComment(owner_id=request.user.id,
257 comment = ReportComment(
218 report_time=report_group.first_timestamp)
258 owner_id=request.user.id, report_time=report_group.first_timestamp
219 comment.body = 'Unassigned group from @%s' % user_name
259 )
260 comment.body = "Unassigned group from @%s" % user_name
220 report_group.comments.append(comment)
261 report_group.comments.append(comment)
221
262
222 # assign new users
263 # assign new users
223 for user_name in new_assigns['assigned']:
264 for user_name in new_assigns["assigned"]:
224 if user_name not in currently_assigned:
265 if user_name not in currently_assigned:
225 user = UserService.by_user_name(user_name)
266 user = UserService.by_user_name(user_name)
226 if user in report_group.assigned_users:
267 if user in report_group.assigned_users:
@@ -229,66 +270,81 b' def assign_users(request):'
229 assignment = ReportAssignment(
270 assignment = ReportAssignment(
230 owner_id=user.id,
271 owner_id=user.id,
231 report_time=report_group.first_timestamp,
272 report_time=report_group.first_timestamp,
232 group_id=report_group.id)
273 group_id=report_group.id,
274 )
233 DBSession.add(assignment)
275 DBSession.add(assignment)
234
276
235 comment = ReportComment(owner_id=request.user.id,
277 comment = ReportComment(
236 report_time=report_group.first_timestamp)
278 owner_id=request.user.id, report_time=report_group.first_timestamp
237 comment.body = 'Assigned report_group to @%s' % user_name
279 )
280 comment.body = "Assigned report_group to @%s" % user_name
238 report_group.comments.append(comment)
281 report_group.comments.append(comment)
239
282
240 email_vars = {'user': user,
283 email_vars = {
241 'request': request,
284 "user": user,
242 'application': application,
285 "request": request,
243 'report_group': report_group,
286 "application": application,
244 'email_title': "AppEnlight :: Assigned Report"}
287 "report_group": report_group,
245 UserService.send_email(request, recipients=[user.email],
288 "email_title": "AppEnlight :: Assigned Report",
246 variables=email_vars,
289 }
247 template='/email_templates/assigned_report.jinja2')
290 UserService.send_email(
291 request,
292 recipients=[user.email],
293 variables=email_vars,
294 template="/email_templates/assigned_report.jinja2",
295 )
248
296
249 return True
297 return True
250
298
251
299
252 @view_config(route_name='report_groups_property', match_param='key=history',
300 @view_config(
253 renderer='json', permission='view')
301 route_name="report_groups_property",
302 match_param="key=history",
303 renderer="json",
304 permission="view",
305 )
254 def history(request):
306 def history(request):
255 """ Separate error graph or similar graph"""
307 """ Separate error graph or similar graph"""
256 report_group = request.context.report_group
308 report_group = request.context.report_group
257 query_params = request.GET.mixed()
309 query_params = request.GET.mixed()
258 query_params['resource'] = (report_group.resource_id,)
310 query_params["resource"] = (report_group.resource_id,)
259
311
260 filter_settings = build_filter_settings_from_query_dict(request,
312 filter_settings = build_filter_settings_from_query_dict(request, query_params)
261 query_params)
313 if not filter_settings.get("end_date"):
262 if not filter_settings.get('end_date'):
263 end_date = datetime.utcnow().replace(microsecond=0, second=0)
314 end_date = datetime.utcnow().replace(microsecond=0, second=0)
264 filter_settings['end_date'] = end_date
315 filter_settings["end_date"] = end_date
265
316
266 if not filter_settings.get('start_date'):
317 if not filter_settings.get("start_date"):
267 delta = timedelta(days=30)
318 delta = timedelta(days=30)
268 filter_settings['start_date'] = filter_settings['end_date'] - delta
319 filter_settings["start_date"] = filter_settings["end_date"] - delta
269
320
270 filter_settings['group_id'] = report_group.id
321 filter_settings["group_id"] = report_group.id
271
322
272 result = ReportGroupService.get_report_stats(request, filter_settings)
323 result = ReportGroupService.get_report_stats(request, filter_settings)
273
324
274 plot_data = []
325 plot_data = []
275 for row in result:
326 for row in result:
276 point = {
327 point = {
277 'x': row['x'],
328 "x": row["x"],
278 'reports': row['report'] + row['slow_report'] + row['not_found']}
329 "reports": row["report"] + row["slow_report"] + row["not_found"],
330 }
279 plot_data.append(point)
331 plot_data.append(point)
280
332
281 return plot_data
333 return plot_data
282
334
283
335
284 @view_config(route_name='report_groups', renderer='json',
336 @view_config(
285 permission='update_reports', request_method="PATCH")
337 route_name="report_groups",
338 renderer="json",
339 permission="update_reports",
340 request_method="PATCH",
341 )
286 def report_groups_PATCH(request):
342 def report_groups_PATCH(request):
287 """
343 """
288 Used to update the report group fixed status
344 Used to update the report group fixed status
289 """
345 """
290 report_group = request.context.report_group
346 report_group = request.context.report_group
291 allowed_keys = ['public', 'fixed']
347 allowed_keys = ["public", "fixed"]
292 for k, v in request.unsafe_json_body.items():
348 for k, v in request.unsafe_json_body.items():
293 if k in allowed_keys:
349 if k in allowed_keys:
294 setattr(report_group, k, v)
350 setattr(report_group, k, v)
This diff has been collapsed as it changes many lines, (563 lines changed) Show them Hide them
@@ -42,39 +42,52 b' log = logging.getLogger(__name__)'
42 GLOBAL_REQ = None
42 GLOBAL_REQ = None
43
43
44
44
45 @view_config(route_name='test', match_param='action=mail',
45 @view_config(
46 renderer='string', permission='root_administration')
46 route_name="test",
47 match_param="action=mail",
48 renderer="string",
49 permission="root_administration",
50 )
47 def mail(request):
51 def mail(request):
48 """
52 """
49 Test email communication
53 Test email communication
50 """
54 """
51 request.environ['HTTP_HOST'] = 'appenlight.com'
55 request.environ["HTTP_HOST"] = "appenlight.com"
52 request.environ['wsgi.url_scheme'] = 'https'
56 request.environ["wsgi.url_scheme"] = "https"
53 renderer_vars = {"title": "You have just registered on AppEnlight",
57 renderer_vars = {
54 "username": "test",
58 "title": "You have just registered on AppEnlight",
55 "email": "grzegżółka",
59 "username": "test",
56 'firstname': 'dupa'}
60 "email": "grzegżółka",
61 "firstname": "dupa",
62 }
57 # return vars
63 # return vars
58 html = pyramid.renderers.render('/email_templates/registered.jinja2',
64 html = pyramid.renderers.render(
59 renderer_vars,
65 "/email_templates/registered.jinja2", renderer_vars, request=request
60 request=request)
66 )
61 message = Message(subject="hello world %s" % random.randint(1, 9999),
67 message = Message(
62 sender="info@appenlight.com",
68 subject="hello world %s" % random.randint(1, 9999),
63 recipients=["ergo14@gmail.com"],
69 sender="info@appenlight.com",
64 html=html)
70 recipients=["ergo14@gmail.com"],
71 html=html,
72 )
65 request.registry.mailer.send(message)
73 request.registry.mailer.send(message)
66 return html
74 return html
67 return vars
75 return vars
68
76
69
77
70 @view_config(route_name='test', match_param='action=alerting',
78 @view_config(
71 renderer='appenlight:templates/tests/alerting.jinja2',
79 route_name="test",
72 permission='root_administration')
80 match_param="action=alerting",
81 renderer="appenlight:templates/tests/alerting.jinja2",
82 permission="root_administration",
83 )
73 def alerting_test(request):
84 def alerting_test(request):
74 """
85 """
75 Allows to test send data on various registered alerting channels
86 Allows to test send data on various registered alerting channels
76 """
87 """
77 applications = UserService.resources_with_perms(request.user, ['view'], resource_types=['application'])
88 applications = UserService.resources_with_perms(
89 request.user, ["view"], resource_types=["application"]
90 )
78 # what we can select in total
91 # what we can select in total
79 all_possible_app_ids = [app.resource_id for app in applications]
92 all_possible_app_ids = [app.resource_id for app in applications]
80 resource = applications[0]
93 resource = applications[0]
@@ -83,114 +96,129 b' def alerting_test(request):'
83 for channel in request.user.alert_channels:
96 for channel in request.user.alert_channels:
84 alert_channels.append(channel.get_dict())
97 alert_channels.append(channel.get_dict())
85
98
86 cname = request.params.get('channel_name')
99 cname = request.params.get("channel_name")
87 cvalue = request.params.get('channel_value')
100 cvalue = request.params.get("channel_value")
88 event_name = request.params.get('event_name')
101 event_name = request.params.get("event_name")
89 if cname and cvalue:
102 if cname and cvalue:
90 for channel in request.user.alert_channels:
103 for channel in request.user.alert_channels:
91 if (channel.channel_value == cvalue and
104 if channel.channel_value == cvalue and channel.channel_name == cname:
92 channel.channel_name == cname):
93 break
105 break
94 if event_name in ['error_report_alert', 'slow_report_alert']:
106 if event_name in ["error_report_alert", "slow_report_alert"]:
95 # opened
107 # opened
96 new_event = Event(resource_id=resource.resource_id,
108 new_event = Event(
97 event_type=Event.types[event_name],
109 resource_id=resource.resource_id,
98 start_date=datetime.datetime.utcnow(),
110 event_type=Event.types[event_name],
99 status=Event.statuses['active'],
111 start_date=datetime.datetime.utcnow(),
100 values={'reports': 5,
112 status=Event.statuses["active"],
101 'threshold': 10}
113 values={"reports": 5, "threshold": 10},
102 )
114 )
103 channel.notify_alert(resource=resource,
115 channel.notify_alert(
104 event=new_event,
116 resource=resource, event=new_event, user=request.user, request=request
105 user=request.user,
117 )
106 request=request)
107
118
108 # closed
119 # closed
109 ev_type = Event.types[event_name.replace('open', 'close')]
120 ev_type = Event.types[event_name.replace("open", "close")]
110 new_event = Event(resource_id=resource.resource_id,
121 new_event = Event(
111 event_type=ev_type,
122 resource_id=resource.resource_id,
112 start_date=datetime.datetime.utcnow(),
123 event_type=ev_type,
113 status=Event.statuses['closed'],
124 start_date=datetime.datetime.utcnow(),
114 values={'reports': 5,
125 status=Event.statuses["closed"],
115 'threshold': 10})
126 values={"reports": 5, "threshold": 10},
116 channel.notify_alert(resource=resource,
127 )
117 event=new_event,
128 channel.notify_alert(
118 user=request.user,
129 resource=resource, event=new_event, user=request.user, request=request
119 request=request)
130 )
120 elif event_name == 'notify_reports':
131 elif event_name == "notify_reports":
121 report = ReportGroupService.by_app_ids(all_possible_app_ids) \
132 report = (
122 .filter(ReportGroup.report_type == ReportType.error).first()
133 ReportGroupService.by_app_ids(all_possible_app_ids)
134 .filter(ReportGroup.report_type == ReportType.error)
135 .first()
136 )
123 confirmed_reports = [(5, report), (1, report)]
137 confirmed_reports = [(5, report), (1, report)]
124 channel.notify_reports(resource=resource,
138 channel.notify_reports(
125 user=request.user,
139 resource=resource,
126 request=request,
140 user=request.user,
127 since_when=datetime.datetime.utcnow(),
141 request=request,
128 reports=confirmed_reports)
142 since_when=datetime.datetime.utcnow(),
143 reports=confirmed_reports,
144 )
129 confirmed_reports = [(5, report)]
145 confirmed_reports = [(5, report)]
130 channel.notify_reports(resource=resource,
146 channel.notify_reports(
131 user=request.user,
147 resource=resource,
132 request=request,
148 user=request.user,
133 since_when=datetime.datetime.utcnow(),
149 request=request,
134 reports=confirmed_reports)
150 since_when=datetime.datetime.utcnow(),
135 elif event_name == 'notify_uptime':
151 reports=confirmed_reports,
136 new_event = Event(resource_id=resource.resource_id,
152 )
137 event_type=Event.types['uptime_alert'],
153 elif event_name == "notify_uptime":
138 start_date=datetime.datetime.utcnow(),
154 new_event = Event(
139 status=Event.statuses['active'],
155 resource_id=resource.resource_id,
140 values={"status_code": 500,
156 event_type=Event.types["uptime_alert"],
141 "tries": 2,
157 start_date=datetime.datetime.utcnow(),
142 "response_time": 0})
158 status=Event.statuses["active"],
143 channel.notify_uptime_alert(resource=resource,
159 values={"status_code": 500, "tries": 2, "response_time": 0},
144 event=new_event,
160 )
145 user=request.user,
161 channel.notify_uptime_alert(
146 request=request)
162 resource=resource, event=new_event, user=request.user, request=request
147 elif event_name == 'chart_alert':
163 )
164 elif event_name == "chart_alert":
148 event = EventService.by_type_and_status(
165 event = EventService.by_type_and_status(
149 event_types=(Event.types['chart_alert'],),
166 event_types=(Event.types["chart_alert"],),
150 status_types=(Event.statuses['active'],)).first()
167 status_types=(Event.statuses["active"],),
151 channel.notify_chart_alert(resource=event.resource,
168 ).first()
152 event=event,
169 channel.notify_chart_alert(
153 user=request.user,
170 resource=event.resource, event=event, user=request.user, request=request
154 request=request)
171 )
155 elif event_name == 'daily_digest':
172 elif event_name == "daily_digest":
156 since_when = datetime.datetime.utcnow() - datetime.timedelta(
173 since_when = datetime.datetime.utcnow() - datetime.timedelta(hours=8)
157 hours=8)
174 filter_settings = {
158 filter_settings = {'resource': [resource.resource_id],
175 "resource": [resource.resource_id],
159 'tags': [{'name': 'type',
176 "tags": [{"name": "type", "value": ["error"], "op": None}],
160 'value': ['error'], 'op': None}],
177 "type": "error",
161 'type': 'error', 'start_date': since_when}
178 "start_date": since_when,
179 }
162
180
163 reports = ReportGroupService.get_trending(
181 reports = ReportGroupService.get_trending(
164 request, filter_settings=filter_settings, limit=50)
182 request, filter_settings=filter_settings, limit=50
165 channel.send_digest(resource=resource,
183 )
166 user=request.user,
184 channel.send_digest(
167 request=request,
185 resource=resource,
168 since_when=datetime.datetime.utcnow(),
186 user=request.user,
169 reports=reports)
187 request=request,
170
188 since_when=datetime.datetime.utcnow(),
171 return {'alert_channels': alert_channels,
189 reports=reports,
172 'applications': dict([(app.resource_id, app.resource_name)
190 )
173 for app in applications.all()])}
191
174
192 return {
175
193 "alert_channels": alert_channels,
176 @view_config(route_name='test', match_param='action=error',
194 "applications": dict(
177 renderer='string', permission='root_administration')
195 [(app.resource_id, app.resource_name) for app in applications.all()]
196 ),
197 }
198
199
200 @view_config(
201 route_name="test",
202 match_param="action=error",
203 renderer="string",
204 permission="root_administration",
205 )
178 def error(request):
206 def error(request):
179 """
207 """
180 Raises an internal error with some test data for testing purposes
208 Raises an internal error with some test data for testing purposes
181 """
209 """
182 request.environ['appenlight.message'] = 'test message'
210 request.environ["appenlight.message"] = "test message"
183 request.environ['appenlight.extra']['dupa'] = 'dupa'
211 request.environ["appenlight.extra"]["dupa"] = "dupa"
184 request.environ['appenlight.extra']['message'] = 'message'
212 request.environ["appenlight.extra"]["message"] = "message"
185 request.environ['appenlight.tags']['action'] = 'test_error'
213 request.environ["appenlight.tags"]["action"] = "test_error"
186 request.environ['appenlight.tags']['count'] = 5
214 request.environ["appenlight.tags"]["count"] = 5
187 log.debug(chr(960))
215 log.debug(chr(960))
188 log.debug('debug')
216 log.debug("debug")
189 log.info(chr(960))
217 log.info(chr(960))
190 log.info('INFO')
218 log.info("INFO")
191 log.warning('warning')
219 log.warning("warning")
192
220
193 @time_trace(name='error.foobar', min_duration=0.1)
221 @time_trace(name="error.foobar", min_duration=0.1)
194 def fooobar():
222 def fooobar():
195 time.sleep(0.12)
223 time.sleep(0.12)
196 return 1
224 return 1
@@ -198,24 +226,28 b' def error(request):'
198 fooobar()
226 fooobar()
199
227
200 def foobar(somearg):
228 def foobar(somearg):
201 raise Exception('test')
229 raise Exception("test")
202
230
203 client = redis.StrictRedis()
231 client = redis.StrictRedis()
204 client.setex('testval', 10, 'foo')
232 client.setex("testval", 10, "foo")
205 request.environ['appenlight.force_send'] = 1
233 request.environ["appenlight.force_send"] = 1
206
234
207 # stats, result = get_local_storage(local_timing).get_thread_stats()
235 # stats, result = get_local_storage(local_timing).get_thread_stats()
208 # import pprint
236 # import pprint
209 # pprint.pprint(stats)
237 # pprint.pprint(stats)
210 # pprint.pprint(result)
238 # pprint.pprint(result)
211 # print 'entries', len(result)
239 # print 'entries', len(result)
212 request.environ['appenlight.username'] = 'ErgO'
240 request.environ["appenlight.username"] = "ErgO"
213 raise Exception(chr(960) + '%s' % random.randint(1, 5))
241 raise Exception(chr(960) + "%s" % random.randint(1, 5))
214 return {}
242 return {}
215
243
216
244
217 @view_config(route_name='test', match_param='action=task',
245 @view_config(
218 renderer='string', permission='root_administration')
246 route_name="test",
247 match_param="action=task",
248 renderer="string",
249 permission="root_administration",
250 )
219 def test_task(request):
251 def test_task(request):
220 """
252 """
221 Test erroneous celery task
253 Test erroneous celery task
@@ -223,11 +255,15 b' def test_task(request):'
223 import appenlight.celery.tasks
255 import appenlight.celery.tasks
224
256
225 appenlight.celery.tasks.test_exception_task.delay()
257 appenlight.celery.tasks.test_exception_task.delay()
226 return 'task sent'
258 return "task sent"
227
259
228
260
229 @view_config(route_name='test', match_param='action=task_retry',
261 @view_config(
230 renderer='string', permission='root_administration')
262 route_name="test",
263 match_param="action=task_retry",
264 renderer="string",
265 permission="root_administration",
266 )
231 def test_task_retry(request):
267 def test_task_retry(request):
232 """
268 """
233 Test erroneous celery task
269 Test erroneous celery task
@@ -235,80 +271,131 b' def test_task_retry(request):'
235 import appenlight.celery.tasks
271 import appenlight.celery.tasks
236
272
237 appenlight.celery.tasks.test_retry_exception_task.delay()
273 appenlight.celery.tasks.test_retry_exception_task.delay()
238 return 'task sent'
274 return "task sent"
239
275
240
276
241 @view_config(route_name='test', match_param='action=celery_emails',
277 @view_config(
242 renderer='string', permission='root_administration')
278 route_name="test",
279 match_param="action=celery_emails",
280 renderer="string",
281 permission="root_administration",
282 )
243 def test_celery_emails(request):
283 def test_celery_emails(request):
244 import appenlight.celery.tasks
284 import appenlight.celery.tasks
285
245 appenlight.celery.tasks.alerting.delay()
286 appenlight.celery.tasks.alerting.delay()
246 return 'task sent'
287 return "task sent"
247
288
248
289
249 @view_config(route_name='test', match_param='action=daily_digest',
290 @view_config(
250 renderer='string', permission='root_administration')
291 route_name="test",
292 match_param="action=daily_digest",
293 renderer="string",
294 permission="root_administration",
295 )
251 def test_celery_daily_digest(request):
296 def test_celery_daily_digest(request):
252 import appenlight.celery.tasks
297 import appenlight.celery.tasks
298
253 appenlight.celery.tasks.daily_digest.delay()
299 appenlight.celery.tasks.daily_digest.delay()
254 return 'task sent'
300 return "task sent"
255
301
256
302
257 @view_config(route_name='test', match_param='action=celery_alerting',
303 @view_config(
258 renderer='string', permission='root_administration')
304 route_name="test",
305 match_param="action=celery_alerting",
306 renderer="string",
307 permission="root_administration",
308 )
259 def test_celery_alerting(request):
309 def test_celery_alerting(request):
260 import appenlight.celery.tasks
310 import appenlight.celery.tasks
311
261 appenlight.celery.tasks.alerting()
312 appenlight.celery.tasks.alerting()
262 return 'task sent'
313 return "task sent"
263
314
264
315
265 @view_config(route_name='test', match_param='action=logging',
316 @view_config(
266 renderer='string', permission='root_administration')
317 route_name="test",
318 match_param="action=logging",
319 renderer="string",
320 permission="root_administration",
321 )
267 def logs(request):
322 def logs(request):
268 """
323 """
269 Test some in-app logging
324 Test some in-app logging
270 """
325 """
271 log.debug(chr(960))
326 log.debug(chr(960))
272 log.debug('debug')
327 log.debug("debug")
273 log.info(chr(960))
328 log.info(chr(960))
274 log.info('INFO')
329 log.info("INFO")
275 log.warning('Matched GET /\xc4\x85\xc5\xbc\xc4\x87'
330 log.warning(
276 '\xc4\x99\xc4\x99\xc4\x85/summary')
331 "Matched GET /\xc4\x85\xc5\xbc\xc4\x87" "\xc4\x99\xc4\x99\xc4\x85/summary"
277 log.warning('XXXXMatched GET /\xc4\x85\xc5\xbc\xc4'
332 )
278 '\x87\xc4\x99\xc4\x99\xc4\x85/summary')
333 log.warning(
279 log.warning('DUPA /ążćęęą')
334 "XXXXMatched GET /\xc4\x85\xc5\xbc\xc4" "\x87\xc4\x99\xc4\x99\xc4\x85/summary"
335 )
336 log.warning("DUPA /ążćęęą")
280 log.warning("g\u017ceg\u017c\u00f3\u0142ka")
337 log.warning("g\u017ceg\u017c\u00f3\u0142ka")
281 log.error('TEST Lorem ipsum2',
338 log.error(
282 extra={'user': 'ergo', 'commit': 'sog8ds0g7sdih12hh1j512h5k'})
339 "TEST Lorem ipsum2",
283 log.fatal('TEST Lorem ipsum3')
340 extra={"user": "ergo", "commit": "sog8ds0g7sdih12hh1j512h5k"},
284 log.warning('TEST Lorem ipsum',
341 )
285 extra={"action": 'purchase',
342 log.fatal("TEST Lorem ipsum3")
286 "price": random.random() * 100,
343 log.warning(
287 "quantity": random.randint(1, 99)})
344 "TEST Lorem ipsum",
288 log.warning('test_pkey',
345 extra={
289 extra={"action": 'test_pkey', "price": random.random() * 100,
346 "action": "purchase",
290 'ae_primary_key': 1,
347 "price": random.random() * 100,
291 "quantity": random.randint(1, 99)})
348 "quantity": random.randint(1, 99),
292 log.warning('test_pkey2',
349 },
293 extra={"action": 'test_pkey', "price": random.random() * 100,
350 )
294 'ae_primary_key': 'b',
351 log.warning(
295 'ae_permanent': 't',
352 "test_pkey",
296 "quantity": random.randint(1, 99)})
353 extra={
297 log.warning('test_pkey3',
354 "action": "test_pkey",
298 extra={"action": 'test_pkey', "price": random.random() * 100,
355 "price": random.random() * 100,
299 'ae_primary_key': 1,
356 "ae_primary_key": 1,
300 "quantity": random.randint(1, 99)})
357 "quantity": random.randint(1, 99),
301 log.warning('test_pkey4',
358 },
302 extra={"action": 'test_pkey', "price": random.random() * 100,
359 )
303 'ae_primary_key': 'b',
360 log.warning(
304 'ae_permanent': True,
361 "test_pkey2",
305 "quantity": random.randint(1, 99)})
362 extra={
306 request.environ['appenlight.force_send'] = 1
363 "action": "test_pkey",
364 "price": random.random() * 100,
365 "ae_primary_key": "b",
366 "ae_permanent": "t",
367 "quantity": random.randint(1, 99),
368 },
369 )
370 log.warning(
371 "test_pkey3",
372 extra={
373 "action": "test_pkey",
374 "price": random.random() * 100,
375 "ae_primary_key": 1,
376 "quantity": random.randint(1, 99),
377 },
378 )
379 log.warning(
380 "test_pkey4",
381 extra={
382 "action": "test_pkey",
383 "price": random.random() * 100,
384 "ae_primary_key": "b",
385 "ae_permanent": True,
386 "quantity": random.randint(1, 99),
387 },
388 )
389 request.environ["appenlight.force_send"] = 1
307 return {}
390 return {}
308
391
309
392
310 @view_config(route_name='test', match_param='action=transaction',
393 @view_config(
311 renderer='string', permission='root_administration')
394 route_name="test",
395 match_param="action=transaction",
396 renderer="string",
397 permission="root_administration",
398 )
312 def transaction_test(request):
399 def transaction_test(request):
313 """
400 """
314 Test transactions
401 Test transactions
@@ -318,74 +405,92 b' def transaction_test(request):'
318 except:
405 except:
319 request.tm.abort()
406 request.tm.abort()
320 result = DBSession.execute("SELECT 1")
407 result = DBSession.execute("SELECT 1")
321 return 'OK'
408 return "OK"
322
409
323
410
324 @view_config(route_name='test', match_param='action=slow_request',
411 @view_config(
325 renderer='string', permission='root_administration')
412 route_name="test",
413 match_param="action=slow_request",
414 renderer="string",
415 permission="root_administration",
416 )
326 def slow_request(request):
417 def slow_request(request):
327 """
418 """
328 Test a request that has some slow entries - including nested calls
419 Test a request that has some slow entries - including nested calls
329 """
420 """
330 users = DBSession.query(User).all()
421 users = DBSession.query(User).all()
331 import random
422 import random
423
332 some_val = random.random()
424 some_val = random.random()
333 import threading
425 import threading
426
334 t_id = id(threading.currentThread())
427 t_id = id(threading.currentThread())
335 log.warning('slow_log %s %s ' % (some_val, t_id))
428 log.warning("slow_log %s %s " % (some_val, t_id))
336 log.critical('tid %s' % t_id)
429 log.critical("tid %s" % t_id)
337
430
338 @time_trace(name='baz_func %s' % some_val, min_duration=0.1)
431 @time_trace(name="baz_func %s" % some_val, min_duration=0.1)
339 def baz(arg):
432 def baz(arg):
340 time.sleep(0.32)
433 time.sleep(0.32)
341 return arg
434 return arg
342
435
343 requests.get('http://ubuntu.com')
436 requests.get("http://ubuntu.com")
344
437
345 @time_trace(name='foo_func %s %s' % (some_val, t_id), min_duration=0.1)
438 @time_trace(name="foo_func %s %s" % (some_val, t_id), min_duration=0.1)
346 def foo(arg):
439 def foo(arg):
347 time.sleep(0.52)
440 time.sleep(0.52)
348 log.warning('foo_func %s %s' % (some_val, t_id))
441 log.warning("foo_func %s %s" % (some_val, t_id))
349 requests.get('http://ubuntu.com?test=%s' % some_val)
442 requests.get("http://ubuntu.com?test=%s" % some_val)
350 return bar(arg)
443 return bar(arg)
351
444
352 @time_trace(name='bar_func %s %s' % (some_val, t_id), min_duration=0.1)
445 @time_trace(name="bar_func %s %s" % (some_val, t_id), min_duration=0.1)
353 def bar(arg):
446 def bar(arg):
354 log.warning('bar_func %s %s' % (some_val, t_id))
447 log.warning("bar_func %s %s" % (some_val, t_id))
355 time.sleep(1.52)
448 time.sleep(1.52)
356 baz(arg)
449 baz(arg)
357 baz(arg)
450 baz(arg)
358 return baz(arg)
451 return baz(arg)
359
452
360 foo('a')
453 foo("a")
361 return {}
454 return {}
362
455
363
456
364 @view_config(route_name='test', match_param='action=styling',
457 @view_config(
365 renderer='appenlight:templates/tests/styling.jinja2',
458 route_name="test",
366 permission='__no_permission_required__')
459 match_param="action=styling",
460 renderer="appenlight:templates/tests/styling.jinja2",
461 permission="__no_permission_required__",
462 )
367 def styling(request):
463 def styling(request):
368 """
464 """
369 Some styling test page
465 Some styling test page
370 """
466 """
371 _ = str
467 _ = str
372 request.session.flash(_(
468 request.session.flash(
373 'Your password got updated. '
469 _("Your password got updated. " "Next time log in with your new credentials.")
374 'Next time log in with your new credentials.'))
470 )
375 request.session.flash(_(
471 request.session.flash(
376 'Something went wrong when we '
472 _(
377 'tried to authorize you via external provider'),
473 "Something went wrong when we "
378 'warning')
474 "tried to authorize you via external provider"
379 request.session.flash(_(
475 ),
380 'Unfortunately there was a problem '
476 "warning",
381 'processing your payment, please try again later.'),
477 )
382 'error')
478 request.session.flash(
479 _(
480 "Unfortunately there was a problem "
481 "processing your payment, please try again later."
482 ),
483 "error",
484 )
383 return {}
485 return {}
384
486
385
487
386 @view_config(route_name='test', match_param='action=js_error',
488 @view_config(
387 renderer='appenlight:templates/tests/js_error.jinja2',
489 route_name="test",
388 permission='__no_permission_required__')
490 match_param="action=js_error",
491 renderer="appenlight:templates/tests/js_error.jinja2",
492 permission="__no_permission_required__",
493 )
389 def js(request):
494 def js(request):
390 """
495 """
391 Used for testing javasctipt client for error catching
496 Used for testing javasctipt client for error catching
@@ -393,9 +498,12 b' def js(request):'
393 return {}
498 return {}
394
499
395
500
396 @view_config(route_name='test', match_param='action=js_log',
501 @view_config(
397 renderer='appenlight:templates/tests/js_log.jinja2',
502 route_name="test",
398 permission='__no_permission_required__')
503 match_param="action=js_log",
504 renderer="appenlight:templates/tests/js_log.jinja2",
505 permission="__no_permission_required__",
506 )
399 def js_log(request):
507 def js_log(request):
400 """
508 """
401 Used for testing javasctipt client for logging
509 Used for testing javasctipt client for logging
@@ -403,9 +511,12 b' def js_log(request):'
403 return {}
511 return {}
404
512
405
513
406 @view_config(route_name='test', match_param='action=log_requests',
514 @view_config(
407 renderer='string',
515 route_name="test",
408 permission='__no_permission_required__')
516 match_param="action=log_requests",
517 renderer="string",
518 permission="__no_permission_required__",
519 )
409 def log_requests(request):
520 def log_requests(request):
410 """
521 """
411 Util view for printing json requests
522 Util view for printing json requests
@@ -413,13 +524,17 b' def log_requests(request):'
413 return {}
524 return {}
414
525
415
526
416 @view_config(route_name='test', match_param='action=url', renderer='string',
527 @view_config(
417 permission='__no_permission_required__')
528 route_name="test",
529 match_param="action=url",
530 renderer="string",
531 permission="__no_permission_required__",
532 )
418 def log_requests(request):
533 def log_requests(request):
419 """
534 """
420 I have no fucking clue why I needed that ;-)
535 I have no fucking clue why I needed that ;-)
421 """
536 """
422 return request.route_url('reports', _app_url='https://appenlight.com')
537 return request.route_url("reports", _app_url="https://appenlight.com")
423
538
424
539
425 class TestClass(object):
540 class TestClass(object):
@@ -430,16 +545,32 b' class TestClass(object):'
430 def __init__(self, request):
545 def __init__(self, request):
431 self.request = request
546 self.request = request
432
547
433 @view_config(route_name='test', match_param='action=test_a',
548 @view_config(
434 renderer='string', permission='root_administration')
549 route_name="test",
435 @view_config(route_name='test', match_param='action=test_c',
550 match_param="action=test_a",
436 renderer='string', permission='root_administration')
551 renderer="string",
437 @view_config(route_name='test', match_param='action=test_d',
552 permission="root_administration",
438 renderer='string', permission='root_administration')
553 )
554 @view_config(
555 route_name="test",
556 match_param="action=test_c",
557 renderer="string",
558 permission="root_administration",
559 )
560 @view_config(
561 route_name="test",
562 match_param="action=test_d",
563 renderer="string",
564 permission="root_administration",
565 )
439 def test_a(self):
566 def test_a(self):
440 return 'ok'
567 return "ok"
441
568
442 @view_config(route_name='test', match_param='action=test_b',
569 @view_config(
443 renderer='string', permission='root_administration')
570 route_name="test",
571 match_param="action=test_b",
572 renderer="string",
573 permission="root_administration",
574 )
444 def test_b(self):
575 def test_b(self):
445 return 'ok'
576 return "ok"
This diff has been collapsed as it changes many lines, (641 lines changed) Show them Hide them
@@ -27,20 +27,23 b' from pyramid.view import view_config'
27 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
27 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest
29 from pyramid.security import NO_PERMISSION_REQUIRED
29 from pyramid.security import NO_PERMISSION_REQUIRED
30 from ziggurat_foundations.models.services.external_identity import \
30 from ziggurat_foundations.models.services.external_identity import (
31 ExternalIdentityService
31 ExternalIdentityService,
32 )
32 from ziggurat_foundations.models.services.user import UserService
33 from ziggurat_foundations.models.services.user import UserService
33
34
34 from appenlight.lib import generate_random_string
35 from appenlight.lib import generate_random_string
35 from appenlight.lib.social import handle_social_data
36 from appenlight.lib.social import handle_social_data
36 from appenlight.lib.utils import channelstream_request, add_cors_headers, \
37 from appenlight.lib.utils import (
37 permission_tuple_to_dict
38 channelstream_request,
39 add_cors_headers,
40 permission_tuple_to_dict,
41 )
38 from appenlight.models import DBSession
42 from appenlight.models import DBSession
39 from appenlight.models.alert_channels.email import EmailAlertChannel
43 from appenlight.models.alert_channels.email import EmailAlertChannel
40 from appenlight.models.alert_channel_action import AlertChannelAction
44 from appenlight.models.alert_channel_action import AlertChannelAction
41 from appenlight.models.services.alert_channel import AlertChannelService
45 from appenlight.models.services.alert_channel import AlertChannelService
42 from appenlight.models.services.alert_channel_action import \
46 from appenlight.models.services.alert_channel_action import AlertChannelActionService
43 AlertChannelActionService
44 from appenlight.models.auth_token import AuthToken
47 from appenlight.models.auth_token import AuthToken
45 from appenlight.models.report import REPORT_TYPE_MATRIX
48 from appenlight.models.report import REPORT_TYPE_MATRIX
46 from appenlight.models.user import User
49 from appenlight.models.user import User
@@ -53,33 +56,49 b' from webob.multidict import MultiDict'
53 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
54
57
55
58
56 @view_config(route_name='users_no_id', renderer='json',
59 @view_config(
57 request_method="GET", permission='root_administration')
60 route_name="users_no_id",
61 renderer="json",
62 request_method="GET",
63 permission="root_administration",
64 )
58 def users_list(request):
65 def users_list(request):
59 """
66 """
60 Returns users list
67 Returns users list
61 """
68 """
62 props = ['user_name', 'id', 'first_name', 'last_name', 'email',
69 props = [
63 'last_login_date', 'status']
70 "user_name",
71 "id",
72 "first_name",
73 "last_name",
74 "email",
75 "last_login_date",
76 "status",
77 ]
64 users = UserService.all()
78 users = UserService.all()
65 users_dicts = []
79 users_dicts = []
66 for user in users:
80 for user in users:
67 u_dict = user.get_dict(include_keys=props)
81 u_dict = user.get_dict(include_keys=props)
68 u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20)
82 u_dict["gravatar_url"] = UserService.gravatar_url(user, s=20)
69 users_dicts.append(u_dict)
83 users_dicts.append(u_dict)
70 return users_dicts
84 return users_dicts
71
85
72
86
73 @view_config(route_name='users_no_id', renderer='json',
87 @view_config(
74 request_method="POST", permission='root_administration')
88 route_name="users_no_id",
89 renderer="json",
90 request_method="POST",
91 permission="root_administration",
92 )
75 def users_create(request):
93 def users_create(request):
76 """
94 """
77 Returns users list
95 Returns users list
78 """
96 """
79 form = forms.UserCreateForm(MultiDict(request.safe_json_body or {}),
97 form = forms.UserCreateForm(
80 csrf_context=request)
98 MultiDict(request.safe_json_body or {}), csrf_context=request
99 )
81 if form.validate():
100 if form.validate():
82 log.info('registering user')
101 log.info("registering user")
83 # probably not needed in the future since this requires root anyways
102 # probably not needed in the future since this requires root anyways
84 # lets keep this here in case we lower view permission in the future
103 # lets keep this here in case we lower view permission in the future
85 # if request.registry.settings['appenlight.disable_registration']:
104 # if request.registry.settings['appenlight.disable_registration']:
@@ -91,29 +110,42 b' def users_create(request):'
91 UserService.regenerate_security_code(user)
110 UserService.regenerate_security_code(user)
92 UserService.set_password(user, user.user_password)
111 UserService.set_password(user, user.user_password)
93 user.status = 1 if form.status.data else 0
112 user.status = 1 if form.status.data else 0
94 request.session.flash(_('User created'))
113 request.session.flash(_("User created"))
95 DBSession.flush()
114 DBSession.flush()
96 return user.get_dict(exclude_keys=['security_code_date', 'notes',
115 return user.get_dict(
97 'security_code', 'user_password'])
116 exclude_keys=[
117 "security_code_date",
118 "notes",
119 "security_code",
120 "user_password",
121 ]
122 )
98 else:
123 else:
99 return HTTPUnprocessableEntity(body=form.errors_json)
124 return HTTPUnprocessableEntity(body=form.errors_json)
100
125
101
126
102 @view_config(route_name='users', renderer='json',
127 @view_config(
103 request_method="GET", permission='root_administration')
128 route_name="users",
104 @view_config(route_name='users', renderer='json',
129 renderer="json",
105 request_method="PATCH", permission='root_administration')
130 request_method="GET",
131 permission="root_administration",
132 )
133 @view_config(
134 route_name="users",
135 renderer="json",
136 request_method="PATCH",
137 permission="root_administration",
138 )
106 def users_update(request):
139 def users_update(request):
107 """
140 """
108 Updates user object
141 Updates user object
109 """
142 """
110 user = UserService.by_id(request.matchdict.get('user_id'))
143 user = UserService.by_id(request.matchdict.get("user_id"))
111 if not user:
144 if not user:
112 return HTTPNotFound()
145 return HTTPNotFound()
113 post_data = request.safe_json_body or {}
146 post_data = request.safe_json_body or {}
114 if request.method == 'PATCH':
147 if request.method == "PATCH":
115 form = forms.UserUpdateForm(MultiDict(post_data),
148 form = forms.UserUpdateForm(MultiDict(post_data), csrf_context=request)
116 csrf_context=request)
117 if form.validate():
149 if form.validate():
118 form.populate_obj(user, ignore_none=True)
150 form.populate_obj(user, ignore_none=True)
119 if form.user_password.data:
151 if form.user_password.data:
@@ -124,114 +156,148 b' def users_update(request):'
124 user.status = 0
156 user.status = 0
125 else:
157 else:
126 return HTTPUnprocessableEntity(body=form.errors_json)
158 return HTTPUnprocessableEntity(body=form.errors_json)
127 return user.get_dict(exclude_keys=['security_code_date', 'notes',
159 return user.get_dict(
128 'security_code', 'user_password'])
160 exclude_keys=["security_code_date", "notes", "security_code", "user_password"]
161 )
129
162
130
163
131 @view_config(route_name='users_property',
164 @view_config(
132 match_param='key=resource_permissions',
165 route_name="users_property",
133 renderer='json', permission='authenticated')
166 match_param="key=resource_permissions",
167 renderer="json",
168 permission="authenticated",
169 )
134 def users_resource_permissions_list(request):
170 def users_resource_permissions_list(request):
135 """
171 """
136 Get list of permissions assigned to specific resources
172 Get list of permissions assigned to specific resources
137 """
173 """
138 user = UserService.by_id(request.matchdict.get('user_id'))
174 user = UserService.by_id(request.matchdict.get("user_id"))
139 if not user:
175 if not user:
140 return HTTPNotFound()
176 return HTTPNotFound()
141 return [permission_tuple_to_dict(perm) for perm in
177 return [
142 UserService.resources_with_possible_perms(user)]
178 permission_tuple_to_dict(perm)
143
179 for perm in UserService.resources_with_possible_perms(user)
144
180 ]
145 @view_config(route_name='users', renderer='json',
181
146 request_method="DELETE", permission='root_administration')
182
183 @view_config(
184 route_name="users",
185 renderer="json",
186 request_method="DELETE",
187 permission="root_administration",
188 )
147 def users_DELETE(request):
189 def users_DELETE(request):
148 """
190 """
149 Removes a user permanently from db - makes a check to see if after the
191 Removes a user permanently from db - makes a check to see if after the
150 operation there will be at least one admin left
192 operation there will be at least one admin left
151 """
193 """
152 msg = _('There needs to be at least one administrator in the system')
194 msg = _("There needs to be at least one administrator in the system")
153 user = UserService.by_id(request.matchdict.get('user_id'))
195 user = UserService.by_id(request.matchdict.get("user_id"))
154 if user:
196 if user:
155 users = UserService.users_for_perms(['root_administration']).all()
197 users = UserService.users_for_perms(["root_administration"]).all()
156 if len(users) < 2 and user.id == users[0].id:
198 if len(users) < 2 and user.id == users[0].id:
157 request.session.flash(msg, 'warning')
199 request.session.flash(msg, "warning")
158 else:
200 else:
159 DBSession.delete(user)
201 DBSession.delete(user)
160 request.session.flash(_('User removed'))
202 request.session.flash(_("User removed"))
161 return True
203 return True
162 request.response.status = 422
204 request.response.status = 422
163 return False
205 return False
164
206
165
207
166 @view_config(route_name='users_self', renderer='json',
208 @view_config(
167 request_method="GET", permission='authenticated')
209 route_name="users_self",
168 @view_config(route_name='users_self', renderer='json',
210 renderer="json",
169 request_method="PATCH", permission='authenticated')
211 request_method="GET",
212 permission="authenticated",
213 )
214 @view_config(
215 route_name="users_self",
216 renderer="json",
217 request_method="PATCH",
218 permission="authenticated",
219 )
170 def users_self(request):
220 def users_self(request):
171 """
221 """
172 Updates user personal information
222 Updates user personal information
173 """
223 """
174
224
175 if request.method == 'PATCH':
225 if request.method == "PATCH":
176 form = forms.gen_user_profile_form()(
226 form = forms.gen_user_profile_form()(
177 MultiDict(request.unsafe_json_body),
227 MultiDict(request.unsafe_json_body), csrf_context=request
178 csrf_context=request)
228 )
179 if form.validate():
229 if form.validate():
180 form.populate_obj(request.user)
230 form.populate_obj(request.user)
181 request.session.flash(_('Your profile got updated.'))
231 request.session.flash(_("Your profile got updated."))
182 else:
232 else:
183 return HTTPUnprocessableEntity(body=form.errors_json)
233 return HTTPUnprocessableEntity(body=form.errors_json)
184 return request.user.get_dict(
234 return request.user.get_dict(
185 exclude_keys=['security_code_date', 'notes', 'security_code',
235 exclude_keys=["security_code_date", "notes", "security_code", "user_password"],
186 'user_password'],
236 extended_info=True,
187 extended_info=True)
237 )
188
238
189
239
190 @view_config(route_name='users_self_property',
240 @view_config(
191 match_param='key=external_identities', renderer='json',
241 route_name="users_self_property",
192 request_method='GET', permission='authenticated')
242 match_param="key=external_identities",
243 renderer="json",
244 request_method="GET",
245 permission="authenticated",
246 )
193 def users_external_identies(request):
247 def users_external_identies(request):
194 user = request.user
248 user = request.user
195 identities = [{'provider': ident.provider_name,
249 identities = [
196 'id': ident.external_user_name} for ident
250 {"provider": ident.provider_name, "id": ident.external_user_name}
197 in user.external_identities.all()]
251 for ident in user.external_identities.all()
252 ]
198 return identities
253 return identities
199
254
200
255
201 @view_config(route_name='users_self_property',
256 @view_config(
202 match_param='key=external_identities', renderer='json',
257 route_name="users_self_property",
203 request_method='DELETE', permission='authenticated')
258 match_param="key=external_identities",
259 renderer="json",
260 request_method="DELETE",
261 permission="authenticated",
262 )
204 def users_external_identies_DELETE(request):
263 def users_external_identies_DELETE(request):
205 """
264 """
206 Unbinds external identities(google,twitter etc.) from user account
265 Unbinds external identities(google,twitter etc.) from user account
207 """
266 """
208 user = request.user
267 user = request.user
209 for identity in user.external_identities.all():
268 for identity in user.external_identities.all():
210 log.info('found identity %s' % identity)
269 log.info("found identity %s" % identity)
211 if (identity.provider_name == request.params.get('provider') and
270 if identity.provider_name == request.params.get(
212 identity.external_user_name == request.params.get('id')):
271 "provider"
213 log.info('remove identity %s' % identity)
272 ) and identity.external_user_name == request.params.get("id"):
273 log.info("remove identity %s" % identity)
214 DBSession.delete(identity)
274 DBSession.delete(identity)
215 return True
275 return True
216 return False
276 return False
217
277
218
278
219 @view_config(route_name='users_self_property',
279 @view_config(
220 match_param='key=password', renderer='json',
280 route_name="users_self_property",
221 request_method='PATCH', permission='authenticated')
281 match_param="key=password",
282 renderer="json",
283 request_method="PATCH",
284 permission="authenticated",
285 )
222 def users_password(request):
286 def users_password(request):
223 """
287 """
224 Sets new password for user account
288 Sets new password for user account
225 """
289 """
226 user = request.user
290 user = request.user
227 form = forms.ChangePasswordForm(MultiDict(request.unsafe_json_body),
291 form = forms.ChangePasswordForm(
228 csrf_context=request)
292 MultiDict(request.unsafe_json_body), csrf_context=request
293 )
229 form.old_password.user = user
294 form.old_password.user = user
230 if form.validate():
295 if form.validate():
231 UserService.regenerate_security_code(user)
296 UserService.regenerate_security_code(user)
232 UserService.set_password(user, form.new_password.data)
297 UserService.set_password(user, form.new_password.data)
233 msg = 'Your password got updated. ' \
298 msg = (
234 'Next time log in with your new credentials.'
299 "Your password got updated. " "Next time log in with your new credentials."
300 )
235 request.session.flash(_(msg))
301 request.session.flash(_(msg))
236 return True
302 return True
237 else:
303 else:
@@ -239,35 +305,49 b' def users_password(request):'
239 return False
305 return False
240
306
241
307
242 @view_config(route_name='users_self_property', match_param='key=websocket',
308 @view_config(
243 renderer='json', permission='authenticated')
309 route_name="users_self_property",
310 match_param="key=websocket",
311 renderer="json",
312 permission="authenticated",
313 )
244 def users_websocket(request):
314 def users_websocket(request):
245 """
315 """
246 Handle authorization of users trying to connect
316 Handle authorization of users trying to connect
247 """
317 """
248 # handle preflight request
318 # handle preflight request
249 user = request.user
319 user = request.user
250 if request.method == 'OPTIONS':
320 if request.method == "OPTIONS":
251 res = request.response.body('OK')
321 res = request.response.body("OK")
252 add_cors_headers(res)
322 add_cors_headers(res)
253 return res
323 return res
254 applications = UserService.resources_with_perms(user, ['view'], resource_types=['application'])
324 applications = UserService.resources_with_perms(
255 channels = ['app_%s' % app.resource_id for app in applications]
325 user, ["view"], resource_types=["application"]
256 payload = {"username": user.user_name,
326 )
257 "conn_id": str(uuid.uuid4()),
327 channels = ["app_%s" % app.resource_id for app in applications]
258 "channels": channels
328 payload = {
259 }
329 "username": user.user_name,
330 "conn_id": str(uuid.uuid4()),
331 "channels": channels,
332 }
260 settings = request.registry.settings
333 settings = request.registry.settings
261 response = channelstream_request(
334 response = channelstream_request(
262 settings['cometd.secret'], '/connect', payload,
335 settings["cometd.secret"],
263 servers=[request.registry.settings['cometd_servers']],
336 "/connect",
264 throw_exceptions=True)
337 payload,
338 servers=[request.registry.settings["cometd_servers"]],
339 throw_exceptions=True,
340 )
265 return payload
341 return payload
266
342
267
343
268 @view_config(route_name='users_self_property', request_method="GET",
344 @view_config(
269 match_param='key=alert_channels', renderer='json',
345 route_name="users_self_property",
270 permission='authenticated')
346 request_method="GET",
347 match_param="key=alert_channels",
348 renderer="json",
349 permission="authenticated",
350 )
271 def alert_channels(request):
351 def alert_channels(request):
272 """
352 """
273 Lists all available alert channels
353 Lists all available alert channels
@@ -276,8 +356,13 b' def alert_channels(request):'
276 return [c.get_dict(extended_info=True) for c in user.alert_channels]
356 return [c.get_dict(extended_info=True) for c in user.alert_channels]
277
357
278
358
279 @view_config(route_name='users_self_property', match_param='key=alert_actions',
359 @view_config(
280 request_method="GET", renderer='json', permission='authenticated')
360 route_name="users_self_property",
361 match_param="key=alert_actions",
362 request_method="GET",
363 renderer="json",
364 permission="authenticated",
365 )
281 def alert_actions(request):
366 def alert_actions(request):
282 """
367 """
283 Lists all available alert channels
368 Lists all available alert channels
@@ -286,41 +371,52 b' def alert_actions(request):'
286 return [r.get_dict(extended_info=True) for r in user.alert_actions]
371 return [r.get_dict(extended_info=True) for r in user.alert_actions]
287
372
288
373
289 @view_config(route_name='users_self_property', renderer='json',
374 @view_config(
290 match_param='key=alert_channels_rules', request_method='POST',
375 route_name="users_self_property",
291 permission='authenticated')
376 renderer="json",
377 match_param="key=alert_channels_rules",
378 request_method="POST",
379 permission="authenticated",
380 )
292 def alert_channels_rule_POST(request):
381 def alert_channels_rule_POST(request):
293 """
382 """
294 Creates new notification rule for specific alert channel
383 Creates new notification rule for specific alert channel
295 """
384 """
296 user = request.user
385 user = request.user
297 alert_action = AlertChannelAction(owner_id=request.user.id,
386 alert_action = AlertChannelAction(owner_id=request.user.id, type="report")
298 type='report')
299 DBSession.add(alert_action)
387 DBSession.add(alert_action)
300 DBSession.flush()
388 DBSession.flush()
301 return alert_action.get_dict()
389 return alert_action.get_dict()
302
390
303
391
304 @view_config(route_name='users_self_property', permission='authenticated',
392 @view_config(
305 match_param='key=alert_channels_rules',
393 route_name="users_self_property",
306 renderer='json', request_method='DELETE')
394 permission="authenticated",
395 match_param="key=alert_channels_rules",
396 renderer="json",
397 request_method="DELETE",
398 )
307 def alert_channels_rule_DELETE(request):
399 def alert_channels_rule_DELETE(request):
308 """
400 """
309 Removes specific alert channel rule
401 Removes specific alert channel rule
310 """
402 """
311 user = request.user
403 user = request.user
312 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
404 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
313 user.id,
405 user.id, request.GET.get("pkey")
314 request.GET.get('pkey'))
406 )
315 if rule_action:
407 if rule_action:
316 DBSession.delete(rule_action)
408 DBSession.delete(rule_action)
317 return True
409 return True
318 return HTTPNotFound()
410 return HTTPNotFound()
319
411
320
412
321 @view_config(route_name='users_self_property', permission='authenticated',
413 @view_config(
322 match_param='key=alert_channels_rules',
414 route_name="users_self_property",
323 renderer='json', request_method='PATCH')
415 permission="authenticated",
416 match_param="key=alert_channels_rules",
417 renderer="json",
418 request_method="PATCH",
419 )
324 def alert_channels_rule_PATCH(request):
420 def alert_channels_rule_PATCH(request):
325 """
421 """
326 Removes specific alert channel rule
422 Removes specific alert channel rule
@@ -328,41 +424,47 b' def alert_channels_rule_PATCH(request):'
328 user = request.user
424 user = request.user
329 json_body = request.unsafe_json_body
425 json_body = request.unsafe_json_body
330
426
331 schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX)
427 schema = build_rule_schema(json_body["rule"], REPORT_TYPE_MATRIX)
332 try:
428 try:
333 schema.deserialize(json_body['rule'])
429 schema.deserialize(json_body["rule"])
334 except colander.Invalid as exc:
430 except colander.Invalid as exc:
335 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
431 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
336
432
337 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
433 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
338 user.id,
434 user.id, request.GET.get("pkey")
339 request.GET.get('pkey'))
435 )
340
436
341 if rule_action:
437 if rule_action:
342 rule_action.rule = json_body['rule']
438 rule_action.rule = json_body["rule"]
343 rule_action.resource_id = json_body['resource_id']
439 rule_action.resource_id = json_body["resource_id"]
344 rule_action.action = json_body['action']
440 rule_action.action = json_body["action"]
345 return rule_action.get_dict()
441 return rule_action.get_dict()
346 return HTTPNotFound()
442 return HTTPNotFound()
347
443
348
444
349 @view_config(route_name='users_self_property', permission='authenticated',
445 @view_config(
350 match_param='key=alert_channels',
446 route_name="users_self_property",
351 renderer='json', request_method='PATCH')
447 permission="authenticated",
448 match_param="key=alert_channels",
449 renderer="json",
450 request_method="PATCH",
451 )
352 def alert_channels_PATCH(request):
452 def alert_channels_PATCH(request):
353 user = request.user
453 user = request.user
354 channel_name = request.GET.get('channel_name')
454 channel_name = request.GET.get("channel_name")
355 channel_value = request.GET.get('channel_value')
455 channel_value = request.GET.get("channel_value")
356 # iterate over channels
456 # iterate over channels
357 channel = None
457 channel = None
358 for channel in user.alert_channels:
458 for channel in user.alert_channels:
359 if (channel.channel_name == channel_name and
459 if (
360 channel.channel_value == channel_value):
460 channel.channel_name == channel_name
461 and channel.channel_value == channel_value
462 ):
361 break
463 break
362 if not channel:
464 if not channel:
363 return HTTPNotFound()
465 return HTTPNotFound()
364
466
365 allowed_keys = ['daily_digest', 'send_alerts']
467 allowed_keys = ["daily_digest", "send_alerts"]
366 for k, v in request.unsafe_json_body.items():
468 for k, v in request.unsafe_json_body.items():
367 if k in allowed_keys:
469 if k in allowed_keys:
368 setattr(channel, k, v)
470 setattr(channel, k, v)
@@ -371,67 +473,84 b' def alert_channels_PATCH(request):'
371 return channel.get_dict()
473 return channel.get_dict()
372
474
373
475
374 @view_config(route_name='users_self_property', permission='authenticated',
476 @view_config(
375 match_param='key=alert_channels',
477 route_name="users_self_property",
376 request_method="POST", renderer='json')
478 permission="authenticated",
479 match_param="key=alert_channels",
480 request_method="POST",
481 renderer="json",
482 )
377 def alert_channels_POST(request):
483 def alert_channels_POST(request):
378 """
484 """
379 Creates a new email alert channel for user, sends a validation email
485 Creates a new email alert channel for user, sends a validation email
380 """
486 """
381 user = request.user
487 user = request.user
382 form = forms.EmailChannelCreateForm(MultiDict(request.unsafe_json_body),
488 form = forms.EmailChannelCreateForm(
383 csrf_context=request)
489 MultiDict(request.unsafe_json_body), csrf_context=request
490 )
384 if not form.validate():
491 if not form.validate():
385 return HTTPUnprocessableEntity(body=form.errors_json)
492 return HTTPUnprocessableEntity(body=form.errors_json)
386
493
387 email = form.email.data.strip()
494 email = form.email.data.strip()
388 channel = EmailAlertChannel()
495 channel = EmailAlertChannel()
389 channel.channel_name = 'email'
496 channel.channel_name = "email"
390 channel.channel_value = email
497 channel.channel_value = email
391 security_code = generate_random_string(10)
498 security_code = generate_random_string(10)
392 channel.channel_json_conf = {'security_code': security_code}
499 channel.channel_json_conf = {"security_code": security_code}
393 user.alert_channels.append(channel)
500 user.alert_channels.append(channel)
394
501
395 email_vars = {'user': user,
502 email_vars = {
396 'email': email,
503 "user": user,
397 'request': request,
504 "email": email,
398 'security_code': security_code,
505 "request": request,
399 'email_title': "AppEnlight :: "
506 "security_code": security_code,
400 "Please authorize your email"}
507 "email_title": "AppEnlight :: " "Please authorize your email",
401
508 }
402 UserService.send_email(request, recipients=[email],
509
403 variables=email_vars,
510 UserService.send_email(
404 template='/email_templates/authorize_email.jinja2')
511 request,
405 request.session.flash(_('Your alert channel was '
512 recipients=[email],
406 'added to the system.'))
513 variables=email_vars,
514 template="/email_templates/authorize_email.jinja2",
515 )
516 request.session.flash(_("Your alert channel was " "added to the system."))
407 request.session.flash(
517 request.session.flash(
408 _('You need to authorize your email channel, a message was '
518 _(
409 'sent containing necessary information.'),
519 "You need to authorize your email channel, a message was "
410 'warning')
520 "sent containing necessary information."
521 ),
522 "warning",
523 )
411 DBSession.flush()
524 DBSession.flush()
412 channel.get_dict()
525 channel.get_dict()
413
526
414
527
415 @view_config(route_name='section_view',
528 @view_config(
416 match_param=['section=user_section',
529 route_name="section_view",
417 'view=alert_channels_authorize'],
530 match_param=["section=user_section", "view=alert_channels_authorize"],
418 renderer='string', permission='authenticated')
531 renderer="string",
532 permission="authenticated",
533 )
419 def alert_channels_authorize(request):
534 def alert_channels_authorize(request):
420 """
535 """
421 Performs alert channel authorization based on auth code sent in email
536 Performs alert channel authorization based on auth code sent in email
422 """
537 """
423 user = request.user
538 user = request.user
424 for channel in user.alert_channels:
539 for channel in user.alert_channels:
425 security_code = request.params.get('security_code', '')
540 security_code = request.params.get("security_code", "")
426 if channel.channel_json_conf['security_code'] == security_code:
541 if channel.channel_json_conf["security_code"] == security_code:
427 channel.channel_validated = True
542 channel.channel_validated = True
428 request.session.flash(_('Your email was authorized.'))
543 request.session.flash(_("Your email was authorized."))
429 return HTTPFound(location=request.route_url('/'))
544 return HTTPFound(location=request.route_url("/"))
430
545
431
546
432 @view_config(route_name='users_self_property', request_method="DELETE",
547 @view_config(
433 match_param='key=alert_channels', renderer='json',
548 route_name="users_self_property",
434 permission='authenticated')
549 request_method="DELETE",
550 match_param="key=alert_channels",
551 renderer="json",
552 permission="authenticated",
553 )
435 def alert_channel_DELETE(request):
554 def alert_channel_DELETE(request):
436 """
555 """
437 Removes alert channel from users channel
556 Removes alert channel from users channel
@@ -439,20 +558,25 b' def alert_channel_DELETE(request):'
439 user = request.user
558 user = request.user
440 channel = None
559 channel = None
441 for chan in user.alert_channels:
560 for chan in user.alert_channels:
442 if (chan.channel_name == request.params.get('channel_name') and
561 if chan.channel_name == request.params.get(
443 chan.channel_value == request.params.get('channel_value')):
562 "channel_name"
563 ) and chan.channel_value == request.params.get("channel_value"):
444 channel = chan
564 channel = chan
445 break
565 break
446 if channel:
566 if channel:
447 user.alert_channels.remove(channel)
567 user.alert_channels.remove(channel)
448 request.session.flash(_('Your channel was removed.'))
568 request.session.flash(_("Your channel was removed."))
449 return True
569 return True
450 return False
570 return False
451
571
452
572
453 @view_config(route_name='users_self_property', permission='authenticated',
573 @view_config(
454 match_param='key=alert_channels_actions_binds',
574 route_name="users_self_property",
455 renderer='json', request_method="POST")
575 permission="authenticated",
576 match_param="key=alert_channels_actions_binds",
577 renderer="json",
578 request_method="POST",
579 )
456 def alert_channels_actions_binds_POST(request):
580 def alert_channels_actions_binds_POST(request):
457 """
581 """
458 Adds alert action to users channels
582 Adds alert action to users channels
@@ -460,12 +584,12 b' def alert_channels_actions_binds_POST(request):'
460 user = request.user
584 user = request.user
461 json_body = request.unsafe_json_body
585 json_body = request.unsafe_json_body
462 channel = AlertChannelService.by_owner_id_and_pkey(
586 channel = AlertChannelService.by_owner_id_and_pkey(
463 user.id,
587 user.id, json_body.get("channel_pkey")
464 json_body.get('channel_pkey'))
588 )
465
589
466 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
590 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
467 user.id,
591 user.id, json_body.get("action_pkey")
468 json_body.get('action_pkey'))
592 )
469
593
470 if channel and rule_action:
594 if channel and rule_action:
471 if channel.pkey not in [c.pkey for c in rule_action.channels]:
595 if channel.pkey not in [c.pkey for c in rule_action.channels]:
@@ -474,21 +598,25 b' def alert_channels_actions_binds_POST(request):'
474 return HTTPUnprocessableEntity()
598 return HTTPUnprocessableEntity()
475
599
476
600
477 @view_config(route_name='users_self_property', request_method="DELETE",
601 @view_config(
478 match_param='key=alert_channels_actions_binds',
602 route_name="users_self_property",
479 renderer='json', permission='authenticated')
603 request_method="DELETE",
604 match_param="key=alert_channels_actions_binds",
605 renderer="json",
606 permission="authenticated",
607 )
480 def alert_channels_actions_binds_DELETE(request):
608 def alert_channels_actions_binds_DELETE(request):
481 """
609 """
482 Removes alert action from users channels
610 Removes alert action from users channels
483 """
611 """
484 user = request.user
612 user = request.user
485 channel = AlertChannelService.by_owner_id_and_pkey(
613 channel = AlertChannelService.by_owner_id_and_pkey(
486 user.id,
614 user.id, request.GET.get("channel_pkey")
487 request.GET.get('channel_pkey'))
615 )
488
616
489 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
617 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
490 user.id,
618 user.id, request.GET.get("action_pkey")
491 request.GET.get('action_pkey'))
619 )
492
620
493 if channel and rule_action:
621 if channel and rule_action:
494 if channel.pkey in [c.pkey for c in rule_action.channels]:
622 if channel.pkey in [c.pkey for c in rule_action.channels]:
@@ -497,18 +625,19 b' def alert_channels_actions_binds_DELETE(request):'
497 return HTTPUnprocessableEntity()
625 return HTTPUnprocessableEntity()
498
626
499
627
500 @view_config(route_name='social_auth_abort',
628 @view_config(
501 renderer='string', permission=NO_PERMISSION_REQUIRED)
629 route_name="social_auth_abort", renderer="string", permission=NO_PERMISSION_REQUIRED
630 )
502 def oauth_abort(request):
631 def oauth_abort(request):
503 """
632 """
504 Handles problems with authorization via velruse
633 Handles problems with authorization via velruse
505 """
634 """
506
635
507
636
508 @view_config(route_name='social_auth', permission=NO_PERMISSION_REQUIRED)
637 @view_config(route_name="social_auth", permission=NO_PERMISSION_REQUIRED)
509 def social_auth(request):
638 def social_auth(request):
510 # Get the internal provider name URL variable.
639 # Get the internal provider name URL variable.
511 provider_name = request.matchdict.get('provider')
640 provider_name = request.matchdict.get("provider")
512
641
513 # Start the login procedure.
642 # Start the login procedure.
514 adapter = WebObAdapter(request, request.response)
643 adapter = WebObAdapter(request, request.response)
@@ -523,12 +652,17 b' def social_auth(request):'
523
652
524 def handle_auth_error(request, result):
653 def handle_auth_error(request, result):
525 # Login procedure finished with an error.
654 # Login procedure finished with an error.
526 request.session.pop('zigg.social_auth', None)
655 request.session.pop("zigg.social_auth", None)
527 request.session.flash(_('Something went wrong when we tried to '
656 request.session.flash(
528 'authorize you via external provider. '
657 _(
529 'Please try again.'), 'warning')
658 "Something went wrong when we tried to "
659 "authorize you via external provider. "
660 "Please try again."
661 ),
662 "warning",
663 )
530
664
531 return HTTPFound(location=request.route_url('/'))
665 return HTTPFound(location=request.route_url("/"))
532
666
533
667
534 def handle_auth_success(request, result):
668 def handle_auth_success(request, result):
@@ -539,140 +673,169 b' def handle_auth_success(request, result):'
539 result.user.update()
673 result.user.update()
540
674
541 social_data = {
675 social_data = {
542 'user': {'data': result.user.data},
676 "user": {"data": result.user.data},
543 'credentials': result.user.credentials
677 "credentials": result.user.credentials,
544 }
678 }
545 # normalize data
679 # normalize data
546 social_data['user']['id'] = result.user.id
680 social_data["user"]["id"] = result.user.id
547 user_name = result.user.username or ''
681 user_name = result.user.username or ""
548 # use email name as username for google
682 # use email name as username for google
549 if (social_data['credentials'].provider_name == 'google' and
683 if social_data["credentials"].provider_name == "google" and result.user.email:
550 result.user.email):
551 user_name = result.user.email
684 user_name = result.user.email
552 social_data['user']['user_name'] = user_name
685 social_data["user"]["user_name"] = user_name
553 social_data['user']['email'] = result.user.email or ''
686 social_data["user"]["email"] = result.user.email or ""
554
687
555 request.session['zigg.social_auth'] = social_data
688 request.session["zigg.social_auth"] = social_data
556 # user is logged so bind his external identity with account
689 # user is logged so bind his external identity with account
557 if request.user:
690 if request.user:
558 handle_social_data(request, request.user, social_data)
691 handle_social_data(request, request.user, social_data)
559 request.session.pop('zigg.social_auth', None)
692 request.session.pop("zigg.social_auth", None)
560 return HTTPFound(location=request.route_url('/'))
693 return HTTPFound(location=request.route_url("/"))
561 else:
694 else:
562 user = ExternalIdentityService.user_by_external_id_and_provider(
695 user = ExternalIdentityService.user_by_external_id_and_provider(
563 social_data['user']['id'],
696 social_data["user"]["id"], social_data["credentials"].provider_name
564 social_data['credentials'].provider_name
565 )
697 )
566 # fix legacy accounts with wrong google ID
698 # fix legacy accounts with wrong google ID
567 if not user and social_data['credentials'].provider_name == 'google':
699 if not user and social_data["credentials"].provider_name == "google":
568 user = ExternalIdentityService.user_by_external_id_and_provider(
700 user = ExternalIdentityService.user_by_external_id_and_provider(
569 social_data['user']['email'],
701 social_data["user"]["email"], social_data["credentials"].provider_name
570 social_data['credentials'].provider_name)
702 )
571
703
572 # user tokens are already found in our db
704 # user tokens are already found in our db
573 if user:
705 if user:
574 handle_social_data(request, user, social_data)
706 handle_social_data(request, user, social_data)
575 headers = security.remember(request, user.id)
707 headers = security.remember(request, user.id)
576 request.session.pop('zigg.social_auth', None)
708 request.session.pop("zigg.social_auth", None)
577 return HTTPFound(location=request.route_url('/'), headers=headers)
709 return HTTPFound(location=request.route_url("/"), headers=headers)
578 else:
710 else:
579 msg = 'You need to finish registration ' \
711 msg = (
580 'process to bind your external identity to your account ' \
712 "You need to finish registration "
581 'or sign in to existing account'
713 "process to bind your external identity to your account "
714 "or sign in to existing account"
715 )
582 request.session.flash(msg)
716 request.session.flash(msg)
583 return HTTPFound(location=request.route_url('register'))
717 return HTTPFound(location=request.route_url("register"))
584
718
585
719
586 @view_config(route_name='section_view', permission='authenticated',
720 @view_config(
587 match_param=['section=users_section', 'view=search_users'],
721 route_name="section_view",
588 renderer='json')
722 permission="authenticated",
723 match_param=["section=users_section", "view=search_users"],
724 renderer="json",
725 )
589 def search_users(request):
726 def search_users(request):
590 """
727 """
591 Returns a list of users for autocomplete
728 Returns a list of users for autocomplete
592 """
729 """
593 user = request.user
730 user = request.user
594 items_returned = []
731 items_returned = []
595 like_condition = request.params.get('user_name', '') + '%'
732 like_condition = request.params.get("user_name", "") + "%"
596 # first append used if email is passed
733 # first append used if email is passed
597 found_user = UserService.by_email(request.params.get('user_name', ''))
734 found_user = UserService.by_email(request.params.get("user_name", ""))
598 if found_user:
735 if found_user:
599 name = '{} {}'.format(found_user.first_name, found_user.last_name)
736 name = "{} {}".format(found_user.first_name, found_user.last_name)
600 items_returned.append({'user': found_user.user_name, 'name': name})
737 items_returned.append({"user": found_user.user_name, "name": name})
601 for found_user in UserService.user_names_like(like_condition).limit(20):
738 for found_user in UserService.user_names_like(like_condition).limit(20):
602 name = '{} {}'.format(found_user.first_name, found_user.last_name)
739 name = "{} {}".format(found_user.first_name, found_user.last_name)
603 items_returned.append({'user': found_user.user_name, 'name': name})
740 items_returned.append({"user": found_user.user_name, "name": name})
604 return items_returned
741 return items_returned
605
742
606
743
607 @view_config(route_name='users_self_property', match_param='key=auth_tokens',
744 @view_config(
608 request_method="GET", renderer='json', permission='authenticated')
745 route_name="users_self_property",
609 @view_config(route_name='users_property', match_param='key=auth_tokens',
746 match_param="key=auth_tokens",
610 request_method="GET", renderer='json', permission='authenticated')
747 request_method="GET",
748 renderer="json",
749 permission="authenticated",
750 )
751 @view_config(
752 route_name="users_property",
753 match_param="key=auth_tokens",
754 request_method="GET",
755 renderer="json",
756 permission="authenticated",
757 )
611 def auth_tokens_list(request):
758 def auth_tokens_list(request):
612 """
759 """
613 Lists all available alert channels
760 Lists all available alert channels
614 """
761 """
615 if request.matched_route.name == 'users_self_property':
762 if request.matched_route.name == "users_self_property":
616 user = request.user
763 user = request.user
617 else:
764 else:
618 user = UserService.by_id(request.matchdict.get('user_id'))
765 user = UserService.by_id(request.matchdict.get("user_id"))
619 if not user:
766 if not user:
620 return HTTPNotFound()
767 return HTTPNotFound()
621 return [c.get_dict() for c in user.auth_tokens]
768 return [c.get_dict() for c in user.auth_tokens]
622
769
623
770
624 @view_config(route_name='users_self_property', match_param='key=auth_tokens',
771 @view_config(
625 request_method="POST", renderer='json',
772 route_name="users_self_property",
626 permission='authenticated')
773 match_param="key=auth_tokens",
627 @view_config(route_name='users_property', match_param='key=auth_tokens',
774 request_method="POST",
628 request_method="POST", renderer='json',
775 renderer="json",
629 permission='authenticated')
776 permission="authenticated",
777 )
778 @view_config(
779 route_name="users_property",
780 match_param="key=auth_tokens",
781 request_method="POST",
782 renderer="json",
783 permission="authenticated",
784 )
630 def auth_tokens_POST(request):
785 def auth_tokens_POST(request):
631 """
786 """
632 Lists all available alert channels
787 Lists all available alert channels
633 """
788 """
634 if request.matched_route.name == 'users_self_property':
789 if request.matched_route.name == "users_self_property":
635 user = request.user
790 user = request.user
636 else:
791 else:
637 user = UserService.by_id(request.matchdict.get('user_id'))
792 user = UserService.by_id(request.matchdict.get("user_id"))
638 if not user:
793 if not user:
639 return HTTPNotFound()
794 return HTTPNotFound()
640
795
641 req_data = request.safe_json_body or {}
796 req_data = request.safe_json_body or {}
642 if not req_data.get('expires'):
797 if not req_data.get("expires"):
643 req_data.pop('expires', None)
798 req_data.pop("expires", None)
644 form = forms.AuthTokenCreateForm(MultiDict(req_data), csrf_context=request)
799 form = forms.AuthTokenCreateForm(MultiDict(req_data), csrf_context=request)
645 if not form.validate():
800 if not form.validate():
646 return HTTPUnprocessableEntity(body=form.errors_json)
801 return HTTPUnprocessableEntity(body=form.errors_json)
647 token = AuthToken()
802 token = AuthToken()
648 form.populate_obj(token)
803 form.populate_obj(token)
649 if token.expires:
804 if token.expires:
650 interval = h.time_deltas.get(token.expires)['delta']
805 interval = h.time_deltas.get(token.expires)["delta"]
651 token.expires = datetime.datetime.utcnow() + interval
806 token.expires = datetime.datetime.utcnow() + interval
652 user.auth_tokens.append(token)
807 user.auth_tokens.append(token)
653 DBSession.flush()
808 DBSession.flush()
654 return token.get_dict()
809 return token.get_dict()
655
810
656
811
657 @view_config(route_name='users_self_property', match_param='key=auth_tokens',
812 @view_config(
658 request_method="DELETE", renderer='json',
813 route_name="users_self_property",
659 permission='authenticated')
814 match_param="key=auth_tokens",
660 @view_config(route_name='users_property', match_param='key=auth_tokens',
815 request_method="DELETE",
661 request_method="DELETE", renderer='json',
816 renderer="json",
662 permission='authenticated')
817 permission="authenticated",
818 )
819 @view_config(
820 route_name="users_property",
821 match_param="key=auth_tokens",
822 request_method="DELETE",
823 renderer="json",
824 permission="authenticated",
825 )
663 def auth_tokens_DELETE(request):
826 def auth_tokens_DELETE(request):
664 """
827 """
665 Lists all available alert channels
828 Lists all available alert channels
666 """
829 """
667 if request.matched_route.name == 'users_self_property':
830 if request.matched_route.name == "users_self_property":
668 user = request.user
831 user = request.user
669 else:
832 else:
670 user = UserService.by_id(request.matchdict.get('user_id'))
833 user = UserService.by_id(request.matchdict.get("user_id"))
671 if not user:
834 if not user:
672 return HTTPNotFound()
835 return HTTPNotFound()
673
836
674 for token in user.auth_tokens:
837 for token in user.auth_tokens:
675 if token.token == request.params.get('token'):
838 if token.token == request.params.get("token"):
676 user.auth_tokens.remove(token)
839 user.auth_tokens.remove(token)
677 return True
840 return True
678 return False
841 return False
General Comments 0
You need to be logged in to leave comments. Login now