##// END OF EJS Templates
black: reformat source
ergo -
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -4,12 +4,12 b' import re'
4 4 from setuptools import setup, find_packages
5 5
6 6 here = os.path.abspath(os.path.dirname(__file__))
7 README = open(os.path.join(here, 'README.rst')).read()
8 CHANGES = open(os.path.join(here, 'CHANGELOG.rst')).read()
7 README = open(os.path.join(here, "README.rst")).read()
8 CHANGES = open(os.path.join(here, "CHANGELOG.rst")).read()
9 9
10 REQUIREMENTS = open(os.path.join(here, 'requirements.txt')).readlines()
10 REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines()
11 11
12 compiled = re.compile('([^=><]*).*')
12 compiled = re.compile("([^=><]*).*")
13 13
14 14
15 15 def parse_req(req):
@@ -21,7 +21,8 b' requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]'
21 21
22 22 def _get_meta_var(name, data, callback_handler=None):
23 23 import re
24 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
24
25 matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data)
25 26 if matches:
26 27 if not callable(callback_handler):
27 28 callback_handler = lambda v: v
@@ -29,53 +30,60 b' def _get_meta_var(name, data, callback_handler=None):'
29 30 return callback_handler(eval(matches.groups()[0]))
30 31
31 32
32 with open(os.path.join(here, 'src', 'appenlight', '__init__.py'), 'r') as _meta:
33 with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta:
33 34 _metadata = _meta.read()
34 35
35 with open(os.path.join(here, 'VERSION'), 'r') as _meta_version:
36 with open(os.path.join(here, "VERSION"), "r") as _meta_version:
36 37 __version__ = _meta_version.read().strip()
37 38
38 __license__ = _get_meta_var('__license__', _metadata)
39 __author__ = _get_meta_var('__author__', _metadata)
40 __url__ = _get_meta_var('__url__', _metadata)
41
42 found_packages = find_packages('src')
43 found_packages.append('appenlight.migrations.versions')
44 setup(name='appenlight',
45 description='appenlight',
46 long_description=README + '\n\n' + CHANGES,
47 classifiers=[
48 "Programming Language :: Python",
49 "Framework :: Pylons",
50 "Topic :: Internet :: WWW/HTTP",
51 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
52 ],
53 version=__version__,
54 license=__license__,
55 author=__author__,
56 url=__url__,
57 keywords='web wsgi bfg pylons pyramid',
58 package_dir={'': 'src'},
59 packages=found_packages,
60 include_package_data=True,
61 zip_safe=False,
62 test_suite='appenlight',
63 install_requires=requires,
64 extras_require={
65 "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "pytest-mock", "webtest"],
66 "lint": ["black"],
67 },
68 entry_points={
69 'paste.app_factory': [
70 'main = appenlight:main'
71 ],
72 'console_scripts': [
73 'appenlight-cleanup = appenlight.scripts.cleanup:main',
74 'appenlight-initializedb = appenlight.scripts.initialize_db:main',
75 'appenlight-migratedb = appenlight.scripts.migratedb:main',
76 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main',
77 'appenlight-static = appenlight.scripts.static:main',
78 'appenlight-make-config = appenlight.scripts.make_config:main',
79 ]
80 }
81 )
39 __license__ = _get_meta_var("__license__", _metadata)
40 __author__ = _get_meta_var("__author__", _metadata)
41 __url__ = _get_meta_var("__url__", _metadata)
42
43 found_packages = find_packages("src")
44 found_packages.append("appenlight.migrations.versions")
45 setup(
46 name="appenlight",
47 description="appenlight",
48 long_description=README + "\n\n" + CHANGES,
49 classifiers=[
50 "Programming Language :: Python",
51 "Framework :: Pylons",
52 "Topic :: Internet :: WWW/HTTP",
53 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
54 ],
55 version=__version__,
56 license=__license__,
57 author=__author__,
58 url=__url__,
59 keywords="web wsgi bfg pylons pyramid",
60 package_dir={"": "src"},
61 packages=found_packages,
62 include_package_data=True,
63 zip_safe=False,
64 test_suite="appenlight",
65 install_requires=requires,
66 extras_require={
67 "dev": [
68 "coverage",
69 "pytest",
70 "pyramid",
71 "tox",
72 "mock",
73 "pytest-mock",
74 "webtest",
75 ],
76 "lint": ["black"],
77 },
78 entry_points={
79 "paste.app_factory": ["main = appenlight:main"],
80 "console_scripts": [
81 "appenlight-cleanup = appenlight.scripts.cleanup:main",
82 "appenlight-initializedb = appenlight.scripts.initialize_db:main",
83 "appenlight-migratedb = appenlight.scripts.migratedb:main",
84 "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main",
85 "appenlight-static = appenlight.scripts.static:main",
86 "appenlight-make-config = appenlight.scripts.make_config:main",
87 ],
88 },
89 )
@@ -38,15 +38,17 b' from redlock import Redlock'
38 38 from sqlalchemy import engine_from_config
39 39
40 40 from appenlight.celery import configure_celery
41 from appenlight.lib.configurator import (CythonCompatConfigurator,
42 register_appenlight_plugin)
41 from appenlight.lib.configurator import (
42 CythonCompatConfigurator,
43 register_appenlight_plugin,
44 )
43 45 from appenlight.lib import cache_regions
44 46 from appenlight.lib.ext_json import json
45 47 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
46 48
47 __license__ = 'Apache 2.0'
48 __author__ = 'RhodeCode GmbH'
49 __url__ = 'http://rhodecode.com'
49 __license__ = "Apache 2.0"
50 __author__ = "RhodeCode GmbH"
51 __url__ = "http://rhodecode.com"
50 52 __version__ = pkg_resources.get_distribution("appenlight").parsed_version
51 53
52 54 json_renderer = JSON(serializer=json.dumps, indent=4)
@@ -59,7 +61,7 b' def datetime_adapter(obj, request):'
59 61
60 62
61 63 def all_permissions_adapter(obj, request):
62 return '__all_permissions__'
64 return "__all_permissions__"
63 65
64 66
65 67 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
@@ -70,91 +72,109 b' def main(global_config, **settings):'
70 72 """ This function returns a Pyramid WSGI application.
71 73 """
72 74 auth_tkt_policy = AuthTktAuthenticationPolicy(
73 settings['authtkt.secret'],
74 hashalg='sha512',
75 settings["authtkt.secret"],
76 hashalg="sha512",
75 77 callback=groupfinder,
76 78 max_age=2592000,
77 secure=asbool(settings.get('authtkt.secure', 'false')))
78 auth_token_policy = AuthTokenAuthenticationPolicy(
79 callback=groupfinder
79 secure=asbool(settings.get("authtkt.secure", "false")),
80 80 )
81 auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder)
81 82 authorization_policy = ACLAuthorizationPolicy()
82 83 authentication_policy = AuthenticationStackPolicy()
83 authentication_policy.add_policy('auth_tkt', auth_tkt_policy)
84 authentication_policy.add_policy('auth_token', auth_token_policy)
84 authentication_policy.add_policy("auth_tkt", auth_tkt_policy)
85 authentication_policy.add_policy("auth_token", auth_token_policy)
85 86 # set crypto key
86 encryption.ENCRYPTION_SECRET = settings.get('encryption_secret')
87 encryption.ENCRYPTION_SECRET = settings.get("encryption_secret")
87 88 # import this later so encyption key can be monkeypatched
88 89 from appenlight.models import DBSession, register_datastores
89 90
90 91 # registration
91 settings['appenlight.disable_registration'] = asbool(
92 settings.get('appenlight.disable_registration'))
92 settings["appenlight.disable_registration"] = asbool(
93 settings.get("appenlight.disable_registration")
94 )
93 95
94 96 # update config with cometd info
95 settings['cometd_servers'] = {'server': settings['cometd.server'],
96 'secret': settings['cometd.secret']}
97 settings["cometd_servers"] = {
98 "server": settings["cometd.server"],
99 "secret": settings["cometd.secret"],
100 }
97 101
98 102 # Create the Pyramid Configurator.
99 settings['_mail_url'] = settings['mailing.app_url']
103 settings["_mail_url"] = settings["mailing.app_url"]
100 104 config = CythonCompatConfigurator(
101 105 settings=settings,
102 106 authentication_policy=authentication_policy,
103 107 authorization_policy=authorization_policy,
104 root_factory='appenlight.security.RootFactory',
105 default_permission='view')
108 root_factory="appenlight.security.RootFactory",
109 default_permission="view",
110 )
106 111 # custom registry variables
107 112
108 113 # resource type information
109 config.registry.resource_types = ['resource', 'application']
114 config.registry.resource_types = ["resource", "application"]
110 115 # plugin information
111 116 config.registry.appenlight_plugins = {}
112 117
113 config.set_default_csrf_options(require_csrf=True, header='X-XSRF-TOKEN')
114 config.add_view_deriver('appenlight.predicates.csrf_view',
115 name='csrf_view')
118 config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN")
119 config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view")
116 120
117 121 # later, when config is available
118 dogpile_config = {'url': settings['redis.url'],
119 "redis_expiration_time": 86400,
120 "redis_distributed_lock": True}
122 dogpile_config = {
123 "url": settings["redis.url"],
124 "redis_expiration_time": 86400,
125 "redis_distributed_lock": True,
126 }
121 127 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
122 128 config.registry.cache_regions = cache_regions.regions
123 engine = engine_from_config(settings, 'sqlalchemy.',
124 json_serializer=json.dumps)
129 engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps)
125 130 DBSession.configure(bind=engine)
126 131
127 132 # json rederer that serializes datetime
128 config.add_renderer('json', json_renderer)
129 config.add_request_method('appenlight.lib.request.es_conn', 'es_conn', property=True)
130 config.add_request_method('appenlight.lib.request.get_user', 'user',
131 reify=True, property=True)
132 config.add_request_method('appenlight.lib.request.get_csrf_token',
133 'csrf_token', reify=True, property=True)
134 config.add_request_method('appenlight.lib.request.safe_json_body',
135 'safe_json_body', reify=True, property=True)
136 config.add_request_method('appenlight.lib.request.unsafe_json_body',
137 'unsafe_json_body', reify=True, property=True)
138 config.add_request_method('appenlight.lib.request.add_flash_to_headers',
139 'add_flash_to_headers')
140 config.add_request_method('appenlight.lib.request.get_authomatic',
141 'authomatic', reify=True)
142
143 config.include('pyramid_redis_sessions')
144 config.include('pyramid_tm')
145 config.include('pyramid_jinja2')
146 config.include('pyramid_mailer')
147 config.include('appenlight_client.ext.pyramid_tween')
148 config.include('ziggurat_foundations.ext.pyramid.sign_in')
149 es_server_list = aslist(settings['elasticsearch.nodes'])
150 redis_url = settings['redis.url']
151 log.warning('Elasticsearch server list: {}'.format(es_server_list))
152 log.warning('Redis server: {}'.format(redis_url))
133 config.add_renderer("json", json_renderer)
134 config.add_request_method(
135 "appenlight.lib.request.es_conn", "es_conn", property=True
136 )
137 config.add_request_method(
138 "appenlight.lib.request.get_user", "user", reify=True, property=True
139 )
140 config.add_request_method(
141 "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True
142 )
143 config.add_request_method(
144 "appenlight.lib.request.safe_json_body",
145 "safe_json_body",
146 reify=True,
147 property=True,
148 )
149 config.add_request_method(
150 "appenlight.lib.request.unsafe_json_body",
151 "unsafe_json_body",
152 reify=True,
153 property=True,
154 )
155 config.add_request_method(
156 "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers"
157 )
158 config.add_request_method(
159 "appenlight.lib.request.get_authomatic", "authomatic", reify=True
160 )
161
162 config.include("pyramid_redis_sessions")
163 config.include("pyramid_tm")
164 config.include("pyramid_jinja2")
165 config.include("pyramid_mailer")
166 config.include("appenlight_client.ext.pyramid_tween")
167 config.include("ziggurat_foundations.ext.pyramid.sign_in")
168 es_server_list = aslist(settings["elasticsearch.nodes"])
169 redis_url = settings["redis.url"]
170 log.warning("Elasticsearch server list: {}".format(es_server_list))
171 log.warning("Redis server: {}".format(redis_url))
153 172 config.registry.es_conn = Elasticsearch(es_server_list)
154 173 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
155 174
156 config.registry.redis_lockmgr = Redlock([settings['redis.redlock.url'], ],
157 retry_count=0, retry_delay=0)
175 config.registry.redis_lockmgr = Redlock(
176 [settings["redis.redlock.url"]], retry_count=0, retry_delay=0
177 )
158 178 # mailer bw compat
159 179 config.registry.mailer = config.registry.getUtility(IMailer)
160 180
@@ -163,47 +183,56 b' def main(global_config, **settings):'
163 183 config.set_session_factory(session_factory)
164 184
165 185 # Configure renderers and event subscribers
166 config.add_jinja2_extension('jinja2.ext.loopcontrols')
167 config.add_jinja2_search_path('appenlight:templates')
186 config.add_jinja2_extension("jinja2.ext.loopcontrols")
187 config.add_jinja2_search_path("appenlight:templates")
168 188 # event subscribers
169 config.add_subscriber("appenlight.subscribers.application_created",
170 "pyramid.events.ApplicationCreated")
171 config.add_subscriber("appenlight.subscribers.add_renderer_globals",
172 "pyramid.events.BeforeRender")
173 config.add_subscriber('appenlight.subscribers.new_request',
174 'pyramid.events.NewRequest')
175 config.add_view_predicate('context_type_class',
176 'appenlight.predicates.contextTypeClass')
177
178 register_datastores(es_conn=config.registry.es_conn,
179 redis_conn=config.registry.redis_conn,
180 redis_lockmgr=config.registry.redis_lockmgr)
189 config.add_subscriber(
190 "appenlight.subscribers.application_created",
191 "pyramid.events.ApplicationCreated",
192 )
193 config.add_subscriber(
194 "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender"
195 )
196 config.add_subscriber(
197 "appenlight.subscribers.new_request", "pyramid.events.NewRequest"
198 )
199 config.add_view_predicate(
200 "context_type_class", "appenlight.predicates.contextTypeClass"
201 )
202
203 register_datastores(
204 es_conn=config.registry.es_conn,
205 redis_conn=config.registry.redis_conn,
206 redis_lockmgr=config.registry.redis_lockmgr,
207 )
181 208
182 209 # base stuff and scan
183 210
184 211 # need to ensure webassets exists otherwise config.override_asset()
185 212 # throws exception
186 if not os.path.exists(settings['webassets.dir']):
187 os.mkdir(settings['webassets.dir'])
188 config.add_static_view(path='appenlight:webassets',
189 name='static', cache_max_age=3600)
190 config.override_asset(to_override='appenlight:webassets/',
191 override_with=settings['webassets.dir'])
192
193 config.include('appenlight.views')
194 config.include('appenlight.views.admin')
195 config.scan(ignore=['appenlight.migrations', 'appenlight.scripts',
196 'appenlight.tests'])
197
198 config.add_directive('register_appenlight_plugin',
199 register_appenlight_plugin)
200
201 for entry_point in iter_entry_points(group='appenlight.plugins'):
213 if not os.path.exists(settings["webassets.dir"]):
214 os.mkdir(settings["webassets.dir"])
215 config.add_static_view(
216 path="appenlight:webassets", name="static", cache_max_age=3600
217 )
218 config.override_asset(
219 to_override="appenlight:webassets/", override_with=settings["webassets.dir"]
220 )
221
222 config.include("appenlight.views")
223 config.include("appenlight.views.admin")
224 config.scan(
225 ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"]
226 )
227
228 config.add_directive("register_appenlight_plugin", register_appenlight_plugin)
229
230 for entry_point in iter_entry_points(group="appenlight.plugins"):
202 231 plugin = entry_point.load()
203 232 plugin.includeme(config)
204 233
205 234 # include other appenlight plugins explictly if needed
206 includes = aslist(settings.get('appenlight.includes', []))
235 includes = aslist(settings.get("appenlight.includes", []))
207 236 for inc in includes:
208 237 config.include(inc)
209 238
@@ -211,8 +240,8 b' def main(global_config, **settings):'
211 240
212 241 def pre_commit():
213 242 jinja_env = config.get_jinja2_environment()
214 jinja_env.filters['tojson'] = json.dumps
215 jinja_env.filters['toJSONUnsafe'] = jinja2_filters.toJSONUnsafe
243 jinja_env.filters["tojson"] = json.dumps
244 jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe
216 245
217 246 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
218 247
@@ -34,15 +34,23 b' from appenlight_client.ext.celery import register_signals'
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 register('date_json', json_dumps, json_loads,
38 content_type='application/x-date_json',
39 content_encoding='utf-8')
37 register(
38 "date_json",
39 json_dumps,
40 json_loads,
41 content_type="application/x-date_json",
42 content_encoding="utf-8",
43 )
40 44
41 45 celery = Celery()
42 46
43 celery.user_options['preload'].add(
44 Option('--ini', dest='ini', default=None,
45 help='Specifies pyramid configuration file location.')
47 celery.user_options["preload"].add(
48 Option(
49 "--ini",
50 dest="ini",
51 default=None,
52 help="Specifies pyramid configuration file location.",
53 )
46 54 )
47 55
48 56
@@ -51,19 +59,21 b' def on_preload_parsed(options, **kwargs):'
51 59 """
52 60 This actually configures celery from pyramid config file
53 61 """
54 celery.conf['INI_PYRAMID'] = options['ini']
62 celery.conf["INI_PYRAMID"] = options["ini"]
55 63 import appenlight_client.client as e_client
56 ini_location = options['ini']
64
65 ini_location = options["ini"]
57 66 if not ini_location:
58 raise Exception('You need to pass pyramid ini location using '
59 '--ini=filename.ini argument to the worker')
67 raise Exception(
68 "You need to pass pyramid ini location using "
69 "--ini=filename.ini argument to the worker"
70 )
60 71 env = bootstrap(ini_location[0])
61 api_key = env['request'].registry.settings['appenlight.api_key']
62 tr_config = env['request'].registry.settings.get(
63 'appenlight.transport_config')
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
72 api_key = env["request"].registry.settings["appenlight.api_key"]
73 tr_config = env["request"].registry.settings.get("appenlight.transport_config")
74 CONFIG = e_client.get_config({"appenlight.api_key": api_key})
65 75 if tr_config:
66 CONFIG['appenlight.transport_config'] = tr_config
76 CONFIG["appenlight.transport_config"] = tr_config
67 77 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
68 78 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
69 79 register_signals(APPENLIGHT_CLIENT)
@@ -71,101 +81,101 b' def on_preload_parsed(options, **kwargs):'
71 81
72 82
73 83 celery_config = {
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
75 'CELERYD_TASK_TIME_LIMIT': 60,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
77 'CELERY_IGNORE_RESULT': True,
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
79 'CELERY_TASK_SERIALIZER': 'date_json',
80 'CELERY_RESULT_SERIALIZER': 'date_json',
81 'BROKER_URL': None,
82 'CELERYD_CONCURRENCY': None,
83 'CELERY_TIMEZONE': None,
84 'CELERYBEAT_SCHEDULE': {
85 'alerting_reports': {
86 'task': 'appenlight.celery.tasks.alerting_reports',
87 'schedule': timedelta(seconds=60)
84 "CELERY_IMPORTS": ["appenlight.celery.tasks"],
85 "CELERYD_TASK_TIME_LIMIT": 60,
86 "CELERYD_MAX_TASKS_PER_CHILD": 1000,
87 "CELERY_IGNORE_RESULT": True,
88 "CELERY_ACCEPT_CONTENT": ["date_json"],
89 "CELERY_TASK_SERIALIZER": "date_json",
90 "CELERY_RESULT_SERIALIZER": "date_json",
91 "BROKER_URL": None,
92 "CELERYD_CONCURRENCY": None,
93 "CELERY_TIMEZONE": None,
94 "CELERYBEAT_SCHEDULE": {
95 "alerting_reports": {
96 "task": "appenlight.celery.tasks.alerting_reports",
97 "schedule": timedelta(seconds=60),
88 98 },
89 'close_alerts': {
90 'task': 'appenlight.celery.tasks.close_alerts',
91 'schedule': timedelta(seconds=60)
92 }
93 }
99 "close_alerts": {
100 "task": "appenlight.celery.tasks.close_alerts",
101 "schedule": timedelta(seconds=60),
102 },
103 },
94 104 }
95 105 celery.config_from_object(celery_config)
96 106
97 107
98 108 def configure_celery(pyramid_registry):
99 109 settings = pyramid_registry.settings
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
110 celery_config["BROKER_URL"] = settings["celery.broker_url"]
111 celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"]
112 celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"]
103 113
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
114 notifications_seconds = int(
115 settings.get("tasks.notifications_reports.interval", 60)
116 )
105 117
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
107 'task': 'appenlight.celery.tasks.notifications_reports',
108 'schedule': timedelta(seconds=notifications_seconds)
118 celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = {
119 "task": "appenlight.celery.tasks.notifications_reports",
120 "schedule": timedelta(seconds=notifications_seconds),
109 121 }
110 122
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
112 'task': 'appenlight.celery.tasks.daily_digest',
113 'schedule': crontab(minute=1, hour='4,12,20')
123 celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = {
124 "task": "appenlight.celery.tasks.daily_digest",
125 "schedule": crontab(minute=1, hour="4,12,20"),
114 126 }
115 127
116 if asbool(settings.get('celery.always_eager')):
117 celery_config['CELERY_ALWAYS_EAGER'] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
128 if asbool(settings.get("celery.always_eager")):
129 celery_config["CELERY_ALWAYS_EAGER"] = True
130 celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True
119 131
120 132 for plugin in pyramid_registry.appenlight_plugins.values():
121 if plugin.get('celery_tasks'):
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
123 if plugin.get('celery_beats'):
124 for name, config in plugin['celery_beats']:
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
133 if plugin.get("celery_tasks"):
134 celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"])
135 if plugin.get("celery_beats"):
136 for name, config in plugin["celery_beats"]:
137 celery_config["CELERYBEAT_SCHEDULE"][name] = config
126 138 celery.config_from_object(celery_config)
127 139
128 140
129 141 @task_prerun.connect
130 142 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
131 if hasattr(celery, 'pyramid'):
143 if hasattr(celery, "pyramid"):
132 144 env = celery.pyramid
133 env = prepare(registry=env['request'].registry)
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
135 tmp_req = Request.blank('/', base_url=proper_base_url)
145 env = prepare(registry=env["request"].registry)
146 proper_base_url = env["request"].registry.settings["mailing.app_url"]
147 tmp_req = Request.blank("/", base_url=proper_base_url)
136 148 # ensure tasks generate url for right domain from config
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
140 env['request'].environ['wsgi.url_scheme'] = \
141 tmp_req.environ['wsgi.url_scheme']
149 env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"]
150 env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"]
151 env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"]
152 env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"]
142 153 get_current_request().tm.begin()
143 154
144 155
145 156 @task_success.connect
146 157 def task_success_signal(result, **kwargs):
147 158 get_current_request().tm.commit()
148 if hasattr(celery, 'pyramid'):
159 if hasattr(celery, "pyramid"):
149 160 celery.pyramid["closer"]()
150 161
151 162
152 163 @task_retry.connect
153 164 def task_retry_signal(request, reason, einfo, **kwargs):
154 165 get_current_request().tm.abort()
155 if hasattr(celery, 'pyramid'):
166 if hasattr(celery, "pyramid"):
156 167 celery.pyramid["closer"]()
157 168
158 169
159 170 @task_failure.connect
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
161 **kwaargs):
171 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs):
162 172 get_current_request().tm.abort()
163 if hasattr(celery, 'pyramid'):
173 if hasattr(celery, "pyramid"):
164 174 celery.pyramid["closer"]()
165 175
166 176
167 177 @task_revoked.connect
168 178 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
169 179 get_current_request().tm.abort()
170 if hasattr(celery, 'pyramid'):
180 if hasattr(celery, "pyramid"):
171 181 celery.pyramid["closer"]()
@@ -17,38 +17,29 b''
17 17 import json
18 18 from datetime import datetime, date, timedelta
19 19
20 DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
20 DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
21 21
22 22
23 23 class DateEncoder(json.JSONEncoder):
24 24 def default(self, obj):
25 25 if isinstance(obj, datetime):
26 return {
27 '__type__': '__datetime__',
28 'iso': obj.strftime(DATE_FORMAT)
29 }
26 return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)}
30 27 elif isinstance(obj, date):
31 return {
32 '__type__': '__date__',
33 'iso': obj.strftime(DATE_FORMAT)
34 }
28 return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)}
35 29 elif isinstance(obj, timedelta):
36 return {
37 '__type__': '__timedelta__',
38 'seconds': obj.total_seconds()
39 }
30 return {"__type__": "__timedelta__", "seconds": obj.total_seconds()}
40 31 else:
41 32 return json.JSONEncoder.default(self, obj)
42 33
43 34
44 35 def date_decoder(dct):
45 if '__type__' in dct:
46 if dct['__type__'] == '__datetime__':
47 return datetime.strptime(dct['iso'], DATE_FORMAT)
48 elif dct['__type__'] == '__date__':
49 return datetime.strptime(dct['iso'], DATE_FORMAT).date()
50 elif dct['__type__'] == '__timedelta__':
51 return timedelta(seconds=dct['seconds'])
36 if "__type__" in dct:
37 if dct["__type__"] == "__datetime__":
38 return datetime.strptime(dct["iso"], DATE_FORMAT)
39 elif dct["__type__"] == "__date__":
40 return datetime.strptime(dct["iso"], DATE_FORMAT).date()
41 elif dct["__type__"] == "__timedelta__":
42 return timedelta(seconds=dct["seconds"])
52 43 return dct
53 44
54 45
@@ -57,4 +48,4 b' def json_dumps(obj):'
57 48
58 49
59 50 def json_loads(obj):
60 return json.loads(obj.decode('utf8'), object_hook=date_decoder)
51 return json.loads(obj.decode("utf8"), object_hook=date_decoder)
@@ -51,9 +51,11 b' from appenlight.lib.enums import ReportType'
51 51
52 52 log = get_task_logger(__name__)
53 53
54 sample_boundries = list(range(100, 1000, 100)) + \
55 list(range(1000, 10000, 1000)) + \
56 list(range(10000, 100000, 5000))
54 sample_boundries = (
55 list(range(100, 1000, 100))
56 + list(range(1000, 10000, 1000))
57 + list(range(10000, 100000, 5000))
58 )
57 59
58 60
59 61 def pick_sample(total_occurences, report_type=None):
@@ -70,9 +72,9 b' def pick_sample(total_occurences, report_type=None):'
70 72
71 73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
72 74 def test_exception_task():
73 log.error('test celery log', extra={'location': 'celery'})
74 log.warning('test celery log', extra={'location': 'celery'})
75 raise Exception('Celery exception test')
75 log.error("test celery log", extra={"location": "celery"})
76 log.warning("test celery log", extra={"location": "celery"})
77 raise Exception("Celery exception test")
76 78
77 79
78 80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
@@ -81,9 +83,9 b' def test_retry_exception_task():'
81 83 import time
82 84
83 85 time.sleep(1.3)
84 log.error('test retry celery log', extra={'location': 'celery'})
85 log.warning('test retry celery log', extra={'location': 'celery'})
86 raise Exception('Celery exception test')
86 log.error("test retry celery log", extra={"location": "celery"})
87 log.warning("test retry celery log", extra={"location": "celery"})
88 raise Exception("Celery exception test")
87 89 except Exception as exc:
88 90 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
89 91 raise
@@ -92,7 +94,7 b' def test_retry_exception_task():'
92 94
93 95 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
94 96 def add_reports(resource_id, request_params, dataset, **kwargs):
95 proto_version = parse_proto(request_params.get('protocol_version', ''))
97 proto_version = parse_proto(request_params.get("protocol_version", ""))
96 98 current_time = datetime.utcnow().replace(second=0, microsecond=0)
97 99 try:
98 100 # we will store solr docs here for single insert
@@ -114,22 +116,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
114 116 report_group = ReportGroupService.by_hash_and_resource(
115 117 report.resource_id,
116 118 report.grouping_hash,
117 since_when=datetime.utcnow().date().replace(day=1)
119 since_when=datetime.utcnow().date().replace(day=1),
118 120 )
119 occurences = report_data.get('occurences', 1)
121 occurences = report_data.get("occurences", 1)
120 122 if not report_group:
121 123 # total reports will be +1 moment later
122 report_group = ReportGroup(grouping_hash=report.grouping_hash,
123 occurences=0, total_reports=0,
124 last_report=0,
125 priority=report.priority,
126 error=report.error,
127 first_timestamp=report.start_time)
124 report_group = ReportGroup(
125 grouping_hash=report.grouping_hash,
126 occurences=0,
127 total_reports=0,
128 last_report=0,
129 priority=report.priority,
130 error=report.error,
131 first_timestamp=report.start_time,
132 )
128 133 report_group._skip_ft_index = True
129 134 report_group.report_type = report.report_type
130 135 report.report_group_time = report_group.first_timestamp
131 add_sample = pick_sample(report_group.occurences,
132 report_type=report_group.report_type)
136 add_sample = pick_sample(
137 report_group.occurences, report_type=report_group.report_type
138 )
133 139 if add_sample:
134 140 resource.report_groups.append(report_group)
135 141 report_group.reports.append(report)
@@ -144,28 +150,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
144 150 for s_call in slow_calls:
145 151 if s_call.partition_id not in es_slow_calls_docs:
146 152 es_slow_calls_docs[s_call.partition_id] = []
147 es_slow_calls_docs[s_call.partition_id].append(
148 s_call.es_doc())
153 es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc())
149 154 # try generating new stat rows if needed
150 155 else:
151 156 # required for postprocessing to not fail later
152 157 report.report_group = report_group
153 158
154 stat_row = ReportService.generate_stat_rows(
155 report, resource, report_group)
159 stat_row = ReportService.generate_stat_rows(report, resource, report_group)
156 160 if stat_row.partition_id not in es_reports_stats_rows:
157 161 es_reports_stats_rows[stat_row.partition_id] = []
158 es_reports_stats_rows[stat_row.partition_id].append(
159 stat_row.es_doc())
162 es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc())
160 163
161 164 # see if we should mark 10th occurence of report
162 165 last_occurences_10 = int(math.floor(report_group.occurences / 10))
163 curr_occurences_10 = int(math.floor(
164 (report_group.occurences + report.occurences) / 10))
165 last_occurences_100 = int(
166 math.floor(report_group.occurences / 100))
167 curr_occurences_100 = int(math.floor(
168 (report_group.occurences + report.occurences) / 100))
166 curr_occurences_10 = int(
167 math.floor((report_group.occurences + report.occurences) / 10)
168 )
169 last_occurences_100 = int(math.floor(report_group.occurences / 100))
170 curr_occurences_100 = int(
171 math.floor((report_group.occurences + report.occurences) / 100)
172 )
169 173 notify_occurences_10 = last_occurences_10 != curr_occurences_10
170 174 notify_occurences_100 = last_occurences_100 != curr_occurences_100
171 175 report_group.occurences = ReportGroup.occurences + occurences
@@ -178,39 +182,47 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
178 182 if added_details:
179 183 report_group.total_reports = ReportGroup.total_reports + 1
180 184 report_group.last_report = report.id
181 report_group.set_notification_info(notify_10=notify_occurences_10,
182 notify_100=notify_occurences_100)
185 report_group.set_notification_info(
186 notify_10=notify_occurences_10, notify_100=notify_occurences_100
187 )
183 188 DBSession.flush()
184 189 report_group.get_report().notify_channel(report_group)
185 190 if report_group.partition_id not in es_report_group_docs:
186 191 es_report_group_docs[report_group.partition_id] = []
187 192 es_report_group_docs[report_group.partition_id].append(
188 report_group.es_doc())
193 report_group.es_doc()
194 )
189 195
190 action = 'REPORT'
191 log_msg = '%s: %s %s, client: %s, proto: %s' % (
196 action = "REPORT"
197 log_msg = "%s: %s %s, client: %s, proto: %s" % (
192 198 action,
193 report_data.get('http_status', 'unknown'),
199 report_data.get("http_status", "unknown"),
194 200 str(resource),
195 report_data.get('client'),
196 proto_version)
201 report_data.get("client"),
202 proto_version,
203 )
197 204 log.info(log_msg)
198 205 total_reports = len(dataset)
199 206 redis_pipeline = Datastores.redis.pipeline(transaction=False)
200 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
207 key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
201 208 redis_pipeline.incr(key, total_reports)
202 209 redis_pipeline.expire(key, 3600 * 24)
203 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
204 resource.owner_user_id, current_time)
210 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
211 resource.owner_user_id, current_time
212 )
205 213 redis_pipeline.incr(key, total_reports)
206 214 redis_pipeline.expire(key, 3600)
207 key = REDIS_KEYS['counters']['reports_per_hour_per_app'].format(
208 resource_id, current_time.replace(minute=0))
215 key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format(
216 resource_id, current_time.replace(minute=0)
217 )
209 218 redis_pipeline.incr(key, total_reports)
210 219 redis_pipeline.expire(key, 3600 * 24 * 7)
211 220 redis_pipeline.sadd(
212 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
213 current_time.replace(minute=0)), resource_id)
221 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
222 current_time.replace(minute=0)
223 ),
224 resource_id,
225 )
214 226 redis_pipeline.execute()
215 227
216 228 add_reports_es(es_report_group_docs, es_report_docs)
@@ -227,11 +239,11 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
227 239 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
228 240 def add_reports_es(report_group_docs, report_docs):
229 241 for k, v in report_group_docs.items():
230 to_update = {'_index': k, '_type': 'report_group'}
242 to_update = {"_index": k, "_type": "report_group"}
231 243 [i.update(to_update) for i in v]
232 244 elasticsearch.helpers.bulk(Datastores.es, v)
233 245 for k, v in report_docs.items():
234 to_update = {'_index': k, '_type': 'report'}
246 to_update = {"_index": k, "_type": "report"}
235 247 [i.update(to_update) for i in v]
236 248 elasticsearch.helpers.bulk(Datastores.es, v)
237 249
@@ -239,7 +251,7 b' def add_reports_es(report_group_docs, report_docs):'
239 251 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
240 252 def add_reports_slow_calls_es(es_docs):
241 253 for k, v in es_docs.items():
242 to_update = {'_index': k, '_type': 'log'}
254 to_update = {"_index": k, "_type": "log"}
243 255 [i.update(to_update) for i in v]
244 256 elasticsearch.helpers.bulk(Datastores.es, v)
245 257
@@ -247,14 +259,14 b' def add_reports_slow_calls_es(es_docs):'
247 259 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
248 260 def add_reports_stats_rows_es(es_docs):
249 261 for k, v in es_docs.items():
250 to_update = {'_index': k, '_type': 'log'}
262 to_update = {"_index": k, "_type": "log"}
251 263 [i.update(to_update) for i in v]
252 264 elasticsearch.helpers.bulk(Datastores.es, v)
253 265
254 266
255 267 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
256 268 def add_logs(resource_id, request_params, dataset, **kwargs):
257 proto_version = request_params.get('protocol_version')
269 proto_version = request_params.get("protocol_version")
258 270 current_time = datetime.utcnow().replace(second=0, microsecond=0)
259 271
260 272 try:
@@ -264,16 +276,15 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
264 276 ns_pairs = []
265 277 for entry in dataset:
266 278 # gather pk and ns so we can remove older versions of row later
267 if entry['primary_key'] is not None:
268 ns_pairs.append({"pk": entry['primary_key'],
269 "ns": entry['namespace']})
279 if entry["primary_key"] is not None:
280 ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]})
270 281 log_entry = Log()
271 282 log_entry.set_data(entry, resource=resource)
272 283 log_entry._skip_ft_index = True
273 284 resource.logs.append(log_entry)
274 285 DBSession.flush()
275 286 # insert non pk rows first
276 if entry['primary_key'] is None:
287 if entry["primary_key"] is None:
277 288 es_docs[log_entry.partition_id].append(log_entry.es_doc())
278 289
279 290 # 2nd pass to delete all log entries from db foe same pk/ns pair
@@ -282,7 +293,8 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
282 293 es_docs = collections.defaultdict(list)
283 294 es_docs_to_delete = collections.defaultdict(list)
284 295 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
285 list_of_pairs=ns_pairs)
296 list_of_pairs=ns_pairs
297 )
286 298 log_dict = {}
287 299 for log_entry in found_pkey_logs:
288 300 log_key = (log_entry.primary_key, log_entry.namespace)
@@ -299,51 +311,58 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
299 311 ids_to_delete.append(e.log_id)
300 312 es_docs_to_delete[e.partition_id].append(e.delete_hash)
301 313
302 es_docs_to_delete[log_entry.partition_id].append(
303 log_entry.delete_hash)
314 es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash)
304 315
305 316 es_docs[log_entry.partition_id].append(log_entry.es_doc())
306 317
307 318 if ids_to_delete:
308 query = DBSession.query(Log).filter(
309 Log.log_id.in_(ids_to_delete))
319 query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete))
310 320 query.delete(synchronize_session=False)
311 321 if es_docs_to_delete:
312 322 # batch this to avoid problems with default ES bulk limits
313 323 for es_index in es_docs_to_delete.keys():
314 324 for batch in in_batches(es_docs_to_delete[es_index], 20):
315 query = {"query": {'terms': {'delete_hash': batch}}}
325 query = {"query": {"terms": {"delete_hash": batch}}}
316 326
317 327 try:
318 328 Datastores.es.transport.perform_request(
319 "DELETE", '/{}/{}/_query'.format(es_index, 'log'), body=query)
329 "DELETE",
330 "/{}/{}/_query".format(es_index, "log"),
331 body=query,
332 )
320 333 except elasticsearch.exceptions.NotFoundError as exc:
321 msg = 'skipping index {}'.format(es_index)
334 msg = "skipping index {}".format(es_index)
322 335 log.info(msg)
323 336
324 337 total_logs = len(dataset)
325 338
326 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
339 log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % (
327 340 str(resource),
328 341 total_logs,
329 proto_version)
342 proto_version,
343 )
330 344 log.info(log_msg)
331 345 # mark_changed(session)
332 346 redis_pipeline = Datastores.redis.pipeline(transaction=False)
333 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
347 key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
334 348 redis_pipeline.incr(key, total_logs)
335 349 redis_pipeline.expire(key, 3600 * 24)
336 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
337 resource.owner_user_id, current_time)
350 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
351 resource.owner_user_id, current_time
352 )
338 353 redis_pipeline.incr(key, total_logs)
339 354 redis_pipeline.expire(key, 3600)
340 key = REDIS_KEYS['counters']['logs_per_hour_per_app'].format(
341 resource_id, current_time.replace(minute=0))
355 key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format(
356 resource_id, current_time.replace(minute=0)
357 )
342 358 redis_pipeline.incr(key, total_logs)
343 359 redis_pipeline.expire(key, 3600 * 24 * 7)
344 360 redis_pipeline.sadd(
345 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
346 current_time.replace(minute=0)), resource_id)
361 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
362 current_time.replace(minute=0)
363 ),
364 resource_id,
365 )
347 366 redis_pipeline.execute()
348 367 add_logs_es(es_docs)
349 368 return True
@@ -357,7 +376,7 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
357 376 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
358 377 def add_logs_es(es_docs):
359 378 for k, v in es_docs.items():
360 to_update = {'_index': k, '_type': 'log'}
379 to_update = {"_index": k, "_type": "log"}
361 380 [i.update(to_update) for i in v]
362 381 elasticsearch.helpers.bulk(Datastores.es, v)
363 382
@@ -371,45 +390,51 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
371 390 es_docs = []
372 391 rows = []
373 392 for metric in dataset:
374 tags = dict(metric['tags'])
375 server_n = tags.get('server_name', metric['server_name']).lower()
376 tags['server_name'] = server_n or 'unknown'
393 tags = dict(metric["tags"])
394 server_n = tags.get("server_name", metric["server_name"]).lower()
395 tags["server_name"] = server_n or "unknown"
377 396 new_metric = Metric(
378 timestamp=metric['timestamp'],
397 timestamp=metric["timestamp"],
379 398 resource_id=resource.resource_id,
380 namespace=metric['namespace'],
381 tags=tags)
399 namespace=metric["namespace"],
400 tags=tags,
401 )
382 402 rows.append(new_metric)
383 403 es_docs.append(new_metric.es_doc())
384 404 session = DBSession()
385 405 session.bulk_save_objects(rows)
386 406 session.flush()
387 407
388 action = 'METRICS'
389 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
408 action = "METRICS"
409 metrics_msg = "%s: %s, metrics: %s, proto:%s" % (
390 410 action,
391 411 str(resource),
392 412 len(dataset),
393 proto_version
413 proto_version,
394 414 )
395 415 log.info(metrics_msg)
396 416
397 417 mark_changed(session)
398 418 redis_pipeline = Datastores.redis.pipeline(transaction=False)
399 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
419 key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
400 420 redis_pipeline.incr(key, len(rows))
401 421 redis_pipeline.expire(key, 3600 * 24)
402 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
403 resource.owner_user_id, current_time)
422 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
423 resource.owner_user_id, current_time
424 )
404 425 redis_pipeline.incr(key, len(rows))
405 426 redis_pipeline.expire(key, 3600)
406 key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format(
407 resource_id, current_time.replace(minute=0))
427 key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format(
428 resource_id, current_time.replace(minute=0)
429 )
408 430 redis_pipeline.incr(key, len(rows))
409 431 redis_pipeline.expire(key, 3600 * 24 * 7)
410 432 redis_pipeline.sadd(
411 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
412 current_time.replace(minute=0)), resource_id)
433 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
434 current_time.replace(minute=0)
435 ),
436 resource_id,
437 )
413 438 redis_pipeline.execute()
414 439 add_metrics_es(es_docs)
415 440 return True
@@ -423,8 +448,8 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
423 448 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
424 449 def add_metrics_es(es_docs):
425 450 for doc in es_docs:
426 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
427 Datastores.es.index(partition, 'log', doc)
451 partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d")
452 Datastores.es.index(partition, "log", doc)
428 453
429 454
430 455 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
@@ -435,10 +460,12 b' def check_user_report_notifications(resource_id):'
435 460 application = ApplicationService.by_id(resource_id)
436 461 if not application:
437 462 return
438 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
439 ReportType.error, resource_id)
440 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
441 ReportType.slow, resource_id)
463 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
464 ReportType.error, resource_id
465 )
466 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
467 ReportType.slow, resource_id
468 )
442 469 error_group_ids = Datastores.redis.smembers(error_key)
443 470 slow_group_ids = Datastores.redis.smembers(slow_key)
444 471 Datastores.redis.delete(error_key)
@@ -448,8 +475,7 b' def check_user_report_notifications(resource_id):'
448 475 group_ids = err_gids + slow_gids
449 476 occurence_dict = {}
450 477 for g_id in group_ids:
451 key = REDIS_KEYS['counters']['report_group_occurences'].format(
452 g_id)
478 key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id)
453 479 val = Datastores.redis.get(key)
454 480 Datastores.redis.delete(key)
455 481 if val:
@@ -460,14 +486,23 b' def check_user_report_notifications(resource_id):'
460 486 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
461 487
462 488 ApplicationService.check_for_groups_alert(
463 application, 'alert', report_groups=report_groups,
464 occurence_dict=occurence_dict)
465 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
489 application,
490 "alert",
491 report_groups=report_groups,
492 occurence_dict=occurence_dict,
493 )
494 users = set(
495 [p.user for p in ResourceService.users_for_perm(application, "view")]
496 )
466 497 report_groups = report_groups.all()
467 498 for user in users:
468 UserService.report_notify(user, request, application,
469 report_groups=report_groups,
470 occurence_dict=occurence_dict)
499 UserService.report_notify(
500 user,
501 request,
502 application,
503 report_groups=report_groups,
504 occurence_dict=occurence_dict,
505 )
471 506 for group in report_groups:
472 507 # marks report_groups as notified
473 508 if not group.notified:
@@ -485,12 +520,12 b' def check_alerts(resource_id):'
485 520 application = ApplicationService.by_id(resource_id)
486 521 if not application:
487 522 return
488 error_key = REDIS_KEYS[
489 'reports_to_notify_per_type_per_app_alerting'].format(
490 ReportType.error, resource_id)
491 slow_key = REDIS_KEYS[
492 'reports_to_notify_per_type_per_app_alerting'].format(
493 ReportType.slow, resource_id)
523 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
524 ReportType.error, resource_id
525 )
526 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
527 ReportType.slow, resource_id
528 )
494 529 error_group_ids = Datastores.redis.smembers(error_key)
495 530 slow_group_ids = Datastores.redis.smembers(slow_key)
496 531 Datastores.redis.delete(error_key)
@@ -500,9 +535,9 b' def check_alerts(resource_id):'
500 535 group_ids = err_gids + slow_gids
501 536 occurence_dict = {}
502 537 for g_id in group_ids:
503 key = REDIS_KEYS['counters'][
504 'report_group_occurences_alerting'].format(
505 g_id)
538 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(
539 g_id
540 )
506 541 val = Datastores.redis.get(key)
507 542 Datastores.redis.delete(key)
508 543 if val:
@@ -513,8 +548,12 b' def check_alerts(resource_id):'
513 548 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
514 549
515 550 ApplicationService.check_for_groups_alert(
516 application, 'alert', report_groups=report_groups,
517 occurence_dict=occurence_dict, since_when=since_when)
551 application,
552 "alert",
553 report_groups=report_groups,
554 occurence_dict=occurence_dict,
555 since_when=since_when,
556 )
518 557 except Exception as exc:
519 558 print_traceback(log)
520 559 raise
@@ -522,21 +561,21 b' def check_alerts(resource_id):'
522 561
523 562 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
524 563 def close_alerts():
525 log.warning('Checking alerts')
564 log.warning("Checking alerts")
526 565 since_when = datetime.utcnow()
527 566 try:
528 event_types = [Event.types['error_report_alert'],
529 Event.types['slow_report_alert'], ]
530 statuses = [Event.statuses['active']]
567 event_types = [
568 Event.types["error_report_alert"],
569 Event.types["slow_report_alert"],
570 ]
571 statuses = [Event.statuses["active"]]
531 572 # get events older than 5 min
532 573 events = EventService.by_type_and_status(
533 event_types,
534 statuses,
535 older_than=(since_when - timedelta(minutes=5)))
574 event_types, statuses, older_than=(since_when - timedelta(minutes=5))
575 )
536 576 for event in events:
537 577 # see if we can close them
538 event.validate_or_close(
539 since_when=(since_when - timedelta(minutes=1)))
578 event.validate_or_close(since_when=(since_when - timedelta(minutes=1)))
540 579 except Exception as exc:
541 580 print_traceback(log)
542 581 raise
@@ -545,12 +584,18 b' def close_alerts():'
545 584 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
546 585 def update_tag_counter(tag_name, tag_value, count):
547 586 try:
548 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
549 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
550 sa.types.TEXT))
551 query.update({'times_seen': Tag.times_seen + count,
552 'last_timestamp': datetime.utcnow()},
553 synchronize_session=False)
587 query = (
588 DBSession.query(Tag)
589 .filter(Tag.name == tag_name)
590 .filter(
591 sa.cast(Tag.value, sa.types.TEXT)
592 == sa.cast(json.dumps(tag_value), sa.types.TEXT)
593 )
594 )
595 query.update(
596 {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()},
597 synchronize_session=False,
598 )
554 599 session = DBSession()
555 600 mark_changed(session)
556 601 return True
@@ -566,8 +611,8 b' def update_tag_counters():'
566 611 """
567 612 Sets task to update counters for application tags
568 613 """
569 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
570 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
614 tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1)
615 Datastores.redis.delete(REDIS_KEYS["seen_tag_list"])
571 616 c = collections.Counter(tags)
572 617 for t_json, count in c.items():
573 618 tag_info = json.loads(t_json)
@@ -580,28 +625,34 b' def daily_digest():'
580 625 Sends daily digest with top 50 error reports
581 626 """
582 627 request = get_current_request()
583 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
584 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
628 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
629 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
585 630 since_when = datetime.utcnow() - timedelta(hours=8)
586 log.warning('Generating daily digests')
631 log.warning("Generating daily digests")
587 632 for resource_id in apps:
588 resource_id = resource_id.decode('utf8')
633 resource_id = resource_id.decode("utf8")
589 634 end_date = datetime.utcnow().replace(microsecond=0, second=0)
590 filter_settings = {'resource': [resource_id],
591 'tags': [{'name': 'type',
592 'value': ['error'], 'op': None}],
593 'type': 'error', 'start_date': since_when,
594 'end_date': end_date}
635 filter_settings = {
636 "resource": [resource_id],
637 "tags": [{"name": "type", "value": ["error"], "op": None}],
638 "type": "error",
639 "start_date": since_when,
640 "end_date": end_date,
641 }
595 642
596 643 reports = ReportGroupService.get_trending(
597 request, filter_settings=filter_settings, limit=50)
644 request, filter_settings=filter_settings, limit=50
645 )
598 646
599 647 application = ApplicationService.by_id(resource_id)
600 648 if application:
601 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
649 users = set(
650 [p.user for p in ResourceService.users_for_perm(application, "view")]
651 )
602 652 for user in users:
603 user.send_digest(request, application, reports=reports,
604 since_when=since_when)
653 user.send_digest(
654 request, application, reports=reports, since_when=since_when
655 )
605 656
606 657
607 658 @celery.task(queue="default")
@@ -610,11 +661,12 b' def notifications_reports():'
610 661 Loop that checks redis for info and then issues new tasks to celery to
611 662 issue notifications
612 663 """
613 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
614 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
664 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
665 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
615 666 for app in apps:
616 log.warning('Notify for app: %s' % app)
617 check_user_report_notifications.delay(app.decode('utf8'))
667 log.warning("Notify for app: %s" % app)
668 check_user_report_notifications.delay(app.decode("utf8"))
669
618 670
619 671 @celery.task(queue="default")
620 672 def alerting_reports():
@@ -624,34 +676,33 b' def alerting_reports():'
624 676 - which applications should have new alerts opened
625 677 """
626 678
627 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
628 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
679 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"])
680 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"])
629 681 for app in apps:
630 log.warning('Notify for app: %s' % app)
631 check_alerts.delay(app.decode('utf8'))
682 log.warning("Notify for app: %s" % app)
683 check_alerts.delay(app.decode("utf8"))
632 684
633 685
634 @celery.task(queue="default", soft_time_limit=3600 * 4,
635 hard_time_limit=3600 * 4, max_retries=144)
686 @celery.task(
687 queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144
688 )
636 689 def logs_cleanup(resource_id, filter_settings):
637 690 request = get_current_request()
638 691 request.tm.begin()
639 692 es_query = {
640 693 "query": {
641 "filtered": {
642 "filter": {
643 "and": [{"term": {"resource_id": resource_id}}]
644 }
645 }
694 "filtered": {"filter": {"and": [{"term": {"resource_id": resource_id}}]}}
646 695 }
647 696 }
648 697
649 698 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
650 if filter_settings['namespace']:
651 query = query.filter(Log.namespace == filter_settings['namespace'][0])
652 es_query['query']['filtered']['filter']['and'].append(
653 {"term": {"namespace": filter_settings['namespace'][0]}}
699 if filter_settings["namespace"]:
700 query = query.filter(Log.namespace == filter_settings["namespace"][0])
701 es_query["query"]["filtered"]["filter"]["and"].append(
702 {"term": {"namespace": filter_settings["namespace"][0]}}
654 703 )
655 704 query.delete(synchronize_session=False)
656 705 request.tm.commit()
657 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format('rcae_l_*', 'log'), body=es_query)
706 Datastores.es.transport.perform_request(
707 "DELETE", "/{}/{}/_query".format("rcae_l_*", "log"), body=es_query
708 )
@@ -14,6 +14,7 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17
17 18 def filter_callable(structure, section=None):
18 structure['SOMEVAL'] = '***REMOVED***'
19 structure["SOMEVAL"] = "***REMOVED***"
19 20 return structure
This diff has been collapsed as it changes many lines, (863 lines changed) Show them Hide them
@@ -43,7 +43,7 b' _ = str'
43 43 strip_filter = lambda x: x.strip() if x else None
44 44 uppercase_filter = lambda x: x.upper() if x else None
45 45
46 FALSE_VALUES = ('false', '', False, None)
46 FALSE_VALUES = ("false", "", False, None)
47 47
48 48
49 49 class CSRFException(Exception):
@@ -51,11 +51,14 b' class CSRFException(Exception):'
51 51
52 52
53 53 class ReactorForm(SecureForm):
54 def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None,
55 **kwargs):
56 super(ReactorForm, self).__init__(formdata=formdata, obj=obj,
57 prefix=prefix,
58 csrf_context=csrf_context, **kwargs)
54 def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
55 super(ReactorForm, self).__init__(
56 formdata=formdata,
57 obj=obj,
58 prefix=prefix,
59 csrf_context=csrf_context,
60 **kwargs
61 )
59 62 self._csrf_context = csrf_context
60 63
61 64 def generate_csrf_token(self, csrf_context):
@@ -63,14 +66,14 b' class ReactorForm(SecureForm):'
63 66
64 67 def validate_csrf_token(self, field):
65 68 request = self._csrf_context or pyramid.threadlocal.get_current_request()
66 is_from_auth_token = 'auth:auth_token' in request.effective_principals
69 is_from_auth_token = "auth:auth_token" in request.effective_principals
67 70 if is_from_auth_token:
68 71 return True
69 72
70 73 if field.data != field.current_token:
71 74 # try to save the day by using token from angular
72 if request.headers.get('X-XSRF-TOKEN') != field.current_token:
73 raise CSRFException('Invalid CSRF token')
75 if request.headers.get("X-XSRF-TOKEN") != field.current_token:
76 raise CSRFException("Invalid CSRF token")
74 77
75 78 @property
76 79 def errors_dict(self):
@@ -105,45 +108,47 b' class ReactorForm(SecureForm):'
105 108
106 109 class SignInForm(ReactorForm):
107 110 came_from = wtforms.HiddenField()
108 sign_in_user_name = wtforms.StringField(_('User Name'))
109 sign_in_user_password = wtforms.PasswordField(_('Password'))
111 sign_in_user_name = wtforms.StringField(_("User Name"))
112 sign_in_user_password = wtforms.PasswordField(_("Password"))
110 113
111 ignore_labels = ['submit']
112 css_classes = {'submit': 'btn btn-primary'}
114 ignore_labels = ["submit"]
115 css_classes = {"submit": "btn btn-primary"}
113 116
114 html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'},
115 'sign_in_user_password': {
116 'placeholder': 'Your password'}}
117 html_attrs = {
118 "sign_in_user_name": {"placeholder": "Your login"},
119 "sign_in_user_password": {"placeholder": "Your password"},
120 }
117 121
118 122
119 123 from wtforms.widgets import html_params, HTMLString
120 124
121 125
122 def select_multi_checkbox(field, ul_class='set', **kwargs):
126 def select_multi_checkbox(field, ul_class="set", **kwargs):
123 127 """Render a multi-checkbox widget"""
124 kwargs.setdefault('type', 'checkbox')
125 field_id = kwargs.pop('id', field.id)
126 html = ['<ul %s>' % html_params(id=field_id, class_=ul_class)]
128 kwargs.setdefault("type", "checkbox")
129 field_id = kwargs.pop("id", field.id)
130 html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)]
127 131 for value, label, checked in field.iter_choices():
128 choice_id = '%s-%s' % (field_id, value)
132 choice_id = "%s-%s" % (field_id, value)
129 133 options = dict(kwargs, name=field.name, value=value, id=choice_id)
130 134 if checked:
131 options['checked'] = 'checked'
132 html.append('<li><input %s /> ' % html_params(**options))
135 options["checked"] = "checked"
136 html.append("<li><input %s /> " % html_params(**options))
133 137 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
134 html.append('</ul>')
135 return HTMLString(''.join(html))
138 html.append("</ul>")
139 return HTMLString("".join(html))
136 140
137 141
138 def button_widget(field, button_cls='ButtonField btn btn-default', **kwargs):
142 def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs):
139 143 """Render a button widget"""
140 kwargs.setdefault('type', 'button')
141 field_id = kwargs.pop('id', field.id)
142 kwargs.setdefault('value', field.label.text)
143 html = ['<button %s>%s</button>' % (html_params(id=field_id,
144 class_=button_cls),
145 kwargs['value'],)]
146 return HTMLString(''.join(html))
144 kwargs.setdefault("type", "button")
145 field_id = kwargs.pop("id", field.id)
146 kwargs.setdefault("value", field.label.text)
147 html = [
148 "<button %s>%s</button>"
149 % (html_params(id=field_id, class_=button_cls), kwargs["value"])
150 ]
151 return HTMLString("".join(html))
147 152
148 153
149 154 def clean_whitespace(value):
@@ -157,33 +162,32 b' def found_username_validator(form, field):'
157 162 # sets user to recover in email validator
158 163 form.field_user = user
159 164 if not user:
160 raise wtforms.ValidationError('This username does not exist')
165 raise wtforms.ValidationError("This username does not exist")
161 166
162 167
163 168 def found_username_email_validator(form, field):
164 169 user = UserService.by_email(field.data)
165 170 if not user:
166 raise wtforms.ValidationError('Email is incorrect')
171 raise wtforms.ValidationError("Email is incorrect")
167 172
168 173
169 174 def unique_username_validator(form, field):
170 175 user = UserService.by_user_name(field.data)
171 176 if user:
172 raise wtforms.ValidationError('This username already exists in system')
177 raise wtforms.ValidationError("This username already exists in system")
173 178
174 179
175 180 def unique_groupname_validator(form, field):
176 181 group = GroupService.by_group_name(field.data)
177 mod_group = getattr(form, '_modified_group', None)
182 mod_group = getattr(form, "_modified_group", None)
178 183 if group and (not mod_group or mod_group.id != group.id):
179 raise wtforms.ValidationError(
180 'This group name already exists in system')
184 raise wtforms.ValidationError("This group name already exists in system")
181 185
182 186
183 187 def unique_email_validator(form, field):
184 188 user = UserService.by_email(field.data)
185 189 if user:
186 raise wtforms.ValidationError('This email already exists in system')
190 raise wtforms.ValidationError("This email already exists in system")
187 191
188 192
189 193 def email_validator(form, field):
@@ -196,145 +200,168 b' def email_validator(form, field):'
196 200
197 201 def unique_alert_email_validator(form, field):
198 202 q = DBSession.query(AlertChannel)
199 q = q.filter(AlertChannel.channel_name == 'email')
203 q = q.filter(AlertChannel.channel_name == "email")
200 204 q = q.filter(AlertChannel.channel_value == field.data)
201 205 email = q.first()
202 206 if email:
203 raise wtforms.ValidationError(
204 'This email already exists in alert system')
207 raise wtforms.ValidationError("This email already exists in alert system")
205 208
206 209
207 210 def blocked_email_validator(form, field):
208 211 blocked_emails = [
209 'goood-mail.org',
210 'shoeonlineblog.com',
211 'louboutinemart.com',
212 'guccibagshere.com',
213 'nikeshoesoutletforsale.com'
212 "goood-mail.org",
213 "shoeonlineblog.com",
214 "louboutinemart.com",
215 "guccibagshere.com",
216 "nikeshoesoutletforsale.com",
214 217 ]
215 data = field.data or ''
216 domain = data.split('@')[-1]
218 data = field.data or ""
219 domain = data.split("@")[-1]
217 220 if domain in blocked_emails:
218 raise wtforms.ValidationError('Don\'t spam')
221 raise wtforms.ValidationError("Don't spam")
219 222
220 223
221 224 def old_password_validator(form, field):
222 if not UserService.check_password(field.user, field.data or ''):
223 raise wtforms.ValidationError('You need to enter correct password')
225 if not UserService.check_password(field.user, field.data or ""):
226 raise wtforms.ValidationError("You need to enter correct password")
224 227
225 228
226 229 class UserRegisterForm(ReactorForm):
227 230 user_name = wtforms.StringField(
228 _('User Name'),
231 _("User Name"),
229 232 filters=[strip_filter],
230 233 validators=[
231 234 wtforms.validators.Length(min=2, max=30),
232 235 wtforms.validators.Regexp(
233 re.compile(r'^[\.\w-]+$', re.UNICODE),
234 message="Invalid characters used"),
236 re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used"
237 ),
235 238 unique_username_validator,
236 wtforms.validators.DataRequired()
237 ])
239 wtforms.validators.DataRequired(),
240 ],
241 )
238 242
239 user_password = wtforms.PasswordField(_('User Password'),
240 filters=[strip_filter],
241 validators=[
242 wtforms.validators.Length(min=4),
243 wtforms.validators.DataRequired()
244 ])
243 user_password = wtforms.PasswordField(
244 _("User Password"),
245 filters=[strip_filter],
246 validators=[
247 wtforms.validators.Length(min=4),
248 wtforms.validators.DataRequired(),
249 ],
250 )
245 251
246 email = wtforms.StringField(_('Email Address'),
247 filters=[strip_filter],
248 validators=[email_validator,
249 unique_email_validator,
250 blocked_email_validator,
251 wtforms.validators.DataRequired()])
252 first_name = wtforms.HiddenField(_('First Name'))
253 last_name = wtforms.HiddenField(_('Last Name'))
252 email = wtforms.StringField(
253 _("Email Address"),
254 filters=[strip_filter],
255 validators=[
256 email_validator,
257 unique_email_validator,
258 blocked_email_validator,
259 wtforms.validators.DataRequired(),
260 ],
261 )
262 first_name = wtforms.HiddenField(_("First Name"))
263 last_name = wtforms.HiddenField(_("Last Name"))
254 264
255 ignore_labels = ['submit']
256 css_classes = {'submit': 'btn btn-primary'}
265 ignore_labels = ["submit"]
266 css_classes = {"submit": "btn btn-primary"}
257 267
258 html_attrs = {'user_name': {'placeholder': 'Your login'},
259 'user_password': {'placeholder': 'Your password'},
260 'email': {'placeholder': 'Your email'}}
268 html_attrs = {
269 "user_name": {"placeholder": "Your login"},
270 "user_password": {"placeholder": "Your password"},
271 "email": {"placeholder": "Your email"},
272 }
261 273
262 274
263 275 class UserCreateForm(UserRegisterForm):
264 status = wtforms.BooleanField('User status',
265 false_values=FALSE_VALUES)
276 status = wtforms.BooleanField("User status", false_values=FALSE_VALUES)
266 277
267 278
268 279 class UserUpdateForm(UserCreateForm):
269 280 user_name = None
270 user_password = wtforms.PasswordField(_('User Password'),
271 filters=[strip_filter],
272 validators=[
273 wtforms.validators.Length(min=4),
274 wtforms.validators.Optional()
275 ])
276 email = wtforms.StringField(_('Email Address'),
277 filters=[strip_filter],
278 validators=[email_validator,
279 wtforms.validators.DataRequired()])
281 user_password = wtforms.PasswordField(
282 _("User Password"),
283 filters=[strip_filter],
284 validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()],
285 )
286 email = wtforms.StringField(
287 _("Email Address"),
288 filters=[strip_filter],
289 validators=[email_validator, wtforms.validators.DataRequired()],
290 )
280 291
281 292
282 293 class LostPasswordForm(ReactorForm):
283 email = wtforms.StringField(_('Email Address'),
284 filters=[strip_filter],
285 validators=[email_validator,
286 found_username_email_validator,
287 wtforms.validators.DataRequired()])
294 email = wtforms.StringField(
295 _("Email Address"),
296 filters=[strip_filter],
297 validators=[
298 email_validator,
299 found_username_email_validator,
300 wtforms.validators.DataRequired(),
301 ],
302 )
288 303
289 submit = wtforms.SubmitField(_('Reset password'))
290 ignore_labels = ['submit']
291 css_classes = {'submit': 'btn btn-primary'}
304 submit = wtforms.SubmitField(_("Reset password"))
305 ignore_labels = ["submit"]
306 css_classes = {"submit": "btn btn-primary"}
292 307
293 308
294 309 class ChangePasswordForm(ReactorForm):
295 310 old_password = wtforms.PasswordField(
296 'Old Password',
311 "Old Password",
297 312 filters=[strip_filter],
298 validators=[old_password_validator,
299 wtforms.validators.DataRequired()])
313 validators=[old_password_validator, wtforms.validators.DataRequired()],
314 )
300 315
301 316 new_password = wtforms.PasswordField(
302 'New Password',
317 "New Password",
303 318 filters=[strip_filter],
304 validators=[wtforms.validators.Length(min=4),
305 wtforms.validators.DataRequired()])
319 validators=[
320 wtforms.validators.Length(min=4),
321 wtforms.validators.DataRequired(),
322 ],
323 )
306 324 new_password_confirm = wtforms.PasswordField(
307 'Confirm Password',
325 "Confirm Password",
308 326 filters=[strip_filter],
309 validators=[wtforms.validators.EqualTo('new_password'),
310 wtforms.validators.DataRequired()])
311 submit = wtforms.SubmitField('Change Password')
312 ignore_labels = ['submit']
313 css_classes = {'submit': 'btn btn-primary'}
327 validators=[
328 wtforms.validators.EqualTo("new_password"),
329 wtforms.validators.DataRequired(),
330 ],
331 )
332 submit = wtforms.SubmitField("Change Password")
333 ignore_labels = ["submit"]
334 css_classes = {"submit": "btn btn-primary"}
314 335
315 336
316 337 class CheckPasswordForm(ReactorForm):
317 338 password = wtforms.PasswordField(
318 'Password',
339 "Password",
319 340 filters=[strip_filter],
320 validators=[old_password_validator,
321 wtforms.validators.DataRequired()])
341 validators=[old_password_validator, wtforms.validators.DataRequired()],
342 )
322 343
323 344
324 345 class NewPasswordForm(ReactorForm):
325 346 new_password = wtforms.PasswordField(
326 'New Password',
347 "New Password",
327 348 filters=[strip_filter],
328 validators=[wtforms.validators.Length(min=4),
329 wtforms.validators.DataRequired()])
349 validators=[
350 wtforms.validators.Length(min=4),
351 wtforms.validators.DataRequired(),
352 ],
353 )
330 354 new_password_confirm = wtforms.PasswordField(
331 'Confirm Password',
355 "Confirm Password",
332 356 filters=[strip_filter],
333 validators=[wtforms.validators.EqualTo('new_password'),
334 wtforms.validators.DataRequired()])
335 submit = wtforms.SubmitField('Set Password')
336 ignore_labels = ['submit']
337 css_classes = {'submit': 'btn btn-primary'}
357 validators=[
358 wtforms.validators.EqualTo("new_password"),
359 wtforms.validators.DataRequired(),
360 ],
361 )
362 submit = wtforms.SubmitField("Set Password")
363 ignore_labels = ["submit"]
364 css_classes = {"submit": "btn btn-primary"}
338 365
339 366
340 367 class CORSTextAreaField(wtforms.StringField):
@@ -342,261 +369,290 b' class CORSTextAreaField(wtforms.StringField):'
342 369 This field represents an HTML ``<textarea>`` and can be used to take
343 370 multi-line input.
344 371 """
372
345 373 widget = wtforms.widgets.TextArea()
346 374
347 375 def process_formdata(self, valuelist):
348 376 self.data = []
349 377 if valuelist:
350 data = [x.strip() for x in valuelist[0].split('\n')]
378 data = [x.strip() for x in valuelist[0].split("\n")]
351 379 for d in data:
352 380 if not d:
353 381 continue
354 if d.startswith('www.'):
382 if d.startswith("www."):
355 383 d = d[4:]
356 384 if data:
357 385 self.data.append(d)
358 386 else:
359 387 self.data = []
360 self.data = '\n'.join(self.data)
388 self.data = "\n".join(self.data)
361 389
362 390
363 391 class ApplicationCreateForm(ReactorForm):
364 392 resource_name = wtforms.StringField(
365 _('Application name'),
393 _("Application name"),
366 394 filters=[strip_filter],
367 validators=[wtforms.validators.Length(min=1),
368 wtforms.validators.DataRequired()])
395 validators=[
396 wtforms.validators.Length(min=1),
397 wtforms.validators.DataRequired(),
398 ],
399 )
369 400
370 401 domains = CORSTextAreaField(
371 _('Domain names for CORS headers '),
372 validators=[wtforms.validators.Length(min=1),
373 wtforms.validators.Optional()],
374 description='Required for Javascript error '
375 'tracking (one line one domain, skip http:// part)')
402 _("Domain names for CORS headers "),
403 validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()],
404 description="Required for Javascript error "
405 "tracking (one line one domain, skip http:// part)",
406 )
376 407
377 submit = wtforms.SubmitField(_('Create Application'))
408 submit = wtforms.SubmitField(_("Create Application"))
378 409
379 ignore_labels = ['submit']
380 css_classes = {'submit': 'btn btn-primary'}
381 html_attrs = {'resource_name': {'placeholder': 'Application Name'},
382 'uptime_url': {'placeholder': 'http://somedomain.com'}}
410 ignore_labels = ["submit"]
411 css_classes = {"submit": "btn btn-primary"}
412 html_attrs = {
413 "resource_name": {"placeholder": "Application Name"},
414 "uptime_url": {"placeholder": "http://somedomain.com"},
415 }
383 416
384 417
385 418 class ApplicationUpdateForm(ApplicationCreateForm):
386 419 default_grouping = wtforms.SelectField(
387 _('Default grouping for errors'),
388 choices=[('url_type', 'Error Type + location',),
389 ('url_traceback', 'Traceback + location',),
390 ('traceback_server', 'Traceback + Server',)],
391 default='url_traceback')
420 _("Default grouping for errors"),
421 choices=[
422 ("url_type", "Error Type + location"),
423 ("url_traceback", "Traceback + location"),
424 ("traceback_server", "Traceback + Server"),
425 ],
426 default="url_traceback",
427 )
392 428
393 429 error_report_threshold = wtforms.IntegerField(
394 _('Alert on error reports'),
430 _("Alert on error reports"),
395 431 validators=[
396 432 wtforms.validators.NumberRange(min=1),
397 wtforms.validators.DataRequired()
433 wtforms.validators.DataRequired(),
398 434 ],
399 description='Application requires to send at least this amount of '
400 'error reports per minute to open alert'
435 description="Application requires to send at least this amount of "
436 "error reports per minute to open alert",
401 437 )
402 438
403 439 slow_report_threshold = wtforms.IntegerField(
404 _('Alert on slow reports'),
405 validators=[wtforms.validators.NumberRange(min=1),
406 wtforms.validators.DataRequired()],
407 description='Application requires to send at least this amount of '
408 'slow reports per minute to open alert')
440 _("Alert on slow reports"),
441 validators=[
442 wtforms.validators.NumberRange(min=1),
443 wtforms.validators.DataRequired(),
444 ],
445 description="Application requires to send at least this amount of "
446 "slow reports per minute to open alert",
447 )
409 448
410 449 allow_permanent_storage = wtforms.BooleanField(
411 _('Permanent logs'),
450 _("Permanent logs"),
412 451 false_values=FALSE_VALUES,
413 description=_(
414 'Allow permanent storage of logs in separate DB partitions'))
452 description=_("Allow permanent storage of logs in separate DB partitions"),
453 )
415 454
416 submit = wtforms.SubmitField(_('Create Application'))
455 submit = wtforms.SubmitField(_("Create Application"))
417 456
418 457
419 458 class UserSearchSchemaForm(ReactorForm):
420 user_name = wtforms.StringField('User Name',
421 filters=[strip_filter], )
459 user_name = wtforms.StringField("User Name", filters=[strip_filter])
422 460
423 submit = wtforms.SubmitField(_('Search User'))
424 ignore_labels = ['submit']
425 css_classes = {'submit': 'btn btn-primary'}
461 submit = wtforms.SubmitField(_("Search User"))
462 ignore_labels = ["submit"]
463 css_classes = {"submit": "btn btn-primary"}
426 464
427 465 '<li class="user_exists"><span></span></li>'
428 466
429 467
430 468 class YesNoForm(ReactorForm):
431 no = wtforms.SubmitField('No', default='')
432 yes = wtforms.SubmitField('Yes', default='')
433 ignore_labels = ['submit']
434 css_classes = {'submit': 'btn btn-primary'}
469 no = wtforms.SubmitField("No", default="")
470 yes = wtforms.SubmitField("Yes", default="")
471 ignore_labels = ["submit"]
472 css_classes = {"submit": "btn btn-primary"}
435 473
436 474
437 status_codes = [('', 'All',), ('500', '500',), ('404', '404',)]
475 status_codes = [("", "All"), ("500", "500"), ("404", "404")]
438 476
439 priorities = [('', 'All',)]
477 priorities = [("", "All")]
440 478 for i in range(1, 11):
441 priorities.append((str(i), str(i),))
479 priorities.append((str(i), str(i)))
442 480
443 report_status_choices = [('', 'All',),
444 ('never_reviewed', 'Never revieved',),
445 ('reviewed', 'Revieved',),
446 ('public', 'Public',),
447 ('fixed', 'Fixed',), ]
481 report_status_choices = [
482 ("", "All"),
483 ("never_reviewed", "Never revieved"),
484 ("reviewed", "Revieved"),
485 ("public", "Public"),
486 ("fixed", "Fixed"),
487 ]
448 488
449 489
450 490 class ReportBrowserForm(ReactorForm):
451 applications = wtforms.SelectMultipleField('Applications',
452 widget=select_multi_checkbox)
453 http_status = wtforms.SelectField('HTTP Status', choices=status_codes)
454 priority = wtforms.SelectField('Priority', choices=priorities, default='')
455 start_date = wtforms.DateField('Start Date')
456 end_date = wtforms.DateField('End Date')
457 error = wtforms.StringField('Error')
458 url_path = wtforms.StringField('URL Path')
459 url_domain = wtforms.StringField('URL Domain')
460 report_status = wtforms.SelectField('Report status',
461 choices=report_status_choices,
462 default='')
463 submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">'
464 '</span> Filter results',
465 widget=button_widget)
466
467 ignore_labels = ['submit']
468 css_classes = {'submit': 'btn btn-primary'}
469
470
471 slow_report_status_choices = [('', 'All',),
472 ('never_reviewed', 'Never revieved',),
473 ('reviewed', 'Revieved',),
474 ('public', 'Public',), ]
491 applications = wtforms.SelectMultipleField(
492 "Applications", widget=select_multi_checkbox
493 )
494 http_status = wtforms.SelectField("HTTP Status", choices=status_codes)
495 priority = wtforms.SelectField("Priority", choices=priorities, default="")
496 start_date = wtforms.DateField("Start Date")
497 end_date = wtforms.DateField("End Date")
498 error = wtforms.StringField("Error")
499 url_path = wtforms.StringField("URL Path")
500 url_domain = wtforms.StringField("URL Domain")
501 report_status = wtforms.SelectField(
502 "Report status", choices=report_status_choices, default=""
503 )
504 submit = wtforms.SubmitField(
505 '<span class="glyphicon glyphicon-search">' "</span> Filter results",
506 widget=button_widget,
507 )
508
509 ignore_labels = ["submit"]
510 css_classes = {"submit": "btn btn-primary"}
511
512
513 slow_report_status_choices = [
514 ("", "All"),
515 ("never_reviewed", "Never revieved"),
516 ("reviewed", "Revieved"),
517 ("public", "Public"),
518 ]
475 519
476 520
477 521 class BulkOperationForm(ReactorForm):
478 applications = wtforms.SelectField('Applications')
522 applications = wtforms.SelectField("Applications")
479 523 start_date = wtforms.DateField(
480 'Start Date',
481 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(
482 days=90))
483 end_date = wtforms.DateField('End Date')
524 "Start Date",
525 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90),
526 )
527 end_date = wtforms.DateField("End Date")
484 528 confirm = wtforms.BooleanField(
485 'Confirm operation',
486 validators=[wtforms.validators.DataRequired()])
529 "Confirm operation", validators=[wtforms.validators.DataRequired()]
530 )
487 531
488 532
489 533 class LogBrowserForm(ReactorForm):
490 applications = wtforms.SelectMultipleField('Applications',
491 widget=select_multi_checkbox)
492 start_date = wtforms.DateField('Start Date')
493 log_level = wtforms.StringField('Log level')
494 message = wtforms.StringField('Message')
495 namespace = wtforms.StringField('Namespace')
534 applications = wtforms.SelectMultipleField(
535 "Applications", widget=select_multi_checkbox
536 )
537 start_date = wtforms.DateField("Start Date")
538 log_level = wtforms.StringField("Log level")
539 message = wtforms.StringField("Message")
540 namespace = wtforms.StringField("Namespace")
496 541 submit = wtforms.SubmitField(
497 542 '<span class="glyphicon glyphicon-search"></span> Filter results',
498 widget=button_widget)
499 ignore_labels = ['submit']
500 css_classes = {'submit': 'btn btn-primary'}
543 widget=button_widget,
544 )
545 ignore_labels = ["submit"]
546 css_classes = {"submit": "btn btn-primary"}
501 547
502 548
503 549 class CommentForm(ReactorForm):
504 body = wtforms.TextAreaField('Comment', validators=[
505 wtforms.validators.Length(min=1),
506 wtforms.validators.DataRequired()
507 ])
508 submit = wtforms.SubmitField('Comment', )
509 ignore_labels = ['submit']
510 css_classes = {'submit': 'btn btn-primary'}
550 body = wtforms.TextAreaField(
551 "Comment",
552 validators=[
553 wtforms.validators.Length(min=1),
554 wtforms.validators.DataRequired(),
555 ],
556 )
557 submit = wtforms.SubmitField("Comment")
558 ignore_labels = ["submit"]
559 css_classes = {"submit": "btn btn-primary"}
511 560
512 561
513 562 class EmailChannelCreateForm(ReactorForm):
514 email = wtforms.StringField(_('Email Address'),
515 filters=[strip_filter],
516 validators=[email_validator,
517 unique_alert_email_validator,
518 wtforms.validators.DataRequired()])
519 submit = wtforms.SubmitField('Add email channel', )
520 ignore_labels = ['submit']
521 css_classes = {'submit': 'btn btn-primary'}
563 email = wtforms.StringField(
564 _("Email Address"),
565 filters=[strip_filter],
566 validators=[
567 email_validator,
568 unique_alert_email_validator,
569 wtforms.validators.DataRequired(),
570 ],
571 )
572 submit = wtforms.SubmitField("Add email channel")
573 ignore_labels = ["submit"]
574 css_classes = {"submit": "btn btn-primary"}
522 575
523 576
524 577 def gen_user_profile_form():
525 578 class UserProfileForm(ReactorForm):
526 579 email = wtforms.StringField(
527 _('Email Address'),
528 validators=[email_validator, wtforms.validators.DataRequired()])
529 first_name = wtforms.StringField(_('First Name'))
530 last_name = wtforms.StringField(_('Last Name'))
531 company_name = wtforms.StringField(_('Company Name'))
532 company_address = wtforms.TextAreaField(_('Company Address'))
533 zip_code = wtforms.StringField(_('ZIP code'))
534 city = wtforms.StringField(_('City'))
535 notifications = wtforms.BooleanField('Account notifications',
536 false_values=FALSE_VALUES)
537 submit = wtforms.SubmitField(_('Update Account'))
538 ignore_labels = ['submit']
539 css_classes = {'submit': 'btn btn-primary'}
580 _("Email Address"),
581 validators=[email_validator, wtforms.validators.DataRequired()],
582 )
583 first_name = wtforms.StringField(_("First Name"))
584 last_name = wtforms.StringField(_("Last Name"))
585 company_name = wtforms.StringField(_("Company Name"))
586 company_address = wtforms.TextAreaField(_("Company Address"))
587 zip_code = wtforms.StringField(_("ZIP code"))
588 city = wtforms.StringField(_("City"))
589 notifications = wtforms.BooleanField(
590 "Account notifications", false_values=FALSE_VALUES
591 )
592 submit = wtforms.SubmitField(_("Update Account"))
593 ignore_labels = ["submit"]
594 css_classes = {"submit": "btn btn-primary"}
540 595
541 596 return UserProfileForm
542 597
543 598
544 599 class PurgeAppForm(ReactorForm):
545 600 resource_id = wtforms.HiddenField(
546 'App Id',
547 validators=[wtforms.validators.DataRequired()])
548 days = wtforms.IntegerField(
549 'Days',
550 validators=[wtforms.validators.DataRequired()])
601 "App Id", validators=[wtforms.validators.DataRequired()]
602 )
603 days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()])
551 604 password = wtforms.PasswordField(
552 'Admin Password',
553 validators=[old_password_validator, wtforms.validators.DataRequired()])
554 submit = wtforms.SubmitField(_('Purge Data'))
555 ignore_labels = ['submit']
556 css_classes = {'submit': 'btn btn-primary'}
605 "Admin Password",
606 validators=[old_password_validator, wtforms.validators.DataRequired()],
607 )
608 submit = wtforms.SubmitField(_("Purge Data"))
609 ignore_labels = ["submit"]
610 css_classes = {"submit": "btn btn-primary"}
557 611
558 612
559 613 class IntegrationRepoForm(ReactorForm):
560 host_name = wtforms.StringField("Service Host", default='')
614 host_name = wtforms.StringField("Service Host", default="")
561 615 user_name = wtforms.StringField(
562 616 "User Name",
563 617 filters=[strip_filter],
564 validators=[wtforms.validators.DataRequired(),
565 wtforms.validators.Length(min=1)])
618 validators=[
619 wtforms.validators.DataRequired(),
620 wtforms.validators.Length(min=1),
621 ],
622 )
566 623 repo_name = wtforms.StringField(
567 624 "Repo Name",
568 625 filters=[strip_filter],
569 validators=[wtforms.validators.DataRequired(),
570 wtforms.validators.Length(min=1)])
626 validators=[
627 wtforms.validators.DataRequired(),
628 wtforms.validators.Length(min=1),
629 ],
630 )
571 631
572 632
573 633 class IntegrationBitbucketForm(IntegrationRepoForm):
574 host_name = wtforms.StringField("Service Host",
575 default='https://bitbucket.org')
634 host_name = wtforms.StringField("Service Host", default="https://bitbucket.org")
576 635
577 636 def validate_user_name(self, field):
578 637 try:
579 638 request = pyramid.threadlocal.get_current_request()
580 639 client = BitbucketIntegration.create_client(
581 request,
582 self.user_name.data,
583 self.repo_name.data)
640 request, self.user_name.data, self.repo_name.data
641 )
584 642 client.get_assignees()
585 643 except IntegrationException as e:
586 644 raise wtforms.validators.ValidationError(str(e))
587 645
588 646
589 647 class IntegrationGithubForm(IntegrationRepoForm):
590 host_name = wtforms.StringField("Service Host",
591 default='https://github.com')
648 host_name = wtforms.StringField("Service Host", default="https://github.com")
592 649
593 650 def validate_user_name(self, field):
594 651 try:
595 652 request = pyramid.threadlocal.get_current_request()
596 653 client = GithubIntegration.create_client(
597 request,
598 self.user_name.data,
599 self.repo_name.data)
654 request, self.user_name.data, self.repo_name.data
655 )
600 656 client.get_assignees()
601 657 except IntegrationException as e:
602 658 raise wtforms.validators.ValidationError(str(e))
@@ -605,25 +661,28 b' class IntegrationGithubForm(IntegrationRepoForm):'
605 661
606 662 def filter_rooms(data):
607 663 if data is not None:
608 rooms = data.split(',')
609 return ','.join([r.strip() for r in rooms])
664 rooms = data.split(",")
665 return ",".join([r.strip() for r in rooms])
610 666
611 667
612 668 class IntegrationCampfireForm(ReactorForm):
613 669 account = wtforms.StringField(
614 'Account',
670 "Account",
615 671 filters=[strip_filter],
616 validators=[wtforms.validators.DataRequired()])
672 validators=[wtforms.validators.DataRequired()],
673 )
617 674 api_token = wtforms.StringField(
618 'Api Token',
675 "Api Token",
619 676 filters=[strip_filter],
620 validators=[wtforms.validators.DataRequired()])
621 rooms = wtforms.StringField('Room ID list', filters=[filter_rooms])
677 validators=[wtforms.validators.DataRequired()],
678 )
679 rooms = wtforms.StringField("Room ID list", filters=[filter_rooms])
622 680
623 681 def validate_api_token(self, field):
624 682 try:
625 client = CampfireIntegration.create_client(self.api_token.data,
626 self.account.data)
683 client = CampfireIntegration.create_client(
684 self.api_token.data, self.account.data
685 )
627 686 client.get_account()
628 687 except IntegrationException as e:
629 688 raise wtforms.validators.ValidationError(str(e))
@@ -631,17 +690,18 b' class IntegrationCampfireForm(ReactorForm):'
631 690 def validate_rooms(self, field):
632 691 if not field.data:
633 692 return
634 client = CampfireIntegration.create_client(self.api_token.data,
635 self.account.data)
693 client = CampfireIntegration.create_client(
694 self.api_token.data, self.account.data
695 )
636 696
637 697 try:
638 room_list = [r['id'] for r in client.get_rooms()]
698 room_list = [r["id"] for r in client.get_rooms()]
639 699 except IntegrationException as e:
640 700 raise wtforms.validators.ValidationError(str(e))
641 701
642 rooms = field.data.split(',')
702 rooms = field.data.split(",")
643 703 if len(rooms) > 3:
644 msg = 'You can use up to 3 room ids'
704 msg = "You can use up to 3 room ids"
645 705 raise wtforms.validators.ValidationError(msg)
646 706 if rooms:
647 707 for room_id in rooms:
@@ -649,75 +709,78 b' class IntegrationCampfireForm(ReactorForm):'
649 709 msg = "Room %s doesn't exist"
650 710 raise wtforms.validators.ValidationError(msg % room_id)
651 711 if not room_id.strip().isdigit():
652 msg = 'You must use only integers for room ids'
712 msg = "You must use only integers for room ids"
653 713 raise wtforms.validators.ValidationError(msg)
654 714
655 submit = wtforms.SubmitField(_('Connect to Campfire'))
656 ignore_labels = ['submit']
657 css_classes = {'submit': 'btn btn-primary'}
715 submit = wtforms.SubmitField(_("Connect to Campfire"))
716 ignore_labels = ["submit"]
717 css_classes = {"submit": "btn btn-primary"}
658 718
659 719
660 720 def filter_rooms(data):
661 721 if data is not None:
662 rooms = data.split(',')
663 return ','.join([r.strip() for r in rooms])
722 rooms = data.split(",")
723 return ",".join([r.strip() for r in rooms])
664 724
665 725
666 726 class IntegrationHipchatForm(ReactorForm):
667 727 api_token = wtforms.StringField(
668 'Api Token',
728 "Api Token",
669 729 filters=[strip_filter],
670 validators=[wtforms.validators.DataRequired()])
730 validators=[wtforms.validators.DataRequired()],
731 )
671 732 rooms = wtforms.StringField(
672 'Room ID list',
733 "Room ID list",
673 734 filters=[filter_rooms],
674 validators=[wtforms.validators.DataRequired()])
735 validators=[wtforms.validators.DataRequired()],
736 )
675 737
676 738 def validate_rooms(self, field):
677 739 if not field.data:
678 740 return
679 741 client = HipchatIntegration.create_client(self.api_token.data)
680 rooms = field.data.split(',')
742 rooms = field.data.split(",")
681 743 if len(rooms) > 3:
682 msg = 'You can use up to 3 room ids'
744 msg = "You can use up to 3 room ids"
683 745 raise wtforms.validators.ValidationError(msg)
684 746 if rooms:
685 747 for room_id in rooms:
686 748 if not room_id.strip().isdigit():
687 msg = 'You must use only integers for room ids'
749 msg = "You must use only integers for room ids"
688 750 raise wtforms.validators.ValidationError(msg)
689 751 try:
690 client.send({
691 "message_format": 'text',
692 "message": "testing for room existence",
693 "from": "AppEnlight",
694 "room_id": room_id,
695 "color": "green"
696 })
752 client.send(
753 {
754 "message_format": "text",
755 "message": "testing for room existence",
756 "from": "AppEnlight",
757 "room_id": room_id,
758 "color": "green",
759 }
760 )
697 761 except IntegrationException as exc:
698 msg = 'Room id: %s exception: %s'
699 raise wtforms.validators.ValidationError(msg % (room_id,
700 exc))
762 msg = "Room id: %s exception: %s"
763 raise wtforms.validators.ValidationError(msg % (room_id, exc))
701 764
702 765
703 766 class IntegrationFlowdockForm(ReactorForm):
704 api_token = wtforms.StringField('API Token',
705 filters=[strip_filter],
706 validators=[
707 wtforms.validators.DataRequired()
708 ], )
767 api_token = wtforms.StringField(
768 "API Token",
769 filters=[strip_filter],
770 validators=[wtforms.validators.DataRequired()],
771 )
709 772
710 773 def validate_api_token(self, field):
711 774 try:
712 775 client = FlowdockIntegration.create_client(self.api_token.data)
713 776 registry = pyramid.threadlocal.get_current_registry()
714 777 payload = {
715 "source": registry.settings['mailing.from_name'],
716 "from_address": registry.settings['mailing.from_email'],
778 "source": registry.settings["mailing.from_name"],
779 "from_address": registry.settings["mailing.from_email"],
717 780 "subject": "Integration test",
718 781 "content": "If you can see this it was successful",
719 782 "tags": ["appenlight"],
720 "link": registry.settings['mailing.app_url']
783 "link": registry.settings["mailing.app_url"],
721 784 }
722 785 client.send_to_inbox(payload)
723 786 except IntegrationException as e:
@@ -726,30 +789,35 b' class IntegrationFlowdockForm(ReactorForm):'
726 789
727 790 class IntegrationSlackForm(ReactorForm):
728 791 webhook_url = wtforms.StringField(
729 'Reports webhook',
792 "Reports webhook",
730 793 filters=[strip_filter],
731 validators=[wtforms.validators.DataRequired()])
794 validators=[wtforms.validators.DataRequired()],
795 )
732 796
733 797 def validate_webhook_url(self, field):
734 798 registry = pyramid.threadlocal.get_current_registry()
735 799 client = SlackIntegration.create_client(field.data)
736 link = "<%s|%s>" % (registry.settings['mailing.app_url'],
737 registry.settings['mailing.from_name'])
800 link = "<%s|%s>" % (
801 registry.settings["mailing.app_url"],
802 registry.settings["mailing.from_name"],
803 )
738 804 test_data = {
739 805 "username": "AppEnlight",
740 806 "icon_emoji": ":fire:",
741 807 "attachments": [
742 {"fallback": "Testing integration channel: %s" % link,
743 "pretext": "Testing integration channel: %s" % link,
744 "color": "good",
745 "fields": [
746 {
747 "title": "Status",
748 "value": "Integration is working fine",
749 "short": False
750 }
751 ]}
752 ]
808 {
809 "fallback": "Testing integration channel: %s" % link,
810 "pretext": "Testing integration channel: %s" % link,
811 "color": "good",
812 "fields": [
813 {
814 "title": "Status",
815 "value": "Integration is working fine",
816 "short": False,
817 }
818 ],
819 }
820 ],
753 821 }
754 822 try:
755 823 client.make_request(data=test_data)
@@ -759,44 +827,52 b' class IntegrationSlackForm(ReactorForm):'
759 827
760 828 class IntegrationWebhooksForm(ReactorForm):
761 829 reports_webhook = wtforms.StringField(
762 'Reports webhook',
830 "Reports webhook",
763 831 filters=[strip_filter],
764 validators=[wtforms.validators.DataRequired()])
832 validators=[wtforms.validators.DataRequired()],
833 )
765 834 alerts_webhook = wtforms.StringField(
766 'Alerts webhook',
835 "Alerts webhook",
767 836 filters=[strip_filter],
768 validators=[wtforms.validators.DataRequired()])
769 submit = wtforms.SubmitField(_('Setup webhooks'))
770 ignore_labels = ['submit']
771 css_classes = {'submit': 'btn btn-primary'}
837 validators=[wtforms.validators.DataRequired()],
838 )
839 submit = wtforms.SubmitField(_("Setup webhooks"))
840 ignore_labels = ["submit"]
841 css_classes = {"submit": "btn btn-primary"}
772 842
773 843
774 844 class IntegrationJiraForm(ReactorForm):
775 845 host_name = wtforms.StringField(
776 'Server URL',
846 "Server URL",
777 847 filters=[strip_filter],
778 validators=[wtforms.validators.DataRequired()])
848 validators=[wtforms.validators.DataRequired()],
849 )
779 850 user_name = wtforms.StringField(
780 'Username',
851 "Username",
781 852 filters=[strip_filter],
782 validators=[wtforms.validators.DataRequired()])
853 validators=[wtforms.validators.DataRequired()],
854 )
783 855 password = wtforms.PasswordField(
784 'Password',
856 "Password",
785 857 filters=[strip_filter],
786 validators=[wtforms.validators.DataRequired()])
858 validators=[wtforms.validators.DataRequired()],
859 )
787 860 project = wtforms.StringField(
788 'Project key',
861 "Project key",
789 862 filters=[uppercase_filter, strip_filter],
790 validators=[wtforms.validators.DataRequired()])
863 validators=[wtforms.validators.DataRequired()],
864 )
791 865
792 866 def validate_project(self, field):
793 867 if not field.data:
794 868 return
795 869 try:
796 client = JiraClient(self.user_name.data,
797 self.password.data,
798 self.host_name.data,
799 self.project.data)
870 client = JiraClient(
871 self.user_name.data,
872 self.password.data,
873 self.host_name.data,
874 self.project.data,
875 )
800 876 except Exception as exc:
801 877 raise wtforms.validators.ValidationError(str(exc))
802 878
@@ -809,88 +885,97 b' class IntegrationJiraForm(ReactorForm):'
809 885 def get_deletion_form(resource):
810 886 class F(ReactorForm):
811 887 application_name = wtforms.StringField(
812 'Application Name',
888 "Application Name",
813 889 filters=[strip_filter],
814 validators=[wtforms.validators.AnyOf([resource.resource_name])])
890 validators=[wtforms.validators.AnyOf([resource.resource_name])],
891 )
815 892 resource_id = wtforms.HiddenField(default=resource.resource_id)
816 submit = wtforms.SubmitField(_('Delete my application'))
817 ignore_labels = ['submit']
818 css_classes = {'submit': 'btn btn-danger'}
893 submit = wtforms.SubmitField(_("Delete my application"))
894 ignore_labels = ["submit"]
895 css_classes = {"submit": "btn btn-danger"}
819 896
820 897 return F
821 898
822 899
823 900 class ChangeApplicationOwnerForm(ReactorForm):
824 901 password = wtforms.PasswordField(
825 'Password',
902 "Password",
826 903 filters=[strip_filter],
827 validators=[old_password_validator,
828 wtforms.validators.DataRequired()])
904 validators=[old_password_validator, wtforms.validators.DataRequired()],
905 )
829 906
830 907 user_name = wtforms.StringField(
831 'New owners username',
908 "New owners username",
832 909 filters=[strip_filter],
833 validators=[found_username_validator,
834 wtforms.validators.DataRequired()])
835 submit = wtforms.SubmitField(_('Transfer ownership of application'))
836 ignore_labels = ['submit']
837 css_classes = {'submit': 'btn btn-danger'}
910 validators=[found_username_validator, wtforms.validators.DataRequired()],
911 )
912 submit = wtforms.SubmitField(_("Transfer ownership of application"))
913 ignore_labels = ["submit"]
914 css_classes = {"submit": "btn btn-danger"}
838 915
839 916
840 917 def default_filename():
841 return 'Invoice %s' % datetime.datetime.utcnow().strftime('%Y/%m')
918 return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m")
842 919
843 920
844 921 class FileUploadForm(ReactorForm):
845 title = wtforms.StringField('File Title',
846 default=default_filename,
847 validators=[wtforms.validators.DataRequired()])
848 file = wtforms.FileField('File')
922 title = wtforms.StringField(
923 "File Title",
924 default=default_filename,
925 validators=[wtforms.validators.DataRequired()],
926 )
927 file = wtforms.FileField("File")
849 928
850 929 def validate_file(self, field):
851 if not hasattr(field.data, 'file'):
852 raise wtforms.ValidationError('File is missing')
930 if not hasattr(field.data, "file"):
931 raise wtforms.ValidationError("File is missing")
853 932
854 submit = wtforms.SubmitField(_('Upload'))
933 submit = wtforms.SubmitField(_("Upload"))
855 934
856 935
857 936 def get_partition_deletion_form(es_indices, pg_indices):
858 937 class F(ReactorForm):
859 es_index = wtforms.SelectMultipleField('Elasticsearch',
860 choices=[(ix, '') for ix in
861 es_indices])
862 pg_index = wtforms.SelectMultipleField('pg',
863 choices=[(ix, '') for ix in
864 pg_indices])
865 confirm = wtforms.TextField('Confirm',
866 filters=[uppercase_filter, strip_filter],
867 validators=[
868 wtforms.validators.AnyOf(['CONFIRM']),
869 wtforms.validators.DataRequired()])
870 ignore_labels = ['submit']
871 css_classes = {'submit': 'btn btn-danger'}
938 es_index = wtforms.SelectMultipleField(
939 "Elasticsearch", choices=[(ix, "") for ix in es_indices]
940 )
941 pg_index = wtforms.SelectMultipleField(
942 "pg", choices=[(ix, "") for ix in pg_indices]
943 )
944 confirm = wtforms.TextField(
945 "Confirm",
946 filters=[uppercase_filter, strip_filter],
947 validators=[
948 wtforms.validators.AnyOf(["CONFIRM"]),
949 wtforms.validators.DataRequired(),
950 ],
951 )
952 ignore_labels = ["submit"]
953 css_classes = {"submit": "btn btn-danger"}
872 954
873 955 return F
874 956
875 957
876 958 class GroupCreateForm(ReactorForm):
877 959 group_name = wtforms.StringField(
878 _('Group Name'),
960 _("Group Name"),
879 961 filters=[strip_filter],
880 962 validators=[
881 963 wtforms.validators.Length(min=2, max=50),
882 964 unique_groupname_validator,
883 wtforms.validators.DataRequired()
884 ])
885 description = wtforms.StringField(_('Group description'))
965 wtforms.validators.DataRequired(),
966 ],
967 )
968 description = wtforms.StringField(_("Group description"))
886 969
887 970
888 time_choices = [(k, v['label'],) for k, v in h.time_deltas.items()]
971 time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()]
889 972
890 973
891 974 class AuthTokenCreateForm(ReactorForm):
892 description = wtforms.StringField(_('Token description'))
893 expires = wtforms.SelectField('Expires',
894 coerce=lambda x: x,
895 choices=time_choices,
896 validators=[wtforms.validators.Optional()])
975 description = wtforms.StringField(_("Token description"))
976 expires = wtforms.SelectField(
977 "Expires",
978 coerce=lambda x: x,
979 choices=time_choices,
980 validators=[wtforms.validators.Optional()],
981 )
@@ -24,20 +24,20 b' from appenlight_client.exceptions import get_current_traceback'
24 24
25 25
26 26 def generate_random_string(chars=10):
27 return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
28 chars))
27 return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars))
29 28
30 29
31 30 def to_integer_safe(input):
32 31 try:
33 32 return int(input)
34 except (TypeError, ValueError,):
33 except (TypeError, ValueError):
35 34 return None
36 35
37 36
38 37 def print_traceback(log):
39 traceback = get_current_traceback(skip=1, show_hidden_frames=True,
40 ignore_system_exceptions=True)
38 traceback = get_current_traceback(
39 skip=1, show_hidden_frames=True, ignore_system_exceptions=True
40 )
41 41 exception_text = traceback.exception
42 42 log.error(exception_text)
43 43 log.error(traceback.plaintext)
@@ -45,6 +45,5 b' def print_traceback(log):'
45 45
46 46
47 47 def get_callable(import_string):
48 import_module, indexer_callable = import_string.split(':')
49 return getattr(importlib.import_module(import_module),
50 indexer_callable)
48 import_module, indexer_callable = import_string.split(":")
49 return getattr(importlib.import_module(import_module), indexer_callable)
@@ -27,21 +27,18 b' log = logging.getLogger(__name__)'
27 27
28 28 def rate_limiting(request, resource, section, to_increment=1):
29 29 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
30 key = REDIS_KEYS['rate_limits'][section].format(tsample,
31 resource.resource_id)
30 key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id)
32 31 redis_pipeline = request.registry.redis_conn.pipeline()
33 32 redis_pipeline.incr(key, to_increment)
34 33 redis_pipeline.expire(key, 3600 * 24)
35 34 results = redis_pipeline.execute()
36 35 current_count = results[0]
37 config = ConfigService.by_key_and_section(section, 'global')
36 config = ConfigService.by_key_and_section(section, "global")
38 37 limit = config.value if config else 1000
39 38 if current_count > int(limit):
40 log.info('RATE LIMITING: {}: {}, {}'.format(
41 section, resource, current_count))
42 abort_msg = 'Rate limits are in effect for this application'
43 raise HTTPTooManyRequests(abort_msg,
44 headers={'X-AppEnlight': abort_msg})
39 log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count))
40 abort_msg = "Rate limits are in effect for this application"
41 raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg})
45 42
46 43
47 44 def check_cors(request, application, should_return=True):
@@ -50,31 +47,34 b' def check_cors(request, application, should_return=True):'
50 47 application, otherwise return 403
51 48 """
52 49 origin_found = False
53 origin = request.headers.get('Origin')
50 origin = request.headers.get("Origin")
54 51 if should_return:
55 log.info('CORS for %s' % origin)
52 log.info("CORS for %s" % origin)
56 53 if not origin:
57 54 return False
58 for domain in application.domains.split('\n'):
55 for domain in application.domains.split("\n"):
59 56 if domain in origin:
60 57 origin_found = True
61 58 if origin_found:
62 request.response.headers.add('Access-Control-Allow-Origin', origin)
63 request.response.headers.add('XDomainRequestAllowed', '1')
64 request.response.headers.add('Access-Control-Allow-Methods',
65 'GET, POST, OPTIONS')
66 request.response.headers.add('Access-Control-Allow-Headers',
67 'Accept-Encoding, Accept-Language, '
68 'Content-Type, '
69 'Depth, User-Agent, X-File-Size, '
70 'X-Requested-With, If-Modified-Since, '
71 'X-File-Name, '
72 'Cache-Control, Host, Pragma, Accept, '
73 'Origin, Connection, '
74 'Referer, Cookie, '
75 'X-appenlight-public-api-key, '
76 'x-appenlight-public-api-key')
77 request.response.headers.add('Access-Control-Max-Age', '86400')
59 request.response.headers.add("Access-Control-Allow-Origin", origin)
60 request.response.headers.add("XDomainRequestAllowed", "1")
61 request.response.headers.add(
62 "Access-Control-Allow-Methods", "GET, POST, OPTIONS"
63 )
64 request.response.headers.add(
65 "Access-Control-Allow-Headers",
66 "Accept-Encoding, Accept-Language, "
67 "Content-Type, "
68 "Depth, User-Agent, X-File-Size, "
69 "X-Requested-With, If-Modified-Since, "
70 "X-File-Name, "
71 "Cache-Control, Host, Pragma, Accept, "
72 "Origin, Connection, "
73 "Referer, Cookie, "
74 "X-appenlight-public-api-key, "
75 "x-appenlight-public-api-key",
76 )
77 request.response.headers.add("Access-Control-Max-Age", "86400")
78 78 return request.response
79 79 else:
80 80 return HTTPForbidden()
@@ -42,23 +42,27 b' def hashgen(namespace, fn, to_str=compat.string_type):'
42 42 """
43 43
44 44 if namespace is None:
45 namespace = '%s:%s' % (fn.__module__, fn.__name__)
45 namespace = "%s:%s" % (fn.__module__, fn.__name__)
46 46 else:
47 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
47 namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
48 48
49 49 args = inspect.getargspec(fn)
50 has_self = args[0] and args[0][0] in ('self', 'cls')
50 has_self = args[0] and args[0][0] in ("self", "cls")
51 51
52 52 def generate_key(*args, **kw):
53 53 if kw:
54 54 raise ValueError(
55 55 "dogpile.cache's default key creation "
56 "function does not accept keyword arguments.")
56 "function does not accept keyword arguments."
57 )
57 58 if has_self:
58 59 args = args[1:]
59 60
60 return namespace + "|" + hashlib.sha1(
61 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
61 return (
62 namespace
63 + "|"
64 + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest()
65 )
62 66
63 67 return generate_key
64 68
@@ -68,116 +72,97 b' class CacheRegions(object):'
68 72 config_redis = {"arguments": settings}
69 73
70 74 self.redis_min_1 = make_region(
71 function_key_generator=hashgen,
72 key_mangler=key_mangler).configure(
73 "dogpile.cache.redis",
74 expiration_time=60,
75 **copy.deepcopy(config_redis))
75 function_key_generator=hashgen, key_mangler=key_mangler
76 ).configure(
77 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
78 )
76 79 self.redis_min_5 = make_region(
77 function_key_generator=hashgen,
78 key_mangler=key_mangler).configure(
79 "dogpile.cache.redis",
80 expiration_time=300,
81 **copy.deepcopy(config_redis))
80 function_key_generator=hashgen, key_mangler=key_mangler
81 ).configure(
82 "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis)
83 )
82 84
83 85 self.redis_min_10 = make_region(
84 function_key_generator=hashgen,
85 key_mangler=key_mangler).configure(
86 "dogpile.cache.redis",
87 expiration_time=60,
88 **copy.deepcopy(config_redis))
86 function_key_generator=hashgen, key_mangler=key_mangler
87 ).configure(
88 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
89 )
89 90
90 91 self.redis_min_60 = make_region(
91 function_key_generator=hashgen,
92 key_mangler=key_mangler).configure(
93 "dogpile.cache.redis",
94 expiration_time=3600,
95 **copy.deepcopy(config_redis))
92 function_key_generator=hashgen, key_mangler=key_mangler
93 ).configure(
94 "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis)
95 )
96 96
97 97 self.redis_sec_1 = make_region(
98 function_key_generator=hashgen,
99 key_mangler=key_mangler).configure(
100 "dogpile.cache.redis",
101 expiration_time=1,
102 **copy.deepcopy(config_redis))
98 function_key_generator=hashgen, key_mangler=key_mangler
99 ).configure(
100 "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis)
101 )
103 102
104 103 self.redis_sec_5 = make_region(
105 function_key_generator=hashgen,
106 key_mangler=key_mangler).configure(
107 "dogpile.cache.redis",
108 expiration_time=5,
109 **copy.deepcopy(config_redis))
104 function_key_generator=hashgen, key_mangler=key_mangler
105 ).configure(
106 "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis)
107 )
110 108
111 109 self.redis_sec_30 = make_region(
112 function_key_generator=hashgen,
113 key_mangler=key_mangler).configure(
114 "dogpile.cache.redis",
115 expiration_time=30,
116 **copy.deepcopy(config_redis))
110 function_key_generator=hashgen, key_mangler=key_mangler
111 ).configure(
112 "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis)
113 )
117 114
118 115 self.redis_day_1 = make_region(
119 function_key_generator=hashgen,
120 key_mangler=key_mangler).configure(
121 "dogpile.cache.redis",
122 expiration_time=86400,
123 **copy.deepcopy(config_redis))
116 function_key_generator=hashgen, key_mangler=key_mangler
117 ).configure(
118 "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis)
119 )
124 120
125 121 self.redis_day_7 = make_region(
126 function_key_generator=hashgen,
127 key_mangler=key_mangler).configure(
122 function_key_generator=hashgen, key_mangler=key_mangler
123 ).configure(
128 124 "dogpile.cache.redis",
129 125 expiration_time=86400 * 7,
130 **copy.deepcopy(config_redis))
126 **copy.deepcopy(config_redis)
127 )
131 128
132 129 self.redis_day_30 = make_region(
133 function_key_generator=hashgen,
134 key_mangler=key_mangler).configure(
130 function_key_generator=hashgen, key_mangler=key_mangler
131 ).configure(
135 132 "dogpile.cache.redis",
136 133 expiration_time=86400 * 30,
137 **copy.deepcopy(config_redis))
134 **copy.deepcopy(config_redis)
135 )
138 136
139 137 self.memory_day_1 = make_region(
140 function_key_generator=hashgen,
141 key_mangler=key_mangler).configure(
142 "dogpile.cache.memory",
143 expiration_time=86400,
144 **copy.deepcopy(config_redis))
138 function_key_generator=hashgen, key_mangler=key_mangler
139 ).configure(
140 "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis)
141 )
145 142
146 143 self.memory_sec_1 = make_region(
147 function_key_generator=hashgen,
148 key_mangler=key_mangler).configure(
149 "dogpile.cache.memory",
150 expiration_time=1)
144 function_key_generator=hashgen, key_mangler=key_mangler
145 ).configure("dogpile.cache.memory", expiration_time=1)
151 146
152 147 self.memory_sec_5 = make_region(
153 function_key_generator=hashgen,
154 key_mangler=key_mangler).configure(
155 "dogpile.cache.memory",
156 expiration_time=5)
148 function_key_generator=hashgen, key_mangler=key_mangler
149 ).configure("dogpile.cache.memory", expiration_time=5)
157 150
158 151 self.memory_min_1 = make_region(
159 function_key_generator=hashgen,
160 key_mangler=key_mangler).configure(
161 "dogpile.cache.memory",
162 expiration_time=60)
152 function_key_generator=hashgen, key_mangler=key_mangler
153 ).configure("dogpile.cache.memory", expiration_time=60)
163 154
164 155 self.memory_min_5 = make_region(
165 function_key_generator=hashgen,
166 key_mangler=key_mangler).configure(
167 "dogpile.cache.memory",
168 expiration_time=300)
156 function_key_generator=hashgen, key_mangler=key_mangler
157 ).configure("dogpile.cache.memory", expiration_time=300)
169 158
170 159 self.memory_min_10 = make_region(
171 function_key_generator=hashgen,
172 key_mangler=key_mangler).configure(
173 "dogpile.cache.memory",
174 expiration_time=600)
160 function_key_generator=hashgen, key_mangler=key_mangler
161 ).configure("dogpile.cache.memory", expiration_time=600)
175 162
176 163 self.memory_min_60 = make_region(
177 function_key_generator=hashgen,
178 key_mangler=key_mangler).configure(
179 "dogpile.cache.memory",
180 expiration_time=3600)
164 function_key_generator=hashgen, key_mangler=key_mangler
165 ).configure("dogpile.cache.memory", expiration_time=3600)
181 166
182 167
183 168 def get_region(region):
@@ -5,6 +5,7 b' from pyramid.config import Configurator'
5 5
6 6 log = logging.getLogger(__name__)
7 7
8
8 9 class InspectProxy(object):
9 10 """
10 11 Proxy to the `inspect` module that allows us to use the pyramid include
@@ -17,7 +18,7 b' class InspectProxy(object):'
17 18 """
18 19 if inspect.ismethod(cyfunction):
19 20 cyfunction = cyfunction.im_func
20 return getattr(cyfunction, 'func_code')
21 return getattr(cyfunction, "func_code")
21 22
22 23 def getmodule(self, *args, **kwds):
23 24 """
@@ -40,14 +41,14 b' class InspectProxy(object):'
40 41 """
41 42 # Check if it's called to look up the source file that contains the
42 43 # magic pyramid `includeme` callable.
43 if getattr(obj, '__name__') == 'includeme':
44 if getattr(obj, "__name__") == "includeme":
44 45 try:
45 46 return inspect.getfile(obj)
46 47 except TypeError as e:
47 48 # Cython functions are not recognized as functions by the
48 49 # inspect module. We have to unpack the func_code attribute
49 50 # ourself.
50 if 'cyfunction' in e.message:
51 if "cyfunction" in e.message:
51 52 obj = self._get_cyfunction_func_code(obj)
52 53 return inspect.getfile(obj)
53 54 raise
@@ -60,33 +61,32 b' class CythonCompatConfigurator(Configurator):'
60 61 Customized configurator to replace the inspect class attribute with
61 62 a custom one that is cython compatible.
62 63 """
64
63 65 inspect = InspectProxy()
64 66
65 67
66 68 def register_appenlight_plugin(config, plugin_name, plugin_config):
67 69 def register():
68 log.warning('Registering plugin: {}'.format(plugin_name))
70 log.warning("Registering plugin: {}".format(plugin_name))
69 71 if plugin_name not in config.registry.appenlight_plugins:
70 72 config.registry.appenlight_plugins[plugin_name] = {
71 'javascript': None,
72 'static': None,
73 'css': None,
74 'celery_tasks': None,
75 'celery_beats': None,
76 'fulltext_indexer': None,
77 'sqlalchemy_migrations': None,
78 'default_values_setter': None,
79 'header_html': None,
80 'resource_types': [],
81 'url_gen': None
73 "javascript": None,
74 "static": None,
75 "css": None,
76 "celery_tasks": None,
77 "celery_beats": None,
78 "fulltext_indexer": None,
79 "sqlalchemy_migrations": None,
80 "default_values_setter": None,
81 "header_html": None,
82 "resource_types": [],
83 "url_gen": None,
82 84 }
83 config.registry.appenlight_plugins[plugin_name].update(
84 plugin_config)
85 config.registry.appenlight_plugins[plugin_name].update(plugin_config)
85 86 # inform AE what kind of resource types we have available
86 87 # so we can avoid failing when a plugin is removed but data
87 88 # is still present in the db
88 if plugin_config.get('resource_types'):
89 config.registry.resource_types.extend(
90 plugin_config['resource_types'])
89 if plugin_config.get("resource_types"):
90 config.registry.resource_types.extend(plugin_config["resource_types"])
91 91
92 config.action('appenlight_plugin={}'.format(plugin_name), register)
92 config.action("appenlight_plugin={}".format(plugin_name), register)
@@ -23,20 +23,20 b' ENCRYPTION_SECRET = None'
23 23 def encrypt_fernet(value):
24 24 # avoid double encryption
25 25 # not sure if this is needed but it won't hurt too much to have this
26 if value.startswith('enc$fernet$'):
26 if value.startswith("enc$fernet$"):
27 27 return value
28 28 f = Fernet(ENCRYPTION_SECRET)
29 return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
29 return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8"))
30 30
31 31
32 32 def decrypt_fernet(value):
33 parts = value.split('$', 3)
33 parts = value.split("$", 3)
34 34 if not len(parts) == 3:
35 35 # not encrypted values
36 36 return value
37 37 else:
38 38 f = Fernet(ENCRYPTION_SECRET)
39 decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
39 decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8")
40 40 return decrypted_data
41 41
42 42
@@ -1,4 +1,5 b''
1 1 import collections
2
2 3 # -*- coding: utf-8 -*-
3 4
4 5 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
@@ -20,13 +21,14 b' class StupidEnum(object):'
20 21 @classmethod
21 22 def set_inverse(cls):
22 23 cls._inverse_values = dict(
23 (y, x) for x, y in vars(cls).items() if
24 not x.startswith('_') and not callable(y)
24 (y, x)
25 for x, y in vars(cls).items()
26 if not x.startswith("_") and not callable(y)
25 27 )
26 28
27 29 @classmethod
28 30 def key_from_value(cls, value):
29 if not hasattr(cls, '_inverse_values'):
31 if not hasattr(cls, "_inverse_values"):
30 32 cls.set_inverse()
31 33 return cls._inverse_values.get(value)
32 34
@@ -25,7 +25,7 b' import functools'
25 25 import decimal
26 26 import imp
27 27
28 __all__ = ['json', 'simplejson', 'stdlibjson']
28 __all__ = ["json", "simplejson", "stdlibjson"]
29 29
30 30
31 31 def _is_aware(value):
@@ -35,8 +35,7 b' def _is_aware(value):'
35 35 The logic is described in Python's docs:
36 36 http://docs.python.org/library/datetime.html#datetime.tzinfo
37 37 """
38 return (value.tzinfo is not None
39 and value.tzinfo.utcoffset(value) is not None)
38 return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
40 39
41 40
42 41 def _obj_dump(obj):
@@ -55,8 +54,8 b' def _obj_dump(obj):'
55 54 r = obj.isoformat()
56 55 # if obj.microsecond:
57 56 # r = r[:23] + r[26:]
58 if r.endswith('+00:00'):
59 r = r[:-6] + 'Z'
57 if r.endswith("+00:00"):
58 r = r[:-6] + "Z"
60 59 return r
61 60 elif isinstance(obj, datetime.date):
62 61 return obj.isoformat()
@@ -71,7 +70,7 b' def _obj_dump(obj):'
71 70 return r
72 71 elif isinstance(obj, set):
73 72 return list(obj)
74 elif hasattr(obj, '__json__'):
73 elif hasattr(obj, "__json__"):
75 74 if callable(obj.__json__):
76 75 return obj.__json__()
77 76 else:
@@ -83,8 +82,7 b' def _obj_dump(obj):'
83 82 # Import simplejson
84 83 try:
85 84 # import simplejson initially
86 _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
87
85 _sj = imp.load_module("_sj", *imp.find_module("simplejson"))
88 86
89 87 def extended_encode(obj):
90 88 try:
@@ -93,22 +91,21 b' try:'
93 91 pass
94 92 raise TypeError("%r is not JSON serializable" % (obj,))
95 93
96
97 94 # we handle decimals our own it makes unified behavior of json vs
98 95 # simplejson
99 sj_version = [int(x) for x in _sj.__version__.split('.')]
96 sj_version = [int(x) for x in _sj.__version__.split(".")]
100 97 major, minor = sj_version[0], sj_version[1]
101 98 if major < 2 or (major == 2 and minor < 1):
102 99 # simplejson < 2.1 doesnt support use_decimal
103 _sj.dumps = functools.partial(
104 _sj.dumps, default=extended_encode)
105 _sj.dump = functools.partial(
106 _sj.dump, default=extended_encode)
100 _sj.dumps = functools.partial(_sj.dumps, default=extended_encode)
101 _sj.dump = functools.partial(_sj.dump, default=extended_encode)
107 102 else:
108 103 _sj.dumps = functools.partial(
109 _sj.dumps, default=extended_encode, use_decimal=False)
104 _sj.dumps, default=extended_encode, use_decimal=False
105 )
110 106 _sj.dump = functools.partial(
111 _sj.dump, default=extended_encode, use_decimal=False)
107 _sj.dump, default=extended_encode, use_decimal=False
108 )
112 109 simplejson = _sj
113 110
114 111 except ImportError:
@@ -117,8 +114,7 b' except ImportError:'
117 114
118 115 try:
119 116 # simplejson not found try out regular json module
120 _json = imp.load_module('_json', *imp.find_module('json'))
121
117 _json = imp.load_module("_json", *imp.find_module("json"))
122 118
123 119 # extended JSON encoder for json
124 120 class ExtendedEncoder(_json.JSONEncoder):
@@ -129,7 +125,6 b' try:'
129 125 pass
130 126 raise TypeError("%r is not JSON serializable" % (obj,))
131 127
132
133 128 # monkey-patch JSON encoder to use extended version
134 129 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
135 130 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
@@ -145,4 +140,4 b' if simplejson:'
145 140 elif _json:
146 141 json = _json
147 142 else:
148 raise ImportError('Could not find any json modules')
143 raise ImportError("Could not find any json modules")
@@ -26,94 +26,135 b' _ = lambda x: x'
26 26
27 27 time_deltas = OrderedDict()
28 28
29 time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
30 'label': '1 minute', 'minutes': 1}
31
32 time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
33 'label': '5 minutes', 'minutes': 5}
34 time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
35 'label': '30 minutes', 'minutes': 30}
36 time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
37 'label': '60 minutes', 'minutes': 60}
38 time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
39 'minutes': 60 * 4}
40 time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
41 'label': '12 hours', 'minutes': 60 * 12}
42 time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
43 'label': '24 hours', 'minutes': 60 * 24}
44 time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
45 'minutes': 60 * 24 * 3}
46 time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
47 'minutes': 60 * 24 * 7}
48 time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
49 'minutes': 60 * 24 * 14}
50 time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
51 'minutes': 60 * 24 * 31}
52 time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
53 'label': '3 months',
54 'minutes': 60 * 24 * 31 * 3}
55 time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
56 'label': '6 months',
57 'minutes': 60 * 24 * 31 * 6}
58 time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
59 'label': '12 months',
60 'minutes': 60 * 24 * 31 * 12}
29 time_deltas["1m"] = {
30 "delta": datetime.timedelta(minutes=1),
31 "label": "1 minute",
32 "minutes": 1,
33 }
34
35 time_deltas["5m"] = {
36 "delta": datetime.timedelta(minutes=5),
37 "label": "5 minutes",
38 "minutes": 5,
39 }
40 time_deltas["30m"] = {
41 "delta": datetime.timedelta(minutes=30),
42 "label": "30 minutes",
43 "minutes": 30,
44 }
45 time_deltas["1h"] = {
46 "delta": datetime.timedelta(hours=1),
47 "label": "60 minutes",
48 "minutes": 60,
49 }
50 time_deltas["4h"] = {
51 "delta": datetime.timedelta(hours=4),
52 "label": "4 hours",
53 "minutes": 60 * 4,
54 }
55 time_deltas["12h"] = {
56 "delta": datetime.timedelta(hours=12),
57 "label": "12 hours",
58 "minutes": 60 * 12,
59 }
60 time_deltas["24h"] = {
61 "delta": datetime.timedelta(hours=24),
62 "label": "24 hours",
63 "minutes": 60 * 24,
64 }
65 time_deltas["3d"] = {
66 "delta": datetime.timedelta(days=3),
67 "label": "3 days",
68 "minutes": 60 * 24 * 3,
69 }
70 time_deltas["1w"] = {
71 "delta": datetime.timedelta(days=7),
72 "label": "7 days",
73 "minutes": 60 * 24 * 7,
74 }
75 time_deltas["2w"] = {
76 "delta": datetime.timedelta(days=14),
77 "label": "14 days",
78 "minutes": 60 * 24 * 14,
79 }
80 time_deltas["1M"] = {
81 "delta": datetime.timedelta(days=31),
82 "label": "31 days",
83 "minutes": 60 * 24 * 31,
84 }
85 time_deltas["3M"] = {
86 "delta": datetime.timedelta(days=31 * 3),
87 "label": "3 months",
88 "minutes": 60 * 24 * 31 * 3,
89 }
90 time_deltas["6M"] = {
91 "delta": datetime.timedelta(days=31 * 6),
92 "label": "6 months",
93 "minutes": 60 * 24 * 31 * 6,
94 }
95 time_deltas["12M"] = {
96 "delta": datetime.timedelta(days=31 * 12),
97 "label": "12 months",
98 "minutes": 60 * 24 * 31 * 12,
99 }
61 100
62 101 # used in json representation
63 time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
64 for k, v in time_deltas.items()])
65 FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
102 time_options = dict(
103 [
104 (k, {"label": v["label"], "minutes": v["minutes"]})
105 for k, v in time_deltas.items()
106 ]
107 )
108 FlashMsg = namedtuple("FlashMsg", ["msg", "level"])
66 109
67 110
68 111 def get_flash(request):
69 112 messages = []
70 113 messages.extend(
71 [FlashMsg(msg, 'error')
72 for msg in request.session.peek_flash('error')])
73 messages.extend([FlashMsg(msg, 'warning')
74 for msg in request.session.peek_flash('warning')])
114 [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")]
115 )
75 116 messages.extend(
76 [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
117 [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")]
118 )
119 messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()])
77 120 return messages
78 121
79 122
80 123 def clear_flash(request):
81 request.session.pop_flash('error')
82 request.session.pop_flash('warning')
124 request.session.pop_flash("error")
125 request.session.pop_flash("warning")
83 126 request.session.pop_flash()
84 127
85 128
86 129 def get_type_formatted_flash(request):
87 return [{'msg': message.msg, 'type': message.level}
88 for message in get_flash(request)]
130 return [
131 {"msg": message.msg, "type": message.level} for message in get_flash(request)
132 ]
89 133
90 134
91 135 def gen_pagination_headers(request, paginator):
92 136 headers = {
93 'x-total-count': str(paginator.item_count),
94 'x-current-page': str(paginator.page),
95 'x-items-per-page': str(paginator.items_per_page)
137 "x-total-count": str(paginator.item_count),
138 "x-current-page": str(paginator.page),
139 "x-items-per-page": str(paginator.items_per_page),
96 140 }
97 141 params_dict = request.GET.dict_of_lists()
98 142 last_page_params = copy.deepcopy(params_dict)
99 last_page_params['page'] = paginator.last_page or 1
143 last_page_params["page"] = paginator.last_page or 1
100 144 first_page_params = copy.deepcopy(params_dict)
101 first_page_params.pop('page', None)
145 first_page_params.pop("page", None)
102 146 next_page_params = copy.deepcopy(params_dict)
103 next_page_params['page'] = paginator.next_page or paginator.last_page or 1
147 next_page_params["page"] = paginator.next_page or paginator.last_page or 1
104 148 prev_page_params = copy.deepcopy(params_dict)
105 prev_page_params['page'] = paginator.previous_page or 1
149 prev_page_params["page"] = paginator.previous_page or 1
106 150 lp_url = request.current_route_url(_query=last_page_params)
107 151 fp_url = request.current_route_url(_query=first_page_params)
108 links = [
109 'rel="last", <{}>'.format(lp_url),
110 'rel="first", <{}>'.format(fp_url),
111 ]
152 links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)]
112 153 if first_page_params != prev_page_params:
113 154 prev_url = request.current_route_url(_query=prev_page_params)
114 155 links.append('rel="prev", <{}>'.format(prev_url))
115 156 if last_page_params != next_page_params:
116 157 next_url = request.current_route_url(_query=next_page_params)
117 158 links.append('rel="next", <{}>'.format(next_url))
118 headers['link'] = '; '.join(links)
159 headers["link"] = "; ".join(links)
119 160 return headers
@@ -18,17 +18,21 b' import re'
18 18 from appenlight.lib.ext_json import json
19 19 from jinja2 import Markup, escape, evalcontextfilter
20 20
21 _paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
21 _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}")
22 22
23 23
24 24 @evalcontextfilter
25 25 def nl2br(eval_ctx, value):
26 26 if eval_ctx.autoescape:
27 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n'))
28 for p in _paragraph_re.split(escape(value)))
27 result = "\n\n".join(
28 "<p>%s</p>" % p.replace("\n", Markup("<br>\n"))
29 for p in _paragraph_re.split(escape(value))
30 )
29 31 else:
30 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n')
31 for p in _paragraph_re.split(escape(value)))
32 result = "\n\n".join(
33 "<p>%s</p>" % p.replace("\n", "<br>\n")
34 for p in _paragraph_re.split(escape(value))
35 )
32 36 if eval_ctx.autoescape:
33 37 result = Markup(result)
34 38 return result
@@ -36,11 +40,14 b' def nl2br(eval_ctx, value):'
36 40
37 41 @evalcontextfilter
38 42 def toJSONUnsafe(eval_ctx, value):
39 encoded = json.dumps(value).replace('&', '\\u0026') \
40 .replace('<', '\\u003c') \
41 .replace('>', '\\u003e') \
42 .replace('>', '\\u003e') \
43 .replace('"', '\\u0022') \
44 .replace("'", '\\u0027') \
45 .replace(r'\n', '/\\\n')
43 encoded = (
44 json.dumps(value)
45 .replace("&", "\\u0026")
46 .replace("<", "\\u003c")
47 .replace(">", "\\u003e")
48 .replace(">", "\\u003e")
49 .replace('"', "\\u0022")
50 .replace("'", "\\u0027")
51 .replace(r"\n", "/\\\n")
52 )
46 53 return Markup("'%s'" % encoded)
@@ -17,11 +17,30 b''
17 17 import json
18 18 import logging
19 19
20 ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text',
21 'filename', 'funcName', 'levelname', 'levelno', 'lineno',
22 'message', 'module', 'msecs', 'msg', 'name', 'pathname',
23 'process', 'processName', 'relativeCreated', 'stack_info',
24 'thread', 'threadName']
20 ignored_keys = [
21 "args",
22 "asctime",
23 "created",
24 "exc_info",
25 "exc_text",
26 "filename",
27 "funcName",
28 "levelname",
29 "levelno",
30 "lineno",
31 "message",
32 "module",
33 "msecs",
34 "msg",
35 "name",
36 "pathname",
37 "process",
38 "processName",
39 "relativeCreated",
40 "stack_info",
41 "thread",
42 "threadName",
43 ]
25 44
26 45
27 46 class JSONFormatter(logging.Formatter):
@@ -41,7 +60,7 b' class JSONFormatter(logging.Formatter):'
41 60 record.message = record.getMessage()
42 61 log_dict = vars(record)
43 62 keys = [k for k in log_dict.keys() if k not in ignored_keys]
44 payload = {'message': record.message}
63 payload = {"message": record.message}
45 64 payload.update({k: log_dict[k] for k in keys})
46 65 record.message = json.dumps(payload, default=lambda x: str(x))
47 66
@@ -14,52 +14,56 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 BASE = 'appenlight:data:{}'
17 BASE = "appenlight:data:{}"
18 18
19 19 REDIS_KEYS = {
20 'tasks': {
21 'add_reports_lock': BASE.format('add_reports_lock:{}'),
22 'add_logs_lock': BASE.format('add_logs_lock:{}'),
20 "tasks": {
21 "add_reports_lock": BASE.format("add_reports_lock:{}"),
22 "add_logs_lock": BASE.format("add_logs_lock:{}"),
23 23 },
24 'counters': {
25 'events_per_minute_per_user': BASE.format(
26 'events_per_minute_per_user:{}:{}'),
27 'reports_per_minute': BASE.format('reports_per_minute:{}'),
28 'reports_per_hour_per_app': BASE.format(
29 'reports_per_hour_per_app:{}:{}'),
30 'reports_per_type': BASE.format('reports_per_type:{}'),
31 'logs_per_minute': BASE.format('logs_per_minute:{}'),
32 'logs_per_hour_per_app': BASE.format(
33 'logs_per_hour_per_app:{}:{}'),
34 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
35 'metrics_per_hour_per_app': BASE.format(
36 'metrics_per_hour_per_app:{}:{}'),
37 'report_group_occurences': BASE.format('report_group_occurences:{}'),
38 'report_group_occurences_alerting': BASE.format(
39 'report_group_occurences_alerting:{}'),
40 'report_group_occurences_10th': BASE.format(
41 'report_group_occurences_10th:{}'),
42 'report_group_occurences_100th': BASE.format(
43 'report_group_occurences_100th:{}'),
24 "counters": {
25 "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"),
26 "reports_per_minute": BASE.format("reports_per_minute:{}"),
27 "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"),
28 "reports_per_type": BASE.format("reports_per_type:{}"),
29 "logs_per_minute": BASE.format("logs_per_minute:{}"),
30 "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"),
31 "metrics_per_minute": BASE.format("metrics_per_minute:{}"),
32 "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"),
33 "report_group_occurences": BASE.format("report_group_occurences:{}"),
34 "report_group_occurences_alerting": BASE.format(
35 "report_group_occurences_alerting:{}"
36 ),
37 "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"),
38 "report_group_occurences_100th": BASE.format(
39 "report_group_occurences_100th:{}"
40 ),
44 41 },
45 'rate_limits': {
46 'per_application_reports_rate_limit': BASE.format(
47 'per_application_reports_limit:{}:{}'),
48 'per_application_logs_rate_limit': BASE.format(
49 'per_application_logs_rate_limit:{}:{}'),
50 'per_application_metrics_rate_limit': BASE.format(
51 'per_application_metrics_rate_limit:{}:{}'),
42 "rate_limits": {
43 "per_application_reports_rate_limit": BASE.format(
44 "per_application_reports_limit:{}:{}"
45 ),
46 "per_application_logs_rate_limit": BASE.format(
47 "per_application_logs_rate_limit:{}:{}"
48 ),
49 "per_application_metrics_rate_limit": BASE.format(
50 "per_application_metrics_rate_limit:{}:{}"
51 ),
52 52 },
53 'apps_that_got_new_data_per_hour': BASE.format('apps_that_got_new_data_per_hour:{}'),
54 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
55 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
56 'apps_that_had_reports_alerting': BASE.format(
57 'apps_that_had_reports_alerting'),
58 'apps_that_had_error_reports_alerting': BASE.format(
59 'apps_that_had_error_reports_alerting'),
60 'reports_to_notify_per_type_per_app': BASE.format(
61 'reports_to_notify_per_type_per_app:{}:{}'),
62 'reports_to_notify_per_type_per_app_alerting': BASE.format(
63 'reports_to_notify_per_type_per_app_alerting:{}:{}'),
64 'seen_tag_list': BASE.format('seen_tag_list')
53 "apps_that_got_new_data_per_hour": BASE.format(
54 "apps_that_got_new_data_per_hour:{}"
55 ),
56 "apps_that_had_reports": BASE.format("apps_that_had_reports"),
57 "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"),
58 "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"),
59 "apps_that_had_error_reports_alerting": BASE.format(
60 "apps_that_had_error_reports_alerting"
61 ),
62 "reports_to_notify_per_type_per_app": BASE.format(
63 "reports_to_notify_per_type_per_app:{}:{}"
64 ),
65 "reports_to_notify_per_type_per_app_alerting": BASE.format(
66 "reports_to_notify_per_type_per_app_alerting:{}:{}"
67 ),
68 "seen_tag_list": BASE.format("seen_tag_list"),
65 69 }
@@ -54,11 +54,11 b' def unsafe_json_body(request):'
54 54 try:
55 55 return request.json_body
56 56 except ValueError:
57 raise JSONException('Incorrect JSON')
57 raise JSONException("Incorrect JSON")
58 58
59 59
60 60 def get_user(request):
61 if not request.path_info.startswith('/static'):
61 if not request.path_info.startswith("/static"):
62 62 user_id = unauthenticated_userid(request)
63 63 try:
64 64 user_id = int(user_id)
@@ -68,8 +68,10 b' def get_user(request):'
68 68 if user_id:
69 69 user = UserService.by_id(user_id)
70 70 if user:
71 request.environ['appenlight.username'] = '%d:%s' % (
72 user_id, user.user_name)
71 request.environ["appenlight.username"] = "%d:%s" % (
72 user_id,
73 user.user_name,
74 )
73 75 return user
74 76 else:
75 77 return None
@@ -85,7 +87,7 b' def add_flash_to_headers(request, clear=True):'
85 87 flash queue
86 88 """
87 89 flash_msgs = helpers.get_type_formatted_flash(request)
88 request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
90 request.response.headers["x-flash-messages"] = json.dumps(flash_msgs)
89 91 helpers.clear_flash(request)
90 92
91 93
@@ -94,42 +96,36 b' def get_authomatic(request):'
94 96 # authomatic social auth
95 97 authomatic_conf = {
96 98 # callback http://yourapp.com/social_auth/twitter
97 'twitter': {
98 'class_': oauth1.Twitter,
99 'consumer_key': settings.get('authomatic.pr.twitter.key', ''),
100 'consumer_secret': settings.get('authomatic.pr.twitter.secret',
101 ''),
99 "twitter": {
100 "class_": oauth1.Twitter,
101 "consumer_key": settings.get("authomatic.pr.twitter.key", ""),
102 "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""),
102 103 },
103 104 # callback http://yourapp.com/social_auth/facebook
104 'facebook': {
105 'class_': oauth2.Facebook,
106 'consumer_key': settings.get('authomatic.pr.facebook.app_id', ''),
107 'consumer_secret': settings.get('authomatic.pr.facebook.secret',
108 ''),
109 'scope': ['email'],
105 "facebook": {
106 "class_": oauth2.Facebook,
107 "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""),
108 "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""),
109 "scope": ["email"],
110 110 },
111 111 # callback http://yourapp.com/social_auth/google
112 'google': {
113 'class_': oauth2.Google,
114 'consumer_key': settings.get('authomatic.pr.google.key', ''),
115 'consumer_secret': settings.get(
116 'authomatic.pr.google.secret', ''),
117 'scope': ['profile', 'email'],
112 "google": {
113 "class_": oauth2.Google,
114 "consumer_key": settings.get("authomatic.pr.google.key", ""),
115 "consumer_secret": settings.get("authomatic.pr.google.secret", ""),
116 "scope": ["profile", "email"],
118 117 },
119 'github': {
120 'class_': oauth2.GitHub,
121 'consumer_key': settings.get('authomatic.pr.github.key', ''),
122 'consumer_secret': settings.get(
123 'authomatic.pr.github.secret', ''),
124 'scope': ['repo', 'public_repo', 'user:email'],
125 'access_headers': {'User-Agent': 'AppEnlight'},
118 "github": {
119 "class_": oauth2.GitHub,
120 "consumer_key": settings.get("authomatic.pr.github.key", ""),
121 "consumer_secret": settings.get("authomatic.pr.github.secret", ""),
122 "scope": ["repo", "public_repo", "user:email"],
123 "access_headers": {"User-Agent": "AppEnlight"},
124 },
125 "bitbucket": {
126 "class_": oauth1.Bitbucket,
127 "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""),
128 "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""),
126 129 },
127 'bitbucket': {
128 'class_': oauth1.Bitbucket,
129 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''),
130 'consumer_secret': settings.get(
131 'authomatic.pr.bitbucket.secret', '')
132 }
133 130 }
134 return Authomatic(
135 config=authomatic_conf, secret=settings['authomatic.secret'])
131 return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"])
@@ -52,13 +52,13 b' class RuleBase(object):'
52 52 :param field_name:
53 53 :return:
54 54 """
55 parts = field_name.split(':') if field_name else []
55 parts = field_name.split(":") if field_name else []
56 56 found = struct
57 57 while parts:
58 58 current_key = parts.pop(0)
59 59 found = found.get(current_key)
60 60 if not found and parts:
61 raise KeyNotFoundException('Key not found in structure')
61 raise KeyNotFoundException("Key not found in structure")
62 62 return found
63 63
64 64 @classmethod
@@ -72,13 +72,13 b' class RuleBase(object):'
72 72 :param field_name:
73 73 :return:
74 74 """
75 parts = field_name.split(':')
75 parts = field_name.split(":")
76 76 found = struct
77 77 while parts:
78 78 current_key = parts.pop(0)
79 79 found = getattr(found, current_key, None)
80 80 if not found and parts:
81 raise KeyNotFoundException('Key not found in structure')
81 raise KeyNotFoundException("Key not found in structure")
82 82 return found
83 83
84 84 def normalized_type(self, field, value):
@@ -89,28 +89,32 b' class RuleBase(object):'
89 89 """
90 90 f_type = self.type_matrix.get(field)
91 91 if f_type:
92 cast_to = f_type['type']
92 cast_to = f_type["type"]
93 93 else:
94 raise UnknownTypeException('Unknown type')
94 raise UnknownTypeException("Unknown type")
95 95
96 96 if value is None:
97 97 return None
98 98
99 99 try:
100 if cast_to == 'int':
100 if cast_to == "int":
101 101 return int(value)
102 elif cast_to == 'float':
102 elif cast_to == "float":
103 103 return float(value)
104 elif cast_to == 'unicode':
104 elif cast_to == "unicode":
105 105 return str(value)
106 106 except ValueError as exc:
107 107 raise InvalidValueException(exc)
108 108
109 109
110 110 class Rule(RuleBase):
111 def __init__(self, config, type_matrix,
112 struct_getter=RuleBase.default_dict_struct_getter,
113 config_manipulator=None):
111 def __init__(
112 self,
113 config,
114 type_matrix,
115 struct_getter=RuleBase.default_dict_struct_getter,
116 config_manipulator=None,
117 ):
114 118 """
115 119
116 120 :param config: dict - contains rule configuration
@@ -159,8 +163,9 b' class Rule(RuleBase):'
159 163 config_manipulator(self)
160 164
161 165 def subrule_check(self, rule_config, struct):
162 rule = Rule(rule_config, self.type_matrix,
163 config_manipulator=self.config_manipulator)
166 rule = Rule(
167 rule_config, self.type_matrix, config_manipulator=self.config_manipulator
168 )
164 169 return rule.match(struct)
165 170
166 171 def match(self, struct):
@@ -169,32 +174,41 b' class Rule(RuleBase):'
169 174 First tries report value, then tests tags in not found, then finally
170 175 report group
171 176 """
172 field_name = self.config.get('field')
173 test_value = self.config.get('value')
177 field_name = self.config.get("field")
178 test_value = self.config.get("value")
174 179
175 180 if not field_name:
176 181 return False
177 182
178 if field_name == '__AND__':
179 rule = AND(self.config['rules'], self.type_matrix,
180 config_manipulator=self.config_manipulator)
183 if field_name == "__AND__":
184 rule = AND(
185 self.config["rules"],
186 self.type_matrix,
187 config_manipulator=self.config_manipulator,
188 )
181 189 return rule.match(struct)
182 elif field_name == '__OR__':
183 rule = OR(self.config['rules'], self.type_matrix,
184 config_manipulator=self.config_manipulator)
190 elif field_name == "__OR__":
191 rule = OR(
192 self.config["rules"],
193 self.type_matrix,
194 config_manipulator=self.config_manipulator,
195 )
185 196 return rule.match(struct)
186 elif field_name == '__NOT__':
187 rule = NOT(self.config['rules'], self.type_matrix,
188 config_manipulator=self.config_manipulator)
197 elif field_name == "__NOT__":
198 rule = NOT(
199 self.config["rules"],
200 self.type_matrix,
201 config_manipulator=self.config_manipulator,
202 )
189 203 return rule.match(struct)
190 204
191 205 if test_value is None:
192 206 return False
193 207
194 208 try:
195 struct_value = self.normalized_type(field_name,
196 self.struct_getter(struct,
197 field_name))
209 struct_value = self.normalized_type(
210 field_name, self.struct_getter(struct, field_name)
211 )
198 212 except (UnknownTypeException, InvalidValueException) as exc:
199 213 log.error(str(exc))
200 214 return False
@@ -205,24 +219,23 b' class Rule(RuleBase):'
205 219 log.error(str(exc))
206 220 return False
207 221
208 if self.config['op'] not in ('startswith', 'endswith', 'contains'):
222 if self.config["op"] not in ("startswith", "endswith", "contains"):
209 223 try:
210 return getattr(operator,
211 self.config['op'])(struct_value, test_value)
224 return getattr(operator, self.config["op"])(struct_value, test_value)
212 225 except TypeError:
213 226 return False
214 elif self.config['op'] == 'startswith':
227 elif self.config["op"] == "startswith":
215 228 return struct_value.startswith(test_value)
216 elif self.config['op'] == 'endswith':
229 elif self.config["op"] == "endswith":
217 230 return struct_value.endswith(test_value)
218 elif self.config['op'] == 'contains':
231 elif self.config["op"] == "contains":
219 232 return test_value in struct_value
220 raise BadConfigException('Invalid configuration, '
221 'unknown operator: {}'.format(self.config))
233 raise BadConfigException(
234 "Invalid configuration, " "unknown operator: {}".format(self.config)
235 )
222 236
223 237 def __repr__(self):
224 return '<Rule {} {}>'.format(self.config.get('field'),
225 self.config.get('value'))
238 return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value"))
226 239
227 240
228 241 class AND(Rule):
@@ -231,8 +244,7 b' class AND(Rule):'
231 244 self.rules = rules
232 245
233 246 def match(self, struct):
234 return all([self.subrule_check(r_conf, struct) for r_conf
235 in self.rules])
247 return all([self.subrule_check(r_conf, struct) for r_conf in self.rules])
236 248
237 249
238 250 class NOT(Rule):
@@ -241,8 +253,7 b' class NOT(Rule):'
241 253 self.rules = rules
242 254
243 255 def match(self, struct):
244 return all([not self.subrule_check(r_conf, struct) for r_conf
245 in self.rules])
256 return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules])
246 257
247 258
248 259 class OR(Rule):
@@ -251,14 +262,12 b' class OR(Rule):'
251 262 self.rules = rules
252 263
253 264 def match(self, struct):
254 return any([self.subrule_check(r_conf, struct) for r_conf
255 in self.rules])
265 return any([self.subrule_check(r_conf, struct) for r_conf in self.rules])
256 266
257 267
258 268 class RuleService(object):
259 269 @staticmethod
260 def rule_from_config(config, field_mappings, labels_dict,
261 manipulator_func=None):
270 def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None):
262 271 """
263 272 Returns modified rule with manipulator function
264 273 By default manipulator function replaces field id from labels_dict
@@ -270,28 +279,33 b' class RuleService(object):'
270 279 """
271 280 rev_map = {}
272 281 for k, v in labels_dict.items():
273 rev_map[(v['agg'], v['key'],)] = k
282 rev_map[(v["agg"], v["key"])] = k
274 283
275 284 if manipulator_func is None:
285
276 286 def label_rewriter_func(rule):
277 field = rule.config.get('field')
278 if not field or rule.config['field'] in ['__OR__',
279 '__AND__', '__NOT__']:
287 field = rule.config.get("field")
288 if not field or rule.config["field"] in [
289 "__OR__",
290 "__AND__",
291 "__NOT__",
292 ]:
280 293 return
281 294
282 to_map = field_mappings.get(rule.config['field'])
295 to_map = field_mappings.get(rule.config["field"])
283 296
284 297 # we need to replace series field with _AE_NOT_FOUND_ to not match
285 298 # accidently some other field which happens to have the series that
286 299 # was used when the alert was created
287 300 if to_map:
288 to_replace = rev_map.get((to_map['agg'], to_map['key'],),
289 '_AE_NOT_FOUND_')
301 to_replace = rev_map.get(
302 (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_"
303 )
290 304 else:
291 to_replace = '_AE_NOT_FOUND_'
305 to_replace = "_AE_NOT_FOUND_"
292 306
293 rule.config['field'] = to_replace
294 rule.type_matrix[to_replace] = {"type": 'float'}
307 rule.config["field"] = to_replace
308 rule.type_matrix[to_replace] = {"type": "float"}
295 309
296 310 manipulator_func = label_rewriter_func
297 311
@@ -14,8 +14,9 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from ziggurat_foundations.models.services.external_identity import \
18 ExternalIdentityService
17 from ziggurat_foundations.models.services.external_identity import (
18 ExternalIdentityService,
19 )
19 20 from appenlight.models.external_identity import ExternalIdentity
20 21
21 22
@@ -24,37 +25,38 b' def handle_social_data(request, user, social_data):'
24 25 update_identity = False
25 26
26 27 extng_id = ExternalIdentityService.by_external_id_and_provider(
27 social_data['user']['id'],
28 social_data['credentials'].provider_name
28 social_data["user"]["id"], social_data["credentials"].provider_name
29 29 )
30 30
31 31 # fix legacy accounts with wrong google ID
32 if not extng_id and social_data['credentials'].provider_name == 'google':
32 if not extng_id and social_data["credentials"].provider_name == "google":
33 33 extng_id = ExternalIdentityService.by_external_id_and_provider(
34 social_data['user']['email'],
35 social_data['credentials'].provider_name
34 social_data["user"]["email"], social_data["credentials"].provider_name
36 35 )
37 36
38 37 if extng_id:
39 38 extng_id.delete()
40 39 update_identity = True
41 40
42 if not social_data['user']['id']:
41 if not social_data["user"]["id"]:
43 42 request.session.flash(
44 'No external user id found? Perhaps permissions for '
45 'authentication are set incorrectly', 'error')
43 "No external user id found? Perhaps permissions for "
44 "authentication are set incorrectly",
45 "error",
46 )
46 47 return False
47 48
48 49 if not extng_id or update_identity:
49 50 if not update_identity:
50 request.session.flash('Your external identity is now '
51 'connected with your account')
51 request.session.flash(
52 "Your external identity is now " "connected with your account"
53 )
52 54 ex_identity = ExternalIdentity()
53 ex_identity.external_id = social_data['user']['id']
54 ex_identity.external_user_name = social_data['user']['user_name']
55 ex_identity.provider_name = social_data['credentials'].provider_name
56 ex_identity.access_token = social_data['credentials'].token
57 ex_identity.token_secret = social_data['credentials'].token_secret
58 ex_identity.alt_token = social_data['credentials'].refresh_token
55 ex_identity.external_id = social_data["user"]["id"]
56 ex_identity.external_user_name = social_data["user"]["user_name"]
57 ex_identity.provider_name = social_data["credentials"].provider_name
58 ex_identity.access_token = social_data["credentials"].token
59 ex_identity.token_secret = social_data["credentials"].token_secret
60 ex_identity.alt_token = social_data["credentials"].refresh_token
59 61 user.external_identities.append(ex_identity)
60 request.session.pop('zigg.social_auth', None)
62 request.session.pop("zigg.social_auth", None)
@@ -28,9 +28,7 b' from collections import namedtuple'
28 28 from datetime import timedelta, datetime, date
29 29 from dogpile.cache.api import NO_VALUE
30 30 from appenlight.models import Datastores
31 from appenlight.validators import (LogSearchSchema,
32 TagListSchema,
33 accepted_search_params)
31 from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params
34 32 from itsdangerous import TimestampSigner
35 33 from ziggurat_foundations.permissions import ALL_PERMISSIONS
36 34 from ziggurat_foundations.models.services.user import UserService
@@ -40,21 +38,20 b' from dateutil.rrule import rrule, MONTHLY, DAILY'
40 38 log = logging.getLogger(__name__)
41 39
42 40
43 Stat = namedtuple('Stat', 'start_interval value')
41 Stat = namedtuple("Stat", "start_interval value")
44 42
45 43
46 44 def default_extractor(item):
47 45 """
48 46 :param item - item to extract date from
49 47 """
50 if hasattr(item, 'start_interval'):
48 if hasattr(item, "start_interval"):
51 49 return item.start_interval
52 return item['start_interval']
50 return item["start_interval"]
53 51
54 52
55 53 # fast gap generator
56 def gap_gen_default(start, step, itemiterator, end_time=None,
57 iv_extractor=None):
54 def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None):
58 55 """ generates a list of time/value items based on step and itemiterator
59 56 if there are entries missing from iterator time/None will be returned
60 57 instead
@@ -100,27 +97,31 b' class DateTimeEncoder(json.JSONEncoder):'
100 97 return json.JSONEncoder.default(self, obj)
101 98
102 99
103 def channelstream_request(secret, endpoint, payload, throw_exceptions=False,
104 servers=None):
100 def channelstream_request(
101 secret, endpoint, payload, throw_exceptions=False, servers=None
102 ):
105 103 responses = []
106 104 if not servers:
107 105 servers = []
108 106
109 107 signer = TimestampSigner(secret)
110 108 sig_for_server = signer.sign(endpoint)
111 for secret, server in [(s['secret'], s['server']) for s in servers]:
109 for secret, server in [(s["secret"], s["server"]) for s in servers]:
112 110 response = {}
113 secret_headers = {'x-channelstream-secret': sig_for_server,
114 'x-channelstream-endpoint': endpoint,
115 'Content-Type': 'application/json'}
116 url = '%s%s' % (server, endpoint)
111 secret_headers = {
112 "x-channelstream-secret": sig_for_server,
113 "x-channelstream-endpoint": endpoint,
114 "Content-Type": "application/json",
115 }
116 url = "%s%s" % (server, endpoint)
117 117 try:
118 response = requests.post(url,
119 data=json.dumps(payload,
120 cls=DateTimeEncoder),
121 headers=secret_headers,
122 verify=False,
123 timeout=2).json()
118 response = requests.post(
119 url,
120 data=json.dumps(payload, cls=DateTimeEncoder),
121 headers=secret_headers,
122 verify=False,
123 timeout=2,
124 ).json()
124 125 except requests.exceptions.RequestException as e:
125 126 if throw_exceptions:
126 127 raise
@@ -130,13 +131,15 b' def channelstream_request(secret, endpoint, payload, throw_exceptions=False,'
130 131
131 132 def add_cors_headers(response):
132 133 # allow CORS
133 response.headers.add('Access-Control-Allow-Origin', '*')
134 response.headers.add('XDomainRequestAllowed', '1')
135 response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
134 response.headers.add("Access-Control-Allow-Origin", "*")
135 response.headers.add("XDomainRequestAllowed", "1")
136 response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
136 137 # response.headers.add('Access-Control-Allow-Credentials', 'true')
137 response.headers.add('Access-Control-Allow-Headers',
138 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
139 response.headers.add('Access-Control-Max-Age', '86400')
138 response.headers.add(
139 "Access-Control-Allow-Headers",
140 "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie",
141 )
142 response.headers.add("Access-Control-Max-Age", "86400")
140 143
141 144
142 145 from sqlalchemy.sql import compiler
@@ -145,6 +148,7 b' from psycopg2.extensions import adapt as sqlescape'
145 148
146 149 # or use the appropiate escape function from your db driver
147 150
151
148 152 def compile_query(query):
149 153 dialect = query.session.bind.dialect
150 154 statement = query.statement
@@ -166,22 +170,23 b' def convert_es_type(input_data):'
166 170 return str(input_data)
167 171
168 172
169 ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
173 ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"])
170 174
171 175
172 176 def parse_proto(input_data):
173 177 try:
174 parts = [int(x) for x in input_data.split('.')]
178 parts = [int(x) for x in input_data.split(".")]
175 179 while len(parts) < 3:
176 180 parts.append(0)
177 181 return ProtoVersion(*parts)
178 182 except Exception as e:
179 log.info('Unknown protocol version: %s' % e)
183 log.info("Unknown protocol version: %s" % e)
180 184 return ProtoVersion(99, 99, 99)
181 185
182 186
183 def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
184 ixtypes=None):
187 def es_index_name_limiter(
188 start_date=None, end_date=None, months_in_past=6, ixtypes=None
189 ):
185 190 """
186 191 This function limits the search to 6 months by default so we don't have to
187 192 query 300 elasticsearch indices for 20 years of historical data for example
@@ -189,23 +194,23 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,'
189 194
190 195 # should be cached later
191 196 def get_possible_names():
192 return list(Datastores.es.indices.get_alias('*'))
197 return list(Datastores.es.indices.get_alias("*"))
193 198
194 199 possible_names = get_possible_names()
195 200 es_index_types = []
196 201 if not ixtypes:
197 ixtypes = ['reports', 'metrics', 'logs']
202 ixtypes = ["reports", "metrics", "logs"]
198 203 for t in ixtypes:
199 if t == 'reports':
200 es_index_types.append('rcae_r_%s')
201 elif t == 'logs':
202 es_index_types.append('rcae_l_%s')
203 elif t == 'metrics':
204 es_index_types.append('rcae_m_%s')
205 elif t == 'uptime':
206 es_index_types.append('rcae_u_%s')
207 elif t == 'slow_calls':
208 es_index_types.append('rcae_sc_%s')
204 if t == "reports":
205 es_index_types.append("rcae_r_%s")
206 elif t == "logs":
207 es_index_types.append("rcae_l_%s")
208 elif t == "metrics":
209 es_index_types.append("rcae_m_%s")
210 elif t == "uptime":
211 es_index_types.append("rcae_u_%s")
212 elif t == "slow_calls":
213 es_index_types.append("rcae_sc_%s")
209 214
210 215 if start_date:
211 216 start_date = copy.copy(start_date)
@@ -217,26 +222,34 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,'
217 222 if not end_date:
218 223 end_date = start_date + relativedelta(months=months_in_past)
219 224
220 index_dates = list(rrule(MONTHLY,
221 dtstart=start_date.date().replace(day=1),
222 until=end_date.date(),
223 count=36))
225 index_dates = list(
226 rrule(
227 MONTHLY,
228 dtstart=start_date.date().replace(day=1),
229 until=end_date.date(),
230 count=36,
231 )
232 )
224 233 index_names = []
225 234 for ix_type in es_index_types:
226 to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
227 if ix_type % d.strftime('%Y_%m') in possible_names]
235 to_extend = [
236 ix_type % d.strftime("%Y_%m")
237 for d in index_dates
238 if ix_type % d.strftime("%Y_%m") in possible_names
239 ]
228 240 index_names.extend(to_extend)
229 for day in list(rrule(DAILY, dtstart=start_date.date(),
230 until=end_date.date(), count=366)):
231 ix_name = ix_type % day.strftime('%Y_%m_%d')
241 for day in list(
242 rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366)
243 ):
244 ix_name = ix_type % day.strftime("%Y_%m_%d")
232 245 if ix_name in possible_names:
233 246 index_names.append(ix_name)
234 247 return index_names
235 248
236 249
237 250 def build_filter_settings_from_query_dict(
238 request, params=None, override_app_ids=None,
239 resource_permissions=None):
251 request, params=None, override_app_ids=None, resource_permissions=None
252 ):
240 253 """
241 254 Builds list of normalized search terms for ES from query params
242 255 ensuring application list is restricted to only applications user
@@ -249,11 +262,12 b' def build_filter_settings_from_query_dict('
249 262 params = copy.deepcopy(params)
250 263 applications = []
251 264 if not resource_permissions:
252 resource_permissions = ['view']
265 resource_permissions = ["view"]
253 266
254 267 if request.user:
255 268 applications = UserService.resources_with_perms(
256 request.user, resource_permissions, resource_types=['application'])
269 request.user, resource_permissions, resource_types=["application"]
270 )
257 271
258 272 # CRITICAL - this ensures our resultset is limited to only the ones
259 273 # user has view permissions
@@ -273,11 +287,11 b' def build_filter_settings_from_query_dict('
273 287 for k, v in list(filter_settings.items()):
274 288 if k in accepted_search_params:
275 289 continue
276 tag_list.append({"name": k, "value": v, "op": 'eq'})
290 tag_list.append({"name": k, "value": v, "op": "eq"})
277 291 # remove the key from filter_settings
278 292 filter_settings.pop(k, None)
279 293 tags = tag_schema.deserialize(tag_list)
280 filter_settings['tags'] = tags
294 filter_settings["tags"] = tags
281 295 return filter_settings
282 296
283 297
@@ -299,26 +313,36 b' def permission_tuple_to_dict(data):'
299 313 "resource_type": None,
300 314 "resource_id": None,
301 315 "group_name": None,
302 "group_id": None
316 "group_id": None,
303 317 }
304 318 if data.user:
305 319 out["user_name"] = data.user.user_name
306 320 if data.perm_name == ALL_PERMISSIONS:
307 out['perm_name'] = '__all_permissions__'
321 out["perm_name"] = "__all_permissions__"
308 322 if data.resource:
309 out['resource_name'] = data.resource.resource_name
310 out['resource_type'] = data.resource.resource_type
311 out['resource_id'] = data.resource.resource_id
323 out["resource_name"] = data.resource.resource_name
324 out["resource_type"] = data.resource.resource_type
325 out["resource_id"] = data.resource.resource_id
312 326 if data.group:
313 out['group_name'] = data.group.group_name
314 out['group_id'] = data.group.id
327 out["group_name"] = data.group.group_name
328 out["group_id"] = data.group.id
315 329 return out
316 330
317 331
318 def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
319 gap_gen=None, db_session=None, step_interval=None,
320 iv_extractor=None,
321 rerange=False, *args, **kwargs):
332 def get_cached_buckets(
333 request,
334 stats_since,
335 end_time,
336 fn,
337 cache_key,
338 gap_gen=None,
339 db_session=None,
340 step_interval=None,
341 iv_extractor=None,
342 rerange=False,
343 *args,
344 **kwargs
345 ):
322 346 """ Takes "fn" that should return some data and tries to load the data
323 347 dividing it into daily buckets - if the stats_since and end time give a
324 348 delta bigger than 24hours, then only "todays" data is computed on the fly
@@ -360,25 +384,28 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,'
360 384 # do not use custom interval if total time range with new iv would exceed
361 385 # end time
362 386 if not step_interval or stats_since + step_interval >= end_time:
363 if delta < h.time_deltas.get('12h')['delta']:
387 if delta < h.time_deltas.get("12h")["delta"]:
364 388 step_interval = timedelta(seconds=60)
365 elif delta < h.time_deltas.get('3d')['delta']:
389 elif delta < h.time_deltas.get("3d")["delta"]:
366 390 step_interval = timedelta(seconds=60 * 5)
367 elif delta > h.time_deltas.get('2w')['delta']:
391 elif delta > h.time_deltas.get("2w")["delta"]:
368 392 step_interval = timedelta(days=1)
369 393 else:
370 394 step_interval = timedelta(minutes=60)
371 395
372 396 if step_interval >= timedelta(minutes=60):
373 log.info('cached_buckets:{}: adjusting start time '
374 'for hourly or daily intervals'.format(cache_key))
397 log.info(
398 "cached_buckets:{}: adjusting start time "
399 "for hourly or daily intervals".format(cache_key)
400 )
375 401 stats_since = stats_since.replace(hour=0, minute=0)
376 402
377 ranges = [i.start_interval for i in list(gap_gen(stats_since,
378 step_interval, [],
379 end_time=end_time))]
403 ranges = [
404 i.start_interval
405 for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time))
406 ]
380 407 buckets = {}
381 storage_key = 'buckets:' + cache_key + '{}|{}'
408 storage_key = "buckets:" + cache_key + "{}|{}"
382 409 # this means we basicly cache per hour in 3-14 day intervals but i think
383 410 # its fine at this point - will be faster than db access anyways
384 411
@@ -391,45 +418,67 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,'
391 418 k = storage_key.format(step_interval.total_seconds(), r)
392 419 value = request.registry.cache_regions.redis_day_30.get(k)
393 420 # last buckets are never loaded from cache
394 is_last_result = (
395 r >= end_time - timedelta(hours=6) or r in last_ranges)
421 is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges
396 422 if value is not NO_VALUE and not is_last_result:
397 log.info("cached_buckets:{}: "
398 "loading range {} from cache".format(cache_key, r))
423 log.info(
424 "cached_buckets:{}: "
425 "loading range {} from cache".format(cache_key, r)
426 )
399 427 buckets[r] = value
400 428 else:
401 log.info("cached_buckets:{}: "
402 "loading range {} from storage".format(cache_key, r))
429 log.info(
430 "cached_buckets:{}: "
431 "loading range {} from storage".format(cache_key, r)
432 )
403 433 range_size = step_interval
404 if (step_interval == timedelta(minutes=60) and
405 not is_last_result and rerange):
434 if (
435 step_interval == timedelta(minutes=60)
436 and not is_last_result
437 and rerange
438 ):
406 439 range_size = timedelta(days=1)
407 440 r = r.replace(hour=0, minute=0)
408 log.info("cached_buckets:{}: "
409 "loading collapsed "
410 "range {} {}".format(cache_key, r,
411 r + range_size))
441 log.info(
442 "cached_buckets:{}: "
443 "loading collapsed "
444 "range {} {}".format(cache_key, r, r + range_size)
445 )
412 446 bucket_data = fn(
413 request, r, r + range_size, step_interval,
414 gap_gen, bucket_count=len(ranges), *args, **kwargs)
447 request,
448 r,
449 r + range_size,
450 step_interval,
451 gap_gen,
452 bucket_count=len(ranges),
453 *args,
454 **kwargs
455 )
415 456 for b in bucket_data:
416 457 b_iv = iv_extractor(b)
417 458 buckets[b_iv] = b
418 k2 = storage_key.format(
419 step_interval.total_seconds(), b_iv)
459 k2 = storage_key.format(step_interval.total_seconds(), b_iv)
420 460 request.registry.cache_regions.redis_day_30.set(k2, b)
421 461 log.info("cached_buckets:{}: saving cache".format(cache_key))
422 462 else:
423 463 # bucket count is 1 for short time ranges <= 24h from now
424 bucket_data = fn(request, stats_since, end_time, step_interval,
425 gap_gen, bucket_count=1, *args, **kwargs)
464 bucket_data = fn(
465 request,
466 stats_since,
467 end_time,
468 step_interval,
469 gap_gen,
470 bucket_count=1,
471 *args,
472 **kwargs
473 )
426 474 for b in bucket_data:
427 475 buckets[iv_extractor(b)] = b
428 476 return buckets
429 477
430 478
431 def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
432 db_session=None, *args, **kwargs):
479 def get_cached_split_data(
480 request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs
481 ):
433 482 """ Takes "fn" that should return some data and tries to load the data
434 483 dividing it into 2 buckets - cached "since_from" bucket and "today"
435 484 bucket - then the data can be reduced into single value
@@ -442,43 +491,51 b' def get_cached_split_data(request, stats_since, end_time, fn, cache_key,'
442 491 delta = end_time - stats_since
443 492
444 493 if delta >= timedelta(minutes=60):
445 log.info('cached_split_data:{}: adjusting start time '
446 'for hourly or daily intervals'.format(cache_key))
494 log.info(
495 "cached_split_data:{}: adjusting start time "
496 "for hourly or daily intervals".format(cache_key)
497 )
447 498 stats_since = stats_since.replace(hour=0, minute=0)
448 499
449 storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
500 storage_key = "buckets_split_data:" + cache_key + ":{}|{}"
450 501 old_end_time = end_time.replace(hour=0, minute=0)
451 502
452 final_storage_key = storage_key.format(delta.total_seconds(),
453 old_end_time)
503 final_storage_key = storage_key.format(delta.total_seconds(), old_end_time)
454 504 older_data = None
455 505
456 cdata = request.registry.cache_regions.redis_day_7.get(
457 final_storage_key)
506 cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key)
458 507
459 508 if cdata:
460 log.info("cached_split_data:{}: found old "
461 "bucket data".format(cache_key))
509 log.info("cached_split_data:{}: found old " "bucket data".format(cache_key))
462 510 older_data = cdata
463 511
464 if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
465 not cdata):
466 log.info("cached_split_data:{}: didn't find the "
467 "start bucket in cache so load older data".format(cache_key))
512 if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata:
513 log.info(
514 "cached_split_data:{}: didn't find the "
515 "start bucket in cache so load older data".format(cache_key)
516 )
468 517 recent_stats_since = old_end_time
469 older_data = fn(request, stats_since, recent_stats_since,
470 db_session=db_session, *args, **kwargs)
471 request.registry.cache_regions.redis_day_7.set(final_storage_key,
472 older_data)
473 elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
518 older_data = fn(
519 request,
520 stats_since,
521 recent_stats_since,
522 db_session=db_session,
523 *args,
524 **kwargs
525 )
526 request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data)
527 elif stats_since < end_time - h.time_deltas.get("24h")["delta"]:
474 528 recent_stats_since = old_end_time
475 529 else:
476 530 recent_stats_since = stats_since
477 531
478 log.info("cached_split_data:{}: loading fresh "
479 "data bucksts from last 24h ".format(cache_key))
480 todays_data = fn(request, recent_stats_since, end_time,
481 db_session=db_session, *args, **kwargs)
532 log.info(
533 "cached_split_data:{}: loading fresh "
534 "data bucksts from last 24h ".format(cache_key)
535 )
536 todays_data = fn(
537 request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs
538 )
482 539 return older_data, todays_data
483 540
484 541
@@ -488,4 +545,4 b' def in_batches(seq, size):'
488 545 :param seq (iterable)
489 546 :param size integer
490 547 """
491 return (seq[pos:pos + size] for pos in range(0, len(seq), size))
548 return (seq[pos : pos + size] for pos in range(0, len(seq), size))
@@ -24,119 +24,138 b' log = logging.getLogger(__name__)'
24 24
25 25 def parse_airbrake_xml(request):
26 26 root = request.context.airbrake_xml_etree
27 error = root.find('error')
28 notifier = root.find('notifier')
29 server_env = root.find('server-environment')
30 request_data = root.find('request')
31 user = root.find('current-user')
27 error = root.find("error")
28 notifier = root.find("notifier")
29 server_env = root.find("server-environment")
30 request_data = root.find("request")
31 user = root.find("current-user")
32 32 if request_data is not None:
33 cgi_data = request_data.find('cgi-data')
33 cgi_data = request_data.find("cgi-data")
34 34 if cgi_data is None:
35 35 cgi_data = []
36 36
37 37 error_dict = {
38 'class_name': error.findtext('class') or '',
39 'error': error.findtext('message') or '',
38 "class_name": error.findtext("class") or "",
39 "error": error.findtext("message") or "",
40 40 "occurences": 1,
41 41 "http_status": 500,
42 42 "priority": 5,
43 "server": 'unknown',
44 'url': 'unknown', 'request': {}
43 "server": "unknown",
44 "url": "unknown",
45 "request": {},
45 46 }
46 47 if user is not None:
47 error_dict['username'] = user.findtext('username') or \
48 user.findtext('id')
48 error_dict["username"] = user.findtext("username") or user.findtext("id")
49 49 if notifier is not None:
50 error_dict['client'] = notifier.findtext('name')
50 error_dict["client"] = notifier.findtext("name")
51 51
52 52 if server_env is not None:
53 error_dict["server"] = server_env.findtext('hostname', 'unknown')
53 error_dict["server"] = server_env.findtext("hostname", "unknown")
54 54
55 whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
56 'CONTENT_TYPE', 'HTTP_REFERER']
55 whitelist_environ = [
56 "REMOTE_USER",
57 "REMOTE_ADDR",
58 "SERVER_NAME",
59 "CONTENT_TYPE",
60 "HTTP_REFERER",
61 ]
57 62
58 63 if request_data is not None:
59 error_dict['url'] = request_data.findtext('url', 'unknown')
60 component = request_data.findtext('component')
61 action = request_data.findtext('action')
64 error_dict["url"] = request_data.findtext("url", "unknown")
65 component = request_data.findtext("component")
66 action = request_data.findtext("action")
62 67 if component and action:
63 error_dict['view_name'] = '%s:%s' % (component, action)
68 error_dict["view_name"] = "%s:%s" % (component, action)
64 69 for node in cgi_data:
65 key = node.get('key')
66 if key.startswith('HTTP') or key in whitelist_environ:
67 error_dict['request'][key] = node.text
68 elif 'query_parameters' in key:
69 error_dict['request']['GET'] = {}
70 key = node.get("key")
71 if key.startswith("HTTP") or key in whitelist_environ:
72 error_dict["request"][key] = node.text
73 elif "query_parameters" in key:
74 error_dict["request"]["GET"] = {}
70 75 for x in node:
71 error_dict['request']['GET'][x.get('key')] = x.text
72 elif 'request_parameters' in key:
73 error_dict['request']['POST'] = {}
76 error_dict["request"]["GET"][x.get("key")] = x.text
77 elif "request_parameters" in key:
78 error_dict["request"]["POST"] = {}
74 79 for x in node:
75 error_dict['request']['POST'][x.get('key')] = x.text
76 elif key.endswith('cookie'):
77 error_dict['request']['COOKIE'] = {}
80 error_dict["request"]["POST"][x.get("key")] = x.text
81 elif key.endswith("cookie"):
82 error_dict["request"]["COOKIE"] = {}
78 83 for x in node:
79 error_dict['request']['COOKIE'][x.get('key')] = x.text
80 elif key.endswith('request_id'):
81 error_dict['request_id'] = node.text
82 elif key.endswith('session'):
83 error_dict['request']['SESSION'] = {}
84 error_dict["request"]["COOKIE"][x.get("key")] = x.text
85 elif key.endswith("request_id"):
86 error_dict["request_id"] = node.text
87 elif key.endswith("session"):
88 error_dict["request"]["SESSION"] = {}
84 89 for x in node:
85 error_dict['request']['SESSION'][x.get('key')] = x.text
90 error_dict["request"]["SESSION"][x.get("key")] = x.text
86 91 else:
87 if key in ['rack.session.options']:
92 if key in ["rack.session.options"]:
88 93 # skip secret configs
89 94 continue
90 95 try:
91 96 if len(node):
92 error_dict['request'][key] = dict(
93 [(x.get('key'), x.text,) for x in node])
97 error_dict["request"][key] = dict(
98 [(x.get("key"), x.text) for x in node]
99 )
94 100 else:
95 error_dict['request'][key] = node.text
101 error_dict["request"][key] = node.text
96 102 except Exception as e:
97 log.warning('Airbrake integration exception: %s' % e)
103 log.warning("Airbrake integration exception: %s" % e)
98 104
99 error_dict['request'].pop('HTTP_COOKIE', '')
105 error_dict["request"].pop("HTTP_COOKIE", "")
100 106
101 error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
102 error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
103 if 'request_id' not in error_dict:
104 error_dict['request_id'] = str(uuid.uuid4())
107 error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "")
108 error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "")
109 if "request_id" not in error_dict:
110 error_dict["request_id"] = str(uuid.uuid4())
105 111 if request.context.possibly_public:
106 112 # set ip for reports that come from airbrake js client
107 113 error_dict["timestamp"] = datetime.utcnow()
108 114 if request.environ.get("HTTP_X_FORWARDED_FOR"):
109 ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
110 first_ip = ip.split(',')[0]
115 ip = request.environ.get("HTTP_X_FORWARDED_FOR", "")
116 first_ip = ip.split(",")[0]
111 117 remote_addr = first_ip.strip()
112 118 else:
113 remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
114 request.environ.get('REMOTE_ADDR'))
119 remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get(
120 "REMOTE_ADDR"
121 )
115 122 error_dict["ip"] = remote_addr
116 123
117 blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
118 'session', 'test']
124 blacklist = [
125 "password",
126 "passwd",
127 "pwd",
128 "auth_tkt",
129 "secret",
130 "csrf",
131 "session",
132 "test",
133 ]
119 134
120 135 lines = []
121 for l in error.find('backtrace'):
122 lines.append({'file': l.get("file", ""),
123 'line': l.get("number", ""),
124 'fn': l.get("method", ""),
125 'module': l.get("module", ""),
126 'cline': l.get("method", ""),
127 'vars': {}})
128 error_dict['traceback'] = list(reversed(lines))
136 for l in error.find("backtrace"):
137 lines.append(
138 {
139 "file": l.get("file", ""),
140 "line": l.get("number", ""),
141 "fn": l.get("method", ""),
142 "module": l.get("module", ""),
143 "cline": l.get("method", ""),
144 "vars": {},
145 }
146 )
147 error_dict["traceback"] = list(reversed(lines))
129 148 # filtering is not provided by airbrake
130 149 keys_to_check = (
131 error_dict['request'].get('COOKIE'),
132 error_dict['request'].get('COOKIES'),
133 error_dict['request'].get('POST'),
134 error_dict['request'].get('SESSION'),
150 error_dict["request"].get("COOKIE"),
151 error_dict["request"].get("COOKIES"),
152 error_dict["request"].get("POST"),
153 error_dict["request"].get("SESSION"),
135 154 )
136 155 for source in [_f for _f in keys_to_check if _f]:
137 156 for k in source.keys():
138 157 for bad_key in blacklist:
139 158 if bad_key in k.lower():
140 source[k] = '***'
159 source[k] = "***"
141 160
142 161 return error_dict
@@ -22,12 +22,12 b' log = logging.getLogger(__name__)'
22 22
23 23
24 24 def to_relativedelta(time_delta):
25 return relativedelta(seconds=int(time_delta.total_seconds()),
26 microseconds=time_delta.microseconds)
25 return relativedelta(
26 seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds
27 )
27 28
28 29
29 def convert_date(date_str, return_utcnow_if_wrong=True,
30 normalize_future=False):
30 def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False):
31 31 utcnow = datetime.utcnow()
32 32 if isinstance(date_str, datetime):
33 33 # get rid of tzinfo
@@ -36,21 +36,21 b' def convert_date(date_str, return_utcnow_if_wrong=True,'
36 36 return utcnow
37 37 try:
38 38 try:
39 if 'Z' in date_str:
40 date_str = date_str[:date_str.index('Z')]
41 if '.' in date_str:
42 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
39 if "Z" in date_str:
40 date_str = date_str[: date_str.index("Z")]
41 if "." in date_str:
42 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f")
43 43 else:
44 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
44 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S")
45 45 except Exception:
46 46 # bw compat with old client
47 date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
47 date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f")
48 48 except Exception:
49 49 if return_utcnow_if_wrong:
50 50 date = utcnow
51 51 else:
52 52 date = None
53 53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
54 log.warning('time %s in future + 3 min, normalizing' % date)
54 log.warning("time %s in future + 3 min, normalizing" % date)
55 55 return utcnow
56 56 return date
@@ -19,45 +19,68 b' from datetime import timedelta'
19 19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
20 20
21 21 EXCLUDED_LOG_VARS = [
22 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
23 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
24 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
25 'thread', 'threadName']
22 "args",
23 "asctime",
24 "created",
25 "exc_info",
26 "exc_text",
27 "filename",
28 "funcName",
29 "levelname",
30 "levelno",
31 "lineno",
32 "message",
33 "module",
34 "msecs",
35 "msg",
36 "name",
37 "pathname",
38 "process",
39 "processName",
40 "relativeCreated",
41 "thread",
42 "threadName",
43 ]
26 44
27 45 EXCLUDE_SENTRY_KEYS = [
28 'csp',
29 'culprit',
30 'event_id',
31 'exception',
32 'extra',
33 'level',
34 'logentry',
35 'logger',
36 'message',
37 'modules',
38 'platform',
39 'query',
40 'release',
41 'request',
42 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
43 'sentry.interfaces.Http', 'sentry.interfaces.Message',
44 'sentry.interfaces.Query',
45 'sentry.interfaces.Stacktrace',
46 'sentry.interfaces.Template', 'sentry.interfaces.User',
47 'sentry.interfaces.csp.Csp',
48 'sentry.interfaces.exception.Exception',
49 'sentry.interfaces.http.Http',
50 'sentry.interfaces.message.Message',
51 'sentry.interfaces.query.Query',
52 'sentry.interfaces.stacktrace.Stacktrace',
53 'sentry.interfaces.template.Template',
54 'sentry.interfaces.user.User', 'server_name',
55 'stacktrace',
56 'tags',
57 'template',
58 'time_spent',
59 'timestamp',
60 'user']
46 "csp",
47 "culprit",
48 "event_id",
49 "exception",
50 "extra",
51 "level",
52 "logentry",
53 "logger",
54 "message",
55 "modules",
56 "platform",
57 "query",
58 "release",
59 "request",
60 "sentry.interfaces.Csp",
61 "sentry.interfaces.Exception",
62 "sentry.interfaces.Http",
63 "sentry.interfaces.Message",
64 "sentry.interfaces.Query",
65 "sentry.interfaces.Stacktrace",
66 "sentry.interfaces.Template",
67 "sentry.interfaces.User",
68 "sentry.interfaces.csp.Csp",
69 "sentry.interfaces.exception.Exception",
70 "sentry.interfaces.http.Http",
71 "sentry.interfaces.message.Message",
72 "sentry.interfaces.query.Query",
73 "sentry.interfaces.stacktrace.Stacktrace",
74 "sentry.interfaces.template.Template",
75 "sentry.interfaces.user.User",
76 "server_name",
77 "stacktrace",
78 "tags",
79 "template",
80 "time_spent",
81 "timestamp",
82 "user",
83 ]
61 84
62 85
63 86 def get_keys(list_of_keys, json_body):
@@ -67,30 +90,32 b' def get_keys(list_of_keys, json_body):'
67 90
68 91
69 92 def get_logentry(json_body):
70 key_names = ['logentry',
71 'sentry.interfaces.message.Message',
72 'sentry.interfaces.Message'
73 ]
93 key_names = [
94 "logentry",
95 "sentry.interfaces.message.Message",
96 "sentry.interfaces.Message",
97 ]
74 98 logentry = get_keys(key_names, json_body)
75 99 return logentry
76 100
77 101
78 102 def get_exception(json_body):
79 103 parsed_exception = {}
80 key_names = ['exception',
81 'sentry.interfaces.exception.Exception',
82 'sentry.interfaces.Exception'
83 ]
104 key_names = [
105 "exception",
106 "sentry.interfaces.exception.Exception",
107 "sentry.interfaces.Exception",
108 ]
84 109 exception = get_keys(key_names, json_body) or {}
85 110 if exception:
86 111 if isinstance(exception, dict):
87 exception = exception['values'][0]
112 exception = exception["values"][0]
88 113 else:
89 114 exception = exception[0]
90 115
91 parsed_exception['type'] = exception.get('type')
92 parsed_exception['value'] = exception.get('value')
93 parsed_exception['module'] = exception.get('module')
116 parsed_exception["type"] = exception.get("type")
117 parsed_exception["value"] = exception.get("value")
118 parsed_exception["module"] = exception.get("module")
94 119 parsed_stacktrace = get_stacktrace(exception) or {}
95 120 parsed_exception = exception or {}
96 121 return parsed_exception, parsed_stacktrace
@@ -98,41 +123,45 b' def get_exception(json_body):'
98 123
99 124 def get_stacktrace(json_body):
100 125 parsed_stacktrace = []
101 key_names = ['stacktrace',
102 'sentry.interfaces.stacktrace.Stacktrace',
103 'sentry.interfaces.Stacktrace'
104 ]
126 key_names = [
127 "stacktrace",
128 "sentry.interfaces.stacktrace.Stacktrace",
129 "sentry.interfaces.Stacktrace",
130 ]
105 131 stacktrace = get_keys(key_names, json_body)
106 132 if stacktrace:
107 for frame in stacktrace['frames']:
133 for frame in stacktrace["frames"]:
108 134 parsed_stacktrace.append(
109 {"cline": frame.get('context_line', ''),
110 "file": frame.get('filename', ''),
111 "module": frame.get('module', ''),
112 "fn": frame.get('function', ''),
113 "line": frame.get('lineno', ''),
114 "vars": list(frame.get('vars', {}).items())
115 }
135 {
136 "cline": frame.get("context_line", ""),
137 "file": frame.get("filename", ""),
138 "module": frame.get("module", ""),
139 "fn": frame.get("function", ""),
140 "line": frame.get("lineno", ""),
141 "vars": list(frame.get("vars", {}).items()),
142 }
116 143 )
117 144 return parsed_stacktrace
118 145
119 146
120 147 def get_template(json_body):
121 148 parsed_template = {}
122 key_names = ['template',
123 'sentry.interfaces.template.Template',
124 'sentry.interfaces.Template'
125 ]
149 key_names = [
150 "template",
151 "sentry.interfaces.template.Template",
152 "sentry.interfaces.Template",
153 ]
126 154 template = get_keys(key_names, json_body)
127 155 if template:
128 for frame in template['frames']:
156 for frame in template["frames"]:
129 157 parsed_template.append(
130 {"cline": frame.get('context_line', ''),
131 "file": frame.get('filename', ''),
132 "fn": '',
133 "line": frame.get('lineno', ''),
134 "vars": []
135 }
158 {
159 "cline": frame.get("context_line", ""),
160 "file": frame.get("filename", ""),
161 "fn": "",
162 "line": frame.get("lineno", ""),
163 "vars": [],
164 }
136 165 )
137 166
138 167 return parsed_template
@@ -140,16 +169,13 b' def get_template(json_body):'
140 169
141 170 def get_request(json_body):
142 171 parsed_http = {}
143 key_names = ['request',
144 'sentry.interfaces.http.Http',
145 'sentry.interfaces.Http'
146 ]
172 key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"]
147 173 http = get_keys(key_names, json_body) or {}
148 174 for k, v in http.items():
149 if k == 'headers':
150 parsed_http['headers'] = {}
151 for sk, sv in http['headers'].items():
152 parsed_http['headers'][sk.title()] = sv
175 if k == "headers":
176 parsed_http["headers"] = {}
177 for sk, sv in http["headers"].items():
178 parsed_http["headers"][sk.title()] = sv
153 179 else:
154 180 parsed_http[k.lower()] = v
155 181 return parsed_http
@@ -157,53 +183,47 b' def get_request(json_body):'
157 183
158 184 def get_user(json_body):
159 185 parsed_user = {}
160 key_names = ['user',
161 'sentry.interfaces.user.User',
162 'sentry.interfaces.User'
163 ]
186 key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"]
164 187 user = get_keys(key_names, json_body)
165 188 if user:
166 parsed_user['id'] = user.get('id')
167 parsed_user['username'] = user.get('username')
168 parsed_user['email'] = user.get('email')
169 parsed_user['ip_address'] = user.get('ip_address')
189 parsed_user["id"] = user.get("id")
190 parsed_user["username"] = user.get("username")
191 parsed_user["email"] = user.get("email")
192 parsed_user["ip_address"] = user.get("ip_address")
170 193
171 194 return parsed_user
172 195
173 196
174 197 def get_query(json_body):
175 198 query = None
176 key_name = ['query',
177 'sentry.interfaces.query.Query',
178 'sentry.interfaces.Query'
179 ]
199 key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"]
180 200 query = get_keys(key_name, json_body)
181 201 return query
182 202
183 203
184 204 def parse_sentry_event(json_body):
185 request_id = json_body.get('event_id')
205 request_id = json_body.get("event_id")
186 206
187 207 # required
188 message = json_body.get('message')
189 log_timestamp = json_body.get('timestamp')
190 level = json_body.get('level')
208 message = json_body.get("message")
209 log_timestamp = json_body.get("timestamp")
210 level = json_body.get("level")
191 211 if isinstance(level, int):
192 212 level = LogLevelPython.key_from_value(level)
193 213
194 namespace = json_body.get('logger')
195 language = json_body.get('platform')
214 namespace = json_body.get("logger")
215 language = json_body.get("platform")
196 216
197 217 # optional
198 server_name = json_body.get('server_name')
199 culprit = json_body.get('culprit')
200 release = json_body.get('release')
218 server_name = json_body.get("server_name")
219 culprit = json_body.get("culprit")
220 release = json_body.get("release")
201 221
202 tags = json_body.get('tags', {})
203 if hasattr(tags, 'items'):
222 tags = json_body.get("tags", {})
223 if hasattr(tags, "items"):
204 224 tags = list(tags.items())
205 extra = json_body.get('extra', {})
206 if hasattr(extra, 'items'):
225 extra = json_body.get("extra", {})
226 if hasattr(extra, "items"):
207 227 extra = list(extra.items())
208 228
209 229 parsed_req = get_request(json_body)
@@ -212,12 +232,13 b' def parse_sentry_event(json_body):'
212 232 query = get_query(json_body)
213 233
214 234 # other unidentified keys found
215 other_keys = [(k, json_body[k]) for k in json_body.keys()
216 if k not in EXCLUDE_SENTRY_KEYS]
235 other_keys = [
236 (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS
237 ]
217 238
218 239 logentry = get_logentry(json_body)
219 240 if logentry:
220 message = logentry['message']
241 message = logentry["message"]
221 242
222 243 exception, stacktrace = get_exception(json_body)
223 244
@@ -227,70 +248,70 b' def parse_sentry_event(json_body):'
227 248 event_type = ParsedSentryEventType.LOG
228 249
229 250 event_dict = {
230 'log_level': level,
231 'message': message,
232 'namespace': namespace,
233 'request_id': request_id,
234 'server': server_name,
235 'date': log_timestamp,
236 'tags': tags
251 "log_level": level,
252 "message": message,
253 "namespace": namespace,
254 "request_id": request_id,
255 "server": server_name,
256 "date": log_timestamp,
257 "tags": tags,
237 258 }
238 event_dict['tags'].extend(
239 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
259 event_dict["tags"].extend(
260 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS]
261 )
240 262
241 263 # other keys can be various object types
242 event_dict['tags'].extend([(k, v) for k, v in other_keys
243 if isinstance(v, str)])
264 event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)])
244 265 if culprit:
245 event_dict['tags'].append(('sentry_culprit', culprit))
266 event_dict["tags"].append(("sentry_culprit", culprit))
246 267 if language:
247 event_dict['tags'].append(('sentry_language', language))
268 event_dict["tags"].append(("sentry_language", language))
248 269 if release:
249 event_dict['tags'].append(('sentry_release', release))
270 event_dict["tags"].append(("sentry_release", release))
250 271
251 272 if exception or stacktrace or alt_stacktrace or template:
252 273 event_type = ParsedSentryEventType.ERROR_REPORT
253 274 event_dict = {
254 'client': 'sentry',
255 'error': message,
256 'namespace': namespace,
257 'request_id': request_id,
258 'server': server_name,
259 'start_time': log_timestamp,
260 'end_time': None,
261 'tags': tags,
262 'extra': extra,
263 'language': language,
264 'view_name': json_body.get('culprit'),
265 'http_status': None,
266 'username': None,
267 'url': parsed_req.get('url'),
268 'ip': None,
269 'user_agent': None,
270 'request': None,
271 'slow_calls': None,
272 'request_stats': None,
273 'traceback': None
275 "client": "sentry",
276 "error": message,
277 "namespace": namespace,
278 "request_id": request_id,
279 "server": server_name,
280 "start_time": log_timestamp,
281 "end_time": None,
282 "tags": tags,
283 "extra": extra,
284 "language": language,
285 "view_name": json_body.get("culprit"),
286 "http_status": None,
287 "username": None,
288 "url": parsed_req.get("url"),
289 "ip": None,
290 "user_agent": None,
291 "request": None,
292 "slow_calls": None,
293 "request_stats": None,
294 "traceback": None,
274 295 }
275 296
276 event_dict['extra'].extend(other_keys)
297 event_dict["extra"].extend(other_keys)
277 298 if release:
278 event_dict['tags'].append(('sentry_release', release))
279 event_dict['request'] = parsed_req
280 if 'headers' in parsed_req:
281 event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
282 if 'env' in parsed_req:
283 event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
284 ts_ms = int(json_body.get('time_spent') or 0)
299 event_dict["tags"].append(("sentry_release", release))
300 event_dict["request"] = parsed_req
301 if "headers" in parsed_req:
302 event_dict["user_agent"] = parsed_req["headers"].get("User-Agent")
303 if "env" in parsed_req:
304 event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR")
305 ts_ms = int(json_body.get("time_spent") or 0)
285 306 if ts_ms > 0:
286 event_dict['end_time'] = event_dict['start_time'] + \
287 timedelta(milliseconds=ts_ms)
307 event_dict["end_time"] = event_dict["start_time"] + timedelta(
308 milliseconds=ts_ms
309 )
288 310 if stacktrace or alt_stacktrace or template:
289 event_dict['traceback'] = stacktrace or alt_stacktrace or template
311 event_dict["traceback"] = stacktrace or alt_stacktrace or template
290 312 for k in list(event_dict.keys()):
291 313 if event_dict[k] is None:
292 314 del event_dict[k]
293 315 if user:
294 event_dict['username'] = user['username'] or user['id'] \
295 or user['email']
316 event_dict["username"] = user["username"] or user["id"] or user["email"]
296 317 return event_dict, event_type
@@ -13,5 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
17
@@ -41,7 +41,7 b' target_metadata = MetaData(naming_convention=NAMING_CONVENTION)'
41 41 # my_important_option = config.get_main_option("my_important_option")
42 42 # ... etc.
43 43
44 VERSION_TABLE_NAME = 'alembic_appenlight_version'
44 VERSION_TABLE_NAME = "alembic_appenlight_version"
45 45
46 46
47 47 def run_migrations_offline():
@@ -57,9 +57,12 b' def run_migrations_offline():'
57 57
58 58 """
59 59 url = config.get_main_option("sqlalchemy.url")
60 context.configure(url=url, target_metadata=target_metadata,
61 transaction_per_migration=True,
62 version_table=VERSION_TABLE_NAME)
60 context.configure(
61 url=url,
62 target_metadata=target_metadata,
63 transaction_per_migration=True,
64 version_table=VERSION_TABLE_NAME,
65 )
63 66
64 67 with context.begin_transaction():
65 68 context.run_migrations()
@@ -74,15 +77,16 b' def run_migrations_online():'
74 77 """
75 78 engine = engine_from_config(
76 79 config.get_section(config.config_ini_section),
77 prefix='sqlalchemy.',
78 poolclass=pool.NullPool)
80 prefix="sqlalchemy.",
81 poolclass=pool.NullPool,
82 )
79 83
80 84 connection = engine.connect()
81 85 context.configure(
82 86 connection=connection,
83 87 target_metadata=target_metadata,
84 88 transaction_per_migration=True,
85 version_table=VERSION_TABLE_NAME
89 version_table=VERSION_TABLE_NAME,
86 90 )
87 91
88 92 try:
This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them
@@ -23,7 +23,7 b' Create Date: 2014-10-13 23:47:38.295159'
23 23 """
24 24
25 25 # revision identifiers, used by Alembic.
26 revision = '55b6e612672f'
26 revision = "55b6e612672f"
27 27 down_revision = None
28 28
29 29 from alembic import op
@@ -31,348 +31,514 b' import sqlalchemy as sa'
31 31
32 32
33 33 def upgrade():
34 op.add_column('users', sa.Column('first_name', sa.Unicode(25)))
35 op.add_column('users', sa.Column('last_name', sa.Unicode(50)))
36 op.add_column('users', sa.Column('company_name', sa.Unicode(255)))
37 op.add_column('users', sa.Column('company_address', sa.Unicode(255)))
38 op.add_column('users', sa.Column('phone1', sa.Unicode(25)))
39 op.add_column('users', sa.Column('phone2', sa.Unicode(25)))
40 op.add_column('users', sa.Column('zip_code', sa.Unicode(25)))
41 op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest"))
42 op.add_column('users', sa.Column('city', sa.Unicode(128)))
43 op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default=''))
44 op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true'))
45 op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default=''))
34 op.add_column("users", sa.Column("first_name", sa.Unicode(25)))
35 op.add_column("users", sa.Column("last_name", sa.Unicode(50)))
36 op.add_column("users", sa.Column("company_name", sa.Unicode(255)))
37 op.add_column("users", sa.Column("company_address", sa.Unicode(255)))
38 op.add_column("users", sa.Column("phone1", sa.Unicode(25)))
39 op.add_column("users", sa.Column("phone2", sa.Unicode(25)))
40 op.add_column("users", sa.Column("zip_code", sa.Unicode(25)))
41 op.add_column(
42 "users",
43 sa.Column(
44 "default_report_sort",
45 sa.Unicode(20),
46 nullable=False,
47 server_default="newest",
48 ),
49 )
50 op.add_column("users", sa.Column("city", sa.Unicode(128)))
51 op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default=""))
52 op.add_column(
53 "users",
54 sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"),
55 )
56 op.add_column(
57 "users",
58 sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""),
59 )
46 60
47 61 op.create_table(
48 'integrations',
49 sa.Column('id', sa.Integer(), primary_key=True),
50 sa.Column('resource_id', sa.Integer(),
51 sa.ForeignKey('resources.resource_id', onupdate='cascade',
52 ondelete='cascade')),
53 sa.Column('integration_name', sa.Unicode(64)),
54 sa.Column('config', sa.dialects.postgresql.JSON, nullable=False),
55 sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()),
56 sa.Column('external_id', sa.Unicode(255)),
57 sa.Column('external_id2', sa.Unicode(255))
62 "integrations",
63 sa.Column("id", sa.Integer(), primary_key=True),
64 sa.Column(
65 "resource_id",
66 sa.Integer(),
67 sa.ForeignKey(
68 "resources.resource_id", onupdate="cascade", ondelete="cascade"
69 ),
70 ),
71 sa.Column("integration_name", sa.Unicode(64)),
72 sa.Column("config", sa.dialects.postgresql.JSON, nullable=False),
73 sa.Column(
74 "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now()
75 ),
76 sa.Column("external_id", sa.Unicode(255)),
77 sa.Column("external_id2", sa.Unicode(255)),
58 78 )
59 79
60 80 op.create_table(
61 'alert_channels',
62 sa.Column('owner_id', sa.Integer(),
63 sa.ForeignKey('users.id', onupdate='cascade',
64 ondelete='cascade'), nullable=False),
65 sa.Column('channel_name', sa.Unicode(25), nullable=False),
66 sa.Column('channel_value', sa.Unicode(80), nullable=False),
67 sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False),
68 sa.Column('channel_validated', sa.Boolean, nullable=False, server_default='False'),
69 sa.Column('send_alerts', sa.Boolean, nullable=False, server_default='True'),
70 sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'),
71 sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'),
72 sa.Column('pkey', sa.Integer(), primary_key=True),
73 sa.Column('integration_id', sa.Integer,
74 sa.ForeignKey('integrations.id', onupdate='cascade',
75 ondelete='cascade')),
76 )
77 op.create_unique_constraint('uq_alert_channels', 'alert_channels',
78 ["owner_id", "channel_name", "channel_value"])
81 "alert_channels",
82 sa.Column(
83 "owner_id",
84 sa.Integer(),
85 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
86 nullable=False,
87 ),
88 sa.Column("channel_name", sa.Unicode(25), nullable=False),
89 sa.Column("channel_value", sa.Unicode(80), nullable=False),
90 sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False),
91 sa.Column(
92 "channel_validated", sa.Boolean, nullable=False, server_default="False"
93 ),
94 sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"),
95 sa.Column(
96 "notify_only_first", sa.Boolean, nullable=False, server_default="False"
97 ),
98 sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"),
99 sa.Column("pkey", sa.Integer(), primary_key=True),
100 sa.Column(
101 "integration_id",
102 sa.Integer,
103 sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"),
104 ),
105 )
106 op.create_unique_constraint(
107 "uq_alert_channels",
108 "alert_channels",
109 ["owner_id", "channel_name", "channel_value"],
110 )
79 111
80 112 op.create_table(
81 'alert_channels_actions',
82 sa.Column('owner_id', sa.Integer(), nullable=False),
83 sa.Column('resource_id', sa.Integer(),
84 sa.ForeignKey('resources.resource_id', onupdate='cascade',
85 ondelete='cascade')),
86 sa.Column('pkey', sa.Integer(), primary_key=True),
87 sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'),
88 sa.Column('rule', sa.dialects.postgresql.JSON),
89 sa.Column('type', sa.Unicode(10), index=True),
90 sa.Column('other_id', sa.Unicode(40), index=True),
91 sa.Column('config', sa.dialects.postgresql.JSON),
92 sa.Column('name', sa.Unicode(255), server_default='')
113 "alert_channels_actions",
114 sa.Column("owner_id", sa.Integer(), nullable=False),
115 sa.Column(
116 "resource_id",
117 sa.Integer(),
118 sa.ForeignKey(
119 "resources.resource_id", onupdate="cascade", ondelete="cascade"
120 ),
121 ),
122 sa.Column("pkey", sa.Integer(), primary_key=True),
123 sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"),
124 sa.Column("rule", sa.dialects.postgresql.JSON),
125 sa.Column("type", sa.Unicode(10), index=True),
126 sa.Column("other_id", sa.Unicode(40), index=True),
127 sa.Column("config", sa.dialects.postgresql.JSON),
128 sa.Column("name", sa.Unicode(255), server_default=""),
93 129 )
94 130
95
96 131 op.create_table(
97 'application_postprocess_conf',
98 sa.Column('pkey', sa.Integer(), primary_key=True),
99 sa.Column('do', sa.Unicode(25), nullable=False),
100 sa.Column('new_value', sa.UnicodeText(), nullable=False, server_default=''),
101 sa.Column('resource_id', sa.Integer(),
102 sa.ForeignKey('resources.resource_id',
103 onupdate='cascade',
104 ondelete='cascade'), nullable=False),
105 sa.Column('rule', sa.dialects.postgresql.JSON),
132 "application_postprocess_conf",
133 sa.Column("pkey", sa.Integer(), primary_key=True),
134 sa.Column("do", sa.Unicode(25), nullable=False),
135 sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""),
136 sa.Column(
137 "resource_id",
138 sa.Integer(),
139 sa.ForeignKey(
140 "resources.resource_id", onupdate="cascade", ondelete="cascade"
141 ),
142 nullable=False,
143 ),
144 sa.Column("rule", sa.dialects.postgresql.JSON),
106 145 )
107 146
108 147 op.create_table(
109 'applications',
110 sa.Column('resource_id', sa.Integer(),
111 sa.ForeignKey('resources.resource_id', onupdate='cascade',
112 ondelete='cascade'), nullable=False,
113 primary_key=True, autoincrement=False),
114 sa.Column('domains', sa.UnicodeText, nullable=False),
115 sa.Column('api_key', sa.Unicode(32), nullable=False, index=True),
116 sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'),
117 sa.Column('public_key', sa.Unicode(32), nullable=False, index=True),
118 sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False),
119 sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False),
120 sa.Column('apdex_threshold', sa.Float(), server_default='0.7', nullable=False),
121 sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False),
122 )
123 op.create_unique_constraint(None, 'applications',
124 ["public_key"])
125 op.create_unique_constraint(None, 'applications',
126 ["api_key"])
148 "applications",
149 sa.Column(
150 "resource_id",
151 sa.Integer(),
152 sa.ForeignKey(
153 "resources.resource_id", onupdate="cascade", ondelete="cascade"
154 ),
155 nullable=False,
156 primary_key=True,
157 autoincrement=False,
158 ),
159 sa.Column("domains", sa.UnicodeText, nullable=False),
160 sa.Column("api_key", sa.Unicode(32), nullable=False, index=True),
161 sa.Column(
162 "default_grouping",
163 sa.Unicode(20),
164 nullable=False,
165 server_default="url_type",
166 ),
167 sa.Column("public_key", sa.Unicode(32), nullable=False, index=True),
168 sa.Column(
169 "error_report_threshold", sa.Integer(), server_default="10", nullable=False
170 ),
171 sa.Column(
172 "slow_report_threshold", sa.Integer(), server_default="10", nullable=False
173 ),
174 sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False),
175 sa.Column(
176 "allow_permanent_storage",
177 sa.Boolean(),
178 server_default="false",
179 nullable=False,
180 ),
181 )
182 op.create_unique_constraint(None, "applications", ["public_key"])
183 op.create_unique_constraint(None, "applications", ["api_key"])
127 184
128 185 op.create_table(
129 'metrics',
130 sa.Column('pkey', sa.types.BigInteger, nullable=False, primary_key=True),
131 sa.Column('resource_id', sa.Integer(),
132 sa.ForeignKey('resources.resource_id',
133 onupdate='cascade',
134 ondelete='cascade')),
135 sa.Column('timestamp', sa.DateTime),
136 sa.Column('namespace', sa.Unicode(255)),
137 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}")
186 "metrics",
187 sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True),
188 sa.Column(
189 "resource_id",
190 sa.Integer(),
191 sa.ForeignKey(
192 "resources.resource_id", onupdate="cascade", ondelete="cascade"
193 ),
194 ),
195 sa.Column("timestamp", sa.DateTime),
196 sa.Column("namespace", sa.Unicode(255)),
197 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
138 198 )
139 199
140 200 op.create_table(
141 'events',
142 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
143 sa.Column('start_date', sa.DateTime, nullable=False, index=True),
144 sa.Column('end_date', sa.DateTime),
145 sa.Column('status', sa.Integer(), nullable=False, index=True),
146 sa.Column('event_type', sa.Integer(), nullable=False, index=True),
147 sa.Column('origin_user_id', sa.Integer()),
148 sa.Column('target_user_id', sa.Integer()),
149 sa.Column('resource_id', sa.Integer(), index=True),
150 sa.Column('text', sa.UnicodeText, server_default=''),
151 sa.Column('values', sa.dialects.postgresql.JSON),
152 sa.Column('target_id', sa.Integer()),
153 sa.Column('target_uuid', sa.Unicode(40), index=True)
201 "events",
202 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
203 sa.Column("start_date", sa.DateTime, nullable=False, index=True),
204 sa.Column("end_date", sa.DateTime),
205 sa.Column("status", sa.Integer(), nullable=False, index=True),
206 sa.Column("event_type", sa.Integer(), nullable=False, index=True),
207 sa.Column("origin_user_id", sa.Integer()),
208 sa.Column("target_user_id", sa.Integer()),
209 sa.Column("resource_id", sa.Integer(), index=True),
210 sa.Column("text", sa.UnicodeText, server_default=""),
211 sa.Column("values", sa.dialects.postgresql.JSON),
212 sa.Column("target_id", sa.Integer()),
213 sa.Column("target_uuid", sa.Unicode(40), index=True),
154 214 )
155 215
156 216 op.create_table(
157 'logs',
158 sa.Column('log_id', sa.types.BigInteger, nullable=False, primary_key=True),
159 sa.Column('resource_id', sa.Integer(),
160 sa.ForeignKey('resources.resource_id',
161 onupdate='cascade',
162 ondelete='cascade')),
163 sa.Column('log_level', sa.SmallInteger(), nullable=False),
164 sa.Column('primary_key', sa.Unicode(128), nullable=True),
165 sa.Column('message', sa.UnicodeText, nullable=False, server_default=''),
166 sa.Column('timestamp', sa.DateTime),
167 sa.Column('namespace', sa.Unicode(255)),
168 sa.Column('request_id', sa.Unicode(40)),
169 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"),
170 sa.Column('permanent', sa.Boolean(), server_default="false",
171 nullable=False)
217 "logs",
218 sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True),
219 sa.Column(
220 "resource_id",
221 sa.Integer(),
222 sa.ForeignKey(
223 "resources.resource_id", onupdate="cascade", ondelete="cascade"
224 ),
225 ),
226 sa.Column("log_level", sa.SmallInteger(), nullable=False),
227 sa.Column("primary_key", sa.Unicode(128), nullable=True),
228 sa.Column("message", sa.UnicodeText, nullable=False, server_default=""),
229 sa.Column("timestamp", sa.DateTime),
230 sa.Column("namespace", sa.Unicode(255)),
231 sa.Column("request_id", sa.Unicode(40)),
232 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
233 sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False),
172 234 )
173 235
174 236 op.create_table(
175 'reports_groups',
176 sa.Column('id', sa.types.BigInteger, primary_key=True),
177 sa.Column('resource_id', sa.Integer,
178 sa.ForeignKey('resources.resource_id', onupdate='cascade',
179 ondelete='cascade'), nullable=False),
180 sa.Column('priority', sa.Integer, nullable=False, server_default="5"),
181 sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()),
182 sa.Column('last_timestamp', sa.DateTime()),
183 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
184 sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""),
185 sa.Column('triggered_postprocesses_ids', sa.dialects.postgresql.JSON, nullable=False, server_default="[]"),
186 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
187 sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"),
188 sa.Column('last_report', sa.Integer, nullable=False, server_default="0"),
189 sa.Column('occurences', sa.Integer, nullable=False, server_default="1"),
190 sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"),
191 sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"),
192 sa.Column('notified', sa.Boolean, nullable=False, server_default="False"),
193 sa.Column('fixed', sa.Boolean, nullable=False, server_default="False"),
194 sa.Column('public', sa.Boolean, nullable=False, server_default="False"),
195 sa.Column('read', sa.Boolean, nullable=False, server_default="False"),
237 "reports_groups",
238 sa.Column("id", sa.types.BigInteger, primary_key=True),
239 sa.Column(
240 "resource_id",
241 sa.Integer,
242 sa.ForeignKey(
243 "resources.resource_id", onupdate="cascade", ondelete="cascade"
244 ),
245 nullable=False,
246 ),
247 sa.Column("priority", sa.Integer, nullable=False, server_default="5"),
248 sa.Column(
249 "first_timestamp",
250 sa.DateTime(),
251 nullable=False,
252 server_default=sa.func.now(),
253 ),
254 sa.Column("last_timestamp", sa.DateTime()),
255 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
256 sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""),
257 sa.Column(
258 "triggered_postprocesses_ids",
259 sa.dialects.postgresql.JSON,
260 nullable=False,
261 server_default="[]",
262 ),
263 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
264 sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"),
265 sa.Column("last_report", sa.Integer, nullable=False, server_default="0"),
266 sa.Column("occurences", sa.Integer, nullable=False, server_default="1"),
267 sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"),
268 sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"),
269 sa.Column("notified", sa.Boolean, nullable=False, server_default="False"),
270 sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"),
271 sa.Column("public", sa.Boolean, nullable=False, server_default="False"),
272 sa.Column("read", sa.Boolean, nullable=False, server_default="False"),
196 273 )
197 274
198 275 op.create_table(
199 'reports',
200 sa.Column('id', sa.types.BigInteger, primary_key=True),
201 sa.Column('group_id', sa.types.BigInteger,
202 sa.ForeignKey('reports_groups.id', onupdate='cascade',
203 ondelete='cascade'), nullable=False, index=True),
204 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
205 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
206 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
207 sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
208 sa.Column('request', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
209 sa.Column('tags', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
210 sa.Column('ip', sa.Unicode(39), nullable=False, server_default=""),
211 sa.Column('username', sa.Unicode(255), nullable=False, server_default=""),
212 sa.Column('user_agent', sa.Unicode(512), nullable=False, server_default=""),
213 sa.Column('url', sa.UnicodeText, nullable=False, server_default=""),
214 sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""),
215 sa.Column('request_stats', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
216 sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
217 sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""),
218 sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()),
219 sa.Column('end_time', sa.DateTime()),
220 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
221 sa.Column('duration', sa.Float(), nullable=False, server_default="0"),
222 sa.Column('http_status', sa.Integer, index=True),
223 sa.Column('url_domain', sa.Unicode(128)),
224 sa.Column('url_path', sa.UnicodeText),
225 sa.Column('language', sa.Integer, server_default="0"),
226 )
227 op.create_index(None, 'reports',
228 [sa.text("(tags ->> 'server_name')")])
229 op.create_index(None, 'reports',
230 [sa.text("(tags ->> 'view_name')")])
276 "reports",
277 sa.Column("id", sa.types.BigInteger, primary_key=True),
278 sa.Column(
279 "group_id",
280 sa.types.BigInteger,
281 sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"),
282 nullable=False,
283 index=True,
284 ),
285 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
286 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
287 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
288 sa.Column(
289 "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
290 ),
291 sa.Column(
292 "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
293 ),
294 sa.Column(
295 "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
296 ),
297 sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""),
298 sa.Column("username", sa.Unicode(255), nullable=False, server_default=""),
299 sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""),
300 sa.Column("url", sa.UnicodeText, nullable=False, server_default=""),
301 sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""),
302 sa.Column(
303 "request_stats",
304 sa.dialects.postgresql.JSON,
305 nullable=False,
306 server_default="{}",
307 ),
308 sa.Column(
309 "traceback",
310 sa.dialects.postgresql.JSON,
311 nullable=False,
312 server_default="{}",
313 ),
314 sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""),
315 sa.Column(
316 "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now()
317 ),
318 sa.Column("end_time", sa.DateTime()),
319 sa.Column(
320 "report_group_time",
321 sa.DateTime,
322 index=True,
323 nullable=False,
324 server_default=sa.func.now(),
325 ),
326 sa.Column("duration", sa.Float(), nullable=False, server_default="0"),
327 sa.Column("http_status", sa.Integer, index=True),
328 sa.Column("url_domain", sa.Unicode(128)),
329 sa.Column("url_path", sa.UnicodeText),
330 sa.Column("language", sa.Integer, server_default="0"),
331 )
332 op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")])
333 op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")])
231 334
232 335 op.create_table(
233 'reports_assignments',
234 sa.Column('group_id', sa.types.BigInteger, nullable=False, primary_key=True),
235 sa.Column('owner_id', sa.Integer,
236 sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'),
237 nullable=False, primary_key=True),
238 sa.Column('report_time', sa.DateTime, nullable=False)
239 )
336 "reports_assignments",
337 sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True),
338 sa.Column(
339 "owner_id",
340 sa.Integer,
341 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
342 nullable=False,
343 primary_key=True,
344 ),
345 sa.Column("report_time", sa.DateTime, nullable=False),
346 )
240 347
241 348 op.create_table(
242 'reports_comments',
243 sa.Column('comment_id', sa.Integer, primary_key=True),
244 sa.Column('body', sa.UnicodeText, nullable=False, server_default=''),
245 sa.Column('owner_id', sa.Integer,
246 sa.ForeignKey('users.id', onupdate='cascade',
247 ondelete='set null'), nullable=True),
248 sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
249 sa.Column('report_time', sa.DateTime, nullable=False),
250 sa.Column('group_id', sa.types.BigInteger, nullable=False)
349 "reports_comments",
350 sa.Column("comment_id", sa.Integer, primary_key=True),
351 sa.Column("body", sa.UnicodeText, nullable=False, server_default=""),
352 sa.Column(
353 "owner_id",
354 sa.Integer,
355 sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"),
356 nullable=True,
357 ),
358 sa.Column(
359 "created_timestamp",
360 sa.DateTime,
361 nullable=False,
362 server_default=sa.func.now(),
363 ),
364 sa.Column("report_time", sa.DateTime, nullable=False),
365 sa.Column("group_id", sa.types.BigInteger, nullable=False),
251 366 )
252 367
253 368 op.create_table(
254 'reports_stats',
255 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
256 sa.Column('start_interval', sa.DateTime, nullable=False, index=True),
257 sa.Column('group_id', sa.types.BigInteger, index=True),
258 sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True),
259 sa.Column('owner_user_id', sa.Integer),
260 sa.Column('type', sa.Integer, index=True, nullable=False),
261 sa.Column('duration', sa.Float(), server_default='0'),
262 sa.Column('server_name', sa.Unicode(128),
263 server_default=''),
264 sa.Column('view_name', sa.Unicode(128),
265 server_default=''),
266 sa.Column('id', sa.BigInteger(), nullable=False, primary_key=True),
267 )
268 op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats',
269 ["start_interval", "group_id"])
369 "reports_stats",
370 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
371 sa.Column("start_interval", sa.DateTime, nullable=False, index=True),
372 sa.Column("group_id", sa.types.BigInteger, index=True),
373 sa.Column(
374 "occurences", sa.Integer, nullable=False, server_default="0", index=True
375 ),
376 sa.Column("owner_user_id", sa.Integer),
377 sa.Column("type", sa.Integer, index=True, nullable=False),
378 sa.Column("duration", sa.Float(), server_default="0"),
379 sa.Column("server_name", sa.Unicode(128), server_default=""),
380 sa.Column("view_name", sa.Unicode(128), server_default=""),
381 sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True),
382 )
383 op.create_index(
384 "ix_reports_stats_start_interval_group_id",
385 "reports_stats",
386 ["start_interval", "group_id"],
387 )
270 388
271 389 op.create_table(
272 'slow_calls',
273 sa.Column('id', sa.types.BigInteger, primary_key=True),
274 sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'),
275 nullable=False, index=True),
276 sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True),
277 sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True),
278 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
279 sa.Column('type', sa.Unicode(16), nullable=False, index=True),
280 sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''),
281 sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False),
282 sa.Column('location', sa.UnicodeText, server_default=''),
283 sa.Column('subtype', sa.Unicode(16), nullable=False, index=True),
284 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
285 sa.Column('statement_hash', sa.Unicode(60), index=True)
390 "slow_calls",
391 sa.Column("id", sa.types.BigInteger, primary_key=True),
392 sa.Column(
393 "report_id",
394 sa.types.BigInteger,
395 sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"),
396 nullable=False,
397 index=True,
398 ),
399 sa.Column(
400 "duration", sa.Float(), nullable=False, server_default="0", index=True
401 ),
402 sa.Column(
403 "timestamp",
404 sa.DateTime,
405 nullable=False,
406 server_default=sa.func.now(),
407 index=True,
408 ),
409 sa.Column(
410 "report_group_time",
411 sa.DateTime,
412 index=True,
413 nullable=False,
414 server_default=sa.func.now(),
415 ),
416 sa.Column("type", sa.Unicode(16), nullable=False, index=True),
417 sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""),
418 sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False),
419 sa.Column("location", sa.UnicodeText, server_default=""),
420 sa.Column("subtype", sa.Unicode(16), nullable=False, index=True),
421 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
422 sa.Column("statement_hash", sa.Unicode(60), index=True),
286 423 )
287 424
288 425 op.create_table(
289 'tags',
290 sa.Column('id', sa.types.BigInteger, primary_key=True),
291 sa.Column('resource_id', sa.Integer,
292 sa.ForeignKey('resources.resource_id', onupdate='cascade',
293 ondelete='cascade')),
294 sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
295 sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
296 sa.Column('name', sa.Unicode(32), nullable=False),
297 sa.Column('value', sa.dialects.postgresql.JSON, nullable=False),
298 sa.Column('times_seen', sa.Integer, nullable=False, server_default='1')
426 "tags",
427 sa.Column("id", sa.types.BigInteger, primary_key=True),
428 sa.Column(
429 "resource_id",
430 sa.Integer,
431 sa.ForeignKey(
432 "resources.resource_id", onupdate="cascade", ondelete="cascade"
433 ),
434 ),
435 sa.Column(
436 "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
437 ),
438 sa.Column(
439 "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
440 ),
441 sa.Column("name", sa.Unicode(32), nullable=False),
442 sa.Column("value", sa.dialects.postgresql.JSON, nullable=False),
443 sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"),
299 444 )
300 445
301 446 op.create_table(
302 'auth_tokens',
303 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
304 sa.Column('token', sa.Unicode),
305 sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()),
306 sa.Column('expires', sa.DateTime),
307 sa.Column('owner_id', sa.Integer,
308 sa.ForeignKey('users.id', onupdate='cascade',
309 ondelete='cascade')),
310 sa.Column('description', sa.Unicode),
447 "auth_tokens",
448 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
449 sa.Column("token", sa.Unicode),
450 sa.Column(
451 "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now()
452 ),
453 sa.Column("expires", sa.DateTime),
454 sa.Column(
455 "owner_id",
456 sa.Integer,
457 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
458 ),
459 sa.Column("description", sa.Unicode),
311 460 )
312 461
313 462 op.create_table(
314 'channels_actions',
315 sa.Column('channel_pkey', sa.Integer,
316 sa.ForeignKey('alert_channels.pkey',
317 ondelete='CASCADE', onupdate='CASCADE')),
318 sa.Column('action_pkey', sa.Integer,
319 sa.ForeignKey('alert_channels_actions.pkey',
320 ondelete='CASCADE', onupdate='CASCADE'))
463 "channels_actions",
464 sa.Column(
465 "channel_pkey",
466 sa.Integer,
467 sa.ForeignKey(
468 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
469 ),
470 ),
471 sa.Column(
472 "action_pkey",
473 sa.Integer,
474 sa.ForeignKey(
475 "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE"
476 ),
477 ),
321 478 )
322 479
323 480 op.create_table(
324 'config',
325 sa.Column('key', sa.Unicode(128), primary_key=True),
326 sa.Column('section', sa.Unicode(128), primary_key=True),
327 sa.Column('value', sa.dialects.postgresql.JSON,
328 server_default="{}")
481 "config",
482 sa.Column("key", sa.Unicode(128), primary_key=True),
483 sa.Column("section", sa.Unicode(128), primary_key=True),
484 sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"),
329 485 )
330 486
331 487 op.create_table(
332 'plugin_configs',
333 sa.Column('id', sa.Integer, primary_key=True),
334 sa.Column('plugin_name', sa.Unicode(128)),
335 sa.Column('section', sa.Unicode(128)),
336 sa.Column('config', sa.dialects.postgresql.JSON,
337 server_default="{}"),
338 sa.Column('resource_id', sa.Integer(),
339 sa.ForeignKey('resources.resource_id', onupdate='cascade',
340 ondelete='cascade')),
341 sa.Column('owner_id', sa.Integer(),
342 sa.ForeignKey('users.id', onupdate='cascade',
343 ondelete='cascade')))
488 "plugin_configs",
489 sa.Column("id", sa.Integer, primary_key=True),
490 sa.Column("plugin_name", sa.Unicode(128)),
491 sa.Column("section", sa.Unicode(128)),
492 sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"),
493 sa.Column(
494 "resource_id",
495 sa.Integer(),
496 sa.ForeignKey(
497 "resources.resource_id", onupdate="cascade", ondelete="cascade"
498 ),
499 ),
500 sa.Column(
501 "owner_id",
502 sa.Integer(),
503 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
504 ),
505 )
344 506
345 507 op.create_table(
346 'rc_versions',
347 sa.Column('name', sa.Unicode(40), primary_key=True),
348 sa.Column('value', sa.Unicode(40)),
508 "rc_versions",
509 sa.Column("name", sa.Unicode(40), primary_key=True),
510 sa.Column("value", sa.Unicode(40)),
511 )
512 version_table = sa.table(
513 "rc_versions",
514 sa.Column("name", sa.Unicode(40)),
515 sa.Column("value", sa.Unicode(40)),
349 516 )
350 version_table = sa.table('rc_versions',
351 sa.Column('name', sa.Unicode(40)),
352 sa.Column('value', sa.Unicode(40)))
353 517
354 insert = version_table.insert().values(name='es_reports')
518 insert = version_table.insert().values(name="es_reports")
355 519 op.execute(insert)
356 insert = version_table.insert().values(name='es_reports_groups')
520 insert = version_table.insert().values(name="es_reports_groups")
357 521 op.execute(insert)
358 insert = version_table.insert().values(name='es_reports_stats')
522 insert = version_table.insert().values(name="es_reports_stats")
359 523 op.execute(insert)
360 insert = version_table.insert().values(name='es_logs')
524 insert = version_table.insert().values(name="es_logs")
361 525 op.execute(insert)
362 insert = version_table.insert().values(name='es_metrics')
526 insert = version_table.insert().values(name="es_metrics")
363 527 op.execute(insert)
364 insert = version_table.insert().values(name='es_slow_calls')
528 insert = version_table.insert().values(name="es_slow_calls")
365 529 op.execute(insert)
366 530
367
368 op.execute('''
531 op.execute(
532 """
369 533 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
370 534 RETURNS timestamp without time zone AS
371 535 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
372 536 LANGUAGE sql VOLATILE;
373 ''')
537 """
538 )
374 539
375 op.execute('''
540 op.execute(
541 """
376 542 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
377 543 LANGUAGE plpgsql SECURITY DEFINER
378 544 AS $$
@@ -426,13 +592,17 b' def upgrade():'
426 592 RETURN NULL;
427 593 END
428 594 $$;
429 ''')
595 """
596 )
430 597
431 op.execute('''
598 op.execute(
599 """
432 600 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
433 ''')
601 """
602 )
434 603
435 op.execute('''
604 op.execute(
605 """
436 606 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
437 607 LANGUAGE plpgsql SECURITY DEFINER
438 608 AS $$
@@ -463,13 +633,17 b' def upgrade():'
463 633 RETURN NULL;
464 634 END
465 635 $$;
466 ''')
636 """
637 )
467 638
468 op.execute('''
639 op.execute(
640 """
469 641 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
470 ''')
642 """
643 )
471 644
472 op.execute('''
645 op.execute(
646 """
473 647 CREATE FUNCTION partition_reports_stats() RETURNS trigger
474 648 LANGUAGE plpgsql SECURITY DEFINER
475 649 AS $$
@@ -499,13 +673,17 b' def upgrade():'
499 673 RETURN NULL;
500 674 END
501 675 $$;
502 ''')
676 """
677 )
503 678
504 op.execute('''
679 op.execute(
680 """
505 681 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
506 ''')
682 """
683 )
507 684
508 op.execute('''
685 op.execute(
686 """
509 687 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
510 688 LANGUAGE plpgsql SECURITY DEFINER
511 689 AS $$
@@ -533,13 +711,17 b' def upgrade():'
533 711 RETURN NULL;
534 712 END
535 713 $$;
536 ''')
714 """
715 )
537 716
538 op.execute('''
717 op.execute(
718 """
539 719 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
540 ''')
720 """
721 )
541 722
542 op.execute('''
723 op.execute(
724 """
543 725 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
544 726 LANGUAGE plpgsql SECURITY DEFINER
545 727 AS $$
@@ -573,14 +755,17 b' def upgrade():'
573 755 RETURN NULL;
574 756 END
575 757 $$;
576 ''')
758 """
759 )
577 760
578 op.execute('''
761 op.execute(
762 """
579 763 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
580 ''')
581
764 """
765 )
582 766
583 op.execute('''
767 op.execute(
768 """
584 769 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
585 770 LANGUAGE plpgsql SECURITY DEFINER
586 771 AS $$
@@ -614,11 +799,15 b' def upgrade():'
614 799 RETURN NULL;
615 800 END
616 801 $$;
617 ''')
802 """
803 )
618 804
619 op.execute('''
805 op.execute(
806 """
620 807 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
621 ''')
808 """
809 )
810
622 811
623 812 def downgrade():
624 813 pass
@@ -7,8 +7,8 b' Create Date: 2018-02-28 13:52:50.717217'
7 7 """
8 8
9 9 # revision identifiers, used by Alembic.
10 revision = 'e9fcfbdd9498'
11 down_revision = '55b6e612672f'
10 revision = "e9fcfbdd9498"
11 down_revision = "55b6e612672f"
12 12
13 13 from alembic import op
14 14 import sqlalchemy as sa
@@ -16,17 +16,25 b' import sqlalchemy as sa'
16 16
17 17 def upgrade():
18 18 op.create_table(
19 'channels_resources',
20 sa.Column('channel_pkey', sa.Integer,
21 sa.ForeignKey('alert_channels.pkey',
22 ondelete='CASCADE', onupdate='CASCADE'),
23 primary_key=True),
24 sa.Column('resource_id', sa.Integer,
25 sa.ForeignKey('resources.resource_id',
26 ondelete='CASCADE', onupdate='CASCADE'),
27 primary_key=True)
19 "channels_resources",
20 sa.Column(
21 "channel_pkey",
22 sa.Integer,
23 sa.ForeignKey(
24 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
25 ),
26 primary_key=True,
27 ),
28 sa.Column(
29 "resource_id",
30 sa.Integer,
31 sa.ForeignKey(
32 "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE"
33 ),
34 primary_key=True,
35 ),
28 36 )
29 37
30 38
31 39 def downgrade():
32 op.drop_table('channels_resources')
40 op.drop_table("channels_resources")
@@ -29,11 +29,11 b' log = logging.getLogger(__name__)'
29 29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
30 30
31 31 NAMING_CONVENTION = {
32 "ix": 'ix_%(column_0_label)s',
32 "ix": "ix_%(column_0_label)s",
33 33 "uq": "uq_%(table_name)s_%(column_0_name)s",
34 34 "ck": "ck_%(table_name)s_%(constraint_name)s",
35 35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
36 "pk": "pk_%(table_name)s"
36 "pk": "pk_%(table_name)s",
37 37 }
38 38
39 39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
@@ -59,23 +59,24 b' class SliceableESQuery(object):'
59 59 self.query = query
60 60 self.sort_query = sort_query
61 61 self.aggregations = aggregations
62 self.items_per_page = kwconfig.pop('items_per_page', 10)
63 self.page = kwconfig.pop('page', 1)
62 self.items_per_page = kwconfig.pop("items_per_page", 10)
63 self.page = kwconfig.pop("page", 1)
64 64 self.kwconfig = kwconfig
65 65 self.result = None
66 66
67 67 def __getitem__(self, index):
68 68 config = self.kwconfig.copy()
69 config['from_'] = index.start
69 config["from_"] = index.start
70 70 query = self.query.copy()
71 71 if self.sort_query:
72 72 query.update(self.sort_query)
73 self.result = Datastores.es.search(body=query, size=self.items_per_page,
74 **config)
73 self.result = Datastores.es.search(
74 body=query, size=self.items_per_page, **config
75 )
75 76 if self.aggregations:
76 self.items = self.result.get('aggregations')
77 self.items = self.result.get("aggregations")
77 78 else:
78 self.items = self.result['hits']['hits']
79 self.items = self.result["hits"]["hits"]
79 80
80 81 return self.items
81 82
@@ -85,14 +86,15 b' class SliceableESQuery(object):'
85 86 def __len__(self):
86 87 config = self.kwconfig.copy()
87 88 query = self.query.copy()
88 self.result = Datastores.es.search(body=query, size=self.items_per_page,
89 **config)
89 self.result = Datastores.es.search(
90 body=query, size=self.items_per_page, **config
91 )
90 92 if self.aggregations:
91 self.items = self.result.get('aggregations')
93 self.items = self.result.get("aggregations")
92 94 else:
93 self.items = self.result['hits']['hits']
95 self.items = self.result["hits"]["hits"]
94 96
95 count = int(self.result['hits']['total'])
97 count = int(self.result["hits"]["total"])
96 98 return count if count < 5000 else 5000
97 99
98 100
@@ -102,8 +104,7 b' from appenlight.models.user import User'
102 104 from appenlight.models.alert_channel import AlertChannel
103 105 from appenlight.models.alert_channel_action import AlertChannelAction
104 106 from appenlight.models.metric import Metric
105 from appenlight.models.application_postprocess_conf import \
106 ApplicationPostprocessConf
107 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
107 108 from appenlight.models.auth_token import AuthToken
108 109 from appenlight.models.event import Event
109 110 from appenlight.models.external_identity import ExternalIdentity
@@ -124,7 +125,15 b' from appenlight.models.user_permission import UserPermission'
124 125 from appenlight.models.user_resource_permission import UserResourcePermission
125 126 from ziggurat_foundations import ziggurat_model_init
126 127
127 ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission,
128 UserResourcePermission, GroupResourcePermission,
129 Resource,
130 ExternalIdentity, passwordmanager=None)
128 ziggurat_model_init(
129 User,
130 Group,
131 UserGroup,
132 GroupPermission,
133 UserPermission,
134 UserResourcePermission,
135 GroupResourcePermission,
136 Resource,
137 ExternalIdentity,
138 passwordmanager=None,
139 )
@@ -27,126 +27,125 b' log = logging.getLogger(__name__)'
27 27
28 28 #
29 29 channel_rules_m2m_table = sa.Table(
30 'channels_actions', Base.metadata,
31 sa.Column('channel_pkey', sa.Integer,
32 sa.ForeignKey('alert_channels.pkey')),
33 sa.Column('action_pkey', sa.Integer,
34 sa.ForeignKey('alert_channels_actions.pkey'))
30 "channels_actions",
31 Base.metadata,
32 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
33 sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")),
35 34 )
36 35
37 36 channel_resources_m2m_table = sa.Table(
38 'channels_resources', Base.metadata,
39 sa.Column('channel_pkey', sa.Integer,
40 sa.ForeignKey('alert_channels.pkey')),
41 sa.Column('resource_id', sa.Integer,
42 sa.ForeignKey('resources.resource_id'))
37 "channels_resources",
38 Base.metadata,
39 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
40 sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")),
43 41 )
44 42
45 DATE_FRMT = '%Y-%m-%dT%H:%M'
43 DATE_FRMT = "%Y-%m-%dT%H:%M"
46 44
47 45
48 46 class AlertChannel(Base, BaseModel):
49 47 """
50 48 Stores information about possible alerting options
51 49 """
52 __tablename__ = 'alert_channels'
53 __possible_channel_names__ = ['email']
50
51 __tablename__ = "alert_channels"
52 __possible_channel_names__ = ["email"]
54 53 __mapper_args__ = {
55 'polymorphic_on': 'channel_name',
56 'polymorphic_identity': 'integration'
54 "polymorphic_on": "channel_name",
55 "polymorphic_identity": "integration",
57 56 }
58 57
59 owner_id = sa.Column(sa.Unicode(30),
60 sa.ForeignKey('users.id', onupdate='CASCADE',
61 ondelete='CASCADE'))
58 owner_id = sa.Column(
59 sa.Unicode(30),
60 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
61 )
62 62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default='')
64 channel_json_conf = sa.Column(JSON(), nullable=False, default='')
65 channel_validated = sa.Column(sa.Boolean, nullable=False,
66 default=False)
67 send_alerts = sa.Column(sa.Boolean, nullable=False,
68 default=True)
69 daily_digest = sa.Column(sa.Boolean, nullable=False,
70 default=True)
71 integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'),
72 nullable=True)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default="")
64 channel_json_conf = sa.Column(JSON(), nullable=False, default="")
65 channel_validated = sa.Column(sa.Boolean, nullable=False, default=False)
66 send_alerts = sa.Column(sa.Boolean, nullable=False, default=True)
67 daily_digest = sa.Column(sa.Boolean, nullable=False, default=True)
68 integration_id = sa.Column(
69 sa.Integer, sa.ForeignKey("integrations.id"), nullable=True
70 )
73 71 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
74 72
75 channel_actions = sa.orm.relationship('AlertChannelAction',
76 cascade="all",
77 passive_deletes=True,
78 passive_updates=True,
79 secondary=channel_rules_m2m_table,
80 backref='channels')
81 resources = sa.orm.relationship('Resource',
82 cascade="all",
83 passive_deletes=True,
84 passive_updates=True,
85 secondary=channel_resources_m2m_table,
86 backref='resources')
73 channel_actions = sa.orm.relationship(
74 "AlertChannelAction",
75 cascade="all",
76 passive_deletes=True,
77 passive_updates=True,
78 secondary=channel_rules_m2m_table,
79 backref="channels",
80 )
81 resources = sa.orm.relationship(
82 "Resource",
83 cascade="all",
84 passive_deletes=True,
85 passive_updates=True,
86 secondary=channel_resources_m2m_table,
87 backref="resources",
88 )
87 89
88 90 @property
89 91 def channel_visible_value(self):
90 92 if self.integration:
91 return '{}: {}'.format(
92 self.channel_name,
93 self.integration.resource.resource_name
93 return "{}: {}".format(
94 self.channel_name, self.integration.resource.resource_name
94 95 )
95 96
96 return '{}: {}'.format(
97 self.channel_name,
98 self.channel_value
99 )
97 return "{}: {}".format(self.channel_name, self.channel_value)
100 98
101 def get_dict(self, exclude_keys=None, include_keys=None,
102 extended_info=True):
99 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True):
103 100 """
104 101 Returns dictionary with required information that will be consumed by
105 102 angular
106 103 """
107 instance_dict = super(AlertChannel, self).get_dict(exclude_keys,
108 include_keys)
104 instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys)
109 105 exclude_keys_list = exclude_keys or []
110 106 include_keys_list = include_keys or []
111 107
112 instance_dict['supports_report_alerting'] = True
113 instance_dict['channel_visible_value'] = self.channel_visible_value
108 instance_dict["supports_report_alerting"] = True
109 instance_dict["channel_visible_value"] = self.channel_visible_value
114 110
115 111 if extended_info:
116 instance_dict['actions'] = [
117 rule.get_dict(extended_info=True) for
118 rule in self.channel_actions]
112 instance_dict["actions"] = [
113 rule.get_dict(extended_info=True) for rule in self.channel_actions
114 ]
119 115
120 del instance_dict['channel_json_conf']
116 del instance_dict["channel_json_conf"]
121 117
122 118 if self.integration:
123 119 instance_dict[
124 'supports_report_alerting'] = \
125 self.integration.supports_report_alerting
120 "supports_report_alerting"
121 ] = self.integration.supports_report_alerting
126 122 d = {}
127 123 for k in instance_dict.keys():
128 if (k not in exclude_keys_list and
129 (k in include_keys_list or not include_keys)):
124 if k not in exclude_keys_list and (
125 k in include_keys_list or not include_keys
126 ):
130 127 d[k] = instance_dict[k]
131 128 return d
132 129
133 130 def __repr__(self):
134 return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name,
135 self.channel_value,
136 self.user_name,)
131 return "<AlertChannel: (%s,%s), user:%s>" % (
132 self.channel_name,
133 self.channel_value,
134 self.user_name,
135 )
137 136
138 137 def send_digest(self, **kwargs):
139 138 """
140 139 This should implement daily top error report notifications
141 140 """
142 log.warning('send_digest NOT IMPLEMENTED')
141 log.warning("send_digest NOT IMPLEMENTED")
143 142
144 143 def notify_reports(self, **kwargs):
145 144 """
146 145 This should implement notification of reports that occured in 1 min
147 146 interval
148 147 """
149 log.warning('notify_reports NOT IMPLEMENTED')
148 log.warning("notify_reports NOT IMPLEMENTED")
150 149
151 150 def notify_alert(self, **kwargs):
152 151 """
@@ -160,87 +159,85 b' class AlertChannel(Base, BaseModel):'
160 159 request: request object
161 160
162 161 """
163 alert_name = kwargs['event'].unified_alert_name()
164 if alert_name in ['slow_report_alert', 'error_report_alert']:
162 alert_name = kwargs["event"].unified_alert_name()
163 if alert_name in ["slow_report_alert", "error_report_alert"]:
165 164 self.notify_report_alert(**kwargs)
166 elif alert_name == 'uptime_alert':
165 elif alert_name == "uptime_alert":
167 166 self.notify_uptime_alert(**kwargs)
168 elif alert_name == 'chart_alert':
167 elif alert_name == "chart_alert":
169 168 self.notify_chart_alert(**kwargs)
170 169
171 170 def notify_chart_alert(self, **kwargs):
172 171 """
173 172 This should implement report open/close alerts notifications
174 173 """
175 log.warning('notify_chart_alert NOT IMPLEMENTED')
174 log.warning("notify_chart_alert NOT IMPLEMENTED")
176 175
177 176 def notify_report_alert(self, **kwargs):
178 177 """
179 178 This should implement report open/close alerts notifications
180 179 """
181 log.warning('notify_report_alert NOT IMPLEMENTED')
180 log.warning("notify_report_alert NOT IMPLEMENTED")
182 181
183 182 def notify_uptime_alert(self, **kwargs):
184 183 """
185 184 This should implement uptime open/close alerts notifications
186 185 """
187 log.warning('notify_uptime_alert NOT IMPLEMENTED')
186 log.warning("notify_uptime_alert NOT IMPLEMENTED")
188 187
189 188 def get_notification_basic_vars(self, kwargs):
190 189 """
191 190 Sets most common variables used later for rendering notifications for
192 191 channel
193 192 """
194 if 'event' in kwargs:
195 kwargs['since_when'] = kwargs['event'].start_date
193 if "event" in kwargs:
194 kwargs["since_when"] = kwargs["event"].start_date
196 195
197 url_start_date = kwargs.get('since_when') - timedelta(minutes=1)
198 url_end_date = kwargs.get('since_when') + timedelta(minutes=4)
196 url_start_date = kwargs.get("since_when") - timedelta(minutes=1)
197 url_end_date = kwargs.get("since_when") + timedelta(minutes=4)
199 198 tmpl_vars = {
200 "timestamp": kwargs['since_when'],
201 "user": kwargs['user'],
202 "since_when": kwargs.get('since_when'),
199 "timestamp": kwargs["since_when"],
200 "user": kwargs["user"],
201 "since_when": kwargs.get("since_when"),
203 202 "url_start_date": url_start_date,
204 "url_end_date": url_end_date
203 "url_end_date": url_end_date,
205 204 }
206 tmpl_vars["resource_name"] = kwargs['resource'].resource_name
207 tmpl_vars["resource"] = kwargs['resource']
205 tmpl_vars["resource_name"] = kwargs["resource"].resource_name
206 tmpl_vars["resource"] = kwargs["resource"]
208 207
209 if 'event' in kwargs:
210 tmpl_vars['event_values'] = kwargs['event'].values
211 tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name()
212 tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action()
208 if "event" in kwargs:
209 tmpl_vars["event_values"] = kwargs["event"].values
210 tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name()
211 tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action()
213 212 return tmpl_vars
214 213
215 214 def report_alert_notification_vars(self, kwargs):
216 215 tmpl_vars = self.get_notification_basic_vars(kwargs)
217 reports = kwargs.get('reports', [])
216 reports = kwargs.get("reports", [])
218 217 tmpl_vars["reports"] = reports
219 218 tmpl_vars["confirmed_total"] = len(reports)
220 219
221 220 tmpl_vars["report_type"] = "error reports"
222 tmpl_vars["url_report_type"] = 'report/list'
221 tmpl_vars["url_report_type"] = "report/list"
223 222
224 alert_type = tmpl_vars.get('alert_type', '')
225 if 'slow_report' in alert_type:
223 alert_type = tmpl_vars.get("alert_type", "")
224 if "slow_report" in alert_type:
226 225 tmpl_vars["report_type"] = "slow reports"
227 tmpl_vars["url_report_type"] = 'report/list_slow'
226 tmpl_vars["url_report_type"] = "report/list_slow"
228 227
229 app_url = kwargs['request'].registry.settings['_mail_url']
228 app_url = kwargs["request"].registry.settings["_mail_url"]
230 229
231 destination_url = kwargs['request'].route_url('/',
232 _app_url=app_url)
230 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
233 231 if alert_type:
234 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
232 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(
235 233 tmpl_vars["url_report_type"],
236 tmpl_vars['resource'].resource_id,
237 tmpl_vars['url_start_date'].strftime(DATE_FRMT),
238 tmpl_vars['url_end_date'].strftime(DATE_FRMT)
234 tmpl_vars["resource"].resource_id,
235 tmpl_vars["url_start_date"].strftime(DATE_FRMT),
236 tmpl_vars["url_end_date"].strftime(DATE_FRMT),
239 237 )
240 238 else:
241 destination_url += 'ui/{}?resource={}'.format(
242 tmpl_vars["url_report_type"],
243 tmpl_vars['resource'].resource_id
239 destination_url += "ui/{}?resource={}".format(
240 tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id
244 241 )
245 242 tmpl_vars["destination_url"] = destination_url
246 243
@@ -248,58 +245,54 b' class AlertChannel(Base, BaseModel):'
248 245
249 246 def uptime_alert_notification_vars(self, kwargs):
250 247 tmpl_vars = self.get_notification_basic_vars(kwargs)
251 app_url = kwargs['request'].registry.settings['_mail_url']
252 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
253 destination_url += 'ui/{}?resource={}'.format(
254 'uptime',
255 tmpl_vars['resource'].resource_id)
256 tmpl_vars['destination_url'] = destination_url
257
258 reason = ''
259 e_values = tmpl_vars.get('event_values')
260
261 if e_values and e_values.get('response_time') == 0:
262 reason += ' Response time was slower than 20 seconds.'
248 app_url = kwargs["request"].registry.settings["_mail_url"]
249 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
250 destination_url += "ui/{}?resource={}".format(
251 "uptime", tmpl_vars["resource"].resource_id
252 )
253 tmpl_vars["destination_url"] = destination_url
254
255 reason = ""
256 e_values = tmpl_vars.get("event_values")
257
258 if e_values and e_values.get("response_time") == 0:
259 reason += " Response time was slower than 20 seconds."
263 260 elif e_values:
264 code = e_values.get('status_code')
265 reason += ' Response status code: %s.' % code
261 code = e_values.get("status_code")
262 reason += " Response status code: %s." % code
266 263
267 tmpl_vars['reason'] = reason
264 tmpl_vars["reason"] = reason
268 265 return tmpl_vars
269 266
270 267 def chart_alert_notification_vars(self, kwargs):
271 268 tmpl_vars = self.get_notification_basic_vars(kwargs)
272 tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name']
273 tmpl_vars['action_name'] = tmpl_vars['event_values'].get(
274 'action_name') or ''
275 matched_values = tmpl_vars['event_values']['matched_step_values']
276 tmpl_vars['readable_values'] = []
277 for key, value in list(matched_values['values'].items()):
278 matched_label = matched_values['labels'].get(key)
269 tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"]
270 tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or ""
271 matched_values = tmpl_vars["event_values"]["matched_step_values"]
272 tmpl_vars["readable_values"] = []
273 for key, value in list(matched_values["values"].items()):
274 matched_label = matched_values["labels"].get(key)
279 275 if matched_label:
280 tmpl_vars['readable_values'].append({
281 'label': matched_label['human_label'],
282 'value': value
283 })
284 tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'],
285 key=lambda x: x['label'])
286 start_date = convert_date(tmpl_vars['event_values']['start_interval'])
276 tmpl_vars["readable_values"].append(
277 {"label": matched_label["human_label"], "value": value}
278 )
279 tmpl_vars["readable_values"] = sorted(
280 tmpl_vars["readable_values"], key=lambda x: x["label"]
281 )
282 start_date = convert_date(tmpl_vars["event_values"]["start_interval"])
287 283 end_date = None
288 if tmpl_vars['event_values'].get('end_interval'):
289 end_date = convert_date(tmpl_vars['event_values']['end_interval'])
284 if tmpl_vars["event_values"].get("end_interval"):
285 end_date = convert_date(tmpl_vars["event_values"]["end_interval"])
290 286
291 app_url = kwargs['request'].registry.settings['_mail_url']
292 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
287 app_url = kwargs["request"].registry.settings["_mail_url"]
288 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
293 289 to_encode = {
294 'resource': tmpl_vars['event_values']['resource'],
295 'start_date': start_date.strftime(DATE_FRMT),
290 "resource": tmpl_vars["event_values"]["resource"],
291 "start_date": start_date.strftime(DATE_FRMT),
296 292 }
297 293 if end_date:
298 to_encode['end_date'] = end_date.strftime(DATE_FRMT)
294 to_encode["end_date"] = end_date.strftime(DATE_FRMT)
299 295
300 destination_url += 'ui/{}?{}'.format(
301 'logs',
302 urllib.parse.urlencode(to_encode)
303 )
304 tmpl_vars['destination_url'] = destination_url
296 destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode))
297 tmpl_vars["destination_url"] = destination_url
305 298 return tmpl_vars
@@ -27,39 +27,42 b' class AlertChannelAction(Base, BaseModel):'
27 27 Stores notifications conditions for user's alert channels
28 28 This is later used for rule parsing like "alert if http_status == 500"
29 29 """
30 __tablename__ = 'alert_channels_actions'
31 30
32 types = ['report', 'chart']
31 __tablename__ = "alert_channels_actions"
33 32
34 owner_id = sa.Column(sa.Integer,
35 sa.ForeignKey('users.id', onupdate='CASCADE',
36 ondelete='CASCADE'))
33 types = ["report", "chart"]
34
35 owner_id = sa.Column(
36 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
37 )
37 38 resource_id = sa.Column(sa.Integer())
38 action = sa.Column(sa.Unicode(10), nullable=False, default='always')
39 action = sa.Column(sa.Unicode(10), nullable=False, default="always")
39 40 type = sa.Column(sa.Unicode(10), nullable=False)
40 41 other_id = sa.Column(sa.Unicode(40))
41 42 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
42 rule = sa.Column(sa.dialects.postgresql.JSON,
43 nullable=False, default={'field': 'http_status',
44 "op": "ge", "value": "500"})
43 rule = sa.Column(
44 sa.dialects.postgresql.JSON,
45 nullable=False,
46 default={"field": "http_status", "op": "ge", "value": "500"},
47 )
45 48 config = sa.Column(sa.dialects.postgresql.JSON)
46 49 name = sa.Column(sa.Unicode(255))
47 50
48 @validates('notify_type')
51 @validates("notify_type")
49 52 def validate_email(self, key, notify_type):
50 assert notify_type in ['always', 'only_first']
53 assert notify_type in ["always", "only_first"]
51 54 return notify_type
52 55
53 56 def resource_name(self, db_session=None):
54 57 db_session = get_db_session(db_session)
55 58 if self.resource_id:
56 59 return ResourceService.by_resource_id(
57 self.resource_id, db_session=db_session).resource_name
60 self.resource_id, db_session=db_session
61 ).resource_name
58 62 else:
59 return 'any resource'
63 return "any resource"
60 64
61 def get_dict(self, exclude_keys=None, include_keys=None,
62 extended_info=False):
65 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
63 66 """
64 67 Returns dictionary with required information that will be consumed by
65 68 angular
@@ -68,12 +71,14 b' class AlertChannelAction(Base, BaseModel):'
68 71 exclude_keys_list = exclude_keys or []
69 72 include_keys_list = include_keys or []
70 73 if extended_info:
71 instance_dict['channels'] = [
72 c.get_dict(extended_info=False) for c in self.channels]
74 instance_dict["channels"] = [
75 c.get_dict(extended_info=False) for c in self.channels
76 ]
73 77
74 78 d = {}
75 79 for k in instance_dict.keys():
76 if (k not in exclude_keys_list and
77 (k in include_keys_list or not include_keys)):
80 if k not in exclude_keys_list and (
81 k in include_keys_list or not include_keys
82 ):
78 83 d[k] = instance_dict[k]
79 84 return d
@@ -13,4 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -23,15 +23,13 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class CampfireAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'campfire'
28 }
26 __mapper_args__ = {"polymorphic_identity": "campfire"}
29 27
30 28 @property
31 29 def client(self):
32 30 client = CampfireIntegration.create_client(
33 self.integration.config['api_token'],
34 self.integration.config['account'])
31 self.integration.config["api_token"], self.integration.config["account"]
32 )
35 33 return client
36 34
37 35 def notify_reports(self, **kwargs):
@@ -48,37 +46,40 b' class CampfireAlertChannel(AlertChannel):'
48 46 """
49 47 template_vars = self.report_alert_notification_vars(kwargs)
50 48
51 app_url = kwargs['request'].registry.settings['_mail_url']
52 destination_url = kwargs['request'].route_url('/',
53 app_url=app_url)
54 f_args = ('report',
55 template_vars['resource'].resource_id,
56 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
57 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
58 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
59 *f_args)
60
61 if template_vars['confirmed_total'] > 1:
49 app_url = kwargs["request"].registry.settings["_mail_url"]
50 destination_url = kwargs["request"].route_url("/", app_url=app_url)
51 f_args = (
52 "report",
53 template_vars["resource"].resource_id,
54 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
55 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
56 )
57 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
58
59 if template_vars["confirmed_total"] > 1:
62 60 template_vars["title"] = "%s - %s reports" % (
63 template_vars['resource_name'],
64 template_vars['confirmed_total'],
61 template_vars["resource_name"],
62 template_vars["confirmed_total"],
65 63 )
66 64 else:
67 error_title = truncate(template_vars['reports'][0][1].error or
68 'slow report', 90)
65 error_title = truncate(
66 template_vars["reports"][0][1].error or "slow report", 90
67 )
69 68 template_vars["title"] = "%s - '%s' report" % (
70 template_vars['resource_name'],
71 error_title)
69 template_vars["resource_name"],
70 error_title,
71 )
72 72
73 template_vars["title"] += ' ' + destination_url
73 template_vars["title"] += " " + destination_url
74 74
75 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
76 kwargs['user'].user_name,
75 log_msg = "NOTIFY : %s via %s :: %s reports" % (
76 kwargs["user"].user_name,
77 77 self.channel_visible_value,
78 template_vars['confirmed_total'])
78 template_vars["confirmed_total"],
79 )
79 80 log.warning(log_msg)
80 81
81 for room in self.integration.config['rooms'].split(','):
82 for room in self.integration.config["rooms"].split(","):
82 83 self.client.speak_to_room(room.strip(), template_vars["title"])
83 84
84 85 def notify_report_alert(self, **kwargs):
@@ -94,23 +95,23 b' class CampfireAlertChannel(AlertChannel):'
94 95 """
95 96 template_vars = self.report_alert_notification_vars(kwargs)
96 97
97 if kwargs['event'].unified_alert_action() == 'OPEN':
98 title = 'ALERT %s: %s - %s %s %s' % (
99 template_vars['alert_action'],
100 template_vars['resource_name'],
101 kwargs['event'].values['reports'],
102 template_vars['report_type'],
103 template_vars['destination_url']
98 if kwargs["event"].unified_alert_action() == "OPEN":
99 title = "ALERT %s: %s - %s %s %s" % (
100 template_vars["alert_action"],
101 template_vars["resource_name"],
102 kwargs["event"].values["reports"],
103 template_vars["report_type"],
104 template_vars["destination_url"],
104 105 )
105 106
106 107 else:
107 title = 'ALERT %s: %s type: %s' % (
108 template_vars['alert_action'],
109 template_vars['resource_name'],
110 template_vars['alert_type'].replace('_', ' '),
108 title = "ALERT %s: %s type: %s" % (
109 template_vars["alert_action"],
110 template_vars["resource_name"],
111 template_vars["alert_type"].replace("_", " "),
111 112 )
112 for room in self.integration.config['rooms'].split(','):
113 self.client.speak_to_room(room.strip(), title, sound='VUVUZELA')
113 for room in self.integration.config["rooms"].split(","):
114 self.client.speak_to_room(room.strip(), title, sound="VUVUZELA")
114 115
115 116 def notify_uptime_alert(self, **kwargs):
116 117 """
@@ -125,15 +126,15 b' class CampfireAlertChannel(AlertChannel):'
125 126 """
126 127 template_vars = self.uptime_alert_notification_vars(kwargs)
127 128
128 message = 'ALERT %s: %s has uptime issues %s\n\n' % (
129 template_vars['alert_action'],
130 template_vars['resource_name'],
131 template_vars['destination_url']
129 message = "ALERT %s: %s has uptime issues %s\n\n" % (
130 template_vars["alert_action"],
131 template_vars["resource_name"],
132 template_vars["destination_url"],
132 133 )
133 message += template_vars['reason']
134 message += template_vars["reason"]
134 135
135 for room in self.integration.config['rooms'].split(','):
136 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
136 for room in self.integration.config["rooms"].split(","):
137 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
137 138
138 139 def send_digest(self, **kwargs):
139 140 """
@@ -148,17 +149,17 b' class CampfireAlertChannel(AlertChannel):'
148 149
149 150 """
150 151 template_vars = self.report_alert_notification_vars(kwargs)
151 f_args = (template_vars['resource_name'],
152 template_vars['confirmed_total'],)
152 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
153 153 message = "Daily report digest: %s - %s reports" % f_args
154 message += '{}\n'.format(template_vars['destination_url'])
155 for room in self.integration.config['rooms'].split(','):
154 message += "{}\n".format(template_vars["destination_url"])
155 for room in self.integration.config["rooms"].split(","):
156 156 self.client.speak_to_room(room.strip(), message)
157 157
158 log_msg = 'DIGEST : %s via %s :: %s reports' % (
159 kwargs['user'].user_name,
158 log_msg = "DIGEST : %s via %s :: %s reports" % (
159 kwargs["user"].user_name,
160 160 self.channel_visible_value,
161 template_vars['confirmed_total'])
161 template_vars["confirmed_total"],
162 )
162 163 log.warning(log_msg)
163 164
164 165 def notify_chart_alert(self, **kwargs):
@@ -173,16 +174,18 b' class CampfireAlertChannel(AlertChannel):'
173 174
174 175 """
175 176 template_vars = self.chart_alert_notification_vars(kwargs)
176 message = 'ALERT {}: value in "{}" chart: ' \
177 'met alert "{}" criteria {} \n'.format(
178 template_vars['alert_action'],
179 template_vars['chart_name'],
180 template_vars['action_name'],
181 template_vars['destination_url']
177 message = (
178 'ALERT {}: value in "{}" chart: '
179 'met alert "{}" criteria {} \n'.format(
180 template_vars["alert_action"],
181 template_vars["chart_name"],
182 template_vars["action_name"],
183 template_vars["destination_url"],
184 )
182 185 )
183 186
184 for item in template_vars['readable_values']:
185 message += '{}: {}\n'.format(item['label'], item['value'])
187 for item in template_vars["readable_values"]:
188 message += "{}: {}\n".format(item["label"], item["value"])
186 189
187 for room in self.integration.config['rooms'].split(','):
188 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
190 for room in self.integration.config["rooms"].split(","):
191 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
@@ -27,9 +27,7 b' class EmailAlertChannel(AlertChannel):'
27 27 Default email alerting channel
28 28 """
29 29
30 __mapper_args__ = {
31 'polymorphic_identity': 'email'
32 }
30 __mapper_args__ = {"polymorphic_identity": "email"}
33 31
34 32 def notify_reports(self, **kwargs):
35 33 """
@@ -45,25 +43,30 b' class EmailAlertChannel(AlertChannel):'
45 43 """
46 44 template_vars = self.report_alert_notification_vars(kwargs)
47 45
48 if template_vars['confirmed_total'] > 1:
46 if template_vars["confirmed_total"] > 1:
49 47 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
50 template_vars['resource_name'],
51 template_vars['confirmed_total'],
48 template_vars["resource_name"],
49 template_vars["confirmed_total"],
52 50 )
53 51 else:
54 error_title = truncate(template_vars['reports'][0][1].error or
55 'slow report', 20)
52 error_title = truncate(
53 template_vars["reports"][0][1].error or "slow report", 20
54 )
56 55 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
57 template_vars['resource_name'],
58 error_title)
59 UserService.send_email(kwargs['request'],
60 [self.channel_value],
61 template_vars,
62 '/email_templates/notify_reports.jinja2')
63 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
64 kwargs['user'].user_name,
56 template_vars["resource_name"],
57 error_title,
58 )
59 UserService.send_email(
60 kwargs["request"],
61 [self.channel_value],
62 template_vars,
63 "/email_templates/notify_reports.jinja2",
64 )
65 log_msg = "NOTIFY : %s via %s :: %s reports" % (
66 kwargs["user"].user_name,
65 67 self.channel_visible_value,
66 template_vars['confirmed_total'])
68 template_vars["confirmed_total"],
69 )
67 70 log.warning(log_msg)
68 71
69 72 def send_digest(self, **kwargs):
@@ -81,20 +84,23 b' class EmailAlertChannel(AlertChannel):'
81 84 template_vars = self.report_alert_notification_vars(kwargs)
82 85 title = "AppEnlight :: Daily report digest: %s - %s reports"
83 86 template_vars["email_title"] = title % (
84 template_vars['resource_name'],
85 template_vars['confirmed_total'],
87 template_vars["resource_name"],
88 template_vars["confirmed_total"],
86 89 )
87 90
88 UserService.send_email(kwargs['request'],
89 [self.channel_value],
90 template_vars,
91 '/email_templates/notify_reports.jinja2',
92 immediately=True,
93 silent=True)
94 log_msg = 'DIGEST : %s via %s :: %s reports' % (
95 kwargs['user'].user_name,
91 UserService.send_email(
92 kwargs["request"],
93 [self.channel_value],
94 template_vars,
95 "/email_templates/notify_reports.jinja2",
96 immediately=True,
97 silent=True,
98 )
99 log_msg = "DIGEST : %s via %s :: %s reports" % (
100 kwargs["user"].user_name,
96 101 self.channel_visible_value,
97 template_vars['confirmed_total'])
102 template_vars["confirmed_total"],
103 )
98 104 log.warning(log_msg)
99 105
100 106 def notify_report_alert(self, **kwargs):
@@ -110,23 +116,26 b' class EmailAlertChannel(AlertChannel):'
110 116 """
111 117 template_vars = self.report_alert_notification_vars(kwargs)
112 118
113 if kwargs['event'].unified_alert_action() == 'OPEN':
114 title = 'AppEnlight :: ALERT %s: %s - %s %s' % (
115 template_vars['alert_action'],
116 template_vars['resource_name'],
117 kwargs['event'].values['reports'],
118 template_vars['report_type'],
119 if kwargs["event"].unified_alert_action() == "OPEN":
120 title = "AppEnlight :: ALERT %s: %s - %s %s" % (
121 template_vars["alert_action"],
122 template_vars["resource_name"],
123 kwargs["event"].values["reports"],
124 template_vars["report_type"],
119 125 )
120 126 else:
121 title = 'AppEnlight :: ALERT %s: %s type: %s' % (
122 template_vars['alert_action'],
123 template_vars['resource_name'],
124 template_vars['alert_type'].replace('_', ' '),
127 title = "AppEnlight :: ALERT %s: %s type: %s" % (
128 template_vars["alert_action"],
129 template_vars["resource_name"],
130 template_vars["alert_type"].replace("_", " "),
125 131 )
126 template_vars['email_title'] = title
127 UserService.send_email(kwargs['request'], [self.channel_value],
128 template_vars,
129 '/email_templates/alert_reports.jinja2')
132 template_vars["email_title"] = title
133 UserService.send_email(
134 kwargs["request"],
135 [self.channel_value],
136 template_vars,
137 "/email_templates/alert_reports.jinja2",
138 )
130 139
131 140 def notify_uptime_alert(self, **kwargs):
132 141 """
@@ -140,15 +149,18 b' class EmailAlertChannel(AlertChannel):'
140 149
141 150 """
142 151 template_vars = self.uptime_alert_notification_vars(kwargs)
143 title = 'AppEnlight :: ALERT %s: %s has uptime issues' % (
144 template_vars['alert_action'],
145 template_vars['resource_name'],
152 title = "AppEnlight :: ALERT %s: %s has uptime issues" % (
153 template_vars["alert_action"],
154 template_vars["resource_name"],
146 155 )
147 template_vars['email_title'] = title
156 template_vars["email_title"] = title
148 157
149 UserService.send_email(kwargs['request'], [self.channel_value],
150 template_vars,
151 '/email_templates/alert_uptime.jinja2')
158 UserService.send_email(
159 kwargs["request"],
160 [self.channel_value],
161 template_vars,
162 "/email_templates/alert_uptime.jinja2",
163 )
152 164
153 165 def notify_chart_alert(self, **kwargs):
154 166 """
@@ -163,13 +175,18 b' class EmailAlertChannel(AlertChannel):'
163 175 """
164 176 template_vars = self.chart_alert_notification_vars(kwargs)
165 177
166 title = 'AppEnlight :: ALERT {} value in "{}" chart' \
167 ' met alert "{}" criteria'.format(
168 template_vars['alert_action'],
169 template_vars['chart_name'],
170 template_vars['action_name'],
178 title = (
179 'AppEnlight :: ALERT {} value in "{}" chart'
180 ' met alert "{}" criteria'.format(
181 template_vars["alert_action"],
182 template_vars["chart_name"],
183 template_vars["action_name"],
184 )
185 )
186 template_vars["email_title"] = title
187 UserService.send_email(
188 kwargs["request"],
189 [self.channel_value],
190 template_vars,
191 "/email_templates/alert_chart.jinja2",
171 192 )
172 template_vars['email_title'] = title
173 UserService.send_email(kwargs['request'], [self.channel_value],
174 template_vars,
175 '/email_templates/alert_chart.jinja2')
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class FlowdockAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'flowdock'
28 }
26 __mapper_args__ = {"polymorphic_identity": "flowdock"}
29 27
30 28 def notify_reports(self, **kwargs):
31 29 """
@@ -41,44 +39,45 b' class FlowdockAlertChannel(AlertChannel):'
41 39 """
42 40 template_vars = self.report_alert_notification_vars(kwargs)
43 41
44 app_url = kwargs['request'].registry.settings['_mail_url']
45 destination_url = kwargs['request'].route_url('/',
46 _app_url=app_url)
47 f_args = ('report',
48 template_vars['resource'].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
52 *f_args)
53
54 if template_vars['confirmed_total'] > 1:
42 app_url = kwargs["request"].registry.settings["_mail_url"]
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
44 f_args = (
45 "report",
46 template_vars["resource"].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
49 )
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
51
52 if template_vars["confirmed_total"] > 1:
55 53 template_vars["title"] = "%s - %s reports" % (
56 template_vars['resource_name'],
57 template_vars['confirmed_total'],
54 template_vars["resource_name"],
55 template_vars["confirmed_total"],
58 56 )
59 57 else:
60 error_title = truncate(template_vars['reports'][0][1].error or
61 'slow report', 90)
58 error_title = truncate(
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
62 61 template_vars["title"] = "%s - '%s' report" % (
63 template_vars['resource_name'],
64 error_title)
62 template_vars["resource_name"],
63 error_title,
64 )
65 65
66 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
67 kwargs['user'].user_name,
66 log_msg = "NOTIFY : %s via %s :: %s reports" % (
67 kwargs["user"].user_name,
68 68 self.channel_visible_value,
69 template_vars['confirmed_total'])
69 template_vars["confirmed_total"],
70 )
70 71 log.warning(log_msg)
71 72
72 client = FlowdockIntegration.create_client(
73 self.integration.config['api_token'])
73 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
74 74 payload = {
75 75 "source": "AppEnlight",
76 "from_address": kwargs['request'].registry.settings[
77 'mailing.from_email'],
76 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
78 77 "subject": template_vars["title"],
79 78 "content": "New report present",
80 79 "tags": ["appenlight"],
81 "link": destination_url
80 "link": destination_url,
82 81 }
83 82 client.send_to_inbox(payload)
84 83
@@ -95,32 +94,30 b' class FlowdockAlertChannel(AlertChannel):'
95 94 """
96 95 template_vars = self.report_alert_notification_vars(kwargs)
97 96
98 if kwargs['event'].unified_alert_action() == 'OPEN':
97 if kwargs["event"].unified_alert_action() == "OPEN":
99 98
100 title = 'ALERT %s: %s - %s %s' % (
101 template_vars['alert_action'],
102 template_vars['resource_name'],
103 kwargs['event'].values['reports'],
104 template_vars['report_type'],
99 title = "ALERT %s: %s - %s %s" % (
100 template_vars["alert_action"],
101 template_vars["resource_name"],
102 kwargs["event"].values["reports"],
103 template_vars["report_type"],
105 104 )
106 105
107 106 else:
108 title = 'ALERT %s: %s type: %s' % (
109 template_vars['alert_action'],
110 template_vars['resource_name'],
111 template_vars['alert_type'].replace('_', ' '),
107 title = "ALERT %s: %s type: %s" % (
108 template_vars["alert_action"],
109 template_vars["resource_name"],
110 template_vars["alert_type"].replace("_", " "),
112 111 )
113 112
114 client = FlowdockIntegration.create_client(
115 self.integration.config['api_token'])
113 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
116 114 payload = {
117 115 "source": "AppEnlight",
118 "from_address": kwargs['request'].registry.settings[
119 'mailing.from_email'],
116 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
120 117 "subject": title,
121 "content": 'Investigation required',
122 "tags": ["appenlight", "alert", template_vars['alert_type']],
123 "link": template_vars['destination_url']
118 "content": "Investigation required",
119 "tags": ["appenlight", "alert", template_vars["alert_type"]],
120 "link": template_vars["destination_url"],
124 121 }
125 122 client.send_to_inbox(payload)
126 123
@@ -137,23 +134,21 b' class FlowdockAlertChannel(AlertChannel):'
137 134 """
138 135 template_vars = self.uptime_alert_notification_vars(kwargs)
139 136
140 message = 'ALERT %s: %s has uptime issues' % (
141 template_vars['alert_action'],
142 template_vars['resource_name'],
137 message = "ALERT %s: %s has uptime issues" % (
138 template_vars["alert_action"],
139 template_vars["resource_name"],
143 140 )
144 submessage = 'Info: '
145 submessage += template_vars['reason']
141 submessage = "Info: "
142 submessage += template_vars["reason"]
146 143
147 client = FlowdockIntegration.create_client(
148 self.integration.config['api_token'])
144 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
149 145 payload = {
150 146 "source": "AppEnlight",
151 "from_address": kwargs['request'].registry.settings[
152 'mailing.from_email'],
147 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
153 148 "subject": message,
154 149 "content": submessage,
155 "tags": ["appenlight", "alert", 'uptime'],
156 "link": template_vars['destination_url']
150 "tags": ["appenlight", "alert", "uptime"],
151 "link": template_vars["destination_url"],
157 152 }
158 153 client.send_to_inbox(payload)
159 154
@@ -171,29 +166,29 b' class FlowdockAlertChannel(AlertChannel):'
171 166 """
172 167 template_vars = self.report_alert_notification_vars(kwargs)
173 168 message = "Daily report digest: %s - %s reports" % (
174 template_vars['resource_name'], template_vars['confirmed_total'])
169 template_vars["resource_name"],
170 template_vars["confirmed_total"],
171 )
175 172
176 f_args = (template_vars['confirmed_total'],
177 template_vars['timestamp'])
173 f_args = (template_vars["confirmed_total"], template_vars["timestamp"])
178 174
179 175 payload = {
180 176 "source": "AppEnlight",
181 "from_address": kwargs['request'].registry.settings[
182 'mailing.from_email'],
177 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
183 178 "subject": message,
184 "content": '%s reports in total since %s' % f_args,
179 "content": "%s reports in total since %s" % f_args,
185 180 "tags": ["appenlight", "digest"],
186 "link": template_vars['destination_url']
181 "link": template_vars["destination_url"],
187 182 }
188 183
189 client = FlowdockIntegration.create_client(
190 self.integration.config['api_token'])
184 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
191 185 client.send_to_inbox(payload)
192 186
193 log_msg = 'DIGEST : %s via %s :: %s reports' % (
194 kwargs['user'].user_name,
187 log_msg = "DIGEST : %s via %s :: %s reports" % (
188 kwargs["user"].user_name,
195 189 self.channel_visible_value,
196 template_vars['confirmed_total'])
190 template_vars["confirmed_total"],
191 )
197 192 log.warning(log_msg)
198 193
199 194 def notify_chart_alert(self, **kwargs):
@@ -209,25 +204,22 b' class FlowdockAlertChannel(AlertChannel):'
209 204 """
210 205 template_vars = self.chart_alert_notification_vars(kwargs)
211 206
212 message = 'ALERT {}: value in "{}" chart ' \
213 'met alert "{}" criteria'.format(
214 template_vars['alert_action'],
215 template_vars['chart_name'],
216 template_vars['action_name'],
207 message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format(
208 template_vars["alert_action"],
209 template_vars["chart_name"],
210 template_vars["action_name"],
217 211 )
218 submessage = 'Info: '
219 for item in template_vars['readable_values']:
220 submessage += '{}: {}\n'.format(item['label'], item['value'])
212 submessage = "Info: "
213 for item in template_vars["readable_values"]:
214 submessage += "{}: {}\n".format(item["label"], item["value"])
221 215
222 client = FlowdockIntegration.create_client(
223 self.integration.config['api_token'])
216 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
224 217 payload = {
225 218 "source": "AppEnlight",
226 "from_address": kwargs['request'].registry.settings[
227 'mailing.from_email'],
219 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
228 220 "subject": message,
229 221 "content": submessage,
230 "tags": ["appenlight", "alert", 'chart'],
231 "link": template_vars['destination_url']
222 "tags": ["appenlight", "alert", "chart"],
223 "link": template_vars["destination_url"],
232 224 }
233 225 client.send_to_inbox(payload)
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class HipchatAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'hipchat'
28 }
26 __mapper_args__ = {"polymorphic_identity": "hipchat"}
29 27
30 28 def notify_reports(self, **kwargs):
31 29 """
@@ -41,46 +39,50 b' class HipchatAlertChannel(AlertChannel):'
41 39 """
42 40 template_vars = self.report_alert_notification_vars(kwargs)
43 41
44 app_url = kwargs['request'].registry.settings['_mail_url']
45 destination_url = kwargs['request'].route_url('/',
46 _app_url=app_url)
47 f_args = ('report',
48 template_vars['resource'].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
52 *f_args)
53
54 if template_vars['confirmed_total'] > 1:
42 app_url = kwargs["request"].registry.settings["_mail_url"]
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
44 f_args = (
45 "report",
46 template_vars["resource"].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
49 )
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
51
52 if template_vars["confirmed_total"] > 1:
55 53 template_vars["title"] = "%s - %s reports" % (
56 template_vars['resource_name'],
57 template_vars['confirmed_total'],
54 template_vars["resource_name"],
55 template_vars["confirmed_total"],
58 56 )
59 57 else:
60 error_title = truncate(template_vars['reports'][0][1].error or
61 'slow report', 90)
58 error_title = truncate(
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
62 61 template_vars["title"] = "%s - '%s' report" % (
63 template_vars['resource_name'],
64 error_title)
62 template_vars["resource_name"],
63 error_title,
64 )
65 65
66 template_vars["title"] += ' ' + destination_url
66 template_vars["title"] += " " + destination_url
67 67
68 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
69 kwargs['user'].user_name,
68 log_msg = "NOTIFY : %s via %s :: %s reports" % (
69 kwargs["user"].user_name,
70 70 self.channel_visible_value,
71 template_vars['confirmed_total'])
71 template_vars["confirmed_total"],
72 )
72 73 log.warning(log_msg)
73 74
74 client = HipchatIntegration.create_client(
75 self.integration.config['api_token'])
76 for room in self.integration.config['rooms'].split(','):
77 client.send({
78 "message_format": 'text',
79 "message": template_vars["title"],
80 "from": "AppEnlight",
81 "room_id": room.strip(),
82 "color": "yellow"
83 })
75 client = HipchatIntegration.create_client(self.integration.config["api_token"])
76 for room in self.integration.config["rooms"].split(","):
77 client.send(
78 {
79 "message_format": "text",
80 "message": template_vars["title"],
81 "from": "AppEnlight",
82 "room_id": room.strip(),
83 "color": "yellow",
84 }
85 )
84 86
85 87 def notify_report_alert(self, **kwargs):
86 88 """
@@ -95,35 +97,37 b' class HipchatAlertChannel(AlertChannel):'
95 97 """
96 98 template_vars = self.report_alert_notification_vars(kwargs)
97 99
98 if kwargs['event'].unified_alert_action() == 'OPEN':
100 if kwargs["event"].unified_alert_action() == "OPEN":
99 101
100 title = 'ALERT %s: %s - %s %s' % (
101 template_vars['alert_action'],
102 template_vars['resource_name'],
103 kwargs['event'].values['reports'],
104 template_vars['report_type'],
102 title = "ALERT %s: %s - %s %s" % (
103 template_vars["alert_action"],
104 template_vars["resource_name"],
105 kwargs["event"].values["reports"],
106 template_vars["report_type"],
105 107 )
106 108
107 109 else:
108 title = 'ALERT %s: %s type: %s' % (
109 template_vars['alert_action'],
110 template_vars['resource_name'],
111 template_vars['alert_type'].replace('_', ' '),
110 title = "ALERT %s: %s type: %s" % (
111 template_vars["alert_action"],
112 template_vars["resource_name"],
113 template_vars["alert_type"].replace("_", " "),
112 114 )
113 115
114 title += '\n ' + template_vars['destination_url']
116 title += "\n " + template_vars["destination_url"]
115 117
116 api_token = self.integration.config['api_token']
118 api_token = self.integration.config["api_token"]
117 119 client = HipchatIntegration.create_client(api_token)
118 for room in self.integration.config['rooms'].split(','):
119 client.send({
120 "message_format": 'text',
121 "message": title,
122 "from": "AppEnlight",
123 "room_id": room.strip(),
124 "color": "red",
125 "notify": '1'
126 })
120 for room in self.integration.config["rooms"].split(","):
121 client.send(
122 {
123 "message_format": "text",
124 "message": title,
125 "from": "AppEnlight",
126 "room_id": room.strip(),
127 "color": "red",
128 "notify": "1",
129 }
130 )
127 131
128 132 def notify_uptime_alert(self, **kwargs):
129 133 """
@@ -138,24 +142,26 b' class HipchatAlertChannel(AlertChannel):'
138 142 """
139 143 template_vars = self.uptime_alert_notification_vars(kwargs)
140 144
141 message = 'ALERT %s: %s has uptime issues\n' % (
142 template_vars['alert_action'],
143 template_vars['resource_name'],
145 message = "ALERT %s: %s has uptime issues\n" % (
146 template_vars["alert_action"],
147 template_vars["resource_name"],
144 148 )
145 message += template_vars['reason']
146 message += '\n{}'.format(template_vars['destination_url'])
149 message += template_vars["reason"]
150 message += "\n{}".format(template_vars["destination_url"])
147 151
148 api_token = self.integration.config['api_token']
152 api_token = self.integration.config["api_token"]
149 153 client = HipchatIntegration.create_client(api_token)
150 for room in self.integration.config['rooms'].split(','):
151 client.send({
152 "message_format": 'text',
153 "message": message,
154 "from": "AppEnlight",
155 "room_id": room.strip(),
156 "color": "red",
157 "notify": '1'
158 })
154 for room in self.integration.config["rooms"].split(","):
155 client.send(
156 {
157 "message_format": "text",
158 "message": message,
159 "from": "AppEnlight",
160 "room_id": room.strip(),
161 "color": "red",
162 "notify": "1",
163 }
164 )
159 165
160 166 def notify_chart_alert(self, **kwargs):
161 167 """
@@ -169,29 +175,30 b' class HipchatAlertChannel(AlertChannel):'
169 175
170 176 """
171 177 template_vars = self.chart_alert_notification_vars(kwargs)
172 message = 'ALERT {}: value in "{}" chart: ' \
173 'met alert "{}" criteria\n'.format(
174 template_vars['alert_action'],
175 template_vars['chart_name'],
176 template_vars['action_name'],
178 message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format(
179 template_vars["alert_action"],
180 template_vars["chart_name"],
181 template_vars["action_name"],
177 182 )
178 183
179 for item in template_vars['readable_values']:
180 message += '{}: {}\n'.format(item['label'], item['value'])
184 for item in template_vars["readable_values"]:
185 message += "{}: {}\n".format(item["label"], item["value"])
181 186
182 message += template_vars['destination_url']
187 message += template_vars["destination_url"]
183 188
184 api_token = self.integration.config['api_token']
189 api_token = self.integration.config["api_token"]
185 190 client = HipchatIntegration.create_client(api_token)
186 for room in self.integration.config['rooms'].split(','):
187 client.send({
188 "message_format": 'text',
189 "message": message,
190 "from": "AppEnlight",
191 "room_id": room.strip(),
192 "color": "red",
193 "notify": '1'
194 })
191 for room in self.integration.config["rooms"].split(","):
192 client.send(
193 {
194 "message_format": "text",
195 "message": message,
196 "from": "AppEnlight",
197 "room_id": room.strip(),
198 "color": "red",
199 "notify": "1",
200 }
201 )
195 202
196 203 def send_digest(self, **kwargs):
197 204 """
@@ -206,24 +213,26 b' class HipchatAlertChannel(AlertChannel):'
206 213
207 214 """
208 215 template_vars = self.report_alert_notification_vars(kwargs)
209 f_args = (template_vars['resource_name'],
210 template_vars['confirmed_total'],)
216 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
211 217 message = "Daily report digest: %s - %s reports" % f_args
212 message += '\n{}'.format(template_vars['destination_url'])
213 api_token = self.integration.config['api_token']
218 message += "\n{}".format(template_vars["destination_url"])
219 api_token = self.integration.config["api_token"]
214 220 client = HipchatIntegration.create_client(api_token)
215 for room in self.integration.config['rooms'].split(','):
216 client.send({
217 "message_format": 'text',
218 "message": message,
219 "from": "AppEnlight",
220 "room_id": room.strip(),
221 "color": "green",
222 "notify": '1'
223 })
224
225 log_msg = 'DIGEST : %s via %s :: %s reports' % (
226 kwargs['user'].user_name,
221 for room in self.integration.config["rooms"].split(","):
222 client.send(
223 {
224 "message_format": "text",
225 "message": message,
226 "from": "AppEnlight",
227 "room_id": room.strip(),
228 "color": "green",
229 "notify": "1",
230 }
231 )
232
233 log_msg = "DIGEST : %s via %s :: %s reports" % (
234 kwargs["user"].user_name,
227 235 self.channel_visible_value,
228 template_vars['confirmed_total'])
236 template_vars["confirmed_total"],
237 )
229 238 log.warning(log_msg)
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class SlackAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'slack'
28 }
26 __mapper_args__ = {"polymorphic_identity": "slack"}
29 27
30 28 def notify_reports(self, **kwargs):
31 29 """
@@ -40,45 +38,40 b' class SlackAlertChannel(AlertChannel):'
40 38
41 39 """
42 40 template_vars = self.report_alert_notification_vars(kwargs)
43 template_vars["title"] = template_vars['resource_name']
41 template_vars["title"] = template_vars["resource_name"]
44 42
45 if template_vars['confirmed_total'] > 1:
46 template_vars['subtext'] = '%s reports' % template_vars[
47 'confirmed_total']
43 if template_vars["confirmed_total"] > 1:
44 template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"]
48 45 else:
49 error_title = truncate(template_vars['reports'][0][1].error or
50 'slow report', 90)
51 template_vars['subtext'] = error_title
46 error_title = truncate(
47 template_vars["reports"][0][1].error or "slow report", 90
48 )
49 template_vars["subtext"] = error_title
52 50
53 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
54 kwargs['user'].user_name,
51 log_msg = "NOTIFY : %s via %s :: %s reports" % (
52 kwargs["user"].user_name,
55 53 self.channel_visible_value,
56 template_vars['confirmed_total'])
54 template_vars["confirmed_total"],
55 )
57 56 log.warning(log_msg)
58 57
59 client = SlackIntegration.create_client(
60 self.integration.config['webhook_url'])
58 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
61 59 report_data = {
62 60 "username": "AppEnlight",
63 61 "icon_emoji": ":fire:",
64 62 "attachments": [
65 63 {
66 64 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
67 "fallback": "*%s* - <%s| Browse>" % (
68 template_vars["title"],
69 template_vars['destination_url']),
70 "pretext": "*%s* - <%s| Browse>" % (
71 template_vars["title"],
72 template_vars['destination_url']),
65 "fallback": "*%s* - <%s| Browse>"
66 % (template_vars["title"], template_vars["destination_url"]),
67 "pretext": "*%s* - <%s| Browse>"
68 % (template_vars["title"], template_vars["destination_url"]),
73 69 "color": "warning",
74 70 "fields": [
75 {
76 "value": 'Info: %s' % template_vars['subtext'],
77 "short": False
78 }
79 ]
71 {"value": "Info: %s" % template_vars["subtext"], "short": False}
72 ],
80 73 }
81 ]
74 ],
82 75 }
83 76 client.make_request(data=report_data)
84 77
@@ -95,53 +88,51 b' class SlackAlertChannel(AlertChannel):'
95 88 """
96 89 template_vars = self.report_alert_notification_vars(kwargs)
97 90
98 if kwargs['event'].unified_alert_action() == 'OPEN':
99 title = '*ALERT %s*: %s' % (
100 template_vars['alert_action'],
101 template_vars['resource_name']
91 if kwargs["event"].unified_alert_action() == "OPEN":
92 title = "*ALERT %s*: %s" % (
93 template_vars["alert_action"],
94 template_vars["resource_name"],
102 95 )
103 96
104 template_vars['subtext'] = 'Got at least %s %s' % (
105 kwargs['event'].values['reports'],
106 template_vars['report_type']
97 template_vars["subtext"] = "Got at least %s %s" % (
98 kwargs["event"].values["reports"],
99 template_vars["report_type"],
107 100 )
108 101
109 102 else:
110 title = '*ALERT %s*: %s' % (
111 template_vars['alert_action'],
112 template_vars['resource_name'],
103 title = "*ALERT %s*: %s" % (
104 template_vars["alert_action"],
105 template_vars["resource_name"],
113 106 )
114 107
115 template_vars['subtext'] = ''
108 template_vars["subtext"] = ""
116 109
117 alert_type = template_vars['alert_type'].replace('_', ' ')
118 alert_type = alert_type.replace('alert', '').capitalize()
110 alert_type = template_vars["alert_type"].replace("_", " ")
111 alert_type = alert_type.replace("alert", "").capitalize()
119 112
120 template_vars['type'] = "Type: %s" % alert_type
113 template_vars["type"] = "Type: %s" % alert_type
121 114
122 client = SlackIntegration.create_client(
123 self.integration.config['webhook_url']
124 )
115 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
125 116 report_data = {
126 117 "username": "AppEnlight",
127 118 "icon_emoji": ":rage:",
128 119 "attachments": [
129 120 {
130 121 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
131 "fallback": "%s - <%s| Browse>" % (
132 title, template_vars['destination_url']),
133 "pretext": "%s - <%s| Browse>" % (
134 title, template_vars['destination_url']),
122 "fallback": "%s - <%s| Browse>"
123 % (title, template_vars["destination_url"]),
124 "pretext": "%s - <%s| Browse>"
125 % (title, template_vars["destination_url"]),
135 126 "color": "danger",
136 127 "fields": [
137 128 {
138 "title": template_vars['type'],
139 "value": template_vars['subtext'],
140 "short": False
129 "title": template_vars["type"],
130 "value": template_vars["subtext"],
131 "short": False,
141 132 }
142 ]
133 ],
143 134 }
144 ]
135 ],
145 136 }
146 137 client.make_request(data=report_data)
147 138
@@ -158,13 +149,11 b' class SlackAlertChannel(AlertChannel):'
158 149 """
159 150 template_vars = self.uptime_alert_notification_vars(kwargs)
160 151
161 title = '*ALERT %s*: %s' % (
162 template_vars['alert_action'],
163 template_vars['resource_name'],
164 )
165 client = SlackIntegration.create_client(
166 self.integration.config['webhook_url']
152 title = "*ALERT %s*: %s" % (
153 template_vars["alert_action"],
154 template_vars["resource_name"],
167 155 )
156 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
168 157 report_data = {
169 158 "username": "AppEnlight",
170 159 "icon_emoji": ":rage:",
@@ -172,19 +161,21 b' class SlackAlertChannel(AlertChannel):'
172 161 {
173 162 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
174 163 "fallback": "{} - <{}| Browse>".format(
175 title, template_vars['destination_url']),
164 title, template_vars["destination_url"]
165 ),
176 166 "pretext": "{} - <{}| Browse>".format(
177 title, template_vars['destination_url']),
167 title, template_vars["destination_url"]
168 ),
178 169 "color": "danger",
179 170 "fields": [
180 171 {
181 172 "title": "Application has uptime issues",
182 "value": template_vars['reason'],
183 "short": False
173 "value": template_vars["reason"],
174 "short": False,
184 175 }
185 ]
176 ],
186 177 }
187 ]
178 ],
188 179 }
189 180 client.make_request(data=report_data)
190 181
@@ -201,39 +192,39 b' class SlackAlertChannel(AlertChannel):'
201 192 """
202 193 template_vars = self.chart_alert_notification_vars(kwargs)
203 194
204 title = '*ALERT {}*: value in *"{}"* chart ' \
205 'met alert *"{}"* criteria'.format(
206 template_vars['alert_action'],
207 template_vars['chart_name'],
208 template_vars['action_name'],
195 title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format(
196 template_vars["alert_action"],
197 template_vars["chart_name"],
198 template_vars["action_name"],
209 199 )
210 200
211 subtext = ''
212 for item in template_vars['readable_values']:
213 subtext += '{} - {}\n'.format(item['label'], item['value'])
201 subtext = ""
202 for item in template_vars["readable_values"]:
203 subtext += "{} - {}\n".format(item["label"], item["value"])
214 204
215 client = SlackIntegration.create_client(
216 self.integration.config['webhook_url']
217 )
205 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
218 206 report_data = {
219 207 "username": "AppEnlight",
220 208 "icon_emoji": ":rage:",
221 209 "attachments": [
222 {"mrkdwn_in": ["text", "pretext", "title", "fallback"],
223 "fallback": "{} - <{}| Browse>".format(
224 title, template_vars['destination_url']),
225 "pretext": "{} - <{}| Browse>".format(
226 title, template_vars['destination_url']),
227 "color": "danger",
228 "fields": [
229 {
230 "title": "Following criteria were met:",
231 "value": subtext,
232 "short": False
233 }
234 ]
235 }
236 ]
210 {
211 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
212 "fallback": "{} - <{}| Browse>".format(
213 title, template_vars["destination_url"]
214 ),
215 "pretext": "{} - <{}| Browse>".format(
216 title, template_vars["destination_url"]
217 ),
218 "color": "danger",
219 "fields": [
220 {
221 "title": "Following criteria were met:",
222 "value": subtext,
223 "short": False,
224 }
225 ],
226 }
227 ],
237 228 }
238 229 client.make_request(data=report_data)
239 230
@@ -250,36 +241,30 b' class SlackAlertChannel(AlertChannel):'
250 241
251 242 """
252 243 template_vars = self.report_alert_notification_vars(kwargs)
253 title = "*Daily report digest*: %s" % template_vars['resource_name']
244 title = "*Daily report digest*: %s" % template_vars["resource_name"]
254 245
255 subtext = '%s reports' % template_vars['confirmed_total']
246 subtext = "%s reports" % template_vars["confirmed_total"]
256 247
257 client = SlackIntegration.create_client(
258 self.integration.config['webhook_url']
259 )
248 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
260 249 report_data = {
261 250 "username": "AppEnlight",
262 251 "attachments": [
263 252 {
264 253 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
265 "fallback": "%s : <%s| Browse>" % (
266 title, template_vars['destination_url']),
267 "pretext": "%s: <%s| Browse>" % (
268 title, template_vars['destination_url']),
254 "fallback": "%s : <%s| Browse>"
255 % (title, template_vars["destination_url"]),
256 "pretext": "%s: <%s| Browse>"
257 % (title, template_vars["destination_url"]),
269 258 "color": "good",
270 "fields": [
271 {
272 "title": "Got at least: %s" % subtext,
273 "short": False
274 }
275 ]
259 "fields": [{"title": "Got at least: %s" % subtext, "short": False}],
276 260 }
277 ]
261 ],
278 262 }
279 263 client.make_request(data=report_data)
280 264
281 log_msg = 'DIGEST : %s via %s :: %s reports' % (
282 kwargs['user'].user_name,
265 log_msg = "DIGEST : %s via %s :: %s reports" % (
266 kwargs["user"].user_name,
283 267 self.channel_visible_value,
284 template_vars['confirmed_total'])
268 template_vars["confirmed_total"],
269 )
285 270 log.warning(log_msg)
@@ -24,7 +24,7 b' log = logging.getLogger(__name__)'
24 24
25 25
26 26 def generate_api_key():
27 uid = str(uuid.uuid4()).replace('-', '')
27 uid = str(uuid.uuid4()).replace("-", "")
28 28 return uid[0:32]
29 29
30 30
@@ -33,61 +33,69 b' class Application(Resource):'
33 33 Resource of application type
34 34 """
35 35
36 __tablename__ = 'applications'
37 __mapper_args__ = {'polymorphic_identity': 'application'}
36 __tablename__ = "applications"
37 __mapper_args__ = {"polymorphic_identity": "application"}
38 38
39 39 # lists configurable possible permissions for this resource type
40 __possible_permissions__ = ('view', 'update_reports')
41
42 resource_id = sa.Column(sa.Integer(),
43 sa.ForeignKey('resources.resource_id',
44 onupdate='CASCADE',
45 ondelete='CASCADE', ),
46 primary_key=True, )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default='')
48 api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True,
49 default=generate_api_key)
50 public_key = sa.Column(sa.String(32), nullable=False, unique=True,
51 index=True,
52 default=generate_api_key)
53 default_grouping = sa.Column(sa.Unicode(20), nullable=False,
54 default='url_traceback')
40 __possible_permissions__ = ("view", "update_reports")
41
42 resource_id = sa.Column(
43 sa.Integer(),
44 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
45 primary_key=True,
46 )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default="")
48 api_key = sa.Column(
49 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
50 )
51 public_key = sa.Column(
52 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
53 )
54 default_grouping = sa.Column(
55 sa.Unicode(20), nullable=False, default="url_traceback"
56 )
55 57 error_report_threshold = sa.Column(sa.Integer(), default=10)
56 58 slow_report_threshold = sa.Column(sa.Integer(), default=10)
57 allow_permanent_storage = sa.Column(sa.Boolean(), default=False,
58 nullable=False)
59 allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False)
59 60
60 @sa.orm.validates('default_grouping')
61 @sa.orm.validates("default_grouping")
61 62 def validate_default_grouping(self, key, grouping):
62 63 """ validate if resouce can have specific permission """
63 assert grouping in ['url_type', 'url_traceback', 'traceback_server']
64 assert grouping in ["url_type", "url_traceback", "traceback_server"]
64 65 return grouping
65 66
66 report_groups = sa.orm.relationship('ReportGroup',
67 cascade="all, delete-orphan",
68 passive_deletes=True,
69 passive_updates=True,
70 lazy='dynamic',
71 backref=sa.orm.backref('application',
72 lazy="joined"))
73
74 postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf',
75 cascade="all, delete-orphan",
76 passive_deletes=True,
77 passive_updates=True,
78 backref='resource')
79
80 logs = sa.orm.relationship('Log',
81 lazy='dynamic',
82 backref='application',
83 passive_deletes=True,
84 passive_updates=True, )
85
86 integrations = sa.orm.relationship('IntegrationBase',
87 backref='resource',
88 cascade="all, delete-orphan",
89 passive_deletes=True,
90 passive_updates=True, )
67 report_groups = sa.orm.relationship(
68 "ReportGroup",
69 cascade="all, delete-orphan",
70 passive_deletes=True,
71 passive_updates=True,
72 lazy="dynamic",
73 backref=sa.orm.backref("application", lazy="joined"),
74 )
75
76 postprocess_conf = sa.orm.relationship(
77 "ApplicationPostprocessConf",
78 cascade="all, delete-orphan",
79 passive_deletes=True,
80 passive_updates=True,
81 backref="resource",
82 )
83
84 logs = sa.orm.relationship(
85 "Log",
86 lazy="dynamic",
87 backref="application",
88 passive_deletes=True,
89 passive_updates=True,
90 )
91
92 integrations = sa.orm.relationship(
93 "IntegrationBase",
94 backref="resource",
95 cascade="all, delete-orphan",
96 passive_deletes=True,
97 passive_updates=True,
98 )
91 99
92 100 def generate_api_key(self):
93 101 return generate_api_key()
@@ -95,10 +103,11 b' class Application(Resource):'
95 103
96 104 def after_update(mapper, connection, target):
97 105 from appenlight.models.services.application import ApplicationService
98 log.info('clearing out ApplicationService cache')
106
107 log.info("clearing out ApplicationService cache")
99 108 ApplicationService.by_id_cached().invalidate(target.resource_id)
100 109 ApplicationService.by_api_key_cached().invalidate(target.api_key)
101 110
102 111
103 sa.event.listen(Application, 'after_update', after_update)
104 sa.event.listen(Application, 'after_delete', after_update)
112 sa.event.listen(Application, "after_update", after_update)
113 sa.event.listen(Application, "after_delete", after_update)
@@ -27,18 +27,20 b' class ApplicationPostprocessConf(Base, BaseModel):'
27 27 This is later used for rule parsing like "if 10 occurences bump priority +1"
28 28 """
29 29
30 __tablename__ = 'application_postprocess_conf'
30 __tablename__ = "application_postprocess_conf"
31 31
32 32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
33 resource_id = sa.Column(sa.Integer(),
34 sa.ForeignKey('resources.resource_id',
35 onupdate='CASCADE',
36 ondelete='CASCADE'))
33 resource_id = sa.Column(
34 sa.Integer(),
35 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
36 )
37 37 do = sa.Column(sa.Unicode(25), nullable=False)
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default='')
39 rule = sa.Column(sa.dialects.postgresql.JSON,
40 nullable=False, default={'field': 'http_status',
41 "op": "ge", "value": "500"})
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default="")
39 rule = sa.Column(
40 sa.dialects.postgresql.JSON,
41 nullable=False,
42 default={"field": "http_status", "op": "ge", "value": "500"},
43 )
42 44
43 45 def postprocess(self, item):
44 46 new_value = int(self.new_value)
@@ -29,17 +29,22 b' class AuthToken(Base, BaseModel):'
29 29 """
30 30 Stores information about possible alerting options
31 31 """
32 __tablename__ = 'auth_tokens'
32
33 __tablename__ = "auth_tokens"
33 34
34 35 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
35 token = sa.Column(sa.Unicode(40), nullable=False,
36 default=lambda x: UserService.generate_random_string(40))
37 owner_id = sa.Column(sa.Unicode(30),
38 sa.ForeignKey('users.id', onupdate='CASCADE',
39 ondelete='CASCADE'))
36 token = sa.Column(
37 sa.Unicode(40),
38 nullable=False,
39 default=lambda x: UserService.generate_random_string(40),
40 )
41 owner_id = sa.Column(
42 sa.Unicode(30),
43 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
44 )
40 45 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
41 46 expires = sa.Column(sa.DateTime)
42 description = sa.Column(sa.Unicode, default='')
47 description = sa.Column(sa.Unicode, default="")
43 48
44 49 @property
45 50 def is_expired(self):
@@ -49,4 +54,4 b' class AuthToken(Base, BaseModel):'
49 54 return False
50 55
51 56 def __str__(self):
52 return '<AuthToken u:%s t:%s...>' % (self.owner_id, self.token[0:10])
57 return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10])
@@ -22,7 +22,7 b' from . import Base'
22 22
23 23
24 24 class Config(Base, BaseModel):
25 __tablename__ = 'config'
25 __tablename__ = "config"
26 26
27 27 key = sa.Column(sa.Unicode, primary_key=True)
28 28 section = sa.Column(sa.Unicode, primary_key=True)
@@ -30,39 +30,40 b' log = logging.getLogger(__name__)'
30 30
31 31
32 32 class Event(Base, BaseModel):
33 __tablename__ = 'events'
33 __tablename__ = "events"
34 34
35 types = {'error_report_alert': 1,
36 'slow_report_alert': 3,
37 'comment': 5,
38 'assignment': 6,
39 'uptime_alert': 7,
40 'chart_alert': 9}
35 types = {
36 "error_report_alert": 1,
37 "slow_report_alert": 3,
38 "comment": 5,
39 "assignment": 6,
40 "uptime_alert": 7,
41 "chart_alert": 9,
42 }
41 43
42 statuses = {'active': 1,
43 'closed': 0}
44 statuses = {"active": 1, "closed": 0}
44 45
45 46 id = sa.Column(sa.Integer, primary_key=True)
46 47 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
47 48 end_date = sa.Column(sa.DateTime)
48 49 status = sa.Column(sa.Integer, default=1)
49 50 event_type = sa.Column(sa.Integer, default=1)
50 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
51 nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
53 nullable=True)
54 resource_id = sa.Column(sa.Integer(),
55 sa.ForeignKey('resources.resource_id'),
56 nullable=True)
51 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
53 resource_id = sa.Column(
54 sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True
55 )
57 56 target_id = sa.Column(sa.Integer)
58 57 target_uuid = sa.Column(sa.Unicode(40))
59 58 text = sa.Column(sa.UnicodeText())
60 59 values = sa.Column(JSON(), nullable=False, default=None)
61 60
62 61 def __repr__(self):
63 return '<Event %s, app:%s, %s>' % (self.unified_alert_name(),
64 self.resource_id,
65 self.unified_alert_action())
62 return "<Event %s, app:%s, %s>" % (
63 self.unified_alert_name(),
64 self.resource_id,
65 self.unified_alert_action(),
66 )
66 67
67 68 @property
68 69 def reverse_types(self):
@@ -73,9 +74,9 b' class Event(Base, BaseModel):'
73 74
74 75 def unified_alert_action(self):
75 76 event_name = self.reverse_types[self.event_type]
76 if self.status == Event.statuses['closed']:
77 if self.status == Event.statuses["closed"]:
77 78 return "CLOSE"
78 if self.status != Event.statuses['closed']:
79 if self.status != Event.statuses["closed"]:
79 80 return "OPEN"
80 81 return event_name
81 82
@@ -89,30 +90,33 b' class Event(Base, BaseModel):'
89 90 request = get_current_request()
90 91 if not resource:
91 92 return
92 users = set([p.user for p in ResourceService.users_for_perm(resource, 'view')])
93 users = set([p.user for p in ResourceService.users_for_perm(resource, "view")])
93 94 for user in users:
94 95 for channel in user.alert_channels:
95 matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources]
96 matches_resource = not channel.resources or resource in [
97 r.resource_id for r in channel.resources
98 ]
96 99 if (
97 not channel.channel_validated or
98 not channel.send_alerts or
99 not matches_resource
100 not channel.channel_validated
101 or not channel.send_alerts
102 or not matches_resource
100 103 ):
101 104 continue
102 105 else:
103 106 try:
104 channel.notify_alert(resource=resource,
105 event=self,
106 user=user,
107 request=request)
107 channel.notify_alert(
108 resource=resource, event=self, user=user, request=request
109 )
108 110 except IntegrationException as e:
109 log.warning('%s' % e)
111 log.warning("%s" % e)
110 112
111 113 def validate_or_close(self, since_when, db_session=None):
112 114 """ Checks if alerts should stay open or it's time to close them.
113 115 Generates close alert event if alerts get closed """
114 event_types = [Event.types['error_report_alert'],
115 Event.types['slow_report_alert']]
116 event_types = [
117 Event.types["error_report_alert"],
118 Event.types["slow_report_alert"],
119 ]
116 120 app = ResourceService.by_resource_id(self.resource_id)
117 121 # if app was deleted close instantly
118 122 if not app:
@@ -121,10 +125,11 b' class Event(Base, BaseModel):'
121 125
122 126 if self.event_type in event_types:
123 127 total = ReportStatService.count_by_type(
124 self.event_type, self.resource_id, since_when)
125 if Event.types['error_report_alert'] == self.event_type:
128 self.event_type, self.resource_id, since_when
129 )
130 if Event.types["error_report_alert"] == self.event_type:
126 131 threshold = app.error_report_threshold
127 if Event.types['slow_report_alert'] == self.event_type:
132 if Event.types["slow_report_alert"] == self.event_type:
128 133 threshold = app.slow_report_threshold
129 134
130 135 if total < threshold:
@@ -135,31 +140,31 b' class Event(Base, BaseModel):'
135 140 Closes an event and sends notification to affected users
136 141 """
137 142 self.end_date = datetime.utcnow()
138 self.status = Event.statuses['closed']
139 log.warning('ALERT: CLOSE: %s' % self)
143 self.status = Event.statuses["closed"]
144 log.warning("ALERT: CLOSE: %s" % self)
140 145 self.send_alerts()
141 146
142 147 def text_representation(self):
143 148 alert_type = self.unified_alert_name()
144 text = ''
145 if 'slow_report' in alert_type:
146 text += 'Slow report alert'
147 if 'error_report' in alert_type:
148 text += 'Exception report alert'
149 if 'uptime_alert' in alert_type:
150 text += 'Uptime alert'
151 if 'chart_alert' in alert_type:
152 text += 'Metrics value alert'
149 text = ""
150 if "slow_report" in alert_type:
151 text += "Slow report alert"
152 if "error_report" in alert_type:
153 text += "Exception report alert"
154 if "uptime_alert" in alert_type:
155 text += "Uptime alert"
156 if "chart_alert" in alert_type:
157 text += "Metrics value alert"
153 158
154 159 alert_action = self.unified_alert_action()
155 if alert_action == 'OPEN':
156 text += ' got opened.'
157 if alert_action == 'CLOSE':
158 text += ' got closed.'
160 if alert_action == "OPEN":
161 text += " got opened."
162 if alert_action == "CLOSE":
163 text += " got closed."
159 164 return text
160 165
161 166 def get_dict(self, request=None):
162 167 dict_data = super(Event, self).get_dict()
163 dict_data['text'] = self.text_representation()
164 dict_data['resource_name'] = self.resource.resource_name
168 dict_data["text"] = self.text_representation()
169 dict_data["resource_name"] = self.resource.resource_name
165 170 return dict_data
@@ -25,12 +25,12 b' from appenlight.lib.sqlalchemy_fields import EncryptedUnicode'
25 25 class ExternalIdentity(ExternalIdentityMixin, Base):
26 26 @declared_attr
27 27 def access_token(self):
28 return sa.Column(EncryptedUnicode(255), default='')
28 return sa.Column(EncryptedUnicode(255), default="")
29 29
30 30 @declared_attr
31 31 def alt_token(self):
32 return sa.Column(EncryptedUnicode(255), default='')
32 return sa.Column(EncryptedUnicode(255), default="")
33 33
34 34 @declared_attr
35 35 def token_secret(self):
36 return sa.Column(EncryptedUnicode(255), default='')
36 return sa.Column(EncryptedUnicode(255), default="")
@@ -19,27 +19,28 b' from appenlight.models import Base'
19 19
20 20
21 21 class Group(GroupMixin, Base):
22 __possible_permissions__ = ('root_administration',
23 'test_features',
24 'admin_panel',
25 'admin_users',
26 'manage_partitions',)
22 __possible_permissions__ = (
23 "root_administration",
24 "test_features",
25 "admin_panel",
26 "admin_users",
27 "manage_partitions",
28 )
27 29
28 def get_dict(self, exclude_keys=None, include_keys=None,
29 include_perms=False):
30 def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False):
30 31 result = super(Group, self).get_dict(exclude_keys, include_keys)
31 32 if include_perms:
32 result['possible_permissions'] = self.__possible_permissions__
33 result['current_permissions'] = [p.perm_name for p in
34 self.permissions]
33 result["possible_permissions"] = self.__possible_permissions__
34 result["current_permissions"] = [p.perm_name for p in self.permissions]
35 35 else:
36 result['possible_permissions'] = []
37 result['current_permissions'] = []
36 result["possible_permissions"] = []
37 result["current_permissions"] = []
38 38 exclude_keys_list = exclude_keys or []
39 39 include_keys_list = include_keys or []
40 40 d = {}
41 41 for k in result.keys():
42 if (k not in exclude_keys_list and
43 (k in include_keys_list or not include_keys)):
42 if k not in exclude_keys_list and (
43 k in include_keys_list or not include_keys
44 ):
44 45 d[k] = result[k]
45 46 return d
@@ -14,8 +14,9 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from ziggurat_foundations.models.group_resource_permission import \
18 GroupResourcePermissionMixin
17 from ziggurat_foundations.models.group_resource_permission import (
18 GroupResourcePermissionMixin,
19 )
19 20 from appenlight.models import Base
20 21
21 22
@@ -32,34 +32,37 b' class IntegrationBase(Base, BaseModel):'
32 32 """
33 33 Model from which all integrations inherit using polymorphic approach
34 34 """
35 __tablename__ = 'integrations'
35
36 __tablename__ = "integrations"
36 37
37 38 front_visible = False
38 39 as_alert_channel = False
39 40 supports_report_alerting = False
40 41
41 42 id = sa.Column(sa.Integer, primary_key=True)
42 resource_id = sa.Column(sa.Integer,
43 sa.ForeignKey('applications.resource_id'))
43 resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id"))
44 44 integration_name = sa.Column(sa.Unicode(64))
45 _config = sa.Column('config', JSON(), nullable=False, default='')
45 _config = sa.Column("config", JSON(), nullable=False, default="")
46 46 modified_date = sa.Column(sa.DateTime)
47 47
48 channel = sa.orm.relationship('AlertChannel',
49 cascade="all,delete-orphan",
50 passive_deletes=True,
51 passive_updates=True,
52 uselist=False,
53 backref='integration')
48 channel = sa.orm.relationship(
49 "AlertChannel",
50 cascade="all,delete-orphan",
51 passive_deletes=True,
52 passive_updates=True,
53 uselist=False,
54 backref="integration",
55 )
54 56
55 57 __mapper_args__ = {
56 'polymorphic_on': 'integration_name',
57 'polymorphic_identity': 'integration'
58 "polymorphic_on": "integration_name",
59 "polymorphic_identity": "integration",
58 60 }
59 61
60 62 @classmethod
61 def by_app_id_and_integration_name(cls, resource_id, integration_name,
62 db_session=None):
63 def by_app_id_and_integration_name(
64 cls, resource_id, integration_name, db_session=None
65 ):
63 66 db_session = get_db_session(db_session)
64 67 query = db_session.query(cls)
65 68 query = query.filter(cls.integration_name == integration_name)
@@ -72,7 +75,6 b' class IntegrationBase(Base, BaseModel):'
72 75
73 76 @config.setter
74 77 def config(self, value):
75 if not hasattr(value, 'items'):
76 raise Exception('IntegrationBase.config only accepts '
77 'flat dictionaries')
78 if not hasattr(value, "items"):
79 raise Exception("IntegrationBase.config only accepts " "flat dictionaries")
78 80 self._config = encrypt_dictionary_keys(value)
@@ -16,8 +16,7 b''
16 16
17 17 import requests
18 18 from requests_oauthlib import OAuth1
19 from appenlight.models.integrations import (IntegrationBase,
20 IntegrationException)
19 from appenlight.models.integrations import IntegrationBase, IntegrationException
21 20
22 21 _ = str
23 22
@@ -27,14 +26,12 b' class NotFoundException(Exception):'
27 26
28 27
29 28 class BitbucketIntegration(IntegrationBase):
30 __mapper_args__ = {
31 'polymorphic_identity': 'bitbucket'
32 }
29 __mapper_args__ = {"polymorphic_identity": "bitbucket"}
33 30 front_visible = True
34 31 as_alert_channel = False
35 32 supports_report_alerting = False
36 33 action_notification = True
37 integration_action = 'Add issue to Bitbucket'
34 integration_action = "Add issue to Bitbucket"
38 35
39 36 @classmethod
40 37 def create_client(cls, request, user_name=None, repo_name=None):
@@ -46,27 +43,36 b' class BitbucketIntegration(IntegrationBase):'
46 43 token = None
47 44 secret = None
48 45 for identity in request.user.external_identities:
49 if identity.provider_name == 'bitbucket':
46 if identity.provider_name == "bitbucket":
50 47 token = identity.access_token
51 48 secret = identity.token_secret
52 49 break
53 50 if not token:
54 raise IntegrationException(
55 'No valid auth token present for this service')
56 client = BitbucketClient(token, secret,
57 user_name,
58 repo_name,
59 config['authomatic.pr.bitbucket.key'],
60 config['authomatic.pr.bitbucket.secret'])
51 raise IntegrationException("No valid auth token present for this service")
52 client = BitbucketClient(
53 token,
54 secret,
55 user_name,
56 repo_name,
57 config["authomatic.pr.bitbucket.key"],
58 config["authomatic.pr.bitbucket.secret"],
59 )
61 60 return client
62 61
63 62
64 63 class BitbucketClient(object):
65 api_url = 'https://bitbucket.org/api/1.0'
66 repo_type = 'bitbucket'
67
68 def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key,
69 bitbucket_consumer_secret):
64 api_url = "https://bitbucket.org/api/1.0"
65 repo_type = "bitbucket"
66
67 def __init__(
68 self,
69 token,
70 secret,
71 owner,
72 repo_name,
73 bitbucket_consumer_key,
74 bitbucket_consumer_secret,
75 ):
70 76 self.access_token = token
71 77 self.token_secret = secret
72 78 self.owner = owner
@@ -75,89 +81,108 b' class BitbucketClient(object):'
75 81 self.bitbucket_consumer_secret = bitbucket_consumer_secret
76 82
77 83 possible_keys = {
78 'status': ['new', 'open', 'resolved', 'on hold', 'invalid',
79 'duplicate', 'wontfix'],
80 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'],
81 'kind': ['bug', 'enhancement', 'proposal', 'task']
84 "status": [
85 "new",
86 "open",
87 "resolved",
88 "on hold",
89 "invalid",
90 "duplicate",
91 "wontfix",
92 ],
93 "priority": ["trivial", "minor", "major", "critical", "blocker"],
94 "kind": ["bug", "enhancement", "proposal", "task"],
82 95 }
83 96
84 97 def get_statuses(self):
85 98 """Gets list of possible item statuses"""
86 return self.possible_keys['status']
99 return self.possible_keys["status"]
87 100
88 101 def get_priorities(self):
89 102 """Gets list of possible item statuses"""
90 return self.possible_keys['priority']
103 return self.possible_keys["priority"]
91 104
92 def make_request(self, url, method='get', data=None, headers=None):
105 def make_request(self, url, method="get", data=None, headers=None):
93 106 """
94 107 Performs HTTP request to bitbucket
95 108 """
96 auth = OAuth1(self.bitbucket_consumer_key,
97 self.bitbucket_consumer_secret,
98 self.access_token, self.token_secret)
109 auth = OAuth1(
110 self.bitbucket_consumer_key,
111 self.bitbucket_consumer_secret,
112 self.access_token,
113 self.token_secret,
114 )
99 115 try:
100 resp = getattr(requests, method)(url, data=data, auth=auth,
101 timeout=10)
116 resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10)
102 117 except Exception as e:
103 118 raise IntegrationException(
104 _('Error communicating with Bitbucket: %s') % (e,))
119 _("Error communicating with Bitbucket: %s") % (e,)
120 )
105 121 if resp.status_code == 401:
106 raise IntegrationException(
107 _('You are not authorized to access this repo'))
122 raise IntegrationException(_("You are not authorized to access this repo"))
108 123 elif resp.status_code == 404:
109 raise IntegrationException(_('User or repo name are incorrect'))
124 raise IntegrationException(_("User or repo name are incorrect"))
110 125 elif resp.status_code not in [200, 201]:
111 126 raise IntegrationException(
112 _('Bitbucket response_code: %s') % resp.status_code)
127 _("Bitbucket response_code: %s") % resp.status_code
128 )
113 129 try:
114 130 return resp.json()
115 131 except Exception as e:
116 132 raise IntegrationException(
117 _('Error decoding response from Bitbucket: %s') % (e,))
133 _("Error decoding response from Bitbucket: %s") % (e,)
134 )
118 135
119 136 def get_assignees(self):
120 137 """Gets list of possible assignees"""
121 url = '%(api_url)s/privileges/%(owner)s/%(repo_name)s' % {
122 'api_url': self.api_url,
123 'owner': self.owner,
124 'repo_name': self.repo_name}
138 url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % {
139 "api_url": self.api_url,
140 "owner": self.owner,
141 "repo_name": self.repo_name,
142 }
125 143
126 144 data = self.make_request(url)
127 results = [{'user': self.owner, 'name': '(Repo owner)'}]
145 results = [{"user": self.owner, "name": "(Repo owner)"}]
128 146 if data:
129 147 for entry in data:
130 results.append({"user": entry['user']['username'],
131 "name": entry['user'].get('display_name')})
148 results.append(
149 {
150 "user": entry["user"]["username"],
151 "name": entry["user"].get("display_name"),
152 }
153 )
132 154 return results
133 155
134 156 def create_issue(self, form_data):
135 157 """
136 158 Sends creates a new issue in tracker using REST call
137 159 """
138 url = '%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/' % {
139 'api_url': self.api_url,
140 'owner': self.owner,
141 'repo_name': self.repo_name}
160 url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % {
161 "api_url": self.api_url,
162 "owner": self.owner,
163 "repo_name": self.repo_name,
164 }
142 165
143 166 payload = {
144 "title": form_data['title'],
145 "content": form_data['content'],
146 "kind": form_data['kind'],
147 "priority": form_data['priority'],
148 "responsible": form_data['responsible']
167 "title": form_data["title"],
168 "content": form_data["content"],
169 "kind": form_data["kind"],
170 "priority": form_data["priority"],
171 "responsible": form_data["responsible"],
149 172 }
150 data = self.make_request(url, 'post', payload)
173 data = self.make_request(url, "post", payload)
151 174 f_args = {
152 175 "owner": self.owner,
153 176 "repo_name": self.repo_name,
154 "issue_id": data['local_id']
177 "issue_id": data["local_id"],
155 178 }
156 web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \
157 '/issue/%(issue_id)s/issue-title' % f_args
179 web_url = (
180 "https://bitbucket.org/%(owner)s/%(repo_name)s"
181 "/issue/%(issue_id)s/issue-title" % f_args
182 )
158 183 to_return = {
159 'id': data['local_id'],
160 'resource_url': data['resource_uri'],
161 'web_url': web_url
184 "id": data["local_id"],
185 "resource_url": data["resource_uri"],
186 "web_url": web_url,
162 187 }
163 188 return to_return
@@ -20,8 +20,7 b' from requests.exceptions import HTTPError, ConnectionError'
20 20 from camplight import Request, Campfire
21 21 from camplight.exceptions import CamplightException
22 22
23 from appenlight.models.integrations import (IntegrationBase,
24 IntegrationException)
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
25 24
26 25 _ = str
27 26
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33 32
34 33
35 34 class CampfireIntegration(IntegrationBase):
36 __mapper_args__ = {
37 'polymorphic_identity': 'campfire'
38 }
35 __mapper_args__ = {"polymorphic_identity": "campfire"}
39 36 front_visible = False
40 37 as_alert_channel = True
41 38 supports_report_alerting = True
42 39 action_notification = True
43 integration_action = 'Message via Campfire'
40 integration_action = "Message via Campfire"
44 41
45 42 @classmethod
46 43 def create_client(cls, api_token, account):
@@ -50,7 +47,7 b' class CampfireIntegration(IntegrationBase):'
50 47
51 48 class CampfireClient(object):
52 49 def __init__(self, api_token, account):
53 request = Request('https://%s.campfirenow.com' % account, api_token)
50 request = Request("https://%s.campfirenow.com" % account, api_token)
54 51 self.campfire = Campfire(request)
55 52
56 53 def get_account(self):
@@ -65,10 +62,10 b' class CampfireClient(object):'
65 62 except (HTTPError, CamplightException) as e:
66 63 raise IntegrationException(str(e))
67 64
68 def speak_to_room(self, room, message, sound='RIMSHOT'):
65 def speak_to_room(self, room, message, sound="RIMSHOT"):
69 66 try:
70 67 room = self.campfire.room(room)
71 68 room.join()
72 room.speak(message, type_='TextMessage')
69 room.speak(message, type_="TextMessage")
73 70 except (HTTPError, CamplightException, ConnectionError) as e:
74 71 raise IntegrationException(str(e))
@@ -20,8 +20,7 b' import requests'
20 20 from requests.auth import HTTPBasicAuth
21 21 import simplejson as json
22 22
23 from appenlight.models.integrations import (IntegrationBase,
24 IntegrationException)
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
25 24
26 25 _ = str
27 26
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33 32
34 33
35 34 class FlowdockIntegration(IntegrationBase):
36 __mapper_args__ = {
37 'polymorphic_identity': 'flowdock'
38 }
35 __mapper_args__ = {"polymorphic_identity": "flowdock"}
39 36 front_visible = False
40 37 as_alert_channel = True
41 38 supports_report_alerting = True
42 39 action_notification = True
43 integration_action = 'Message via Flowdock'
40 integration_action = "Message via Flowdock"
44 41
45 42 @classmethod
46 43 def create_client(cls, api_token):
@@ -50,33 +47,37 b' class FlowdockIntegration(IntegrationBase):'
50 47
51 48 class FlowdockClient(object):
52 49 def __init__(self, api_token):
53 self.auth = HTTPBasicAuth(api_token, '')
50 self.auth = HTTPBasicAuth(api_token, "")
54 51 self.api_token = api_token
55 self.api_url = 'https://api.flowdock.com/v1/messages'
52 self.api_url = "https://api.flowdock.com/v1/messages"
56 53
57 def make_request(self, url, method='get', data=None):
54 def make_request(self, url, method="get", data=None):
58 55 headers = {
59 'Content-Type': 'application/json',
60 'User-Agent': 'appenlight-flowdock',
56 "Content-Type": "application/json",
57 "User-Agent": "appenlight-flowdock",
61 58 }
62 59 try:
63 60 if data:
64 61 data = json.dumps(data)
65 resp = getattr(requests, method)(url, data=data, headers=headers,
66 timeout=10)
62 resp = getattr(requests, method)(
63 url, data=data, headers=headers, timeout=10
64 )
67 65 except Exception as e:
68 66 raise IntegrationException(
69 _('Error communicating with Flowdock: %s') % (e,))
67 _("Error communicating with Flowdock: %s") % (e,)
68 )
70 69 if resp.status_code > 299:
71 70 raise IntegrationException(resp.text)
72 71 return resp
73 72
74 73 def send_to_chat(self, payload):
75 url = '%(api_url)s/chat/%(api_token)s' % {'api_url': self.api_url,
76 'api_token': self.api_token}
77 return self.make_request(url, method='post', data=payload).json()
74 url = "%(api_url)s/chat/%(api_token)s" % {
75 "api_url": self.api_url,
76 "api_token": self.api_token,
77 }
78 return self.make_request(url, method="post", data=payload).json()
78 79
79 80 def send_to_inbox(self, payload):
80 f_args = {'api_url': self.api_url, 'api_token': self.api_token}
81 url = '%(api_url)s/team_inbox/%(api_token)s' % f_args
82 return self.make_request(url, method='post', data=payload).json()
81 f_args = {"api_url": self.api_url, "api_token": self.api_token}
82 url = "%(api_url)s/team_inbox/%(api_token)s" % f_args
83 return self.make_request(url, method="post", data=payload).json()
@@ -27,14 +27,12 b' class GithubAuthException(Exception):'
27 27
28 28
29 29 class GithubIntegration(IntegrationBase):
30 __mapper_args__ = {
31 'polymorphic_identity': 'github'
32 }
30 __mapper_args__ = {"polymorphic_identity": "github"}
33 31 front_visible = True
34 32 as_alert_channel = False
35 33 supports_report_alerting = False
36 34 action_notification = True
37 integration_action = 'Add issue to Github'
35 integration_action = "Add issue to Github"
38 36
39 37 @classmethod
40 38 def create_client(cls, request, user_name=None, repo_name=None):
@@ -45,112 +43,116 b' class GithubIntegration(IntegrationBase):'
45 43 token = None
46 44 secret = None
47 45 for identity in request.user.external_identities:
48 if identity.provider_name == 'github':
46 if identity.provider_name == "github":
49 47 token = identity.access_token
50 48 secret = identity.token_secret
51 49 break
52 50 if not token:
53 raise IntegrationException(
54 'No valid auth token present for this service')
51 raise IntegrationException("No valid auth token present for this service")
55 52 client = GithubClient(token=token, owner=user_name, name=repo_name)
56 53 return client
57 54
58 55
59 56 class GithubClient(object):
60 api_url = 'https://api.github.com'
61 repo_type = 'github'
57 api_url = "https://api.github.com"
58 repo_type = "github"
62 59
63 60 def __init__(self, token, owner, name):
64 61 self.access_token = token
65 62 self.owner = owner
66 63 self.name = name
67 64
68 def make_request(self, url, method='get', data=None, headers=None):
69 req_headers = {'User-Agent': 'appenlight',
70 'Content-Type': 'application/json',
71 'Authorization': 'token %s' % self.access_token}
65 def make_request(self, url, method="get", data=None, headers=None):
66 req_headers = {
67 "User-Agent": "appenlight",
68 "Content-Type": "application/json",
69 "Authorization": "token %s" % self.access_token,
70 }
72 71 try:
73 72 if data:
74 73 data = json.dumps(data)
75 resp = getattr(requests, method)(url, data=data,
76 headers=req_headers,
77 timeout=10)
74 resp = getattr(requests, method)(
75 url, data=data, headers=req_headers, timeout=10
76 )
78 77 except Exception as e:
79 msg = 'Error communicating with Github: %s'
78 msg = "Error communicating with Github: %s"
80 79 raise IntegrationException(_(msg) % (e,))
81 80
82 81 if resp.status_code == 404:
83 msg = 'User or repo name are incorrect'
82 msg = "User or repo name are incorrect"
84 83 raise IntegrationException(_(msg))
85 84 if resp.status_code == 401:
86 msg = 'You are not authorized to access this repo'
85 msg = "You are not authorized to access this repo"
87 86 raise IntegrationException(_(msg))
88 87 elif resp.status_code not in [200, 201]:
89 msg = 'Github response_code: %s'
88 msg = "Github response_code: %s"
90 89 raise IntegrationException(_(msg) % resp.status_code)
91 90 try:
92 91 return resp.json()
93 92 except Exception as e:
94 msg = 'Error decoding response from Github: %s'
93 msg = "Error decoding response from Github: %s"
95 94 raise IntegrationException(_(msg) % (e,))
96 95
97 96 def get_statuses(self):
98 97 """Gets list of possible item statuses"""
99 url = '%(api_url)s/repos/%(owner)s/%(name)s/labels' % {
100 'api_url': self.api_url,
101 'owner': self.owner,
102 'name': self.name}
98 url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % {
99 "api_url": self.api_url,
100 "owner": self.owner,
101 "name": self.name,
102 }
103 103
104 104 data = self.make_request(url)
105 105
106 106 statuses = []
107 107 for status in data:
108 statuses.append(status['name'])
108 statuses.append(status["name"])
109 109 return statuses
110 110
111 111 def get_repo(self):
112 112 """Gets list of possible item statuses"""
113 url = '%(api_url)s/repos/%(owner)s/%(name)s' % {
114 'api_url': self.api_url,
115 'owner': self.owner,
116 'name': self.name}
113 url = "%(api_url)s/repos/%(owner)s/%(name)s" % {
114 "api_url": self.api_url,
115 "owner": self.owner,
116 "name": self.name,
117 }
117 118
118 119 data = self.make_request(url)
119 120 return data
120 121
121 122 def get_assignees(self):
122 123 """Gets list of possible assignees"""
123 url = '%(api_url)s/repos/%(owner)s/%(name)s/collaborators' % {
124 'api_url': self.api_url,
125 'owner': self.owner,
126 'name': self.name}
124 url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % {
125 "api_url": self.api_url,
126 "owner": self.owner,
127 "name": self.name,
128 }
127 129 data = self.make_request(url)
128 130 results = []
129 131 for entry in data:
130 results.append({"user": entry['login'],
131 "name": entry.get('name')})
132 results.append({"user": entry["login"], "name": entry.get("name")})
132 133 return results
133 134
134 135 def create_issue(self, form_data):
135 136 """
136 137 Make a REST call to create issue in Github's issue tracker
137 138 """
138 url = '%(api_url)s/repos/%(owner)s/%(name)s/issues' % {
139 'api_url': self.api_url,
140 'owner': self.owner,
141 'name': self.name}
139 url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % {
140 "api_url": self.api_url,
141 "owner": self.owner,
142 "name": self.name,
143 }
142 144
143 145 payload = {
144 "title": form_data['title'],
145 "body": form_data['content'],
146 "title": form_data["title"],
147 "body": form_data["content"],
146 148 "labels": [],
147 "assignee": form_data['responsible']
149 "assignee": form_data["responsible"],
148 150 }
149 payload['labels'].extend(form_data['kind'])
150 data = self.make_request(url, 'post', data=payload)
151 payload["labels"].extend(form_data["kind"])
152 data = self.make_request(url, "post", data=payload)
151 153 to_return = {
152 'id': data['number'],
153 'resource_url': data['url'],
154 'web_url': data['html_url']
154 "id": data["number"],
155 "resource_url": data["url"],
156 "web_url": data["html_url"],
155 157 }
156 158 return to_return
@@ -30,14 +30,12 b' class NotFoundException(Exception):'
30 30
31 31
32 32 class HipchatIntegration(IntegrationBase):
33 __mapper_args__ = {
34 'polymorphic_identity': 'hipchat'
35 }
33 __mapper_args__ = {"polymorphic_identity": "hipchat"}
36 34 front_visible = False
37 35 as_alert_channel = True
38 36 supports_report_alerting = True
39 37 action_notification = True
40 integration_action = 'Message via Hipchat'
38 integration_action = "Message via Hipchat"
41 39
42 40 @classmethod
43 41 def create_client(cls, api_token):
@@ -48,36 +46,30 b' class HipchatIntegration(IntegrationBase):'
48 46 class HipchatClient(object):
49 47 def __init__(self, api_token):
50 48 self.api_token = api_token
51 self.api_url = 'https://api.hipchat.com/v1'
52
53 def make_request(self, endpoint, method='get', data=None):
54 headers = {
55 'User-Agent': 'appenlight-hipchat',
56 }
57 url = '%s%s' % (self.api_url, endpoint)
58 params = {
59 'format': 'json',
60 'auth_token': self.api_token
61 }
49 self.api_url = "https://api.hipchat.com/v1"
50
51 def make_request(self, endpoint, method="get", data=None):
52 headers = {"User-Agent": "appenlight-hipchat"}
53 url = "%s%s" % (self.api_url, endpoint)
54 params = {"format": "json", "auth_token": self.api_token}
62 55 try:
63 resp = getattr(requests, method)(url, data=data, headers=headers,
64 params=params,
65 timeout=3)
56 resp = getattr(requests, method)(
57 url, data=data, headers=headers, params=params, timeout=3
58 )
66 59 except Exception as e:
67 msg = 'Error communicating with Hipchat: %s'
60 msg = "Error communicating with Hipchat: %s"
68 61 raise IntegrationException(_(msg) % (e,))
69 62 if resp.status_code == 404:
70 msg = 'Error communicating with Hipchat - Room not found'
63 msg = "Error communicating with Hipchat - Room not found"
71 64 raise IntegrationException(msg)
72 65 elif resp.status_code != requests.codes.ok:
73 msg = 'Error communicating with Hipchat - status code: %s'
66 msg = "Error communicating with Hipchat - status code: %s"
74 67 raise IntegrationException(msg % resp.status_code)
75 68 return resp
76 69
77 70 def get_rooms(self):
78 71 # not used with notification api token
79 return self.make_request('/rooms/list')
72 return self.make_request("/rooms/list")
80 73
81 74 def send(self, payload):
82 return self.make_request('/rooms/message', method='post',
83 data=payload).json()
75 return self.make_request("/rooms/message", method="post", data=payload).json()
@@ -15,8 +15,7 b''
15 15 # limitations under the License.
16 16
17 17 import jira
18 from appenlight.models.integrations import (IntegrationBase,
19 IntegrationException)
18 from appenlight.models.integrations import IntegrationBase, IntegrationException
20 19
21 20 _ = str
22 21
@@ -26,14 +25,12 b' class NotFoundException(Exception):'
26 25
27 26
28 27 class JiraIntegration(IntegrationBase):
29 __mapper_args__ = {
30 'polymorphic_identity': 'jira'
31 }
28 __mapper_args__ = {"polymorphic_identity": "jira"}
32 29 front_visible = True
33 30 as_alert_channel = False
34 31 supports_report_alerting = False
35 32 action_notification = True
36 integration_action = 'Add issue to Jira'
33 integration_action = "Add issue to Jira"
37 34
38 35
39 36 class JiraClient(object):
@@ -44,12 +41,14 b' class JiraClient(object):'
44 41 self.project = project
45 42 self.request = request
46 43 try:
47 self.client = jira.client.JIRA(options={'server': host_name},
48 basic_auth=(user_name, password))
44 self.client = jira.client.JIRA(
45 options={"server": host_name}, basic_auth=(user_name, password)
46 )
49 47 except jira.JIRAError as e:
50 48 raise IntegrationException(
51 'Communication problem: HTTP_STATUS:%s, URL:%s ' % (
52 e.status_code, e.url))
49 "Communication problem: HTTP_STATUS:%s, URL:%s "
50 % (e.status_code, e.url)
51 )
53 52
54 53 def get_projects(self):
55 54 projects = self.client.projects()
@@ -58,42 +57,42 b' class JiraClient(object):'
58 57 def get_assignees(self, request):
59 58 """Gets list of possible assignees"""
60 59 cache_region = request.registry.cache_regions.redis_sec_30
61 @cache_region.cache_on_arguments('JiraClient.get_assignees')
60
61 @cache_region.cache_on_arguments("JiraClient.get_assignees")
62 62 def cached(project_name):
63 63 users = self.client.search_assignable_users_for_issues(
64 None, project=project_name)
64 None, project=project_name
65 )
65 66 results = []
66 67 for user in users:
67 68 results.append({"id": user.name, "name": user.displayName})
68 69 return results
70
69 71 return cached(self.project)
70 72
71 73 def get_issue_types(self, request):
72 74 metadata = self.get_metadata(request)
73 75 assignees = self.get_assignees(request)
74 76 parsed_metadata = []
75 for entry in metadata['projects'][0]['issuetypes']:
76 issue = {"name": entry['name'],
77 "id": entry['id'],
78 "fields": []}
79 for i_id, field_i in entry['fields'].items():
77 for entry in metadata["projects"][0]["issuetypes"]:
78 issue = {"name": entry["name"], "id": entry["id"], "fields": []}
79 for i_id, field_i in entry["fields"].items():
80 80 field = {
81 "name": field_i['name'],
81 "name": field_i["name"],
82 82 "id": i_id,
83 "required": field_i['required'],
83 "required": field_i["required"],
84 84 "values": [],
85 "type": field_i['schema'].get('type')
85 "type": field_i["schema"].get("type"),
86 86 }
87 if field_i.get('allowedValues'):
88 field['values'] = []
89 for i in field_i['allowedValues']:
90 field['values'].append(
91 {'id': i['id'],
92 'name': i.get('name', i.get('value', ''))
93 })
94 if field['id'] == 'assignee':
95 field['values'] = assignees
96 issue['fields'].append(field)
87 if field_i.get("allowedValues"):
88 field["values"] = []
89 for i in field_i["allowedValues"]:
90 field["values"].append(
91 {"id": i["id"], "name": i.get("name", i.get("value", ""))}
92 )
93 if field["id"] == "assignee":
94 field["values"] = assignees
95 issue["fields"].append(field)
97 96 parsed_metadata.append(issue)
98 97 return parsed_metadata
99 98
@@ -102,35 +101,37 b' class JiraClient(object):'
102 101 # @cache_region.cache_on_arguments('JiraClient.get_metadata')
103 102 def cached(project_name):
104 103 return self.client.createmeta(
105 projectKeys=project_name, expand='projects.issuetypes.fields')
104 projectKeys=project_name, expand="projects.issuetypes.fields"
105 )
106
106 107 return cached(self.project)
107 108
108 109 def create_issue(self, form_data, request):
109 110 issue_types = self.get_issue_types(request)
110 111 payload = {
111 'project': {'key': form_data['project']},
112 'summary': form_data['title'],
113 'description': form_data['content'],
114 'issuetype': {'id': form_data['issue_type']},
115 "priority": {'id': form_data['priority']},
116 "assignee": {'name': form_data['responsible']},
112 "project": {"key": form_data["project"]},
113 "summary": form_data["title"],
114 "description": form_data["content"],
115 "issuetype": {"id": form_data["issue_type"]},
116 "priority": {"id": form_data["priority"]},
117 "assignee": {"name": form_data["responsible"]},
117 118 }
118 119 for issue_type in issue_types:
119 if issue_type['id'] == form_data['issue_type']:
120 for field in issue_type['fields']:
120 if issue_type["id"] == form_data["issue_type"]:
121 for field in issue_type["fields"]:
121 122 # set some defaults for other required fields
122 if field == 'reporter':
123 payload["reporter"] = {'id': self.user_name}
124 if field['required'] and field['id'] not in payload:
125 if field['type'] == 'array':
126 payload[field['id']] = [field['values'][0], ]
127 elif field['type'] == 'string':
128 payload[field['id']] = ''
123 if field == "reporter":
124 payload["reporter"] = {"id": self.user_name}
125 if field["required"] and field["id"] not in payload:
126 if field["type"] == "array":
127 payload[field["id"]] = [field["values"][0]]
128 elif field["type"] == "string":
129 payload[field["id"]] = ""
129 130 new_issue = self.client.create_issue(fields=payload)
130 web_url = self.host_name + '/browse/' + new_issue.key
131 web_url = self.host_name + "/browse/" + new_issue.key
131 132 to_return = {
132 'id': new_issue.id,
133 'resource_url': new_issue.self,
134 'web_url': web_url
133 "id": new_issue.id,
134 "resource_url": new_issue.self,
135 "web_url": web_url,
135 136 }
136 137 return to_return
@@ -18,8 +18,7 b' import logging'
18 18
19 19 import requests
20 20
21 from appenlight.models.integrations import (IntegrationBase,
22 IntegrationException)
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
23 22 from appenlight.lib.ext_json import json
24 23
25 24 _ = str
@@ -32,14 +31,12 b' class NotFoundException(Exception):'
32 31
33 32
34 33 class SlackIntegration(IntegrationBase):
35 __mapper_args__ = {
36 'polymorphic_identity': 'slack'
37 }
34 __mapper_args__ = {"polymorphic_identity": "slack"}
38 35 front_visible = False
39 36 as_alert_channel = True
40 37 supports_report_alerting = True
41 38 action_notification = True
42 integration_action = 'Message via Slack'
39 integration_action = "Message via Slack"
43 40
44 41 @classmethod
45 42 def create_client(cls, api_token):
@@ -52,23 +49,17 b' class SlackClient(object):'
52 49 self.api_url = api_url
53 50
54 51 def make_request(self, data=None):
55 headers = {
56 'User-Agent': 'appenlight-slack',
57 'Content-Type': 'application/json'
58 }
52 headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"}
59 53 try:
60 resp = getattr(requests, 'post')(self.api_url,
61 data=json.dumps(data),
62 headers=headers,
63 timeout=3)
54 resp = getattr(requests, "post")(
55 self.api_url, data=json.dumps(data), headers=headers, timeout=3
56 )
64 57 except Exception as e:
65 raise IntegrationException(
66 _('Error communicating with Slack: %s') % (e,))
58 raise IntegrationException(_("Error communicating with Slack: %s") % (e,))
67 59 if resp.status_code != requests.codes.ok:
68 msg = 'Error communicating with Slack - status code: %s'
60 msg = "Error communicating with Slack - status code: %s"
69 61 raise IntegrationException(msg % resp.status_code)
70 62 return resp
71 63
72 64 def send(self, payload):
73 return self.make_request('/rooms/message', method='post',
74 data=payload).json()
65 return self.make_request("/rooms/message", method="post", data=payload).json()
@@ -18,8 +18,7 b' import logging'
18 18
19 19 import requests
20 20
21 from appenlight.models.integrations import (IntegrationBase,
22 IntegrationException)
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
23 22 from appenlight.models.alert_channel import AlertChannel
24 23 from appenlight.lib.ext_json import json
25 24
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33 32
34 33
35 34 class WebhooksIntegration(IntegrationBase):
36 __mapper_args__ = {
37 'polymorphic_identity': 'webhooks'
38 }
35 __mapper_args__ = {"polymorphic_identity": "webhooks"}
39 36 front_visible = False
40 37 as_alert_channel = True
41 38 supports_report_alerting = True
42 39 action_notification = True
43 integration_action = 'Message via Webhooks'
40 integration_action = "Message via Webhooks"
44 41
45 42 @classmethod
46 43 def create_client(cls, url):
@@ -52,34 +49,33 b' class WebhooksClient(object):'
52 49 def __init__(self, url):
53 50 self.api_url = url
54 51
55 def make_request(self, url, method='get', data=None):
52 def make_request(self, url, method="get", data=None):
56 53 headers = {
57 'Content-Type': 'application/json',
58 'User-Agent': 'appenlight-webhooks',
54 "Content-Type": "application/json",
55 "User-Agent": "appenlight-webhooks",
59 56 }
60 57 try:
61 58 if data:
62 59 data = json.dumps(data)
63 resp = getattr(requests, method)(url, data=data, headers=headers,
64 timeout=3)
60 resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3)
65 61 except Exception as e:
66 62 raise IntegrationException(
67 _('Error communicating with Webhooks: {}').format(e))
63 _("Error communicating with Webhooks: {}").format(e)
64 )
68 65 if resp.status_code > 299:
69 66 raise IntegrationException(
70 'Error communicating with Webhooks - status code: {}'.format(
71 resp.status_code))
67 "Error communicating with Webhooks - status code: {}".format(
68 resp.status_code
69 )
70 )
72 71 return resp
73 72
74 73 def send_to_hook(self, payload):
75 return self.make_request(self.api_url, method='post',
76 data=payload).json()
74 return self.make_request(self.api_url, method="post", data=payload).json()
77 75
78 76
79 77 class WebhooksAlertChannel(AlertChannel):
80 __mapper_args__ = {
81 'polymorphic_identity': 'webhooks'
82 }
78 __mapper_args__ = {"polymorphic_identity": "webhooks"}
83 79
84 80 def notify_reports(self, **kwargs):
85 81 """
@@ -95,17 +91,28 b' class WebhooksAlertChannel(AlertChannel):'
95 91 """
96 92 template_vars = self.get_notification_basic_vars(kwargs)
97 93 payload = []
98 include_keys = ('id', 'http_status', 'report_type', 'resource_name',
99 'front_url', 'resource_id', 'error', 'url_path',
100 'tags', 'duration')
101
102 for occurences, report in kwargs['reports']:
103 r_dict = report.last_report_ref.get_dict(kwargs['request'],
104 include_keys=include_keys)
105 r_dict['group']['occurences'] = occurences
94 include_keys = (
95 "id",
96 "http_status",
97 "report_type",
98 "resource_name",
99 "front_url",
100 "resource_id",
101 "error",
102 "url_path",
103 "tags",
104 "duration",
105 )
106
107 for occurences, report in kwargs["reports"]:
108 r_dict = report.last_report_ref.get_dict(
109 kwargs["request"], include_keys=include_keys
110 )
111 r_dict["group"]["occurences"] = occurences
106 112 payload.append(r_dict)
107 113 client = WebhooksIntegration.create_client(
108 self.integration.config['reports_webhook'])
114 self.integration.config["reports_webhook"]
115 )
109 116 client.send_to_hook(payload)
110 117
111 118 def notify_alert(self, **kwargs):
@@ -120,19 +127,19 b' class WebhooksAlertChannel(AlertChannel):'
120 127
121 128 """
122 129 payload = {
123 'alert_action': kwargs['event'].unified_alert_action(),
124 'alert_name': kwargs['event'].unified_alert_name(),
125 'event_time': kwargs['event'].end_date or kwargs[
126 'event'].start_date,
127 'resource_name': None,
128 'resource_id': None
130 "alert_action": kwargs["event"].unified_alert_action(),
131 "alert_name": kwargs["event"].unified_alert_name(),
132 "event_time": kwargs["event"].end_date or kwargs["event"].start_date,
133 "resource_name": None,
134 "resource_id": None,
129 135 }
130 if kwargs['event'].values and kwargs['event'].values.get('reports'):
131 payload['reports'] = kwargs['event'].values.get('reports', [])
132 if 'application' in kwargs:
133 payload['resource_name'] = kwargs['application'].resource_name
134 payload['resource_id'] = kwargs['application'].resource_id
136 if kwargs["event"].values and kwargs["event"].values.get("reports"):
137 payload["reports"] = kwargs["event"].values.get("reports", [])
138 if "application" in kwargs:
139 payload["resource_name"] = kwargs["application"].resource_name
140 payload["resource_id"] = kwargs["application"].resource_id
135 141
136 142 client = WebhooksIntegration.create_client(
137 self.integration.config['alerts_webhook'])
143 self.integration.config["alerts_webhook"]
144 )
138 145 client.send_to_hook(payload)
@@ -29,21 +29,23 b' log = logging.getLogger(__name__)'
29 29
30 30
31 31 class Log(Base, BaseModel):
32 __tablename__ = 'logs'
33 __table_args__ = {'implicit_returning': False}
32 __tablename__ = "logs"
33 __table_args__ = {"implicit_returning": False}
34 34
35 35 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
36 resource_id = sa.Column(sa.Integer(),
37 sa.ForeignKey('applications.resource_id',
38 onupdate='CASCADE',
39 ondelete='CASCADE'),
40 nullable=False,
41 index=True)
42 log_level = sa.Column(sa.Unicode, nullable=False, index=True,
43 default='INFO')
44 message = sa.Column(sa.UnicodeText(), default='')
45 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
46 server_default=sa.func.now())
36 resource_id = sa.Column(
37 sa.Integer(),
38 sa.ForeignKey(
39 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
40 ),
41 nullable=False,
42 index=True,
43 )
44 log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO")
45 message = sa.Column(sa.UnicodeText(), default="")
46 timestamp = sa.Column(
47 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
48 )
47 49 request_id = sa.Column(sa.Unicode())
48 50 namespace = sa.Column(sa.Unicode())
49 51 primary_key = sa.Column(sa.Unicode())
@@ -52,39 +54,40 b' class Log(Base, BaseModel):'
52 54 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
53 55
54 56 def __str__(self):
55 return self.__unicode__().encode('utf8')
57 return self.__unicode__().encode("utf8")
56 58
57 59 def __unicode__(self):
58 return '<Log id:%s, lv:%s, ns:%s >' % (
59 self.log_id, self.log_level, self.namespace)
60 return "<Log id:%s, lv:%s, ns:%s >" % (
61 self.log_id,
62 self.log_level,
63 self.namespace,
64 )
60 65
61 66 def set_data(self, data, resource):
62 level = data.get('log_level').upper()
67 level = data.get("log_level").upper()
63 68 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
64 self.message = data.get('message', '')
65 server_name = data.get('server', '').lower() or 'unknown'
66 self.tags = {
67 'server_name': server_name
68 }
69 if data.get('tags'):
70 for tag_tuple in data['tags']:
69 self.message = data.get("message", "")
70 server_name = data.get("server", "").lower() or "unknown"
71 self.tags = {"server_name": server_name}
72 if data.get("tags"):
73 for tag_tuple in data["tags"]:
71 74 self.tags[tag_tuple[0]] = tag_tuple[1]
72 self.timestamp = data['date']
73 r_id = data.get('request_id', '')
75 self.timestamp = data["date"]
76 r_id = data.get("request_id", "")
74 77 if not r_id:
75 r_id = ''
76 self.request_id = r_id.replace('-', '')
78 r_id = ""
79 self.request_id = r_id.replace("-", "")
77 80 self.resource_id = resource.resource_id
78 self.namespace = data.get('namespace') or ''
79 self.permanent = data.get('permanent')
80 self.primary_key = data.get('primary_key')
81 self.namespace = data.get("namespace") or ""
82 self.permanent = data.get("permanent")
83 self.primary_key = data.get("primary_key")
81 84 if self.primary_key is not None:
82 self.tags['appenlight_primary_key'] = self.primary_key
85 self.tags["appenlight_primary_key"] = self.primary_key
83 86
84 87 def get_dict(self):
85 88 instance_dict = super(Log, self).get_dict()
86 instance_dict['log_level'] = LogLevel.key_from_value(self.log_level)
87 instance_dict['resource_name'] = self.application.resource_name
89 instance_dict["log_level"] = LogLevel.key_from_value(self.log_level)
90 instance_dict["resource_name"] = self.application.resource_name
88 91 return instance_dict
89 92
90 93 @property
@@ -92,39 +95,38 b' class Log(Base, BaseModel):'
92 95 if not self.primary_key:
93 96 return None
94 97
95 to_hash = '{}_{}_{}'.format(self.resource_id, self.primary_key,
96 self.namespace)
97 return hashlib.sha1(to_hash.encode('utf8')).hexdigest()
98 to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace)
99 return hashlib.sha1(to_hash.encode("utf8")).hexdigest()
98 100
99 101 def es_doc(self):
100 102 tags = {}
101 103 tag_list = []
102 104 for name, value in self.tags.items():
103 105 # replace dot in indexed tag name
104 name = name.replace('.', '_')
106 name = name.replace(".", "_")
105 107 tag_list.append(name)
106 108 tags[name] = {
107 109 "values": convert_es_type(value),
108 "numeric_values": value if (
109 isinstance(value, (int, float)) and
110 not isinstance(value, bool)) else None
110 "numeric_values": value
111 if (isinstance(value, (int, float)) and not isinstance(value, bool))
112 else None,
111 113 }
112 114 return {
113 'pg_id': str(self.log_id),
114 'delete_hash': self.delete_hash,
115 'resource_id': self.resource_id,
116 'request_id': self.request_id,
117 'log_level': LogLevel.key_from_value(self.log_level),
118 'timestamp': self.timestamp,
119 'message': self.message if self.message else '',
120 'namespace': self.namespace if self.namespace else '',
121 'tags': tags,
122 'tag_list': tag_list
115 "pg_id": str(self.log_id),
116 "delete_hash": self.delete_hash,
117 "resource_id": self.resource_id,
118 "request_id": self.request_id,
119 "log_level": LogLevel.key_from_value(self.log_level),
120 "timestamp": self.timestamp,
121 "message": self.message if self.message else "",
122 "namespace": self.namespace if self.namespace else "",
123 "tags": tags,
124 "tag_list": tag_list,
123 125 }
124 126
125 127 @property
126 128 def partition_id(self):
127 129 if self.permanent:
128 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m')
130 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m")
129 131 else:
130 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m_%d')
132 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d")
@@ -25,40 +25,44 b' from appenlight.models import Base'
25 25
26 26
27 27 class Metric(Base, BaseModel):
28 __tablename__ = 'metrics'
29 __table_args__ = {'implicit_returning': False}
28 __tablename__ = "metrics"
29 __table_args__ = {"implicit_returning": False}
30 30
31 31 pkey = sa.Column(sa.BigInteger(), primary_key=True)
32 resource_id = sa.Column(sa.Integer(),
33 sa.ForeignKey('applications.resource_id'),
34 nullable=False, primary_key=True)
35 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
36 server_default=sa.func.now())
32 resource_id = sa.Column(
33 sa.Integer(),
34 sa.ForeignKey("applications.resource_id"),
35 nullable=False,
36 primary_key=True,
37 )
38 timestamp = sa.Column(
39 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
40 )
37 41 tags = sa.Column(JSON(), default={})
38 42 namespace = sa.Column(sa.Unicode(255))
39 43
40 44 @property
41 45 def partition_id(self):
42 return 'rcae_m_%s' % self.timestamp.strftime('%Y_%m_%d')
46 return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d")
43 47
44 48 def es_doc(self):
45 49 tags = {}
46 50 tag_list = []
47 51 for name, value in self.tags.items():
48 52 # replace dot in indexed tag name
49 name = name.replace('.', '_')
53 name = name.replace(".", "_")
50 54 tag_list.append(name)
51 55 tags[name] = {
52 56 "values": convert_es_type(value),
53 "numeric_values": value if (
54 isinstance(value, (int, float)) and
55 not isinstance(value, bool)) else None
57 "numeric_values": value
58 if (isinstance(value, (int, float)) and not isinstance(value, bool))
59 else None,
56 60 }
57 61
58 62 return {
59 'resource_id': self.resource_id,
60 'timestamp': self.timestamp,
61 'namespace': self.namespace,
62 'tags': tags,
63 'tag_list': tag_list
63 "resource_id": self.resource_id,
64 "timestamp": self.timestamp,
65 "namespace": self.namespace,
66 "tags": tags,
67 "tag_list": tag_list,
64 68 }
@@ -22,19 +22,19 b' from . import Base'
22 22
23 23
24 24 class PluginConfig(Base, BaseModel):
25 __tablename__ = 'plugin_configs'
25 __tablename__ = "plugin_configs"
26 26
27 27 id = sa.Column(sa.Integer, primary_key=True)
28 28 plugin_name = sa.Column(sa.Unicode)
29 29 section = sa.Column(sa.Unicode)
30 30 config = sa.Column(JSON, nullable=False)
31 resource_id = sa.Column(sa.Integer(),
32 sa.ForeignKey('resources.resource_id',
33 onupdate='cascade',
34 ondelete='cascade'))
35 owner_id = sa.Column(sa.Integer(),
36 sa.ForeignKey('users.id', onupdate='cascade',
37 ondelete='cascade'))
31 resource_id = sa.Column(
32 sa.Integer(),
33 sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"),
34 )
35 owner_id = sa.Column(
36 sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade")
37 )
38 38
39 39 def __json__(self, request):
40 40 return self.get_dict()
This diff has been collapsed as it changes many lines, (531 lines changed) Show them Hide them
@@ -36,51 +36,55 b' from ziggurat_foundations.models.base import BaseModel'
36 36 log = logging.getLogger(__name__)
37 37
38 38 REPORT_TYPE_MATRIX = {
39 'http_status': {"type": 'int',
40 "ops": ('eq', 'ne', 'ge', 'le',)},
41 'group:priority': {"type": 'int',
42 "ops": ('eq', 'ne', 'ge', 'le',)},
43 'duration': {"type": 'float',
44 "ops": ('ge', 'le',)},
45 'url_domain': {"type": 'unicode',
46 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
47 'url_path': {"type": 'unicode',
48 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
49 'error': {"type": 'unicode',
50 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
51 'tags:server_name': {"type": 'unicode',
52 "ops": ('eq', 'ne', 'startswith', 'endswith',
53 'contains',)},
54 'traceback': {"type": 'unicode',
55 "ops": ('contains',)},
56 'group:occurences': {"type": 'int',
57 "ops": ('eq', 'ne', 'ge', 'le',)}
39 "http_status": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
40 "group:priority": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
41 "duration": {"type": "float", "ops": ("ge", "le")},
42 "url_domain": {
43 "type": "unicode",
44 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
45 },
46 "url_path": {
47 "type": "unicode",
48 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
49 },
50 "error": {
51 "type": "unicode",
52 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
53 },
54 "tags:server_name": {
55 "type": "unicode",
56 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
57 },
58 "traceback": {"type": "unicode", "ops": ("contains",)},
59 "group:occurences": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
58 60 }
59 61
60 62
61 63 class Report(Base, BaseModel):
62 __tablename__ = 'reports'
63 __table_args__ = {'implicit_returning': False}
64 __tablename__ = "reports"
65 __table_args__ = {"implicit_returning": False}
64 66
65 67 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
66 group_id = sa.Column(sa.BigInteger,
67 sa.ForeignKey('reports_groups.id', ondelete='cascade',
68 onupdate='cascade'))
68 group_id = sa.Column(
69 sa.BigInteger,
70 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
71 )
69 72 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
70 73 report_type = sa.Column(sa.Integer(), nullable=False, index=True)
71 74 error = sa.Column(sa.UnicodeText(), index=True)
72 75 extra = sa.Column(JSON(), default={})
73 76 request = sa.Column(JSON(), nullable=False, default={})
74 ip = sa.Column(sa.String(39), index=True, default='')
75 username = sa.Column(sa.Unicode(255), default='')
76 user_agent = sa.Column(sa.Unicode(255), default='')
77 ip = sa.Column(sa.String(39), index=True, default="")
78 username = sa.Column(sa.Unicode(255), default="")
79 user_agent = sa.Column(sa.Unicode(255), default="")
77 80 url = sa.Column(sa.UnicodeText(), index=True)
78 81 request_id = sa.Column(sa.Text())
79 82 request_stats = sa.Column(JSON(), nullable=False, default={})
80 83 traceback = sa.Column(JSON(), nullable=False, default=None)
81 84 traceback_hash = sa.Column(sa.Text())
82 start_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
83 server_default=sa.func.now())
85 start_time = sa.Column(
86 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
87 )
84 88 end_time = sa.Column(sa.DateTime())
85 89 duration = sa.Column(sa.Float, default=0)
86 90 http_status = sa.Column(sa.Integer, index=True)
@@ -89,99 +93,104 b' class Report(Base, BaseModel):'
89 93 tags = sa.Column(JSON(), nullable=False, default={})
90 94 language = sa.Column(sa.Integer(), default=0)
91 95 # this is used to determine partition for the report
92 report_group_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
93 server_default=sa.func.now())
96 report_group_time = sa.Column(
97 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
98 )
94 99
95 100 logs = sa.orm.relationship(
96 'Log',
97 lazy='dynamic',
101 "Log",
102 lazy="dynamic",
98 103 passive_deletes=True,
99 104 passive_updates=True,
100 105 primaryjoin="and_(Report.request_id==Log.request_id, "
101 "Log.request_id != None, Log.request_id != '')",
102 foreign_keys='[Log.request_id]')
103
104 slow_calls = sa.orm.relationship('SlowCall',
105 backref='detail',
106 cascade="all, delete-orphan",
107 passive_deletes=True,
108 passive_updates=True,
109 order_by='SlowCall.timestamp')
106 "Log.request_id != None, Log.request_id != '')",
107 foreign_keys="[Log.request_id]",
108 )
109
110 slow_calls = sa.orm.relationship(
111 "SlowCall",
112 backref="detail",
113 cascade="all, delete-orphan",
114 passive_deletes=True,
115 passive_updates=True,
116 order_by="SlowCall.timestamp",
117 )
110 118
111 119 def set_data(self, data, resource, protocol_version=None):
112 self.http_status = data['http_status']
113 self.priority = data['priority']
114 self.error = data['error']
115 report_language = data.get('language', '').lower()
120 self.http_status = data["http_status"]
121 self.priority = data["priority"]
122 self.error = data["error"]
123 report_language = data.get("language", "").lower()
116 124 self.language = getattr(Language, report_language, Language.unknown)
117 125 # we need temp holder here to decide later
118 126 # if we want to to commit the tags if report is marked for creation
119 self.tags = {
120 'server_name': data['server'],
121 'view_name': data['view_name']
122 }
123 if data.get('tags'):
124 for tag_tuple in data['tags']:
127 self.tags = {"server_name": data["server"], "view_name": data["view_name"]}
128 if data.get("tags"):
129 for tag_tuple in data["tags"]:
125 130 self.tags[tag_tuple[0]] = tag_tuple[1]
126 self.traceback = data['traceback']
131 self.traceback = data["traceback"]
127 132 stripped_traceback = self.stripped_traceback()
128 tb_repr = repr(stripped_traceback).encode('utf8')
133 tb_repr = repr(stripped_traceback).encode("utf8")
129 134 self.traceback_hash = hashlib.sha1(tb_repr).hexdigest()
130 url_info = urllib.parse.urlsplit(
131 data.get('url', ''), allow_fragments=False)
135 url_info = urllib.parse.urlsplit(data.get("url", ""), allow_fragments=False)
132 136 self.url_domain = url_info.netloc[:128]
133 137 self.url_path = url_info.path[:2048]
134 self.occurences = data['occurences']
138 self.occurences = data["occurences"]
135 139 if self.error:
136 140 self.report_type = ReportType.error
137 141 else:
138 142 self.report_type = ReportType.slow
139 143
140 144 # but if its status 404 its 404 type
141 if self.http_status in [404, '404'] or self.error == '404 Not Found':
145 if self.http_status in [404, "404"] or self.error == "404 Not Found":
142 146 self.report_type = ReportType.not_found
143 self.error = ''
147 self.error = ""
144 148
145 self.generate_grouping_hash(data.get('appenlight.group_string',
146 data.get('group_string')),
147 resource.default_grouping,
148 protocol_version)
149 self.generate_grouping_hash(
150 data.get("appenlight.group_string", data.get("group_string")),
151 resource.default_grouping,
152 protocol_version,
153 )
149 154
150 155 # details
151 if data['http_status'] in [404, '404']:
152 data = {"username": data["username"],
153 "ip": data["ip"],
154 "url": data["url"],
155 "user_agent": data["user_agent"]}
156 if data.get('HTTP_REFERER') or data.get('http_referer'):
157 data['HTTP_REFERER'] = data.get(
158 'HTTP_REFERER', '') or data.get('http_referer', '')
156 if data["http_status"] in [404, "404"]:
157 data = {
158 "username": data["username"],
159 "ip": data["ip"],
160 "url": data["url"],
161 "user_agent": data["user_agent"],
162 }
163 if data.get("HTTP_REFERER") or data.get("http_referer"):
164 data["HTTP_REFERER"] = data.get("HTTP_REFERER", "") or data.get(
165 "http_referer", ""
166 )
159 167
160 168 self.resource_id = resource.resource_id
161 self.username = data['username']
162 self.user_agent = data['user_agent']
163 self.ip = data['ip']
169 self.username = data["username"]
170 self.user_agent = data["user_agent"]
171 self.ip = data["ip"]
164 172 self.extra = {}
165 if data.get('extra'):
166 for extra_tuple in data['extra']:
173 if data.get("extra"):
174 for extra_tuple in data["extra"]:
167 175 self.extra[extra_tuple[0]] = extra_tuple[1]
168 176
169 self.url = data['url']
170 self.request_id = data.get('request_id', '').replace('-', '') or str(
171 uuid.uuid4())
172 request_data = data.get('request', {})
177 self.url = data["url"]
178 self.request_id = data.get("request_id", "").replace("-", "") or str(
179 uuid.uuid4()
180 )
181 request_data = data.get("request", {})
173 182
174 183 self.request = request_data
175 self.request_stats = data.get('request_stats', {})
176 traceback = data.get('traceback')
184 self.request_stats = data.get("request_stats", {})
185 traceback = data.get("traceback")
177 186 if not traceback:
178 traceback = data.get('frameinfo')
187 traceback = data.get("frameinfo")
179 188 self.traceback = traceback
180 start_date = convert_date(data.get('start_time'))
189 start_date = convert_date(data.get("start_time"))
181 190 if not self.start_time or self.start_time < start_date:
182 191 self.start_time = start_date
183 192
184 self.end_time = convert_date(data.get('end_time'), False)
193 self.end_time = convert_date(data.get("end_time"), False)
185 194 self.duration = 0
186 195
187 196 if self.start_time and self.end_time:
@@ -190,81 +199,85 b' class Report(Base, BaseModel):'
190 199
191 200 # update tags with other vars
192 201 if self.username:
193 self.tags['user_name'] = self.username
194 self.tags['report_language'] = Language.key_from_value(self.language)
202 self.tags["user_name"] = self.username
203 self.tags["report_language"] = Language.key_from_value(self.language)
195 204
196 205 def add_slow_calls(self, data, report_group):
197 206 slow_calls = []
198 for call in data.get('slow_calls', []):
207 for call in data.get("slow_calls", []):
199 208 sc_inst = SlowCall()
200 sc_inst.set_data(call, resource_id=self.resource_id,
201 report_group=report_group)
209 sc_inst.set_data(
210 call, resource_id=self.resource_id, report_group=report_group
211 )
202 212 slow_calls.append(sc_inst)
203 213 self.slow_calls.extend(slow_calls)
204 214 return slow_calls
205 215
206 def get_dict(self, request, details=False, exclude_keys=None,
207 include_keys=None):
216 def get_dict(self, request, details=False, exclude_keys=None, include_keys=None):
208 217 from appenlight.models.services.report_group import ReportGroupService
218
209 219 instance_dict = super(Report, self).get_dict()
210 instance_dict['req_stats'] = self.req_stats()
211 instance_dict['group'] = {}
212 instance_dict['group']['id'] = self.report_group.id
213 instance_dict['group'][
214 'total_reports'] = self.report_group.total_reports
215 instance_dict['group']['last_report'] = self.report_group.last_report
216 instance_dict['group']['priority'] = self.report_group.priority
217 instance_dict['group']['occurences'] = self.report_group.occurences
218 instance_dict['group'][
219 'last_timestamp'] = self.report_group.last_timestamp
220 instance_dict['group'][
221 'first_timestamp'] = self.report_group.first_timestamp
222 instance_dict['group']['public'] = self.report_group.public
223 instance_dict['group']['fixed'] = self.report_group.fixed
224 instance_dict['group']['read'] = self.report_group.read
225 instance_dict['group'][
226 'average_duration'] = self.report_group.average_duration
227
228 instance_dict[
229 'resource_name'] = self.report_group.application.resource_name
230 instance_dict['report_type'] = self.report_type
231
232 if instance_dict['http_status'] == 404 and not instance_dict['error']:
233 instance_dict['error'] = '404 Not Found'
220 instance_dict["req_stats"] = self.req_stats()
221 instance_dict["group"] = {}
222 instance_dict["group"]["id"] = self.report_group.id
223 instance_dict["group"]["total_reports"] = self.report_group.total_reports
224 instance_dict["group"]["last_report"] = self.report_group.last_report
225 instance_dict["group"]["priority"] = self.report_group.priority
226 instance_dict["group"]["occurences"] = self.report_group.occurences
227 instance_dict["group"]["last_timestamp"] = self.report_group.last_timestamp
228 instance_dict["group"]["first_timestamp"] = self.report_group.first_timestamp
229 instance_dict["group"]["public"] = self.report_group.public
230 instance_dict["group"]["fixed"] = self.report_group.fixed
231 instance_dict["group"]["read"] = self.report_group.read
232 instance_dict["group"]["average_duration"] = self.report_group.average_duration
233
234 instance_dict["resource_name"] = self.report_group.application.resource_name
235 instance_dict["report_type"] = self.report_type
236
237 if instance_dict["http_status"] == 404 and not instance_dict["error"]:
238 instance_dict["error"] = "404 Not Found"
234 239
235 240 if details:
236 instance_dict['affected_users_count'] = \
237 ReportGroupService.affected_users_count(self.report_group)
238 instance_dict['top_affected_users'] = [
239 {'username': u.username, 'count': u.count} for u in
240 ReportGroupService.top_affected_users(self.report_group)]
241 instance_dict['application'] = {'integrations': []}
241 instance_dict[
242 "affected_users_count"
243 ] = ReportGroupService.affected_users_count(self.report_group)
244 instance_dict["top_affected_users"] = [
245 {"username": u.username, "count": u.count}
246 for u in ReportGroupService.top_affected_users(self.report_group)
247 ]
248 instance_dict["application"] = {"integrations": []}
242 249 for integration in self.report_group.application.integrations:
243 250 if integration.front_visible:
244 instance_dict['application']['integrations'].append(
245 {'name': integration.integration_name,
246 'action': integration.integration_action})
247 instance_dict['comments'] = [c.get_dict() for c in
248 self.report_group.comments]
249
250 instance_dict['group']['next_report'] = None
251 instance_dict['group']['previous_report'] = None
251 instance_dict["application"]["integrations"].append(
252 {
253 "name": integration.integration_name,
254 "action": integration.integration_action,
255 }
256 )
257 instance_dict["comments"] = [
258 c.get_dict() for c in self.report_group.comments
259 ]
260
261 instance_dict["group"]["next_report"] = None
262 instance_dict["group"]["previous_report"] = None
252 263 next_in_group = self.get_next_in_group(request)
253 264 previous_in_group = self.get_previous_in_group(request)
254 265 if next_in_group:
255 instance_dict['group']['next_report'] = next_in_group
266 instance_dict["group"]["next_report"] = next_in_group
256 267 if previous_in_group:
257 instance_dict['group']['previous_report'] = previous_in_group
268 instance_dict["group"]["previous_report"] = previous_in_group
258 269
259 270 # slow call ordering
260 271 def find_parent(row, data):
261 272 for r in reversed(data):
262 273 try:
263 if (row['timestamp'] > r['timestamp'] and
264 row['end_time'] < r['end_time']):
274 if (
275 row["timestamp"] > r["timestamp"]
276 and row["end_time"] < r["end_time"]
277 ):
265 278 return r
266 279 except TypeError as e:
267 log.warning('reports_view.find_parent: %s' % e)
280 log.warning("reports_view.find_parent: %s" % e)
268 281 return None
269 282
270 283 new_calls = []
@@ -274,24 +287,23 b' class Report(Base, BaseModel):'
274 287 for x in range(len(calls) - 1, -1, -1):
275 288 parent = find_parent(calls[x], calls)
276 289 if parent:
277 parent['children'].append(calls[x])
290 parent["children"].append(calls[x])
278 291 else:
279 292 # no parent at all? append to new calls anyways
280 293 new_calls.append(calls[x])
281 294 # print 'append', calls[x]
282 295 del calls[x]
283 296 break
284 instance_dict['slow_calls'] = new_calls
297 instance_dict["slow_calls"] = new_calls
285 298
286 instance_dict['front_url'] = self.get_public_url(request)
299 instance_dict["front_url"] = self.get_public_url(request)
287 300
288 301 exclude_keys_list = exclude_keys or []
289 302 include_keys_list = include_keys or []
290 303 for k in list(instance_dict.keys()):
291 if k == 'group':
304 if k == "group":
292 305 continue
293 if (k in exclude_keys_list or
294 (k not in include_keys_list and include_keys)):
306 if k in exclude_keys_list or (k not in include_keys_list and include_keys):
295 307 del instance_dict[k]
296 308 return instance_dict
297 309
@@ -301,19 +313,20 b' class Report(Base, BaseModel):'
301 313 "query": {
302 314 "filtered": {
303 315 "filter": {
304 "and": [{"term": {"group_id": self.group_id}},
305 {"range": {"pg_id": {"lt": self.id}}}]
316 "and": [
317 {"term": {"group_id": self.group_id}},
318 {"range": {"pg_id": {"lt": self.id}}},
319 ]
306 320 }
307 321 }
308 322 },
309 "sort": [
310 {"_doc": {"order": "desc"}},
311 ],
323 "sort": [{"_doc": {"order": "desc"}}],
312 324 }
313 result = request.es_conn.search(body=query, index=self.partition_id,
314 doc_type='report')
315 if result['hits']['total']:
316 return result['hits']['hits'][0]['_source']['pg_id']
325 result = request.es_conn.search(
326 body=query, index=self.partition_id, doc_type="report"
327 )
328 if result["hits"]["total"]:
329 return result["hits"]["hits"][0]["_source"]["pg_id"]
317 330
318 331 def get_next_in_group(self, request):
319 332 query = {
@@ -321,19 +334,20 b' class Report(Base, BaseModel):'
321 334 "query": {
322 335 "filtered": {
323 336 "filter": {
324 "and": [{"term": {"group_id": self.group_id}},
325 {"range": {"pg_id": {"gt": self.id}}}]
337 "and": [
338 {"term": {"group_id": self.group_id}},
339 {"range": {"pg_id": {"gt": self.id}}},
340 ]
326 341 }
327 342 }
328 343 },
329 "sort": [
330 {"_doc": {"order": "asc"}},
331 ],
344 "sort": [{"_doc": {"order": "asc"}}],
332 345 }
333 result = request.es_conn.search(body=query, index=self.partition_id,
334 doc_type='report')
335 if result['hits']['total']:
336 return result['hits']['hits'][0]['_source']['pg_id']
346 result = request.es_conn.search(
347 body=query, index=self.partition_id, doc_type="report"
348 )
349 if result["hits"]["total"]:
350 return result["hits"]["hits"][0]["_source"]["pg_id"]
337 351
338 352 def get_public_url(self, request=None, report_group=None, _app_url=None):
339 353 """
@@ -341,53 +355,51 b' class Report(Base, BaseModel):'
341 355 """
342 356 if not request:
343 357 request = get_current_request()
344 url = request.route_url('/', _app_url=_app_url)
358 url = request.route_url("/", _app_url=_app_url)
345 359 if report_group:
346 return (url + 'ui/report/%s/%s') % (report_group.id, self.id)
347 return (url + 'ui/report/%s/%s') % (self.group_id, self.id)
360 return (url + "ui/report/%s/%s") % (report_group.id, self.id)
361 return (url + "ui/report/%s/%s") % (self.group_id, self.id)
348 362
349 363 def req_stats(self):
350 364 stats = self.request_stats.copy()
351 stats['percentages'] = {}
352 stats['percentages']['main'] = 100.0
353 main = stats.get('main', 0.0)
365 stats["percentages"] = {}
366 stats["percentages"]["main"] = 100.0
367 main = stats.get("main", 0.0)
354 368 if not main:
355 369 return None
356 370 for name, call_time in stats.items():
357 if ('calls' not in name and 'main' not in name and
358 'percentages' not in name):
359 stats['main'] -= call_time
360 stats['percentages'][name] = math.floor(
361 (call_time / main * 100.0))
362 stats['percentages']['main'] -= stats['percentages'][name]
363 if stats['percentages']['main'] < 0.0:
364 stats['percentages']['main'] = 0.0
365 stats['main'] = 0.0
371 if "calls" not in name and "main" not in name and "percentages" not in name:
372 stats["main"] -= call_time
373 stats["percentages"][name] = math.floor((call_time / main * 100.0))
374 stats["percentages"]["main"] -= stats["percentages"][name]
375 if stats["percentages"]["main"] < 0.0:
376 stats["percentages"]["main"] = 0.0
377 stats["main"] = 0.0
366 378 return stats
367 379
368 def generate_grouping_hash(self, hash_string=None, default_grouping=None,
369 protocol_version=None):
380 def generate_grouping_hash(
381 self, hash_string=None, default_grouping=None, protocol_version=None
382 ):
370 383 """
371 384 Generates SHA1 hash that will be used to group reports together
372 385 """
373 386 if not hash_string:
374 location = self.tags.get('view_name') or self.url_path;
375 server_name = self.tags.get('server_name') or ''
376 if default_grouping == 'url_traceback':
377 hash_string = '%s_%s_%s' % (self.traceback_hash, location,
378 self.error)
387 location = self.tags.get("view_name") or self.url_path
388 server_name = self.tags.get("server_name") or ""
389 if default_grouping == "url_traceback":
390 hash_string = "%s_%s_%s" % (self.traceback_hash, location, self.error)
379 391 if self.language == Language.javascript:
380 hash_string = '%s_%s' % (self.traceback_hash, self.error)
392 hash_string = "%s_%s" % (self.traceback_hash, self.error)
381 393
382 elif default_grouping == 'traceback_server':
383 hash_string = '%s_%s' % (self.traceback_hash, server_name)
394 elif default_grouping == "traceback_server":
395 hash_string = "%s_%s" % (self.traceback_hash, server_name)
384 396 if self.language == Language.javascript:
385 hash_string = '%s_%s' % (self.traceback_hash, server_name)
397 hash_string = "%s_%s" % (self.traceback_hash, server_name)
386 398 else:
387 hash_string = '%s_%s' % (self.error, location)
399 hash_string = "%s_%s" % (self.error, location)
388 400 month = datetime.utcnow().date().replace(day=1)
389 hash_string = '{}_{}'.format(month, hash_string)
390 binary_string = hash_string.encode('utf8')
401 hash_string = "{}_{}".format(month, hash_string)
402 binary_string = hash_string.encode("utf8")
391 403 self.grouping_hash = hashlib.sha1(binary_string).hexdigest()
392 404 return self.grouping_hash
393 405
@@ -399,7 +411,7 b' class Report(Base, BaseModel):'
399 411
400 412 if isinstance(stripped_traceback, list):
401 413 for row in stripped_traceback:
402 row.pop('vars', None)
414 row.pop("vars", None)
403 415 return stripped_traceback
404 416
405 417 def notify_channel(self, report_group):
@@ -407,78 +419,81 b' class Report(Base, BaseModel):'
407 419 Sends notification to websocket channel
408 420 """
409 421 settings = get_current_registry().settings
410 log.info('notify channelstream')
422 log.info("notify channelstream")
411 423 if self.report_type != ReportType.error:
412 424 return
413 425 payload = {
414 'type': 'message',
415 "user": '__system__',
416 "channel": 'app_%s' % self.resource_id,
417 'message': {
418 'topic': 'front_dashboard.new_topic',
419 'report': {
420 'group': {
421 'priority': report_group.priority,
422 'first_timestamp': report_group.first_timestamp,
423 'last_timestamp': report_group.last_timestamp,
424 'average_duration': report_group.average_duration,
425 'occurences': report_group.occurences
426 "type": "message",
427 "user": "__system__",
428 "channel": "app_%s" % self.resource_id,
429 "message": {
430 "topic": "front_dashboard.new_topic",
431 "report": {
432 "group": {
433 "priority": report_group.priority,
434 "first_timestamp": report_group.first_timestamp,
435 "last_timestamp": report_group.last_timestamp,
436 "average_duration": report_group.average_duration,
437 "occurences": report_group.occurences,
426 438 },
427 'report_id': self.id,
428 'group_id': self.group_id,
429 'resource_id': self.resource_id,
430 'http_status': self.http_status,
431 'url_domain': self.url_domain,
432 'url_path': self.url_path,
433 'error': self.error or '',
434 'server': self.tags.get('server_name'),
435 'view_name': self.tags.get('view_name'),
436 'front_url': self.get_public_url(),
437 }
438 }
439
439 "report_id": self.id,
440 "group_id": self.group_id,
441 "resource_id": self.resource_id,
442 "http_status": self.http_status,
443 "url_domain": self.url_domain,
444 "url_path": self.url_path,
445 "error": self.error or "",
446 "server": self.tags.get("server_name"),
447 "view_name": self.tags.get("view_name"),
448 "front_url": self.get_public_url(),
449 },
450 },
440 451 }
441 channelstream_request(settings['cometd.secret'], '/message', [payload],
442 servers=[settings['cometd_servers']])
452 channelstream_request(
453 settings["cometd.secret"],
454 "/message",
455 [payload],
456 servers=[settings["cometd_servers"]],
457 )
443 458
444 459 def es_doc(self):
445 460 tags = {}
446 461 tag_list = []
447 462 for name, value in self.tags.items():
448 name = name.replace('.', '_')
463 name = name.replace(".", "_")
449 464 tag_list.append(name)
450 465 tags[name] = {
451 466 "values": convert_es_type(value),
452 "numeric_values": value if (
453 isinstance(value, (int, float)) and
454 not isinstance(value, bool)) else None}
467 "numeric_values": value
468 if (isinstance(value, (int, float)) and not isinstance(value, bool))
469 else None,
470 }
455 471
456 if 'user_name' not in self.tags and self.username:
457 tags["user_name"] = {"value": [self.username],
458 "numeric_value": None}
472 if "user_name" not in self.tags and self.username:
473 tags["user_name"] = {"value": [self.username], "numeric_value": None}
459 474 return {
460 '_id': str(self.id),
461 'pg_id': str(self.id),
462 'resource_id': self.resource_id,
463 'http_status': self.http_status or '',
464 'start_time': self.start_time,
465 'end_time': self.end_time,
466 'url_domain': self.url_domain if self.url_domain else '',
467 'url_path': self.url_path if self.url_path else '',
468 'duration': self.duration,
469 'error': self.error if self.error else '',
470 'report_type': self.report_type,
471 'request_id': self.request_id,
472 'ip': self.ip,
473 'group_id': str(self.group_id),
474 '_parent': str(self.group_id),
475 'tags': tags,
476 'tag_list': tag_list
475 "_id": str(self.id),
476 "pg_id": str(self.id),
477 "resource_id": self.resource_id,
478 "http_status": self.http_status or "",
479 "start_time": self.start_time,
480 "end_time": self.end_time,
481 "url_domain": self.url_domain if self.url_domain else "",
482 "url_path": self.url_path if self.url_path else "",
483 "duration": self.duration,
484 "error": self.error if self.error else "",
485 "report_type": self.report_type,
486 "request_id": self.request_id,
487 "ip": self.ip,
488 "group_id": str(self.group_id),
489 "_parent": str(self.group_id),
490 "tags": tags,
491 "tag_list": tag_list,
477 492 }
478 493
479 494 @property
480 495 def partition_id(self):
481 return 'rcae_r_%s' % self.report_group_time.strftime('%Y_%m')
496 return "rcae_r_%s" % self.report_group_time.strftime("%Y_%m")
482 497
483 498 def partition_range(self):
484 499 start_date = self.report_group_time.date().replace(day=1)
@@ -488,27 +503,31 b' class Report(Base, BaseModel):'
488 503
489 504
490 505 def after_insert(mapper, connection, target):
491 if not hasattr(target, '_skip_ft_index'):
506 if not hasattr(target, "_skip_ft_index"):
492 507 data = target.es_doc()
493 data.pop('_id', None)
494 Datastores.es.index(target.partition_id, 'report', data,
495 parent=target.group_id, id=target.id)
508 data.pop("_id", None)
509 Datastores.es.index(
510 target.partition_id, "report", data, parent=target.group_id, id=target.id
511 )
496 512
497 513
498 514 def after_update(mapper, connection, target):
499 if not hasattr(target, '_skip_ft_index'):
515 if not hasattr(target, "_skip_ft_index"):
500 516 data = target.es_doc()
501 data.pop('_id', None)
502 Datastores.es.index(target.partition_id, 'report', data,
503 parent=target.group_id, id=target.id)
517 data.pop("_id", None)
518 Datastores.es.index(
519 target.partition_id, "report", data, parent=target.group_id, id=target.id
520 )
504 521
505 522
506 523 def after_delete(mapper, connection, target):
507 if not hasattr(target, '_skip_ft_index'):
508 query = {"query":{'term': {'pg_id': target.id}}}
509 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
524 if not hasattr(target, "_skip_ft_index"):
525 query = {"query": {"term": {"pg_id": target.id}}}
526 Datastores.es.transport.perform_request(
527 "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query
528 )
510 529
511 530
512 sa.event.listen(Report, 'after_insert', after_insert)
513 sa.event.listen(Report, 'after_update', after_update)
514 sa.event.listen(Report, 'after_delete', after_delete)
531 sa.event.listen(Report, "after_insert", after_insert)
532 sa.event.listen(Report, "after_update", after_update)
533 sa.event.listen(Report, "after_delete", after_delete)
@@ -20,13 +20,16 b' import sqlalchemy as sa'
20 20
21 21
22 22 class ReportAssignment(Base, BaseModel):
23 __tablename__ = 'reports_assignments'
23 __tablename__ = "reports_assignments"
24 24
25 group_id = sa.Column(sa.BigInteger,
26 sa.ForeignKey('reports_groups.id', ondelete='cascade',
27 onupdate='cascade'),
28 primary_key=True)
29 owner_id = sa.Column(sa.Integer,
30 sa.ForeignKey('users.id', onupdate='CASCADE',
31 ondelete='CASCADE'), primary_key=True)
25 group_id = sa.Column(
26 sa.BigInteger,
27 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
28 primary_key=True,
29 )
30 owner_id = sa.Column(
31 sa.Integer,
32 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
33 primary_key=True,
34 )
32 35 report_time = sa.Column(sa.DateTime(), nullable=False)
@@ -22,23 +22,23 b' from ziggurat_foundations.models.base import BaseModel'
22 22
23 23
24 24 class ReportComment(Base, BaseModel):
25 __tablename__ = 'reports_comments'
25 __tablename__ = "reports_comments"
26 26
27 27 comment_id = sa.Column(sa.Integer, nullable=False, primary_key=True)
28 group_id = sa.Column(sa.BigInteger,
29 sa.ForeignKey('reports_groups.id', ondelete='cascade',
30 onupdate='cascade'))
31 body = sa.Column(sa.UnicodeText(), default='')
32 owner_id = sa.Column(sa.Integer,
33 sa.ForeignKey('users.id', onupdate='CASCADE',
34 ondelete='CASCADE'))
35 created_timestamp = sa.Column(sa.DateTime(),
36 default=datetime.utcnow,
37 server_default=sa.func.now())
28 group_id = sa.Column(
29 sa.BigInteger,
30 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
31 )
32 body = sa.Column(sa.UnicodeText(), default="")
33 owner_id = sa.Column(
34 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
35 )
36 created_timestamp = sa.Column(
37 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
38 )
38 39 report_time = sa.Column(sa.DateTime(), nullable=False)
39 40
40 owner = sa.orm.relationship('User',
41 lazy='joined')
41 owner = sa.orm.relationship("User", lazy="joined")
42 42
43 43 @property
44 44 def processed_body(self):
@@ -46,5 +46,5 b' class ReportComment(Base, BaseModel):'
46 46
47 47 def get_dict(self):
48 48 instance_dict = super(ReportComment, self).get_dict()
49 instance_dict['user_name'] = self.owner.user_name
49 instance_dict["user_name"] = self.owner.user_name
50 50 return instance_dict
@@ -33,26 +33,30 b' log = logging.getLogger(__name__)'
33 33
34 34
35 35 class ReportGroup(Base, BaseModel):
36 __tablename__ = 'reports_groups'
37 __table_args__ = {'implicit_returning': False}
36 __tablename__ = "reports_groups"
37 __table_args__ = {"implicit_returning": False}
38 38
39 39 id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
40 resource_id = sa.Column(sa.Integer(),
41 sa.ForeignKey('applications.resource_id',
42 onupdate='CASCADE',
43 ondelete='CASCADE'),
44 nullable=False,
45 index=True)
46 priority = sa.Column(sa.Integer, nullable=False, index=True, default=5,
47 server_default='5')
48 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
49 server_default=sa.func.now())
50 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
51 server_default=sa.func.now())
40 resource_id = sa.Column(
41 sa.Integer(),
42 sa.ForeignKey(
43 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
44 ),
45 nullable=False,
46 index=True,
47 )
48 priority = sa.Column(
49 sa.Integer, nullable=False, index=True, default=5, server_default="5"
50 )
51 first_timestamp = sa.Column(
52 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
53 )
54 last_timestamp = sa.Column(
55 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
56 )
52 57 error = sa.Column(sa.UnicodeText(), index=True)
53 grouping_hash = sa.Column(sa.String(40), default='')
54 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False,
55 default=list)
58 grouping_hash = sa.Column(sa.String(40), default="")
59 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False, default=list)
56 60 report_type = sa.Column(sa.Integer, default=1)
57 61 total_reports = sa.Column(sa.Integer, default=1)
58 62 last_report = sa.Column(sa.Integer)
@@ -64,50 +68,58 b' class ReportGroup(Base, BaseModel):'
64 68 notified = sa.Column(sa.Boolean(), index=True, default=False)
65 69 public = sa.Column(sa.Boolean(), index=True, default=False)
66 70
67 reports = sa.orm.relationship('Report',
68 lazy='dynamic',
69 backref='report_group',
70 cascade="all, delete-orphan",
71 passive_deletes=True,
72 passive_updates=True, )
73
74 comments = sa.orm.relationship('ReportComment',
75 lazy='dynamic',
76 backref='report',
77 cascade="all, delete-orphan",
78 passive_deletes=True,
79 passive_updates=True,
80 order_by="ReportComment.comment_id")
81
82 assigned_users = sa.orm.relationship('User',
83 backref=sa.orm.backref(
84 'assigned_reports_relation',
85 lazy='dynamic',
86 order_by=sa.desc(
87 sa.text("reports_groups.id"))
88 ),
89 passive_deletes=True,
90 passive_updates=True,
91 secondary='reports_assignments',
92 order_by="User.user_name")
93
94 stats = sa.orm.relationship('ReportStat',
95 lazy='dynamic',
96 backref='report',
97 passive_deletes=True,
98 passive_updates=True, )
99
100 last_report_ref = sa.orm.relationship('Report',
101 uselist=False,
102 primaryjoin="ReportGroup.last_report "
103 "== Report.id",
104 foreign_keys="Report.id",
105 cascade="all, delete-orphan",
106 passive_deletes=True,
107 passive_updates=True, )
71 reports = sa.orm.relationship(
72 "Report",
73 lazy="dynamic",
74 backref="report_group",
75 cascade="all, delete-orphan",
76 passive_deletes=True,
77 passive_updates=True,
78 )
79
80 comments = sa.orm.relationship(
81 "ReportComment",
82 lazy="dynamic",
83 backref="report",
84 cascade="all, delete-orphan",
85 passive_deletes=True,
86 passive_updates=True,
87 order_by="ReportComment.comment_id",
88 )
89
90 assigned_users = sa.orm.relationship(
91 "User",
92 backref=sa.orm.backref(
93 "assigned_reports_relation",
94 lazy="dynamic",
95 order_by=sa.desc(sa.text("reports_groups.id")),
96 ),
97 passive_deletes=True,
98 passive_updates=True,
99 secondary="reports_assignments",
100 order_by="User.user_name",
101 )
102
103 stats = sa.orm.relationship(
104 "ReportStat",
105 lazy="dynamic",
106 backref="report",
107 passive_deletes=True,
108 passive_updates=True,
109 )
110
111 last_report_ref = sa.orm.relationship(
112 "Report",
113 uselist=False,
114 primaryjoin="ReportGroup.last_report " "== Report.id",
115 foreign_keys="Report.id",
116 cascade="all, delete-orphan",
117 passive_deletes=True,
118 passive_updates=True,
119 )
108 120
109 121 def __repr__(self):
110 return '<ReportGroup id:{}>'.format(self.id)
122 return "<ReportGroup id:{}>".format(self.id)
111 123
112 124 def get_report(self, report_id=None, public=False):
113 125 """
@@ -121,8 +133,8 b' class ReportGroup(Base, BaseModel):'
121 133 return self.reports.filter(Report.id == report_id).first()
122 134
123 135 def get_public_url(self, request, _app_url=None):
124 url = request.route_url('/', _app_url=_app_url)
125 return (url + 'ui/report/%s') % self.id
136 url = request.route_url("/", _app_url=_app_url)
137 return (url + "ui/report/%s") % self.id
126 138
127 139 def run_postprocessing(self, report):
128 140 """
@@ -135,12 +147,15 b' class ReportGroup(Base, BaseModel):'
135 147 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
136 148 report_dict = report.get_dict(request)
137 149 # if was not processed yet
138 if (rule_obj.match(report_dict) and
139 action.pkey not in self.triggered_postprocesses_ids):
150 if (
151 rule_obj.match(report_dict)
152 and action.pkey not in self.triggered_postprocesses_ids
153 ):
140 154 action.postprocess(self)
141 155 # this way sqla can track mutation of list
142 self.triggered_postprocesses_ids = \
143 self.triggered_postprocesses_ids + [action.pkey]
156 self.triggered_postprocesses_ids = self.triggered_postprocesses_ids + [
157 action.pkey
158 ]
144 159
145 160 get_db_session(None, self).flush()
146 161 # do not go out of bounds
@@ -151,31 +166,30 b' class ReportGroup(Base, BaseModel):'
151 166
152 167 def get_dict(self, request):
153 168 instance_dict = super(ReportGroup, self).get_dict()
154 instance_dict['server_name'] = self.get_report().tags.get(
155 'server_name')
156 instance_dict['view_name'] = self.get_report().tags.get('view_name')
157 instance_dict['resource_name'] = self.application.resource_name
158 instance_dict['report_type'] = self.get_report().report_type
159 instance_dict['url_path'] = self.get_report().url_path
160 instance_dict['front_url'] = self.get_report().get_public_url(request)
161 del instance_dict['triggered_postprocesses_ids']
169 instance_dict["server_name"] = self.get_report().tags.get("server_name")
170 instance_dict["view_name"] = self.get_report().tags.get("view_name")
171 instance_dict["resource_name"] = self.application.resource_name
172 instance_dict["report_type"] = self.get_report().report_type
173 instance_dict["url_path"] = self.get_report().url_path
174 instance_dict["front_url"] = self.get_report().get_public_url(request)
175 del instance_dict["triggered_postprocesses_ids"]
162 176 return instance_dict
163 177
164 178 def es_doc(self):
165 179 return {
166 '_id': str(self.id),
167 'pg_id': str(self.id),
168 'resource_id': self.resource_id,
169 'error': self.error,
170 'fixed': self.fixed,
171 'public': self.public,
172 'read': self.read,
173 'priority': self.priority,
174 'occurences': self.occurences,
175 'average_duration': self.average_duration,
176 'summed_duration': self.summed_duration,
177 'first_timestamp': self.first_timestamp,
178 'last_timestamp': self.last_timestamp
180 "_id": str(self.id),
181 "pg_id": str(self.id),
182 "resource_id": self.resource_id,
183 "error": self.error,
184 "fixed": self.fixed,
185 "public": self.public,
186 "read": self.read,
187 "priority": self.priority,
188 "occurences": self.occurences,
189 "average_duration": self.average_duration,
190 "summed_duration": self.summed_duration,
191 "first_timestamp": self.first_timestamp,
192 "last_timestamp": self.last_timestamp,
179 193 }
180 194
181 195 def set_notification_info(self, notify_10=False, notify_100=False):
@@ -184,53 +198,54 b' class ReportGroup(Base, BaseModel):'
184 198 """
185 199 current_time = datetime.utcnow().replace(second=0, microsecond=0)
186 200 # global app counter
187 key = REDIS_KEYS['counters']['reports_per_type'].format(
188 self.report_type, current_time)
201 key = REDIS_KEYS["counters"]["reports_per_type"].format(
202 self.report_type, current_time
203 )
189 204 redis_pipeline = Datastores.redis.pipeline()
190 205 redis_pipeline.incr(key)
191 206 redis_pipeline.expire(key, 3600 * 24)
192 207 # detailed app notification for alerts and notifications
208 redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id)
193 209 redis_pipeline.sadd(
194 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
195 redis_pipeline.sadd(
196 REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id)
210 REDIS_KEYS["apps_that_had_reports_alerting"], self.resource_id
211 )
197 212 # only notify for exceptions here
198 213 if self.report_type == ReportType.error:
214 redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id)
199 215 redis_pipeline.sadd(
200 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
201 redis_pipeline.sadd(
202 REDIS_KEYS['apps_that_had_error_reports_alerting'],
203 self.resource_id)
204 key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id)
216 REDIS_KEYS["apps_that_had_error_reports_alerting"], self.resource_id
217 )
218 key = REDIS_KEYS["counters"]["report_group_occurences"].format(self.id)
205 219 redis_pipeline.incr(key)
206 220 redis_pipeline.expire(key, 3600 * 24)
207 key = REDIS_KEYS['counters']['report_group_occurences_alerting'].format(
208 self.id)
221 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(self.id)
209 222 redis_pipeline.incr(key)
210 223 redis_pipeline.expire(key, 3600 * 24)
211 224
212 225 if notify_10:
213 key = REDIS_KEYS['counters'][
214 'report_group_occurences_10th'].format(self.id)
226 key = REDIS_KEYS["counters"]["report_group_occurences_10th"].format(self.id)
215 227 redis_pipeline.setex(key, 3600 * 24, 1)
216 228 if notify_100:
217 key = REDIS_KEYS['counters'][
218 'report_group_occurences_100th'].format(self.id)
229 key = REDIS_KEYS["counters"]["report_group_occurences_100th"].format(
230 self.id
231 )
219 232 redis_pipeline.setex(key, 3600 * 24, 1)
220 233
221 key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
222 self.report_type, self.resource_id)
234 key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
235 self.report_type, self.resource_id
236 )
223 237 redis_pipeline.sadd(key, self.id)
224 238 redis_pipeline.expire(key, 3600 * 24)
225 key = REDIS_KEYS['reports_to_notify_per_type_per_app_alerting'].format(
226 self.report_type, self.resource_id)
239 key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
240 self.report_type, self.resource_id
241 )
227 242 redis_pipeline.sadd(key, self.id)
228 243 redis_pipeline.expire(key, 3600 * 24)
229 244 redis_pipeline.execute()
230 245
231 246 @property
232 247 def partition_id(self):
233 return 'rcae_r_%s' % self.first_timestamp.strftime('%Y_%m')
248 return "rcae_r_%s" % self.first_timestamp.strftime("%Y_%m")
234 249
235 250 def partition_range(self):
236 251 start_date = self.first_timestamp.date().replace(day=1)
@@ -240,29 +255,33 b' class ReportGroup(Base, BaseModel):'
240 255
241 256
242 257 def after_insert(mapper, connection, target):
243 if not hasattr(target, '_skip_ft_index'):
258 if not hasattr(target, "_skip_ft_index"):
244 259 data = target.es_doc()
245 data.pop('_id', None)
246 Datastores.es.index(target.partition_id, 'report_group',
247 data, id=target.id)
260 data.pop("_id", None)
261 Datastores.es.index(target.partition_id, "report_group", data, id=target.id)
248 262
249 263
250 264 def after_update(mapper, connection, target):
251 if not hasattr(target, '_skip_ft_index'):
265 if not hasattr(target, "_skip_ft_index"):
252 266 data = target.es_doc()
253 data.pop('_id', None)
254 Datastores.es.index(target.partition_id, 'report_group',
255 data, id=target.id)
267 data.pop("_id", None)
268 Datastores.es.index(target.partition_id, "report_group", data, id=target.id)
256 269
257 270
258 271 def after_delete(mapper, connection, target):
259 query = {"query": {'term': {'group_id': target.id}}}
272 query = {"query": {"term": {"group_id": target.id}}}
260 273 # delete by query
261 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
262 query = {"query": {'term': {'pg_id': target.id}}}
263 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query)
264
265
266 sa.event.listen(ReportGroup, 'after_insert', after_insert)
267 sa.event.listen(ReportGroup, 'after_update', after_update)
268 sa.event.listen(ReportGroup, 'after_delete', after_delete)
274 Datastores.es.transport.perform_request(
275 "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query
276 )
277 query = {"query": {"term": {"pg_id": target.id}}}
278 Datastores.es.transport.perform_request(
279 "DELETE",
280 "/{}/{}/_query".format(target.partition_id, "report_group"),
281 body=query,
282 )
283
284
285 sa.event.listen(ReportGroup, "after_insert", after_insert)
286 sa.event.listen(ReportGroup, "after_update", after_update)
287 sa.event.listen(ReportGroup, "after_delete", after_delete)
@@ -22,53 +22,58 b' from ziggurat_foundations.models.base import BaseModel'
22 22
23 23
24 24 class ReportStat(Base, BaseModel):
25 __tablename__ = 'reports_stats'
26 __table_args__ = {'implicit_returning': False}
25 __tablename__ = "reports_stats"
26 __table_args__ = {"implicit_returning": False}
27 27
28 group_id = sa.Column(sa.BigInteger(),
29 sa.ForeignKey('reports_groups.id'),
30 nullable=False)
31 resource_id = sa.Column(sa.Integer(),
32 sa.ForeignKey('applications.resource_id'),
33 nullable=False)
28 group_id = sa.Column(
29 sa.BigInteger(), sa.ForeignKey("reports_groups.id"), nullable=False
30 )
31 resource_id = sa.Column(
32 sa.Integer(), sa.ForeignKey("applications.resource_id"), nullable=False
33 )
34 34 start_interval = sa.Column(sa.DateTime(), nullable=False)
35 35 occurences = sa.Column(sa.Integer, nullable=True, default=0)
36 owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
37 nullable=True)
36 owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
38 37 type = sa.Column(sa.Integer, nullable=True, default=0)
39 38 duration = sa.Column(sa.Float, nullable=True, default=0)
40 39 id = sa.Column(sa.BigInteger, nullable=False, primary_key=True)
41 server_name = sa.Column(sa.Unicode(128), nullable=False, default='')
42 view_name = sa.Column(sa.Unicode(128), nullable=False, default='')
40 server_name = sa.Column(sa.Unicode(128), nullable=False, default="")
41 view_name = sa.Column(sa.Unicode(128), nullable=False, default="")
43 42
44 43 @property
45 44 def partition_id(self):
46 return 'rcae_r_%s' % self.start_interval.strftime('%Y_%m')
45 return "rcae_r_%s" % self.start_interval.strftime("%Y_%m")
47 46
48 47 def es_doc(self):
49 48 return {
50 'resource_id': self.resource_id,
51 'timestamp': self.start_interval,
52 'pg_id': str(self.id),
53 'permanent': True,
54 'request_id': None,
55 'log_level': 'ERROR',
56 'message': None,
57 'namespace': 'appenlight.error',
58 'tags': {
59 'duration': {'values': self.duration,
60 'numeric_values': self.duration},
61 'occurences': {'values': self.occurences,
62 'numeric_values': self.occurences},
63 'group_id': {'values': self.group_id,
64 'numeric_values': self.group_id},
65 'type': {'values': ReportType.key_from_value(self.type),
66 'numeric_values': self.type},
67 'server_name': {'values': self.server_name,
68 'numeric_values': None},
69 'view_name': {'values': self.view_name,
70 'numeric_values': None},
49 "resource_id": self.resource_id,
50 "timestamp": self.start_interval,
51 "pg_id": str(self.id),
52 "permanent": True,
53 "request_id": None,
54 "log_level": "ERROR",
55 "message": None,
56 "namespace": "appenlight.error",
57 "tags": {
58 "duration": {"values": self.duration, "numeric_values": self.duration},
59 "occurences": {
60 "values": self.occurences,
61 "numeric_values": self.occurences,
62 },
63 "group_id": {"values": self.group_id, "numeric_values": self.group_id},
64 "type": {
65 "values": ReportType.key_from_value(self.type),
66 "numeric_values": self.type,
67 },
68 "server_name": {"values": self.server_name, "numeric_values": None},
69 "view_name": {"values": self.view_name, "numeric_values": None},
71 70 },
72 'tag_list': ['duration', 'occurences', 'group_id', 'type',
73 'server_name', 'view_name']
71 "tag_list": [
72 "duration",
73 "occurences",
74 "group_id",
75 "type",
76 "server_name",
77 "view_name",
78 ],
74 79 }
@@ -23,11 +23,13 b' from ziggurat_foundations.models.services.resource import ResourceService'
23 23
24 24
25 25 class Resource(ResourceMixin, Base):
26 events = sa.orm.relationship('Event',
27 lazy='dynamic',
28 backref='resource',
29 passive_deletes=True,
30 passive_updates=True)
26 events = sa.orm.relationship(
27 "Event",
28 lazy="dynamic",
29 backref="resource",
30 passive_deletes=True,
31 passive_updates=True,
32 )
31 33
32 34 @property
33 35 def owner_user_name(self):
@@ -39,46 +41,56 b' class Resource(ResourceMixin, Base):'
39 41 if self.owner_group:
40 42 return self.owner_group.group_name
41 43
42 def get_dict(self, exclude_keys=None, include_keys=None,
43 include_perms=False, include_processing_rules=False):
44 def get_dict(
45 self,
46 exclude_keys=None,
47 include_keys=None,
48 include_perms=False,
49 include_processing_rules=False,
50 ):
44 51 result = super(Resource, self).get_dict(exclude_keys, include_keys)
45 result['possible_permissions'] = self.__possible_permissions__
52 result["possible_permissions"] = self.__possible_permissions__
46 53 if include_perms:
47 result['current_permissions'] = self.user_permissions_list
54 result["current_permissions"] = self.user_permissions_list
48 55 else:
49 result['current_permissions'] = []
56 result["current_permissions"] = []
50 57 if include_processing_rules:
51 result["postprocessing_rules"] = [rule.get_dict() for rule
52 in self.postprocess_conf]
58 result["postprocessing_rules"] = [
59 rule.get_dict() for rule in self.postprocess_conf
60 ]
53 61 else:
54 62 result["postprocessing_rules"] = []
55 63 exclude_keys_list = exclude_keys or []
56 64 include_keys_list = include_keys or []
57 65 d = {}
58 66 for k in result.keys():
59 if (k not in exclude_keys_list and
60 (k in include_keys_list or not include_keys)):
67 if k not in exclude_keys_list and (
68 k in include_keys_list or not include_keys
69 ):
61 70 d[k] = result[k]
62 for k in ['owner_user_name', 'owner_group_name']:
63 if (k not in exclude_keys_list and
64 (k in include_keys_list or not include_keys)):
71 for k in ["owner_user_name", "owner_group_name"]:
72 if k not in exclude_keys_list and (
73 k in include_keys_list or not include_keys
74 ):
65 75 d[k] = getattr(self, k)
66 76 return d
67 77
68 78 @property
69 79 def user_permissions_list(self):
70 return [permission_tuple_to_dict(perm) for perm in
71 ResourceService.users_for_perm(
72 self, '__any_permission__', limit_group_permissions=True)]
80 return [
81 permission_tuple_to_dict(perm)
82 for perm in ResourceService.users_for_perm(
83 self, "__any_permission__", limit_group_permissions=True
84 )
85 ]
73 86
74 87 @property
75 88 def __acl__(self):
76 89 acls = []
77 90
78 91 if self.owner_user_id:
79 acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS,), ])
92 acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS)])
80 93
81 94 if self.owner_group_id:
82 acls.extend([(Allow, "group:%s" % self.owner_group_id,
83 ALL_PERMISSIONS,), ])
95 acls.extend([(Allow, "group:%s" % self.owner_group_id, ALL_PERMISSIONS)])
84 96 return acls
@@ -13,4 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -33,7 +33,6 b' log = logging.getLogger(__name__)'
33 33
34 34
35 35 class ApplicationService(BaseService):
36
37 36 @classmethod
38 37 def all(cls, db_session=None):
39 38 db_session = get_db_session(db_session)
@@ -51,9 +50,9 b' class ApplicationService(BaseService):'
51 50 @classmethod
52 51 def by_api_key_cached(cls, db_session=None):
53 52 db_session = get_db_session(db_session)
54 cache_region = get_region('redis_min_1')
53 cache_region = get_region("redis_min_1")
55 54
56 @cache_region.cache_on_arguments('ApplicationService.by_api_key')
55 @cache_region.cache_on_arguments("ApplicationService.by_api_key")
57 56 def cached(*args, **kwargs):
58 57 app = cls.by_api_key(*args, db_session=db_session, **kwargs)
59 58 if app:
@@ -63,10 +62,11 b' class ApplicationService(BaseService):'
63 62 return cached
64 63
65 64 @classmethod
66 def by_public_api_key(cls, api_key, db_session=None, from_cache=False,
67 request=None):
65 def by_public_api_key(
66 cls, api_key, db_session=None, from_cache=False, request=None
67 ):
68 68 db_session = get_db_session(db_session)
69 cache_region = get_region('redis_min_1')
69 cache_region = get_region("redis_min_1")
70 70
71 71 def uncached(api_key):
72 72 q = db_session.query(Application)
@@ -75,8 +75,8 b' class ApplicationService(BaseService):'
75 75 return q.first()
76 76
77 77 if from_cache:
78 @cache_region.cache_on_arguments(
79 'ApplicationService.by_public_api_key')
78
79 @cache_region.cache_on_arguments("ApplicationService.by_public_api_key")
80 80 def cached(api_key):
81 81 app = uncached(api_key)
82 82 if app:
@@ -98,9 +98,9 b' class ApplicationService(BaseService):'
98 98 @classmethod
99 99 def by_id_cached(cls, db_session=None):
100 100 db_session = get_db_session(db_session)
101 cache_region = get_region('redis_min_1')
101 cache_region = get_region("redis_min_1")
102 102
103 @cache_region.cache_on_arguments('ApplicationService.by_id')
103 @cache_region.cache_on_arguments("ApplicationService.by_id")
104 104 def cached(*args, **kwargs):
105 105 app = cls.by_id(*args, db_session=db_session, **kwargs)
106 106 if app:
@@ -119,10 +119,9 b' class ApplicationService(BaseService):'
119 119 @classmethod
120 120 def by_http_referer(cls, referer_string, db_session=None):
121 121 db_session = get_db_session(db_session)
122 domain = urllib.parse.urlsplit(
123 referer_string, allow_fragments=False).netloc
122 domain = urllib.parse.urlsplit(referer_string, allow_fragments=False).netloc
124 123 if domain:
125 if domain.startswith('www.'):
124 if domain.startswith("www."):
126 125 domain = domain[4:]
127 126 q = db_session.query(Application).filter(Application.domain == domain)
128 127 return q.first()
@@ -132,7 +131,8 b' class ApplicationService(BaseService):'
132 131 db_session = get_db_session(db_session)
133 132 q = db_session.query(Application)
134 133 q2 = ReportGroup.last_updated(
135 since_when, exclude_status=exclude_status, db_session=db_session)
134 since_when, exclude_status=exclude_status, db_session=db_session
135 )
136 136 q2 = q2.from_self(ReportGroup.resource_id)
137 137 q2 = q2.group_by(ReportGroup.resource_id)
138 138 q = q.filter(Application.resource_id.in_(q2))
@@ -142,10 +142,10 b' class ApplicationService(BaseService):'
142 142 def check_for_groups_alert(cls, resource, event_type, *args, **kwargs):
143 143 """ Check for open alerts depending on group type.
144 144 Create new one if nothing is found and send alerts """
145 db_session = get_db_session(kwargs.get('db_session'))
145 db_session = get_db_session(kwargs.get("db_session"))
146 146 request = get_current_request()
147 report_groups = kwargs['report_groups']
148 occurence_dict = kwargs['occurence_dict']
147 report_groups = kwargs["report_groups"]
148 occurence_dict = kwargs["occurence_dict"]
149 149
150 150 error_reports = 0
151 151 slow_reports = 0
@@ -156,38 +156,45 b' class ApplicationService(BaseService):'
156 156 elif group.get_report().report_type == ReportType.slow:
157 157 slow_reports += occurences
158 158
159 log_msg = 'LIMIT INFO: %s : %s error reports. %s slow_reports' % (
159 log_msg = "LIMIT INFO: %s : %s error reports. %s slow_reports" % (
160 160 resource,
161 161 error_reports,
162 slow_reports)
162 slow_reports,
163 )
163 164 logging.warning(log_msg)
164 165 threshold = 10
165 for event_type in ['error_report_alert', 'slow_report_alert']:
166 if (error_reports < resource.error_report_threshold and
167 event_type == 'error_report_alert'):
166 for event_type in ["error_report_alert", "slow_report_alert"]:
167 if (
168 error_reports < resource.error_report_threshold
169 and event_type == "error_report_alert"
170 ):
168 171 continue
169 elif (slow_reports <= resource.slow_report_threshold and
170 event_type == 'slow_report_alert'):
172 elif (
173 slow_reports <= resource.slow_report_threshold
174 and event_type == "slow_report_alert"
175 ):
171 176 continue
172 if event_type == 'error_report_alert':
177 if event_type == "error_report_alert":
173 178 amount = error_reports
174 179 threshold = resource.error_report_threshold
175 elif event_type == 'slow_report_alert':
180 elif event_type == "slow_report_alert":
176 181 amount = slow_reports
177 182 threshold = resource.slow_report_threshold
178 183
179 event = EventService.for_resource([resource.resource_id],
180 event_type=Event.types[
181 event_type],
182 status=Event.statuses['active'])
184 event = EventService.for_resource(
185 [resource.resource_id],
186 event_type=Event.types[event_type],
187 status=Event.statuses["active"],
188 )
183 189 if event.first():
184 log.info('ALERT: PROGRESS: %s %s' % (event_type, resource))
190 log.info("ALERT: PROGRESS: %s %s" % (event_type, resource))
185 191 else:
186 log.warning('ALERT: OPEN: %s %s' % (event_type, resource))
187 new_event = Event(resource_id=resource.resource_id,
188 event_type=Event.types[event_type],
189 status=Event.statuses['active'],
190 values={'reports': amount,
191 'threshold': threshold})
192 log.warning("ALERT: OPEN: %s %s" % (event_type, resource))
193 new_event = Event(
194 resource_id=resource.resource_id,
195 event_type=Event.types[event_type],
196 status=Event.statuses["active"],
197 values={"reports": amount, "threshold": threshold},
198 )
192 199 db_session.add(new_event)
193 200 new_event.send_alerts(request=request, resource=resource)
@@ -21,7 +21,6 b' from appenlight.models.services.base import BaseService'
21 21
22 22
23 23 class ApplicationPostprocessConfService(BaseService):
24
25 24 @classmethod
26 25 def by_pkey(cls, pkey, db_session=None):
27 26 db_session = get_db_session(db_session)
@@ -40,9 +40,10 b' class ConfigService(BaseService):'
40 40 if pairs:
41 41 conditions = []
42 42 for pair in pairs:
43 conditions.append(sa.and_(
44 Config.key == pair['key'],
45 Config.section == pair['section'])
43 conditions.append(
44 sa.and_(
45 Config.key == pair["key"], Config.section == pair["section"]
46 )
46 47 )
47 48
48 49 query = query.filter(sa.or_(*conditions))
@@ -57,13 +58,15 b' class ConfigService(BaseService):'
57 58 return config
58 59
59 60 @classmethod
60 def by_key_and_section(cls, key, section, auto_create=False,
61 default_value=None, db_session=None):
61 def by_key_and_section(
62 cls, key, section, auto_create=False, default_value=None, db_session=None
63 ):
62 64 db_session = get_db_session(db_session)
63 65 registry = get_current_registry()
64 66
65 67 @registry.cache_regions.memory_min_1.cache_on_arguments(
66 namespace='ConfigService.by_key_and_section')
68 namespace="ConfigService.by_key_and_section"
69 )
67 70 def cached(key, section):
68 71 query = db_session.query(Config).filter(Config.key == key)
69 72 query = query.filter(Config.section == section)
@@ -76,8 +79,7 b' class ConfigService(BaseService):'
76 79 if config:
77 80 config = db_session.merge(config, load=False)
78 81 if config is None and auto_create:
79 config = ConfigService.create_config(key, section,
80 value=default_value)
82 config = ConfigService.create_config(key, section, value=default_value)
81 83 cached.invalidate(key, section)
82 84 return config
83 85
@@ -87,14 +89,28 b' class ConfigService(BaseService):'
87 89 Will add fresh default config values to database if no keys are found
88 90 :return:
89 91 """
90 log.info('Checking/setting default values')
91 self.by_key_and_section('template_footer_html', 'global',
92 default_value='', auto_create=True)
93 self.by_key_and_section('list_groups_to_non_admins', 'global',
94 default_value=True, auto_create=True)
95 self.by_key_and_section('per_application_reports_rate_limit', 'global',
96 default_value=2000, auto_create=True)
97 self.by_key_and_section('per_application_logs_rate_limit', 'global',
98 default_value=100000, auto_create=True)
99 self.by_key_and_section('per_application_metrics_rate_limit', 'global',
100 default_value=100000, auto_create=True)
92 log.info("Checking/setting default values")
93 self.by_key_and_section(
94 "template_footer_html", "global", default_value="", auto_create=True
95 )
96 self.by_key_and_section(
97 "list_groups_to_non_admins", "global", default_value=True, auto_create=True
98 )
99 self.by_key_and_section(
100 "per_application_reports_rate_limit",
101 "global",
102 default_value=2000,
103 auto_create=True,
104 )
105 self.by_key_and_section(
106 "per_application_logs_rate_limit",
107 "global",
108 default_value=100000,
109 auto_create=True,
110 )
111 self.by_key_and_section(
112 "per_application_metrics_rate_limit",
113 "global",
114 default_value=100000,
115 auto_create=True,
116 )
@@ -26,10 +26,19 b' from appenlight.models.services.base import BaseService'
26 26
27 27 class EventService(BaseService):
28 28 @classmethod
29 def for_resource(cls, resource_ids, event_type=None, status=None,
30 since_when=None, limit=20, event_id=None,
31 target_uuid=None, order_by=None, or_target_user_id=None,
32 db_session=None):
29 def for_resource(
30 cls,
31 resource_ids,
32 event_type=None,
33 status=None,
34 since_when=None,
35 limit=20,
36 event_id=None,
37 target_uuid=None,
38 order_by=None,
39 or_target_user_id=None,
40 db_session=None,
41 ):
33 42 """
34 43 Fetches events including based on passed params OR if target_user_id
35 44 is present include events that just target this user
@@ -57,8 +66,7 b' class EventService(BaseService):'
57 66 if or_target_user_id:
58 67 or_cond.append(sa.or_(Event.target_user_id == or_target_user_id))
59 68
60 query = query.filter(sa.or_(sa.and_(*and_cond),
61 *or_cond))
69 query = query.filter(sa.or_(sa.and_(*and_cond), *or_cond))
62 70 if not order_by:
63 71 query = query.order_by(sa.desc(Event.start_date))
64 72 if limit:
@@ -67,8 +75,15 b' class EventService(BaseService):'
67 75 return query
68 76
69 77 @classmethod
70 def by_type_and_status(cls, event_types, status_types, since_when=None,
71 older_than=None, db_session=None, app_ids=None):
78 def by_type_and_status(
79 cls,
80 event_types,
81 status_types,
82 since_when=None,
83 older_than=None,
84 db_session=None,
85 app_ids=None,
86 ):
72 87 db_session = get_db_session(db_session)
73 88 query = db_session.query(Event)
74 89 query = query.filter(Event.event_type.in_(event_types))
@@ -84,26 +99,38 b' class EventService(BaseService):'
84 99 @classmethod
85 100 def latest_for_user(cls, user, db_session=None):
86 101 registry = get_current_registry()
87 resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types)
102 resources = UserService.resources_with_perms(
103 user, ["view"], resource_types=registry.resource_types
104 )
88 105 resource_ids = [r.resource_id for r in resources]
89 106 db_session = get_db_session(db_session)
90 107 return EventService.for_resource(
91 resource_ids, or_target_user_id=user.id, limit=10,
92 db_session=db_session)
108 resource_ids, or_target_user_id=user.id, limit=10, db_session=db_session
109 )
93 110
94 111 @classmethod
95 def get_paginator(cls, user, page=1, item_count=None, items_per_page=50,
96 order_by=None, filter_settings=None, db_session=None):
112 def get_paginator(
113 cls,
114 user,
115 page=1,
116 item_count=None,
117 items_per_page=50,
118 order_by=None,
119 filter_settings=None,
120 db_session=None,
121 ):
97 122 if not filter_settings:
98 123 filter_settings = {}
99 124 registry = get_current_registry()
100 resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types)
125 resources = UserService.resources_with_perms(
126 user, ["view"], resource_types=registry.resource_types
127 )
101 128 resource_ids = [r.resource_id for r in resources]
102 129 query = EventService.for_resource(
103 resource_ids, or_target_user_id=user.id, limit=100,
104 db_session=db_session)
130 resource_ids, or_target_user_id=user.id, limit=100, db_session=db_session
131 )
105 132
106 paginator = SqlalchemyOrmPage(query, page=page,
107 items_per_page=items_per_page,
108 **filter_settings)
133 paginator = SqlalchemyOrmPage(
134 query, page=page, items_per_page=items_per_page, **filter_settings
135 )
109 136 return paginator
@@ -16,18 +16,20 b''
16 16
17 17 from appenlight.models.group_resource_permission import GroupResourcePermission
18 18 from appenlight.models import get_db_session
19 from ziggurat_foundations.models.services.group_resource_permission import GroupResourcePermissionService
19 from ziggurat_foundations.models.services.group_resource_permission import (
20 GroupResourcePermissionService,
21 )
20 22
21 23
22 24 class GroupResourcePermissionService(GroupResourcePermissionService):
23 25 @classmethod
24 def by_resource_group_and_perm(cls, group_id, perm_name, resource_id,
25 db_session=None):
26 def by_resource_group_and_perm(
27 cls, group_id, perm_name, resource_id, db_session=None
28 ):
26 29 """ return all instances by user name, perm name and resource id """
27 30 db_session = get_db_session(db_session)
28 31 query = db_session.query(GroupResourcePermission)
29 32 query = query.filter(GroupResourcePermission.group_id == group_id)
30 query = query.filter(
31 GroupResourcePermission.resource_id == resource_id)
33 query = query.filter(GroupResourcePermission.resource_id == resource_id)
32 34 query = query.filter(GroupResourcePermission.perm_name == perm_name)
33 35 return query.first()
@@ -28,8 +28,7 b' log = logging.getLogger(__name__)'
28 28
29 29 class LogService(BaseService):
30 30 @classmethod
31 def get_logs(cls, resource_ids=None, filter_settings=None,
32 db_session=None):
31 def get_logs(cls, resource_ids=None, filter_settings=None, db_session=None):
33 32 # ensure we always have id's passed
34 33 if not resource_ids:
35 34 # raise Exception('No App ID passed')
@@ -37,18 +36,17 b' class LogService(BaseService):'
37 36 db_session = get_db_session(db_session)
38 37 q = db_session.query(Log)
39 38 q = q.filter(Log.resource_id.in_(resource_ids))
40 if filter_settings.get('start_date'):
41 q = q.filter(Log.timestamp >= filter_settings.get('start_date'))
42 if filter_settings.get('end_date'):
43 q = q.filter(Log.timestamp <= filter_settings.get('end_date'))
44 if filter_settings.get('log_level'):
45 q = q.filter(
46 Log.log_level == filter_settings.get('log_level').upper())
47 if filter_settings.get('request_id'):
48 request_id = filter_settings.get('request_id', '')
49 q = q.filter(Log.request_id == request_id.replace('-', ''))
50 if filter_settings.get('namespace'):
51 q = q.filter(Log.namespace == filter_settings.get('namespace'))
39 if filter_settings.get("start_date"):
40 q = q.filter(Log.timestamp >= filter_settings.get("start_date"))
41 if filter_settings.get("end_date"):
42 q = q.filter(Log.timestamp <= filter_settings.get("end_date"))
43 if filter_settings.get("log_level"):
44 q = q.filter(Log.log_level == filter_settings.get("log_level").upper())
45 if filter_settings.get("request_id"):
46 request_id = filter_settings.get("request_id", "")
47 q = q.filter(Log.request_id == request_id.replace("-", ""))
48 if filter_settings.get("namespace"):
49 q = q.filter(Log.namespace == filter_settings.get("namespace"))
52 50 q = q.order_by(sa.desc(Log.timestamp))
53 51 return q
54 52
@@ -60,20 +58,18 b' class LogService(BaseService):'
60 58 query = {
61 59 "query": {
62 60 "filtered": {
63 "filter": {
64 "and": [{"terms": {"resource_id": list(app_ids)}}]
65 }
61 "filter": {"and": [{"terms": {"resource_id": list(app_ids)}}]}
66 62 }
67 63 }
68 64 }
69 65
70 start_date = filter_settings.get('start_date')
71 end_date = filter_settings.get('end_date')
72 filter_part = query['query']['filtered']['filter']['and']
66 start_date = filter_settings.get("start_date")
67 end_date = filter_settings.get("end_date")
68 filter_part = query["query"]["filtered"]["filter"]["and"]
73 69
74 for tag in filter_settings.get('tags', []):
75 tag_values = [v.lower() for v in tag['value']]
76 key = "tags.%s.values" % tag['name'].replace('.', '_')
70 for tag in filter_settings.get("tags", []):
71 tag_values = [v.lower() for v in tag["value"]]
72 key = "tags.%s.values" % tag["name"].replace(".", "_")
77 73 filter_part.append({"terms": {key: tag_values}})
78 74
79 75 date_range = {"range": {"timestamp": {}}}
@@ -84,26 +80,21 b' class LogService(BaseService):'
84 80 if start_date or end_date:
85 81 filter_part.append(date_range)
86 82
87 levels = filter_settings.get('level')
83 levels = filter_settings.get("level")
88 84 if levels:
89 filter_part.append({"terms": {'log_level': levels}})
90 namespaces = filter_settings.get('namespace')
85 filter_part.append({"terms": {"log_level": levels}})
86 namespaces = filter_settings.get("namespace")
91 87 if namespaces:
92 filter_part.append({"terms": {'namespace': namespaces}})
88 filter_part.append({"terms": {"namespace": namespaces}})
93 89
94 request_ids = filter_settings.get('request_id')
90 request_ids = filter_settings.get("request_id")
95 91 if request_ids:
96 filter_part.append({"terms": {'request_id': request_ids}})
92 filter_part.append({"terms": {"request_id": request_ids}})
97 93
98 messages = filter_settings.get('message')
94 messages = filter_settings.get("message")
99 95 if messages:
100 query['query']['filtered']['query'] = {
101 'match': {
102 'message': {
103 'query': ' '.join(messages),
104 'operator': 'and'
105 }
106 }
96 query["query"]["filtered"]["query"] = {
97 "match": {"message": {"query": " ".join(messages), "operator": "and"}}
107 98 }
108 99 return query
109 100
@@ -118,76 +109,96 b' class LogService(BaseService):'
118 109 "field": "timestamp",
119 110 "interval": "1h",
120 111 "min_doc_count": 0,
121 'extended_bounds': {
122 'max': filter_settings.get('end_date'),
123 'min': filter_settings.get('start_date')}
112 "extended_bounds": {
113 "max": filter_settings.get("end_date"),
114 "min": filter_settings.get("start_date"),
115 },
124 116 }
125 117 }
126 118 }
127 119 log.debug(es_query)
128 index_names = es_index_name_limiter(filter_settings.get('start_date'),
129 filter_settings.get('end_date'),
130 ixtypes=['logs'])
120 index_names = es_index_name_limiter(
121 filter_settings.get("start_date"),
122 filter_settings.get("end_date"),
123 ixtypes=["logs"],
124 )
131 125 if index_names:
132 126 results = Datastores.es.search(
133 body=es_query, index=index_names, doc_type='log', size=0)
127 body=es_query, index=index_names, doc_type="log", size=0
128 )
134 129 else:
135 130 results = []
136 131 return results
137 132
138 133 @classmethod
139 def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50,
140 order_by=None, filter_settings=None, limit=None):
134 def get_search_iterator(
135 cls,
136 app_ids=None,
137 page=1,
138 items_per_page=50,
139 order_by=None,
140 filter_settings=None,
141 limit=None,
142 ):
141 143 if not app_ids:
142 144 return {}, 0
143 145
144 146 es_query = cls.es_query_builder(app_ids, filter_settings)
145 sort_query = {
146 "sort": [
147 {"timestamp": {"order": "desc"}}
148 ]
149 }
147 sort_query = {"sort": [{"timestamp": {"order": "desc"}}]}
150 148 es_query.update(sort_query)
151 149 log.debug(es_query)
152 150 es_from = (page - 1) * items_per_page
153 index_names = es_index_name_limiter(filter_settings.get('start_date'),
154 filter_settings.get('end_date'),
155 ixtypes=['logs'])
151 index_names = es_index_name_limiter(
152 filter_settings.get("start_date"),
153 filter_settings.get("end_date"),
154 ixtypes=["logs"],
155 )
156 156 if not index_names:
157 157 return {}, 0
158 158
159 results = Datastores.es.search(body=es_query, index=index_names,
160 doc_type='log', size=items_per_page,
161 from_=es_from)
162 if results['hits']['total'] > 5000:
159 results = Datastores.es.search(
160 body=es_query,
161 index=index_names,
162 doc_type="log",
163 size=items_per_page,
164 from_=es_from,
165 )
166 if results["hits"]["total"] > 5000:
163 167 count = 5000
164 168 else:
165 count = results['hits']['total']
166 return results['hits'], count
169 count = results["hits"]["total"]
170 return results["hits"], count
167 171
168 172 @classmethod
169 def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None,
170 items_per_page=50, order_by=None,
171 filter_settings=None,
172 exclude_columns=None, db_session=None):
173 def get_paginator_by_app_ids(
174 cls,
175 app_ids=None,
176 page=1,
177 item_count=None,
178 items_per_page=50,
179 order_by=None,
180 filter_settings=None,
181 exclude_columns=None,
182 db_session=None,
183 ):
173 184 if not filter_settings:
174 185 filter_settings = {}
175 results, item_count = cls.get_search_iterator(app_ids, page,
176 items_per_page, order_by,
177 filter_settings)
178 paginator = paginate.Page([],
179 item_count=item_count,
180 items_per_page=items_per_page,
181 **filter_settings)
182 ordered_ids = tuple(item['_source']['pg_id']
183 for item in results.get('hits', []))
186 results, item_count = cls.get_search_iterator(
187 app_ids, page, items_per_page, order_by, filter_settings
188 )
189 paginator = paginate.Page(
190 [], item_count=item_count, items_per_page=items_per_page, **filter_settings
191 )
192 ordered_ids = tuple(
193 item["_source"]["pg_id"] for item in results.get("hits", [])
194 )
184 195
185 196 sorted_instance_list = []
186 197 if ordered_ids:
187 198 db_session = get_db_session(db_session)
188 199 query = db_session.query(Log)
189 200 query = query.filter(Log.log_id.in_(ordered_ids))
190 query = query.order_by(sa.desc('timestamp'))
201 query = query.order_by(sa.desc("timestamp"))
191 202 sa_items = query.all()
192 203 # resort by score
193 204 for i_id in ordered_ids:
@@ -198,14 +209,14 b' class LogService(BaseService):'
198 209 return paginator
199 210
200 211 @classmethod
201 def query_by_primary_key_and_namespace(cls, list_of_pairs,
202 db_session=None):
212 def query_by_primary_key_and_namespace(cls, list_of_pairs, db_session=None):
203 213 db_session = get_db_session(db_session)
204 214 list_of_conditions = []
205 215 query = db_session.query(Log)
206 216 for pair in list_of_pairs:
207 list_of_conditions.append(sa.and_(
208 Log.primary_key == pair['pk'], Log.namespace == pair['ns']))
217 list_of_conditions.append(
218 sa.and_(Log.primary_key == pair["pk"], Log.namespace == pair["ns"])
219 )
209 220 query = query.filter(sa.or_(*list_of_conditions))
210 221 query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id))
211 222 return query
@@ -38,8 +38,9 b' class PluginConfigService(BaseService):'
38 38 return query.first()
39 39
40 40 @classmethod
41 def by_query(cls, resource_id=None, plugin_name=None,
42 section=None, db_session=None):
41 def by_query(
42 cls, resource_id=None, plugin_name=None, section=None, db_session=None
43 ):
43 44 db_session = get_db_session(db_session)
44 45
45 46 query = db_session.query(PluginConfig)
@@ -37,21 +37,24 b' class ReportService(BaseService):'
37 37 return q
38 38
39 39 @classmethod
40 def generate_stat_rows(cls, report, resource, report_group, occurences=1,
41 db_session=None):
40 def generate_stat_rows(
41 cls, report, resource, report_group, occurences=1, db_session=None
42 ):
42 43 """
43 44 Generates timeseries for this report's group
44 45 """
45 46 db_session = get_db_session(db_session)
46 stats = ReportStat(resource_id=report.resource_id,
47 group_id=report_group.id,
48 start_interval=report.start_time,
49 owner_user_id=resource.owner_user_id,
50 server_name=report.tags.get('server_name'),
51 view_name=report.tags.get('view_name'),
52 type=report.report_type,
53 occurences=occurences,
54 duration=report.duration)
47 stats = ReportStat(
48 resource_id=report.resource_id,
49 group_id=report_group.id,
50 start_interval=report.start_time,
51 owner_user_id=resource.owner_user_id,
52 server_name=report.tags.get("server_name"),
53 view_name=report.tags.get("view_name"),
54 type=report.report_type,
55 occurences=occurences,
56 duration=report.duration,
57 )
55 58 db_session.add(stats)
56 59 db_session.flush()
57 60 return stats
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now