##// END OF EJS Templates
black: reformat source
ergo -
Show More
@@ -4,12 +4,12 b' import re'
4 4 from setuptools import setup, find_packages
5 5
6 6 here = os.path.abspath(os.path.dirname(__file__))
7 README = open(os.path.join(here, 'README.rst')).read()
8 CHANGES = open(os.path.join(here, 'CHANGELOG.rst')).read()
7 README = open(os.path.join(here, "README.rst")).read()
8 CHANGES = open(os.path.join(here, "CHANGELOG.rst")).read()
9 9
10 REQUIREMENTS = open(os.path.join(here, 'requirements.txt')).readlines()
10 REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines()
11 11
12 compiled = re.compile('([^=><]*).*')
12 compiled = re.compile("([^=><]*).*")
13 13
14 14
15 15 def parse_req(req):
@@ -21,7 +21,8 b' requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]'
21 21
22 22 def _get_meta_var(name, data, callback_handler=None):
23 23 import re
24 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
24
25 matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data)
25 26 if matches:
26 27 if not callable(callback_handler):
27 28 callback_handler = lambda v: v
@@ -29,21 +30,22 b' def _get_meta_var(name, data, callback_handler=None):'
29 30 return callback_handler(eval(matches.groups()[0]))
30 31
31 32
32 with open(os.path.join(here, 'src', 'appenlight', '__init__.py'), 'r') as _meta:
33 with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta:
33 34 _metadata = _meta.read()
34 35
35 with open(os.path.join(here, 'VERSION'), 'r') as _meta_version:
36 with open(os.path.join(here, "VERSION"), "r") as _meta_version:
36 37 __version__ = _meta_version.read().strip()
37 38
38 __license__ = _get_meta_var('__license__', _metadata)
39 __author__ = _get_meta_var('__author__', _metadata)
40 __url__ = _get_meta_var('__url__', _metadata)
39 __license__ = _get_meta_var("__license__", _metadata)
40 __author__ = _get_meta_var("__author__", _metadata)
41 __url__ = _get_meta_var("__url__", _metadata)
41 42
42 found_packages = find_packages('src')
43 found_packages.append('appenlight.migrations.versions')
44 setup(name='appenlight',
45 description='appenlight',
46 long_description=README + '\n\n' + CHANGES,
43 found_packages = find_packages("src")
44 found_packages.append("appenlight.migrations.versions")
45 setup(
46 name="appenlight",
47 description="appenlight",
48 long_description=README + "\n\n" + CHANGES,
47 49 classifiers=[
48 50 "Programming Language :: Python",
49 51 "Framework :: Pylons",
@@ -54,28 +56,34 b" setup(name='appenlight',"
54 56 license=__license__,
55 57 author=__author__,
56 58 url=__url__,
57 keywords='web wsgi bfg pylons pyramid',
58 package_dir={'': 'src'},
59 keywords="web wsgi bfg pylons pyramid",
60 package_dir={"": "src"},
59 61 packages=found_packages,
60 62 include_package_data=True,
61 63 zip_safe=False,
62 test_suite='appenlight',
64 test_suite="appenlight",
63 65 install_requires=requires,
64 66 extras_require={
65 "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "pytest-mock", "webtest"],
67 "dev": [
68 "coverage",
69 "pytest",
70 "pyramid",
71 "tox",
72 "mock",
73 "pytest-mock",
74 "webtest",
75 ],
66 76 "lint": ["black"],
67 77 },
68 78 entry_points={
69 'paste.app_factory': [
70 'main = appenlight:main'
79 "paste.app_factory": ["main = appenlight:main"],
80 "console_scripts": [
81 "appenlight-cleanup = appenlight.scripts.cleanup:main",
82 "appenlight-initializedb = appenlight.scripts.initialize_db:main",
83 "appenlight-migratedb = appenlight.scripts.migratedb:main",
84 "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main",
85 "appenlight-static = appenlight.scripts.static:main",
86 "appenlight-make-config = appenlight.scripts.make_config:main",
71 87 ],
72 'console_scripts': [
73 'appenlight-cleanup = appenlight.scripts.cleanup:main',
74 'appenlight-initializedb = appenlight.scripts.initialize_db:main',
75 'appenlight-migratedb = appenlight.scripts.migratedb:main',
76 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main',
77 'appenlight-static = appenlight.scripts.static:main',
78 'appenlight-make-config = appenlight.scripts.make_config:main',
79 ]
80 }
88 },
81 89 )
@@ -38,15 +38,17 b' from redlock import Redlock'
38 38 from sqlalchemy import engine_from_config
39 39
40 40 from appenlight.celery import configure_celery
41 from appenlight.lib.configurator import (CythonCompatConfigurator,
42 register_appenlight_plugin)
41 from appenlight.lib.configurator import (
42 CythonCompatConfigurator,
43 register_appenlight_plugin,
44 )
43 45 from appenlight.lib import cache_regions
44 46 from appenlight.lib.ext_json import json
45 47 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
46 48
47 __license__ = 'Apache 2.0'
48 __author__ = 'RhodeCode GmbH'
49 __url__ = 'http://rhodecode.com'
49 __license__ = "Apache 2.0"
50 __author__ = "RhodeCode GmbH"
51 __url__ = "http://rhodecode.com"
50 52 __version__ = pkg_resources.get_distribution("appenlight").parsed_version
51 53
52 54 json_renderer = JSON(serializer=json.dumps, indent=4)
@@ -59,7 +61,7 b' def datetime_adapter(obj, request):'
59 61
60 62
61 63 def all_permissions_adapter(obj, request):
62 return '__all_permissions__'
64 return "__all_permissions__"
63 65
64 66
65 67 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
@@ -70,91 +72,109 b' def main(global_config, **settings):'
70 72 """ This function returns a Pyramid WSGI application.
71 73 """
72 74 auth_tkt_policy = AuthTktAuthenticationPolicy(
73 settings['authtkt.secret'],
74 hashalg='sha512',
75 settings["authtkt.secret"],
76 hashalg="sha512",
75 77 callback=groupfinder,
76 78 max_age=2592000,
77 secure=asbool(settings.get('authtkt.secure', 'false')))
78 auth_token_policy = AuthTokenAuthenticationPolicy(
79 callback=groupfinder
79 secure=asbool(settings.get("authtkt.secure", "false")),
80 80 )
81 auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder)
81 82 authorization_policy = ACLAuthorizationPolicy()
82 83 authentication_policy = AuthenticationStackPolicy()
83 authentication_policy.add_policy('auth_tkt', auth_tkt_policy)
84 authentication_policy.add_policy('auth_token', auth_token_policy)
84 authentication_policy.add_policy("auth_tkt", auth_tkt_policy)
85 authentication_policy.add_policy("auth_token", auth_token_policy)
85 86 # set crypto key
86 encryption.ENCRYPTION_SECRET = settings.get('encryption_secret')
87 encryption.ENCRYPTION_SECRET = settings.get("encryption_secret")
87 88 # import this later so encyption key can be monkeypatched
88 89 from appenlight.models import DBSession, register_datastores
89 90
90 91 # registration
91 settings['appenlight.disable_registration'] = asbool(
92 settings.get('appenlight.disable_registration'))
92 settings["appenlight.disable_registration"] = asbool(
93 settings.get("appenlight.disable_registration")
94 )
93 95
94 96 # update config with cometd info
95 settings['cometd_servers'] = {'server': settings['cometd.server'],
96 'secret': settings['cometd.secret']}
97 settings["cometd_servers"] = {
98 "server": settings["cometd.server"],
99 "secret": settings["cometd.secret"],
100 }
97 101
98 102 # Create the Pyramid Configurator.
99 settings['_mail_url'] = settings['mailing.app_url']
103 settings["_mail_url"] = settings["mailing.app_url"]
100 104 config = CythonCompatConfigurator(
101 105 settings=settings,
102 106 authentication_policy=authentication_policy,
103 107 authorization_policy=authorization_policy,
104 root_factory='appenlight.security.RootFactory',
105 default_permission='view')
108 root_factory="appenlight.security.RootFactory",
109 default_permission="view",
110 )
106 111 # custom registry variables
107 112
108 113 # resource type information
109 config.registry.resource_types = ['resource', 'application']
114 config.registry.resource_types = ["resource", "application"]
110 115 # plugin information
111 116 config.registry.appenlight_plugins = {}
112 117
113 config.set_default_csrf_options(require_csrf=True, header='X-XSRF-TOKEN')
114 config.add_view_deriver('appenlight.predicates.csrf_view',
115 name='csrf_view')
118 config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN")
119 config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view")
116 120
117 121 # later, when config is available
118 dogpile_config = {'url': settings['redis.url'],
122 dogpile_config = {
123 "url": settings["redis.url"],
119 124 "redis_expiration_time": 86400,
120 "redis_distributed_lock": True}
125 "redis_distributed_lock": True,
126 }
121 127 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
122 128 config.registry.cache_regions = cache_regions.regions
123 engine = engine_from_config(settings, 'sqlalchemy.',
124 json_serializer=json.dumps)
129 engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps)
125 130 DBSession.configure(bind=engine)
126 131
127 132 # json rederer that serializes datetime
128 config.add_renderer('json', json_renderer)
129 config.add_request_method('appenlight.lib.request.es_conn', 'es_conn', property=True)
130 config.add_request_method('appenlight.lib.request.get_user', 'user',
131 reify=True, property=True)
132 config.add_request_method('appenlight.lib.request.get_csrf_token',
133 'csrf_token', reify=True, property=True)
134 config.add_request_method('appenlight.lib.request.safe_json_body',
135 'safe_json_body', reify=True, property=True)
136 config.add_request_method('appenlight.lib.request.unsafe_json_body',
137 'unsafe_json_body', reify=True, property=True)
138 config.add_request_method('appenlight.lib.request.add_flash_to_headers',
139 'add_flash_to_headers')
140 config.add_request_method('appenlight.lib.request.get_authomatic',
141 'authomatic', reify=True)
142
143 config.include('pyramid_redis_sessions')
144 config.include('pyramid_tm')
145 config.include('pyramid_jinja2')
146 config.include('pyramid_mailer')
147 config.include('appenlight_client.ext.pyramid_tween')
148 config.include('ziggurat_foundations.ext.pyramid.sign_in')
149 es_server_list = aslist(settings['elasticsearch.nodes'])
150 redis_url = settings['redis.url']
151 log.warning('Elasticsearch server list: {}'.format(es_server_list))
152 log.warning('Redis server: {}'.format(redis_url))
133 config.add_renderer("json", json_renderer)
134 config.add_request_method(
135 "appenlight.lib.request.es_conn", "es_conn", property=True
136 )
137 config.add_request_method(
138 "appenlight.lib.request.get_user", "user", reify=True, property=True
139 )
140 config.add_request_method(
141 "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True
142 )
143 config.add_request_method(
144 "appenlight.lib.request.safe_json_body",
145 "safe_json_body",
146 reify=True,
147 property=True,
148 )
149 config.add_request_method(
150 "appenlight.lib.request.unsafe_json_body",
151 "unsafe_json_body",
152 reify=True,
153 property=True,
154 )
155 config.add_request_method(
156 "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers"
157 )
158 config.add_request_method(
159 "appenlight.lib.request.get_authomatic", "authomatic", reify=True
160 )
161
162 config.include("pyramid_redis_sessions")
163 config.include("pyramid_tm")
164 config.include("pyramid_jinja2")
165 config.include("pyramid_mailer")
166 config.include("appenlight_client.ext.pyramid_tween")
167 config.include("ziggurat_foundations.ext.pyramid.sign_in")
168 es_server_list = aslist(settings["elasticsearch.nodes"])
169 redis_url = settings["redis.url"]
170 log.warning("Elasticsearch server list: {}".format(es_server_list))
171 log.warning("Redis server: {}".format(redis_url))
153 172 config.registry.es_conn = Elasticsearch(es_server_list)
154 173 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
155 174
156 config.registry.redis_lockmgr = Redlock([settings['redis.redlock.url'], ],
157 retry_count=0, retry_delay=0)
175 config.registry.redis_lockmgr = Redlock(
176 [settings["redis.redlock.url"]], retry_count=0, retry_delay=0
177 )
158 178 # mailer bw compat
159 179 config.registry.mailer = config.registry.getUtility(IMailer)
160 180
@@ -163,47 +183,56 b' def main(global_config, **settings):'
163 183 config.set_session_factory(session_factory)
164 184
165 185 # Configure renderers and event subscribers
166 config.add_jinja2_extension('jinja2.ext.loopcontrols')
167 config.add_jinja2_search_path('appenlight:templates')
186 config.add_jinja2_extension("jinja2.ext.loopcontrols")
187 config.add_jinja2_search_path("appenlight:templates")
168 188 # event subscribers
169 config.add_subscriber("appenlight.subscribers.application_created",
170 "pyramid.events.ApplicationCreated")
171 config.add_subscriber("appenlight.subscribers.add_renderer_globals",
172 "pyramid.events.BeforeRender")
173 config.add_subscriber('appenlight.subscribers.new_request',
174 'pyramid.events.NewRequest')
175 config.add_view_predicate('context_type_class',
176 'appenlight.predicates.contextTypeClass')
177
178 register_datastores(es_conn=config.registry.es_conn,
189 config.add_subscriber(
190 "appenlight.subscribers.application_created",
191 "pyramid.events.ApplicationCreated",
192 )
193 config.add_subscriber(
194 "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender"
195 )
196 config.add_subscriber(
197 "appenlight.subscribers.new_request", "pyramid.events.NewRequest"
198 )
199 config.add_view_predicate(
200 "context_type_class", "appenlight.predicates.contextTypeClass"
201 )
202
203 register_datastores(
204 es_conn=config.registry.es_conn,
179 205 redis_conn=config.registry.redis_conn,
180 redis_lockmgr=config.registry.redis_lockmgr)
206 redis_lockmgr=config.registry.redis_lockmgr,
207 )
181 208
182 209 # base stuff and scan
183 210
184 211 # need to ensure webassets exists otherwise config.override_asset()
185 212 # throws exception
186 if not os.path.exists(settings['webassets.dir']):
187 os.mkdir(settings['webassets.dir'])
188 config.add_static_view(path='appenlight:webassets',
189 name='static', cache_max_age=3600)
190 config.override_asset(to_override='appenlight:webassets/',
191 override_with=settings['webassets.dir'])
192
193 config.include('appenlight.views')
194 config.include('appenlight.views.admin')
195 config.scan(ignore=['appenlight.migrations', 'appenlight.scripts',
196 'appenlight.tests'])
197
198 config.add_directive('register_appenlight_plugin',
199 register_appenlight_plugin)
200
201 for entry_point in iter_entry_points(group='appenlight.plugins'):
213 if not os.path.exists(settings["webassets.dir"]):
214 os.mkdir(settings["webassets.dir"])
215 config.add_static_view(
216 path="appenlight:webassets", name="static", cache_max_age=3600
217 )
218 config.override_asset(
219 to_override="appenlight:webassets/", override_with=settings["webassets.dir"]
220 )
221
222 config.include("appenlight.views")
223 config.include("appenlight.views.admin")
224 config.scan(
225 ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"]
226 )
227
228 config.add_directive("register_appenlight_plugin", register_appenlight_plugin)
229
230 for entry_point in iter_entry_points(group="appenlight.plugins"):
202 231 plugin = entry_point.load()
203 232 plugin.includeme(config)
204 233
205 234 # include other appenlight plugins explictly if needed
206 includes = aslist(settings.get('appenlight.includes', []))
235 includes = aslist(settings.get("appenlight.includes", []))
207 236 for inc in includes:
208 237 config.include(inc)
209 238
@@ -211,8 +240,8 b' def main(global_config, **settings):'
211 240
212 241 def pre_commit():
213 242 jinja_env = config.get_jinja2_environment()
214 jinja_env.filters['tojson'] = json.dumps
215 jinja_env.filters['toJSONUnsafe'] = jinja2_filters.toJSONUnsafe
243 jinja_env.filters["tojson"] = json.dumps
244 jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe
216 245
217 246 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
218 247
@@ -34,15 +34,23 b' from appenlight_client.ext.celery import register_signals'
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 register('date_json', json_dumps, json_loads,
38 content_type='application/x-date_json',
39 content_encoding='utf-8')
37 register(
38 "date_json",
39 json_dumps,
40 json_loads,
41 content_type="application/x-date_json",
42 content_encoding="utf-8",
43 )
40 44
41 45 celery = Celery()
42 46
43 celery.user_options['preload'].add(
44 Option('--ini', dest='ini', default=None,
45 help='Specifies pyramid configuration file location.')
47 celery.user_options["preload"].add(
48 Option(
49 "--ini",
50 dest="ini",
51 default=None,
52 help="Specifies pyramid configuration file location.",
53 )
46 54 )
47 55
48 56
@@ -51,19 +59,21 b' def on_preload_parsed(options, **kwargs):'
51 59 """
52 60 This actually configures celery from pyramid config file
53 61 """
54 celery.conf['INI_PYRAMID'] = options['ini']
62 celery.conf["INI_PYRAMID"] = options["ini"]
55 63 import appenlight_client.client as e_client
56 ini_location = options['ini']
64
65 ini_location = options["ini"]
57 66 if not ini_location:
58 raise Exception('You need to pass pyramid ini location using '
59 '--ini=filename.ini argument to the worker')
67 raise Exception(
68 "You need to pass pyramid ini location using "
69 "--ini=filename.ini argument to the worker"
70 )
60 71 env = bootstrap(ini_location[0])
61 api_key = env['request'].registry.settings['appenlight.api_key']
62 tr_config = env['request'].registry.settings.get(
63 'appenlight.transport_config')
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
72 api_key = env["request"].registry.settings["appenlight.api_key"]
73 tr_config = env["request"].registry.settings.get("appenlight.transport_config")
74 CONFIG = e_client.get_config({"appenlight.api_key": api_key})
65 75 if tr_config:
66 CONFIG['appenlight.transport_config'] = tr_config
76 CONFIG["appenlight.transport_config"] = tr_config
67 77 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
68 78 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
69 79 register_signals(APPENLIGHT_CLIENT)
@@ -71,101 +81,101 b' def on_preload_parsed(options, **kwargs):'
71 81
72 82
73 83 celery_config = {
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
75 'CELERYD_TASK_TIME_LIMIT': 60,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
77 'CELERY_IGNORE_RESULT': True,
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
79 'CELERY_TASK_SERIALIZER': 'date_json',
80 'CELERY_RESULT_SERIALIZER': 'date_json',
81 'BROKER_URL': None,
82 'CELERYD_CONCURRENCY': None,
83 'CELERY_TIMEZONE': None,
84 'CELERYBEAT_SCHEDULE': {
85 'alerting_reports': {
86 'task': 'appenlight.celery.tasks.alerting_reports',
87 'schedule': timedelta(seconds=60)
84 "CELERY_IMPORTS": ["appenlight.celery.tasks"],
85 "CELERYD_TASK_TIME_LIMIT": 60,
86 "CELERYD_MAX_TASKS_PER_CHILD": 1000,
87 "CELERY_IGNORE_RESULT": True,
88 "CELERY_ACCEPT_CONTENT": ["date_json"],
89 "CELERY_TASK_SERIALIZER": "date_json",
90 "CELERY_RESULT_SERIALIZER": "date_json",
91 "BROKER_URL": None,
92 "CELERYD_CONCURRENCY": None,
93 "CELERY_TIMEZONE": None,
94 "CELERYBEAT_SCHEDULE": {
95 "alerting_reports": {
96 "task": "appenlight.celery.tasks.alerting_reports",
97 "schedule": timedelta(seconds=60),
98 },
99 "close_alerts": {
100 "task": "appenlight.celery.tasks.close_alerts",
101 "schedule": timedelta(seconds=60),
102 },
88 103 },
89 'close_alerts': {
90 'task': 'appenlight.celery.tasks.close_alerts',
91 'schedule': timedelta(seconds=60)
92 }
93 }
94 104 }
95 105 celery.config_from_object(celery_config)
96 106
97 107
98 108 def configure_celery(pyramid_registry):
99 109 settings = pyramid_registry.settings
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
110 celery_config["BROKER_URL"] = settings["celery.broker_url"]
111 celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"]
112 celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"]
103 113
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
114 notifications_seconds = int(
115 settings.get("tasks.notifications_reports.interval", 60)
116 )
105 117
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
107 'task': 'appenlight.celery.tasks.notifications_reports',
108 'schedule': timedelta(seconds=notifications_seconds)
118 celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = {
119 "task": "appenlight.celery.tasks.notifications_reports",
120 "schedule": timedelta(seconds=notifications_seconds),
109 121 }
110 122
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
112 'task': 'appenlight.celery.tasks.daily_digest',
113 'schedule': crontab(minute=1, hour='4,12,20')
123 celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = {
124 "task": "appenlight.celery.tasks.daily_digest",
125 "schedule": crontab(minute=1, hour="4,12,20"),
114 126 }
115 127
116 if asbool(settings.get('celery.always_eager')):
117 celery_config['CELERY_ALWAYS_EAGER'] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
128 if asbool(settings.get("celery.always_eager")):
129 celery_config["CELERY_ALWAYS_EAGER"] = True
130 celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True
119 131
120 132 for plugin in pyramid_registry.appenlight_plugins.values():
121 if plugin.get('celery_tasks'):
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
123 if plugin.get('celery_beats'):
124 for name, config in plugin['celery_beats']:
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
133 if plugin.get("celery_tasks"):
134 celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"])
135 if plugin.get("celery_beats"):
136 for name, config in plugin["celery_beats"]:
137 celery_config["CELERYBEAT_SCHEDULE"][name] = config
126 138 celery.config_from_object(celery_config)
127 139
128 140
129 141 @task_prerun.connect
130 142 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
131 if hasattr(celery, 'pyramid'):
143 if hasattr(celery, "pyramid"):
132 144 env = celery.pyramid
133 env = prepare(registry=env['request'].registry)
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
135 tmp_req = Request.blank('/', base_url=proper_base_url)
145 env = prepare(registry=env["request"].registry)
146 proper_base_url = env["request"].registry.settings["mailing.app_url"]
147 tmp_req = Request.blank("/", base_url=proper_base_url)
136 148 # ensure tasks generate url for right domain from config
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
140 env['request'].environ['wsgi.url_scheme'] = \
141 tmp_req.environ['wsgi.url_scheme']
149 env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"]
150 env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"]
151 env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"]
152 env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"]
142 153 get_current_request().tm.begin()
143 154
144 155
145 156 @task_success.connect
146 157 def task_success_signal(result, **kwargs):
147 158 get_current_request().tm.commit()
148 if hasattr(celery, 'pyramid'):
159 if hasattr(celery, "pyramid"):
149 160 celery.pyramid["closer"]()
150 161
151 162
152 163 @task_retry.connect
153 164 def task_retry_signal(request, reason, einfo, **kwargs):
154 165 get_current_request().tm.abort()
155 if hasattr(celery, 'pyramid'):
166 if hasattr(celery, "pyramid"):
156 167 celery.pyramid["closer"]()
157 168
158 169
159 170 @task_failure.connect
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
161 **kwaargs):
171 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs):
162 172 get_current_request().tm.abort()
163 if hasattr(celery, 'pyramid'):
173 if hasattr(celery, "pyramid"):
164 174 celery.pyramid["closer"]()
165 175
166 176
167 177 @task_revoked.connect
168 178 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
169 179 get_current_request().tm.abort()
170 if hasattr(celery, 'pyramid'):
180 if hasattr(celery, "pyramid"):
171 181 celery.pyramid["closer"]()
@@ -17,38 +17,29 b''
17 17 import json
18 18 from datetime import datetime, date, timedelta
19 19
20 DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
20 DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
21 21
22 22
23 23 class DateEncoder(json.JSONEncoder):
24 24 def default(self, obj):
25 25 if isinstance(obj, datetime):
26 return {
27 '__type__': '__datetime__',
28 'iso': obj.strftime(DATE_FORMAT)
29 }
26 return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)}
30 27 elif isinstance(obj, date):
31 return {
32 '__type__': '__date__',
33 'iso': obj.strftime(DATE_FORMAT)
34 }
28 return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)}
35 29 elif isinstance(obj, timedelta):
36 return {
37 '__type__': '__timedelta__',
38 'seconds': obj.total_seconds()
39 }
30 return {"__type__": "__timedelta__", "seconds": obj.total_seconds()}
40 31 else:
41 32 return json.JSONEncoder.default(self, obj)
42 33
43 34
44 35 def date_decoder(dct):
45 if '__type__' in dct:
46 if dct['__type__'] == '__datetime__':
47 return datetime.strptime(dct['iso'], DATE_FORMAT)
48 elif dct['__type__'] == '__date__':
49 return datetime.strptime(dct['iso'], DATE_FORMAT).date()
50 elif dct['__type__'] == '__timedelta__':
51 return timedelta(seconds=dct['seconds'])
36 if "__type__" in dct:
37 if dct["__type__"] == "__datetime__":
38 return datetime.strptime(dct["iso"], DATE_FORMAT)
39 elif dct["__type__"] == "__date__":
40 return datetime.strptime(dct["iso"], DATE_FORMAT).date()
41 elif dct["__type__"] == "__timedelta__":
42 return timedelta(seconds=dct["seconds"])
52 43 return dct
53 44
54 45
@@ -57,4 +48,4 b' def json_dumps(obj):'
57 48
58 49
59 50 def json_loads(obj):
60 return json.loads(obj.decode('utf8'), object_hook=date_decoder)
51 return json.loads(obj.decode("utf8"), object_hook=date_decoder)
@@ -51,9 +51,11 b' from appenlight.lib.enums import ReportType'
51 51
52 52 log = get_task_logger(__name__)
53 53
54 sample_boundries = list(range(100, 1000, 100)) + \
55 list(range(1000, 10000, 1000)) + \
56 list(range(10000, 100000, 5000))
54 sample_boundries = (
55 list(range(100, 1000, 100))
56 + list(range(1000, 10000, 1000))
57 + list(range(10000, 100000, 5000))
58 )
57 59
58 60
59 61 def pick_sample(total_occurences, report_type=None):
@@ -70,9 +72,9 b' def pick_sample(total_occurences, report_type=None):'
70 72
71 73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
72 74 def test_exception_task():
73 log.error('test celery log', extra={'location': 'celery'})
74 log.warning('test celery log', extra={'location': 'celery'})
75 raise Exception('Celery exception test')
75 log.error("test celery log", extra={"location": "celery"})
76 log.warning("test celery log", extra={"location": "celery"})
77 raise Exception("Celery exception test")
76 78
77 79
78 80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
@@ -81,9 +83,9 b' def test_retry_exception_task():'
81 83 import time
82 84
83 85 time.sleep(1.3)
84 log.error('test retry celery log', extra={'location': 'celery'})
85 log.warning('test retry celery log', extra={'location': 'celery'})
86 raise Exception('Celery exception test')
86 log.error("test retry celery log", extra={"location": "celery"})
87 log.warning("test retry celery log", extra={"location": "celery"})
88 raise Exception("Celery exception test")
87 89 except Exception as exc:
88 90 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
89 91 raise
@@ -92,7 +94,7 b' def test_retry_exception_task():'
92 94
93 95 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
94 96 def add_reports(resource_id, request_params, dataset, **kwargs):
95 proto_version = parse_proto(request_params.get('protocol_version', ''))
97 proto_version = parse_proto(request_params.get("protocol_version", ""))
96 98 current_time = datetime.utcnow().replace(second=0, microsecond=0)
97 99 try:
98 100 # we will store solr docs here for single insert
@@ -114,22 +116,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
114 116 report_group = ReportGroupService.by_hash_and_resource(
115 117 report.resource_id,
116 118 report.grouping_hash,
117 since_when=datetime.utcnow().date().replace(day=1)
119 since_when=datetime.utcnow().date().replace(day=1),
118 120 )
119 occurences = report_data.get('occurences', 1)
121 occurences = report_data.get("occurences", 1)
120 122 if not report_group:
121 123 # total reports will be +1 moment later
122 report_group = ReportGroup(grouping_hash=report.grouping_hash,
123 occurences=0, total_reports=0,
124 report_group = ReportGroup(
125 grouping_hash=report.grouping_hash,
126 occurences=0,
127 total_reports=0,
124 128 last_report=0,
125 129 priority=report.priority,
126 130 error=report.error,
127 first_timestamp=report.start_time)
131 first_timestamp=report.start_time,
132 )
128 133 report_group._skip_ft_index = True
129 134 report_group.report_type = report.report_type
130 135 report.report_group_time = report_group.first_timestamp
131 add_sample = pick_sample(report_group.occurences,
132 report_type=report_group.report_type)
136 add_sample = pick_sample(
137 report_group.occurences, report_type=report_group.report_type
138 )
133 139 if add_sample:
134 140 resource.report_groups.append(report_group)
135 141 report_group.reports.append(report)
@@ -144,28 +150,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
144 150 for s_call in slow_calls:
145 151 if s_call.partition_id not in es_slow_calls_docs:
146 152 es_slow_calls_docs[s_call.partition_id] = []
147 es_slow_calls_docs[s_call.partition_id].append(
148 s_call.es_doc())
153 es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc())
149 154 # try generating new stat rows if needed
150 155 else:
151 156 # required for postprocessing to not fail later
152 157 report.report_group = report_group
153 158
154 stat_row = ReportService.generate_stat_rows(
155 report, resource, report_group)
159 stat_row = ReportService.generate_stat_rows(report, resource, report_group)
156 160 if stat_row.partition_id not in es_reports_stats_rows:
157 161 es_reports_stats_rows[stat_row.partition_id] = []
158 es_reports_stats_rows[stat_row.partition_id].append(
159 stat_row.es_doc())
162 es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc())
160 163
161 164 # see if we should mark 10th occurence of report
162 165 last_occurences_10 = int(math.floor(report_group.occurences / 10))
163 curr_occurences_10 = int(math.floor(
164 (report_group.occurences + report.occurences) / 10))
165 last_occurences_100 = int(
166 math.floor(report_group.occurences / 100))
167 curr_occurences_100 = int(math.floor(
168 (report_group.occurences + report.occurences) / 100))
166 curr_occurences_10 = int(
167 math.floor((report_group.occurences + report.occurences) / 10)
168 )
169 last_occurences_100 = int(math.floor(report_group.occurences / 100))
170 curr_occurences_100 = int(
171 math.floor((report_group.occurences + report.occurences) / 100)
172 )
169 173 notify_occurences_10 = last_occurences_10 != curr_occurences_10
170 174 notify_occurences_100 = last_occurences_100 != curr_occurences_100
171 175 report_group.occurences = ReportGroup.occurences + occurences
@@ -178,39 +182,47 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
178 182 if added_details:
179 183 report_group.total_reports = ReportGroup.total_reports + 1
180 184 report_group.last_report = report.id
181 report_group.set_notification_info(notify_10=notify_occurences_10,
182 notify_100=notify_occurences_100)
185 report_group.set_notification_info(
186 notify_10=notify_occurences_10, notify_100=notify_occurences_100
187 )
183 188 DBSession.flush()
184 189 report_group.get_report().notify_channel(report_group)
185 190 if report_group.partition_id not in es_report_group_docs:
186 191 es_report_group_docs[report_group.partition_id] = []
187 192 es_report_group_docs[report_group.partition_id].append(
188 report_group.es_doc())
193 report_group.es_doc()
194 )
189 195
190 action = 'REPORT'
191 log_msg = '%s: %s %s, client: %s, proto: %s' % (
196 action = "REPORT"
197 log_msg = "%s: %s %s, client: %s, proto: %s" % (
192 198 action,
193 report_data.get('http_status', 'unknown'),
199 report_data.get("http_status", "unknown"),
194 200 str(resource),
195 report_data.get('client'),
196 proto_version)
201 report_data.get("client"),
202 proto_version,
203 )
197 204 log.info(log_msg)
198 205 total_reports = len(dataset)
199 206 redis_pipeline = Datastores.redis.pipeline(transaction=False)
200 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
207 key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
201 208 redis_pipeline.incr(key, total_reports)
202 209 redis_pipeline.expire(key, 3600 * 24)
203 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
204 resource.owner_user_id, current_time)
210 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
211 resource.owner_user_id, current_time
212 )
205 213 redis_pipeline.incr(key, total_reports)
206 214 redis_pipeline.expire(key, 3600)
207 key = REDIS_KEYS['counters']['reports_per_hour_per_app'].format(
208 resource_id, current_time.replace(minute=0))
215 key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format(
216 resource_id, current_time.replace(minute=0)
217 )
209 218 redis_pipeline.incr(key, total_reports)
210 219 redis_pipeline.expire(key, 3600 * 24 * 7)
211 220 redis_pipeline.sadd(
212 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
213 current_time.replace(minute=0)), resource_id)
221 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
222 current_time.replace(minute=0)
223 ),
224 resource_id,
225 )
214 226 redis_pipeline.execute()
215 227
216 228 add_reports_es(es_report_group_docs, es_report_docs)
@@ -227,11 +239,11 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
227 239 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
228 240 def add_reports_es(report_group_docs, report_docs):
229 241 for k, v in report_group_docs.items():
230 to_update = {'_index': k, '_type': 'report_group'}
242 to_update = {"_index": k, "_type": "report_group"}
231 243 [i.update(to_update) for i in v]
232 244 elasticsearch.helpers.bulk(Datastores.es, v)
233 245 for k, v in report_docs.items():
234 to_update = {'_index': k, '_type': 'report'}
246 to_update = {"_index": k, "_type": "report"}
235 247 [i.update(to_update) for i in v]
236 248 elasticsearch.helpers.bulk(Datastores.es, v)
237 249
@@ -239,7 +251,7 b' def add_reports_es(report_group_docs, report_docs):'
239 251 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
240 252 def add_reports_slow_calls_es(es_docs):
241 253 for k, v in es_docs.items():
242 to_update = {'_index': k, '_type': 'log'}
254 to_update = {"_index": k, "_type": "log"}
243 255 [i.update(to_update) for i in v]
244 256 elasticsearch.helpers.bulk(Datastores.es, v)
245 257
@@ -247,14 +259,14 b' def add_reports_slow_calls_es(es_docs):'
247 259 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
248 260 def add_reports_stats_rows_es(es_docs):
249 261 for k, v in es_docs.items():
250 to_update = {'_index': k, '_type': 'log'}
262 to_update = {"_index": k, "_type": "log"}
251 263 [i.update(to_update) for i in v]
252 264 elasticsearch.helpers.bulk(Datastores.es, v)
253 265
254 266
255 267 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
256 268 def add_logs(resource_id, request_params, dataset, **kwargs):
257 proto_version = request_params.get('protocol_version')
269 proto_version = request_params.get("protocol_version")
258 270 current_time = datetime.utcnow().replace(second=0, microsecond=0)
259 271
260 272 try:
@@ -264,16 +276,15 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
264 276 ns_pairs = []
265 277 for entry in dataset:
266 278 # gather pk and ns so we can remove older versions of row later
267 if entry['primary_key'] is not None:
268 ns_pairs.append({"pk": entry['primary_key'],
269 "ns": entry['namespace']})
279 if entry["primary_key"] is not None:
280 ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]})
270 281 log_entry = Log()
271 282 log_entry.set_data(entry, resource=resource)
272 283 log_entry._skip_ft_index = True
273 284 resource.logs.append(log_entry)
274 285 DBSession.flush()
275 286 # insert non pk rows first
276 if entry['primary_key'] is None:
287 if entry["primary_key"] is None:
277 288 es_docs[log_entry.partition_id].append(log_entry.es_doc())
278 289
279 290 # 2nd pass to delete all log entries from db foe same pk/ns pair
@@ -282,7 +293,8 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
282 293 es_docs = collections.defaultdict(list)
283 294 es_docs_to_delete = collections.defaultdict(list)
284 295 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
285 list_of_pairs=ns_pairs)
296 list_of_pairs=ns_pairs
297 )
286 298 log_dict = {}
287 299 for log_entry in found_pkey_logs:
288 300 log_key = (log_entry.primary_key, log_entry.namespace)
@@ -299,51 +311,58 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
299 311 ids_to_delete.append(e.log_id)
300 312 es_docs_to_delete[e.partition_id].append(e.delete_hash)
301 313
302 es_docs_to_delete[log_entry.partition_id].append(
303 log_entry.delete_hash)
314 es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash)
304 315
305 316 es_docs[log_entry.partition_id].append(log_entry.es_doc())
306 317
307 318 if ids_to_delete:
308 query = DBSession.query(Log).filter(
309 Log.log_id.in_(ids_to_delete))
319 query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete))
310 320 query.delete(synchronize_session=False)
311 321 if es_docs_to_delete:
312 322 # batch this to avoid problems with default ES bulk limits
313 323 for es_index in es_docs_to_delete.keys():
314 324 for batch in in_batches(es_docs_to_delete[es_index], 20):
315 query = {"query": {'terms': {'delete_hash': batch}}}
325 query = {"query": {"terms": {"delete_hash": batch}}}
316 326
317 327 try:
318 328 Datastores.es.transport.perform_request(
319 "DELETE", '/{}/{}/_query'.format(es_index, 'log'), body=query)
329 "DELETE",
330 "/{}/{}/_query".format(es_index, "log"),
331 body=query,
332 )
320 333 except elasticsearch.exceptions.NotFoundError as exc:
321 msg = 'skipping index {}'.format(es_index)
334 msg = "skipping index {}".format(es_index)
322 335 log.info(msg)
323 336
324 337 total_logs = len(dataset)
325 338
326 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
339 log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % (
327 340 str(resource),
328 341 total_logs,
329 proto_version)
342 proto_version,
343 )
330 344 log.info(log_msg)
331 345 # mark_changed(session)
332 346 redis_pipeline = Datastores.redis.pipeline(transaction=False)
333 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
347 key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
334 348 redis_pipeline.incr(key, total_logs)
335 349 redis_pipeline.expire(key, 3600 * 24)
336 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
337 resource.owner_user_id, current_time)
350 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
351 resource.owner_user_id, current_time
352 )
338 353 redis_pipeline.incr(key, total_logs)
339 354 redis_pipeline.expire(key, 3600)
340 key = REDIS_KEYS['counters']['logs_per_hour_per_app'].format(
341 resource_id, current_time.replace(minute=0))
355 key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format(
356 resource_id, current_time.replace(minute=0)
357 )
342 358 redis_pipeline.incr(key, total_logs)
343 359 redis_pipeline.expire(key, 3600 * 24 * 7)
344 360 redis_pipeline.sadd(
345 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
346 current_time.replace(minute=0)), resource_id)
361 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
362 current_time.replace(minute=0)
363 ),
364 resource_id,
365 )
347 366 redis_pipeline.execute()
348 367 add_logs_es(es_docs)
349 368 return True
@@ -357,7 +376,7 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
357 376 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
358 377 def add_logs_es(es_docs):
359 378 for k, v in es_docs.items():
360 to_update = {'_index': k, '_type': 'log'}
379 to_update = {"_index": k, "_type": "log"}
361 380 [i.update(to_update) for i in v]
362 381 elasticsearch.helpers.bulk(Datastores.es, v)
363 382
@@ -371,45 +390,51 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
371 390 es_docs = []
372 391 rows = []
373 392 for metric in dataset:
374 tags = dict(metric['tags'])
375 server_n = tags.get('server_name', metric['server_name']).lower()
376 tags['server_name'] = server_n or 'unknown'
393 tags = dict(metric["tags"])
394 server_n = tags.get("server_name", metric["server_name"]).lower()
395 tags["server_name"] = server_n or "unknown"
377 396 new_metric = Metric(
378 timestamp=metric['timestamp'],
397 timestamp=metric["timestamp"],
379 398 resource_id=resource.resource_id,
380 namespace=metric['namespace'],
381 tags=tags)
399 namespace=metric["namespace"],
400 tags=tags,
401 )
382 402 rows.append(new_metric)
383 403 es_docs.append(new_metric.es_doc())
384 404 session = DBSession()
385 405 session.bulk_save_objects(rows)
386 406 session.flush()
387 407
388 action = 'METRICS'
389 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
408 action = "METRICS"
409 metrics_msg = "%s: %s, metrics: %s, proto:%s" % (
390 410 action,
391 411 str(resource),
392 412 len(dataset),
393 proto_version
413 proto_version,
394 414 )
395 415 log.info(metrics_msg)
396 416
397 417 mark_changed(session)
398 418 redis_pipeline = Datastores.redis.pipeline(transaction=False)
399 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
419 key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
400 420 redis_pipeline.incr(key, len(rows))
401 421 redis_pipeline.expire(key, 3600 * 24)
402 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
403 resource.owner_user_id, current_time)
422 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
423 resource.owner_user_id, current_time
424 )
404 425 redis_pipeline.incr(key, len(rows))
405 426 redis_pipeline.expire(key, 3600)
406 key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format(
407 resource_id, current_time.replace(minute=0))
427 key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format(
428 resource_id, current_time.replace(minute=0)
429 )
408 430 redis_pipeline.incr(key, len(rows))
409 431 redis_pipeline.expire(key, 3600 * 24 * 7)
410 432 redis_pipeline.sadd(
411 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
412 current_time.replace(minute=0)), resource_id)
433 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
434 current_time.replace(minute=0)
435 ),
436 resource_id,
437 )
413 438 redis_pipeline.execute()
414 439 add_metrics_es(es_docs)
415 440 return True
@@ -423,8 +448,8 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
423 448 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
424 449 def add_metrics_es(es_docs):
425 450 for doc in es_docs:
426 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
427 Datastores.es.index(partition, 'log', doc)
451 partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d")
452 Datastores.es.index(partition, "log", doc)
428 453
429 454
430 455 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
@@ -435,10 +460,12 b' def check_user_report_notifications(resource_id):'
435 460 application = ApplicationService.by_id(resource_id)
436 461 if not application:
437 462 return
438 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
439 ReportType.error, resource_id)
440 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
441 ReportType.slow, resource_id)
463 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
464 ReportType.error, resource_id
465 )
466 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
467 ReportType.slow, resource_id
468 )
442 469 error_group_ids = Datastores.redis.smembers(error_key)
443 470 slow_group_ids = Datastores.redis.smembers(slow_key)
444 471 Datastores.redis.delete(error_key)
@@ -448,8 +475,7 b' def check_user_report_notifications(resource_id):'
448 475 group_ids = err_gids + slow_gids
449 476 occurence_dict = {}
450 477 for g_id in group_ids:
451 key = REDIS_KEYS['counters']['report_group_occurences'].format(
452 g_id)
478 key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id)
453 479 val = Datastores.redis.get(key)
454 480 Datastores.redis.delete(key)
455 481 if val:
@@ -460,14 +486,23 b' def check_user_report_notifications(resource_id):'
460 486 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
461 487
462 488 ApplicationService.check_for_groups_alert(
463 application, 'alert', report_groups=report_groups,
464 occurence_dict=occurence_dict)
465 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
489 application,
490 "alert",
491 report_groups=report_groups,
492 occurence_dict=occurence_dict,
493 )
494 users = set(
495 [p.user for p in ResourceService.users_for_perm(application, "view")]
496 )
466 497 report_groups = report_groups.all()
467 498 for user in users:
468 UserService.report_notify(user, request, application,
499 UserService.report_notify(
500 user,
501 request,
502 application,
469 503 report_groups=report_groups,
470 occurence_dict=occurence_dict)
504 occurence_dict=occurence_dict,
505 )
471 506 for group in report_groups:
472 507 # marks report_groups as notified
473 508 if not group.notified:
@@ -485,12 +520,12 b' def check_alerts(resource_id):'
485 520 application = ApplicationService.by_id(resource_id)
486 521 if not application:
487 522 return
488 error_key = REDIS_KEYS[
489 'reports_to_notify_per_type_per_app_alerting'].format(
490 ReportType.error, resource_id)
491 slow_key = REDIS_KEYS[
492 'reports_to_notify_per_type_per_app_alerting'].format(
493 ReportType.slow, resource_id)
523 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
524 ReportType.error, resource_id
525 )
526 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
527 ReportType.slow, resource_id
528 )
494 529 error_group_ids = Datastores.redis.smembers(error_key)
495 530 slow_group_ids = Datastores.redis.smembers(slow_key)
496 531 Datastores.redis.delete(error_key)
@@ -500,9 +535,9 b' def check_alerts(resource_id):'
500 535 group_ids = err_gids + slow_gids
501 536 occurence_dict = {}
502 537 for g_id in group_ids:
503 key = REDIS_KEYS['counters'][
504 'report_group_occurences_alerting'].format(
505 g_id)
538 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(
539 g_id
540 )
506 541 val = Datastores.redis.get(key)
507 542 Datastores.redis.delete(key)
508 543 if val:
@@ -513,8 +548,12 b' def check_alerts(resource_id):'
513 548 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
514 549
515 550 ApplicationService.check_for_groups_alert(
516 application, 'alert', report_groups=report_groups,
517 occurence_dict=occurence_dict, since_when=since_when)
551 application,
552 "alert",
553 report_groups=report_groups,
554 occurence_dict=occurence_dict,
555 since_when=since_when,
556 )
518 557 except Exception as exc:
519 558 print_traceback(log)
520 559 raise
@@ -522,21 +561,21 b' def check_alerts(resource_id):'
522 561
523 562 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
524 563 def close_alerts():
525 log.warning('Checking alerts')
564 log.warning("Checking alerts")
526 565 since_when = datetime.utcnow()
527 566 try:
528 event_types = [Event.types['error_report_alert'],
529 Event.types['slow_report_alert'], ]
530 statuses = [Event.statuses['active']]
567 event_types = [
568 Event.types["error_report_alert"],
569 Event.types["slow_report_alert"],
570 ]
571 statuses = [Event.statuses["active"]]
531 572 # get events older than 5 min
532 573 events = EventService.by_type_and_status(
533 event_types,
534 statuses,
535 older_than=(since_when - timedelta(minutes=5)))
574 event_types, statuses, older_than=(since_when - timedelta(minutes=5))
575 )
536 576 for event in events:
537 577 # see if we can close them
538 event.validate_or_close(
539 since_when=(since_when - timedelta(minutes=1)))
578 event.validate_or_close(since_when=(since_when - timedelta(minutes=1)))
540 579 except Exception as exc:
541 580 print_traceback(log)
542 581 raise
@@ -545,12 +584,18 b' def close_alerts():'
545 584 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
546 585 def update_tag_counter(tag_name, tag_value, count):
547 586 try:
548 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
549 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
550 sa.types.TEXT))
551 query.update({'times_seen': Tag.times_seen + count,
552 'last_timestamp': datetime.utcnow()},
553 synchronize_session=False)
587 query = (
588 DBSession.query(Tag)
589 .filter(Tag.name == tag_name)
590 .filter(
591 sa.cast(Tag.value, sa.types.TEXT)
592 == sa.cast(json.dumps(tag_value), sa.types.TEXT)
593 )
594 )
595 query.update(
596 {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()},
597 synchronize_session=False,
598 )
554 599 session = DBSession()
555 600 mark_changed(session)
556 601 return True
@@ -566,8 +611,8 b' def update_tag_counters():'
566 611 """
567 612 Sets task to update counters for application tags
568 613 """
569 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
570 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
614 tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1)
615 Datastores.redis.delete(REDIS_KEYS["seen_tag_list"])
571 616 c = collections.Counter(tags)
572 617 for t_json, count in c.items():
573 618 tag_info = json.loads(t_json)
@@ -580,28 +625,34 b' def daily_digest():'
580 625 Sends daily digest with top 50 error reports
581 626 """
582 627 request = get_current_request()
583 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
584 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
628 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
629 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
585 630 since_when = datetime.utcnow() - timedelta(hours=8)
586 log.warning('Generating daily digests')
631 log.warning("Generating daily digests")
587 632 for resource_id in apps:
588 resource_id = resource_id.decode('utf8')
633 resource_id = resource_id.decode("utf8")
589 634 end_date = datetime.utcnow().replace(microsecond=0, second=0)
590 filter_settings = {'resource': [resource_id],
591 'tags': [{'name': 'type',
592 'value': ['error'], 'op': None}],
593 'type': 'error', 'start_date': since_when,
594 'end_date': end_date}
635 filter_settings = {
636 "resource": [resource_id],
637 "tags": [{"name": "type", "value": ["error"], "op": None}],
638 "type": "error",
639 "start_date": since_when,
640 "end_date": end_date,
641 }
595 642
596 643 reports = ReportGroupService.get_trending(
597 request, filter_settings=filter_settings, limit=50)
644 request, filter_settings=filter_settings, limit=50
645 )
598 646
599 647 application = ApplicationService.by_id(resource_id)
600 648 if application:
601 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
649 users = set(
650 [p.user for p in ResourceService.users_for_perm(application, "view")]
651 )
602 652 for user in users:
603 user.send_digest(request, application, reports=reports,
604 since_when=since_when)
653 user.send_digest(
654 request, application, reports=reports, since_when=since_when
655 )
605 656
606 657
607 658 @celery.task(queue="default")
@@ -610,11 +661,12 b' def notifications_reports():'
610 661 Loop that checks redis for info and then issues new tasks to celery to
611 662 issue notifications
612 663 """
613 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
614 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
664 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
665 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
615 666 for app in apps:
616 log.warning('Notify for app: %s' % app)
617 check_user_report_notifications.delay(app.decode('utf8'))
667 log.warning("Notify for app: %s" % app)
668 check_user_report_notifications.delay(app.decode("utf8"))
669
618 670
619 671 @celery.task(queue="default")
620 672 def alerting_reports():
@@ -624,34 +676,33 b' def alerting_reports():'
624 676 - which applications should have new alerts opened
625 677 """
626 678
627 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
628 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
679 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"])
680 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"])
629 681 for app in apps:
630 log.warning('Notify for app: %s' % app)
631 check_alerts.delay(app.decode('utf8'))
682 log.warning("Notify for app: %s" % app)
683 check_alerts.delay(app.decode("utf8"))
632 684
633 685
634 @celery.task(queue="default", soft_time_limit=3600 * 4,
635 hard_time_limit=3600 * 4, max_retries=144)
686 @celery.task(
687 queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144
688 )
636 689 def logs_cleanup(resource_id, filter_settings):
637 690 request = get_current_request()
638 691 request.tm.begin()
639 692 es_query = {
640 693 "query": {
641 "filtered": {
642 "filter": {
643 "and": [{"term": {"resource_id": resource_id}}]
644 }
645 }
694 "filtered": {"filter": {"and": [{"term": {"resource_id": resource_id}}]}}
646 695 }
647 696 }
648 697
649 698 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
650 if filter_settings['namespace']:
651 query = query.filter(Log.namespace == filter_settings['namespace'][0])
652 es_query['query']['filtered']['filter']['and'].append(
653 {"term": {"namespace": filter_settings['namespace'][0]}}
699 if filter_settings["namespace"]:
700 query = query.filter(Log.namespace == filter_settings["namespace"][0])
701 es_query["query"]["filtered"]["filter"]["and"].append(
702 {"term": {"namespace": filter_settings["namespace"][0]}}
654 703 )
655 704 query.delete(synchronize_session=False)
656 705 request.tm.commit()
657 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format('rcae_l_*', 'log'), body=es_query)
706 Datastores.es.transport.perform_request(
707 "DELETE", "/{}/{}/_query".format("rcae_l_*", "log"), body=es_query
708 )
@@ -14,6 +14,7 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17
17 18 def filter_callable(structure, section=None):
18 structure['SOMEVAL'] = '***REMOVED***'
19 structure["SOMEVAL"] = "***REMOVED***"
19 20 return structure
This diff has been collapsed as it changes many lines, (801 lines changed) Show them Hide them
@@ -43,7 +43,7 b' _ = str'
43 43 strip_filter = lambda x: x.strip() if x else None
44 44 uppercase_filter = lambda x: x.upper() if x else None
45 45
46 FALSE_VALUES = ('false', '', False, None)
46 FALSE_VALUES = ("false", "", False, None)
47 47
48 48
49 49 class CSRFException(Exception):
@@ -51,11 +51,14 b' class CSRFException(Exception):'
51 51
52 52
53 53 class ReactorForm(SecureForm):
54 def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None,
55 **kwargs):
56 super(ReactorForm, self).__init__(formdata=formdata, obj=obj,
54 def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
55 super(ReactorForm, self).__init__(
56 formdata=formdata,
57 obj=obj,
57 58 prefix=prefix,
58 csrf_context=csrf_context, **kwargs)
59 csrf_context=csrf_context,
60 **kwargs
61 )
59 62 self._csrf_context = csrf_context
60 63
61 64 def generate_csrf_token(self, csrf_context):
@@ -63,14 +66,14 b' class ReactorForm(SecureForm):'
63 66
64 67 def validate_csrf_token(self, field):
65 68 request = self._csrf_context or pyramid.threadlocal.get_current_request()
66 is_from_auth_token = 'auth:auth_token' in request.effective_principals
69 is_from_auth_token = "auth:auth_token" in request.effective_principals
67 70 if is_from_auth_token:
68 71 return True
69 72
70 73 if field.data != field.current_token:
71 74 # try to save the day by using token from angular
72 if request.headers.get('X-XSRF-TOKEN') != field.current_token:
73 raise CSRFException('Invalid CSRF token')
75 if request.headers.get("X-XSRF-TOKEN") != field.current_token:
76 raise CSRFException("Invalid CSRF token")
74 77
75 78 @property
76 79 def errors_dict(self):
@@ -105,45 +108,47 b' class ReactorForm(SecureForm):'
105 108
106 109 class SignInForm(ReactorForm):
107 110 came_from = wtforms.HiddenField()
108 sign_in_user_name = wtforms.StringField(_('User Name'))
109 sign_in_user_password = wtforms.PasswordField(_('Password'))
111 sign_in_user_name = wtforms.StringField(_("User Name"))
112 sign_in_user_password = wtforms.PasswordField(_("Password"))
110 113
111 ignore_labels = ['submit']
112 css_classes = {'submit': 'btn btn-primary'}
114 ignore_labels = ["submit"]
115 css_classes = {"submit": "btn btn-primary"}
113 116
114 html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'},
115 'sign_in_user_password': {
116 'placeholder': 'Your password'}}
117 html_attrs = {
118 "sign_in_user_name": {"placeholder": "Your login"},
119 "sign_in_user_password": {"placeholder": "Your password"},
120 }
117 121
118 122
119 123 from wtforms.widgets import html_params, HTMLString
120 124
121 125
122 def select_multi_checkbox(field, ul_class='set', **kwargs):
126 def select_multi_checkbox(field, ul_class="set", **kwargs):
123 127 """Render a multi-checkbox widget"""
124 kwargs.setdefault('type', 'checkbox')
125 field_id = kwargs.pop('id', field.id)
126 html = ['<ul %s>' % html_params(id=field_id, class_=ul_class)]
128 kwargs.setdefault("type", "checkbox")
129 field_id = kwargs.pop("id", field.id)
130 html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)]
127 131 for value, label, checked in field.iter_choices():
128 choice_id = '%s-%s' % (field_id, value)
132 choice_id = "%s-%s" % (field_id, value)
129 133 options = dict(kwargs, name=field.name, value=value, id=choice_id)
130 134 if checked:
131 options['checked'] = 'checked'
132 html.append('<li><input %s /> ' % html_params(**options))
135 options["checked"] = "checked"
136 html.append("<li><input %s /> " % html_params(**options))
133 137 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
134 html.append('</ul>')
135 return HTMLString(''.join(html))
138 html.append("</ul>")
139 return HTMLString("".join(html))
136 140
137 141
138 def button_widget(field, button_cls='ButtonField btn btn-default', **kwargs):
142 def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs):
139 143 """Render a button widget"""
140 kwargs.setdefault('type', 'button')
141 field_id = kwargs.pop('id', field.id)
142 kwargs.setdefault('value', field.label.text)
143 html = ['<button %s>%s</button>' % (html_params(id=field_id,
144 class_=button_cls),
145 kwargs['value'],)]
146 return HTMLString(''.join(html))
144 kwargs.setdefault("type", "button")
145 field_id = kwargs.pop("id", field.id)
146 kwargs.setdefault("value", field.label.text)
147 html = [
148 "<button %s>%s</button>"
149 % (html_params(id=field_id, class_=button_cls), kwargs["value"])
150 ]
151 return HTMLString("".join(html))
147 152
148 153
149 154 def clean_whitespace(value):
@@ -157,33 +162,32 b' def found_username_validator(form, field):'
157 162 # sets user to recover in email validator
158 163 form.field_user = user
159 164 if not user:
160 raise wtforms.ValidationError('This username does not exist')
165 raise wtforms.ValidationError("This username does not exist")
161 166
162 167
163 168 def found_username_email_validator(form, field):
164 169 user = UserService.by_email(field.data)
165 170 if not user:
166 raise wtforms.ValidationError('Email is incorrect')
171 raise wtforms.ValidationError("Email is incorrect")
167 172
168 173
169 174 def unique_username_validator(form, field):
170 175 user = UserService.by_user_name(field.data)
171 176 if user:
172 raise wtforms.ValidationError('This username already exists in system')
177 raise wtforms.ValidationError("This username already exists in system")
173 178
174 179
175 180 def unique_groupname_validator(form, field):
176 181 group = GroupService.by_group_name(field.data)
177 mod_group = getattr(form, '_modified_group', None)
182 mod_group = getattr(form, "_modified_group", None)
178 183 if group and (not mod_group or mod_group.id != group.id):
179 raise wtforms.ValidationError(
180 'This group name already exists in system')
184 raise wtforms.ValidationError("This group name already exists in system")
181 185
182 186
183 187 def unique_email_validator(form, field):
184 188 user = UserService.by_email(field.data)
185 189 if user:
186 raise wtforms.ValidationError('This email already exists in system')
190 raise wtforms.ValidationError("This email already exists in system")
187 191
188 192
189 193 def email_validator(form, field):
@@ -196,145 +200,168 b' def email_validator(form, field):'
196 200
197 201 def unique_alert_email_validator(form, field):
198 202 q = DBSession.query(AlertChannel)
199 q = q.filter(AlertChannel.channel_name == 'email')
203 q = q.filter(AlertChannel.channel_name == "email")
200 204 q = q.filter(AlertChannel.channel_value == field.data)
201 205 email = q.first()
202 206 if email:
203 raise wtforms.ValidationError(
204 'This email already exists in alert system')
207 raise wtforms.ValidationError("This email already exists in alert system")
205 208
206 209
207 210 def blocked_email_validator(form, field):
208 211 blocked_emails = [
209 'goood-mail.org',
210 'shoeonlineblog.com',
211 'louboutinemart.com',
212 'guccibagshere.com',
213 'nikeshoesoutletforsale.com'
212 "goood-mail.org",
213 "shoeonlineblog.com",
214 "louboutinemart.com",
215 "guccibagshere.com",
216 "nikeshoesoutletforsale.com",
214 217 ]
215 data = field.data or ''
216 domain = data.split('@')[-1]
218 data = field.data or ""
219 domain = data.split("@")[-1]
217 220 if domain in blocked_emails:
218 raise wtforms.ValidationError('Don\'t spam')
221 raise wtforms.ValidationError("Don't spam")
219 222
220 223
221 224 def old_password_validator(form, field):
222 if not UserService.check_password(field.user, field.data or ''):
223 raise wtforms.ValidationError('You need to enter correct password')
225 if not UserService.check_password(field.user, field.data or ""):
226 raise wtforms.ValidationError("You need to enter correct password")
224 227
225 228
226 229 class UserRegisterForm(ReactorForm):
227 230 user_name = wtforms.StringField(
228 _('User Name'),
231 _("User Name"),
229 232 filters=[strip_filter],
230 233 validators=[
231 234 wtforms.validators.Length(min=2, max=30),
232 235 wtforms.validators.Regexp(
233 re.compile(r'^[\.\w-]+$', re.UNICODE),
234 message="Invalid characters used"),
236 re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used"
237 ),
235 238 unique_username_validator,
236 wtforms.validators.DataRequired()
237 ])
239 wtforms.validators.DataRequired(),
240 ],
241 )
238 242
239 user_password = wtforms.PasswordField(_('User Password'),
243 user_password = wtforms.PasswordField(
244 _("User Password"),
240 245 filters=[strip_filter],
241 246 validators=[
242 247 wtforms.validators.Length(min=4),
243 wtforms.validators.DataRequired()
244 ])
248 wtforms.validators.DataRequired(),
249 ],
250 )
245 251
246 email = wtforms.StringField(_('Email Address'),
252 email = wtforms.StringField(
253 _("Email Address"),
247 254 filters=[strip_filter],
248 validators=[email_validator,
255 validators=[
256 email_validator,
249 257 unique_email_validator,
250 258 blocked_email_validator,
251 wtforms.validators.DataRequired()])
252 first_name = wtforms.HiddenField(_('First Name'))
253 last_name = wtforms.HiddenField(_('Last Name'))
259 wtforms.validators.DataRequired(),
260 ],
261 )
262 first_name = wtforms.HiddenField(_("First Name"))
263 last_name = wtforms.HiddenField(_("Last Name"))
254 264
255 ignore_labels = ['submit']
256 css_classes = {'submit': 'btn btn-primary'}
265 ignore_labels = ["submit"]
266 css_classes = {"submit": "btn btn-primary"}
257 267
258 html_attrs = {'user_name': {'placeholder': 'Your login'},
259 'user_password': {'placeholder': 'Your password'},
260 'email': {'placeholder': 'Your email'}}
268 html_attrs = {
269 "user_name": {"placeholder": "Your login"},
270 "user_password": {"placeholder": "Your password"},
271 "email": {"placeholder": "Your email"},
272 }
261 273
262 274
263 275 class UserCreateForm(UserRegisterForm):
264 status = wtforms.BooleanField('User status',
265 false_values=FALSE_VALUES)
276 status = wtforms.BooleanField("User status", false_values=FALSE_VALUES)
266 277
267 278
268 279 class UserUpdateForm(UserCreateForm):
269 280 user_name = None
270 user_password = wtforms.PasswordField(_('User Password'),
281 user_password = wtforms.PasswordField(
282 _("User Password"),
271 283 filters=[strip_filter],
272 validators=[
273 wtforms.validators.Length(min=4),
274 wtforms.validators.Optional()
275 ])
276 email = wtforms.StringField(_('Email Address'),
284 validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()],
285 )
286 email = wtforms.StringField(
287 _("Email Address"),
277 288 filters=[strip_filter],
278 validators=[email_validator,
279 wtforms.validators.DataRequired()])
289 validators=[email_validator, wtforms.validators.DataRequired()],
290 )
280 291
281 292
282 293 class LostPasswordForm(ReactorForm):
283 email = wtforms.StringField(_('Email Address'),
294 email = wtforms.StringField(
295 _("Email Address"),
284 296 filters=[strip_filter],
285 validators=[email_validator,
297 validators=[
298 email_validator,
286 299 found_username_email_validator,
287 wtforms.validators.DataRequired()])
300 wtforms.validators.DataRequired(),
301 ],
302 )
288 303
289 submit = wtforms.SubmitField(_('Reset password'))
290 ignore_labels = ['submit']
291 css_classes = {'submit': 'btn btn-primary'}
304 submit = wtforms.SubmitField(_("Reset password"))
305 ignore_labels = ["submit"]
306 css_classes = {"submit": "btn btn-primary"}
292 307
293 308
294 309 class ChangePasswordForm(ReactorForm):
295 310 old_password = wtforms.PasswordField(
296 'Old Password',
311 "Old Password",
297 312 filters=[strip_filter],
298 validators=[old_password_validator,
299 wtforms.validators.DataRequired()])
313 validators=[old_password_validator, wtforms.validators.DataRequired()],
314 )
300 315
301 316 new_password = wtforms.PasswordField(
302 'New Password',
317 "New Password",
303 318 filters=[strip_filter],
304 validators=[wtforms.validators.Length(min=4),
305 wtforms.validators.DataRequired()])
319 validators=[
320 wtforms.validators.Length(min=4),
321 wtforms.validators.DataRequired(),
322 ],
323 )
306 324 new_password_confirm = wtforms.PasswordField(
307 'Confirm Password',
325 "Confirm Password",
308 326 filters=[strip_filter],
309 validators=[wtforms.validators.EqualTo('new_password'),
310 wtforms.validators.DataRequired()])
311 submit = wtforms.SubmitField('Change Password')
312 ignore_labels = ['submit']
313 css_classes = {'submit': 'btn btn-primary'}
327 validators=[
328 wtforms.validators.EqualTo("new_password"),
329 wtforms.validators.DataRequired(),
330 ],
331 )
332 submit = wtforms.SubmitField("Change Password")
333 ignore_labels = ["submit"]
334 css_classes = {"submit": "btn btn-primary"}
314 335
315 336
316 337 class CheckPasswordForm(ReactorForm):
317 338 password = wtforms.PasswordField(
318 'Password',
339 "Password",
319 340 filters=[strip_filter],
320 validators=[old_password_validator,
321 wtforms.validators.DataRequired()])
341 validators=[old_password_validator, wtforms.validators.DataRequired()],
342 )
322 343
323 344
324 345 class NewPasswordForm(ReactorForm):
325 346 new_password = wtforms.PasswordField(
326 'New Password',
347 "New Password",
327 348 filters=[strip_filter],
328 validators=[wtforms.validators.Length(min=4),
329 wtforms.validators.DataRequired()])
349 validators=[
350 wtforms.validators.Length(min=4),
351 wtforms.validators.DataRequired(),
352 ],
353 )
330 354 new_password_confirm = wtforms.PasswordField(
331 'Confirm Password',
355 "Confirm Password",
332 356 filters=[strip_filter],
333 validators=[wtforms.validators.EqualTo('new_password'),
334 wtforms.validators.DataRequired()])
335 submit = wtforms.SubmitField('Set Password')
336 ignore_labels = ['submit']
337 css_classes = {'submit': 'btn btn-primary'}
357 validators=[
358 wtforms.validators.EqualTo("new_password"),
359 wtforms.validators.DataRequired(),
360 ],
361 )
362 submit = wtforms.SubmitField("Set Password")
363 ignore_labels = ["submit"]
364 css_classes = {"submit": "btn btn-primary"}
338 365
339 366
340 367 class CORSTextAreaField(wtforms.StringField):
@@ -342,261 +369,290 b' class CORSTextAreaField(wtforms.StringField):'
342 369 This field represents an HTML ``<textarea>`` and can be used to take
343 370 multi-line input.
344 371 """
372
345 373 widget = wtforms.widgets.TextArea()
346 374
347 375 def process_formdata(self, valuelist):
348 376 self.data = []
349 377 if valuelist:
350 data = [x.strip() for x in valuelist[0].split('\n')]
378 data = [x.strip() for x in valuelist[0].split("\n")]
351 379 for d in data:
352 380 if not d:
353 381 continue
354 if d.startswith('www.'):
382 if d.startswith("www."):
355 383 d = d[4:]
356 384 if data:
357 385 self.data.append(d)
358 386 else:
359 387 self.data = []
360 self.data = '\n'.join(self.data)
388 self.data = "\n".join(self.data)
361 389
362 390
363 391 class ApplicationCreateForm(ReactorForm):
364 392 resource_name = wtforms.StringField(
365 _('Application name'),
393 _("Application name"),
366 394 filters=[strip_filter],
367 validators=[wtforms.validators.Length(min=1),
368 wtforms.validators.DataRequired()])
395 validators=[
396 wtforms.validators.Length(min=1),
397 wtforms.validators.DataRequired(),
398 ],
399 )
369 400
370 401 domains = CORSTextAreaField(
371 _('Domain names for CORS headers '),
372 validators=[wtforms.validators.Length(min=1),
373 wtforms.validators.Optional()],
374 description='Required for Javascript error '
375 'tracking (one line one domain, skip http:// part)')
402 _("Domain names for CORS headers "),
403 validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()],
404 description="Required for Javascript error "
405 "tracking (one line one domain, skip http:// part)",
406 )
376 407
377 submit = wtforms.SubmitField(_('Create Application'))
408 submit = wtforms.SubmitField(_("Create Application"))
378 409
379 ignore_labels = ['submit']
380 css_classes = {'submit': 'btn btn-primary'}
381 html_attrs = {'resource_name': {'placeholder': 'Application Name'},
382 'uptime_url': {'placeholder': 'http://somedomain.com'}}
410 ignore_labels = ["submit"]
411 css_classes = {"submit": "btn btn-primary"}
412 html_attrs = {
413 "resource_name": {"placeholder": "Application Name"},
414 "uptime_url": {"placeholder": "http://somedomain.com"},
415 }
383 416
384 417
385 418 class ApplicationUpdateForm(ApplicationCreateForm):
386 419 default_grouping = wtforms.SelectField(
387 _('Default grouping for errors'),
388 choices=[('url_type', 'Error Type + location',),
389 ('url_traceback', 'Traceback + location',),
390 ('traceback_server', 'Traceback + Server',)],
391 default='url_traceback')
420 _("Default grouping for errors"),
421 choices=[
422 ("url_type", "Error Type + location"),
423 ("url_traceback", "Traceback + location"),
424 ("traceback_server", "Traceback + Server"),
425 ],
426 default="url_traceback",
427 )
392 428
393 429 error_report_threshold = wtforms.IntegerField(
394 _('Alert on error reports'),
430 _("Alert on error reports"),
395 431 validators=[
396 432 wtforms.validators.NumberRange(min=1),
397 wtforms.validators.DataRequired()
433 wtforms.validators.DataRequired(),
398 434 ],
399 description='Application requires to send at least this amount of '
400 'error reports per minute to open alert'
435 description="Application requires to send at least this amount of "
436 "error reports per minute to open alert",
401 437 )
402 438
403 439 slow_report_threshold = wtforms.IntegerField(
404 _('Alert on slow reports'),
405 validators=[wtforms.validators.NumberRange(min=1),
406 wtforms.validators.DataRequired()],
407 description='Application requires to send at least this amount of '
408 'slow reports per minute to open alert')
440 _("Alert on slow reports"),
441 validators=[
442 wtforms.validators.NumberRange(min=1),
443 wtforms.validators.DataRequired(),
444 ],
445 description="Application requires to send at least this amount of "
446 "slow reports per minute to open alert",
447 )
409 448
410 449 allow_permanent_storage = wtforms.BooleanField(
411 _('Permanent logs'),
450 _("Permanent logs"),
412 451 false_values=FALSE_VALUES,
413 description=_(
414 'Allow permanent storage of logs in separate DB partitions'))
452 description=_("Allow permanent storage of logs in separate DB partitions"),
453 )
415 454
416 submit = wtforms.SubmitField(_('Create Application'))
455 submit = wtforms.SubmitField(_("Create Application"))
417 456
418 457
419 458 class UserSearchSchemaForm(ReactorForm):
420 user_name = wtforms.StringField('User Name',
421 filters=[strip_filter], )
459 user_name = wtforms.StringField("User Name", filters=[strip_filter])
422 460
423 submit = wtforms.SubmitField(_('Search User'))
424 ignore_labels = ['submit']
425 css_classes = {'submit': 'btn btn-primary'}
461 submit = wtforms.SubmitField(_("Search User"))
462 ignore_labels = ["submit"]
463 css_classes = {"submit": "btn btn-primary"}
426 464
427 465 '<li class="user_exists"><span></span></li>'
428 466
429 467
430 468 class YesNoForm(ReactorForm):
431 no = wtforms.SubmitField('No', default='')
432 yes = wtforms.SubmitField('Yes', default='')
433 ignore_labels = ['submit']
434 css_classes = {'submit': 'btn btn-primary'}
469 no = wtforms.SubmitField("No", default="")
470 yes = wtforms.SubmitField("Yes", default="")
471 ignore_labels = ["submit"]
472 css_classes = {"submit": "btn btn-primary"}
435 473
436 474
437 status_codes = [('', 'All',), ('500', '500',), ('404', '404',)]
475 status_codes = [("", "All"), ("500", "500"), ("404", "404")]
438 476
439 priorities = [('', 'All',)]
477 priorities = [("", "All")]
440 478 for i in range(1, 11):
441 priorities.append((str(i), str(i),))
442
443 report_status_choices = [('', 'All',),
444 ('never_reviewed', 'Never revieved',),
445 ('reviewed', 'Revieved',),
446 ('public', 'Public',),
447 ('fixed', 'Fixed',), ]
479 priorities.append((str(i), str(i)))
480
481 report_status_choices = [
482 ("", "All"),
483 ("never_reviewed", "Never revieved"),
484 ("reviewed", "Revieved"),
485 ("public", "Public"),
486 ("fixed", "Fixed"),
487 ]
448 488
449 489
450 490 class ReportBrowserForm(ReactorForm):
451 applications = wtforms.SelectMultipleField('Applications',
452 widget=select_multi_checkbox)
453 http_status = wtforms.SelectField('HTTP Status', choices=status_codes)
454 priority = wtforms.SelectField('Priority', choices=priorities, default='')
455 start_date = wtforms.DateField('Start Date')
456 end_date = wtforms.DateField('End Date')
457 error = wtforms.StringField('Error')
458 url_path = wtforms.StringField('URL Path')
459 url_domain = wtforms.StringField('URL Domain')
460 report_status = wtforms.SelectField('Report status',
461 choices=report_status_choices,
462 default='')
463 submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">'
464 '</span> Filter results',
465 widget=button_widget)
466
467 ignore_labels = ['submit']
468 css_classes = {'submit': 'btn btn-primary'}
469
470
471 slow_report_status_choices = [('', 'All',),
472 ('never_reviewed', 'Never revieved',),
473 ('reviewed', 'Revieved',),
474 ('public', 'Public',), ]
491 applications = wtforms.SelectMultipleField(
492 "Applications", widget=select_multi_checkbox
493 )
494 http_status = wtforms.SelectField("HTTP Status", choices=status_codes)
495 priority = wtforms.SelectField("Priority", choices=priorities, default="")
496 start_date = wtforms.DateField("Start Date")
497 end_date = wtforms.DateField("End Date")
498 error = wtforms.StringField("Error")
499 url_path = wtforms.StringField("URL Path")
500 url_domain = wtforms.StringField("URL Domain")
501 report_status = wtforms.SelectField(
502 "Report status", choices=report_status_choices, default=""
503 )
504 submit = wtforms.SubmitField(
505 '<span class="glyphicon glyphicon-search">' "</span> Filter results",
506 widget=button_widget,
507 )
508
509 ignore_labels = ["submit"]
510 css_classes = {"submit": "btn btn-primary"}
511
512
513 slow_report_status_choices = [
514 ("", "All"),
515 ("never_reviewed", "Never revieved"),
516 ("reviewed", "Revieved"),
517 ("public", "Public"),
518 ]
475 519
476 520
477 521 class BulkOperationForm(ReactorForm):
478 applications = wtforms.SelectField('Applications')
522 applications = wtforms.SelectField("Applications")
479 523 start_date = wtforms.DateField(
480 'Start Date',
481 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(
482 days=90))
483 end_date = wtforms.DateField('End Date')
524 "Start Date",
525 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90),
526 )
527 end_date = wtforms.DateField("End Date")
484 528 confirm = wtforms.BooleanField(
485 'Confirm operation',
486 validators=[wtforms.validators.DataRequired()])
529 "Confirm operation", validators=[wtforms.validators.DataRequired()]
530 )
487 531
488 532
489 533 class LogBrowserForm(ReactorForm):
490 applications = wtforms.SelectMultipleField('Applications',
491 widget=select_multi_checkbox)
492 start_date = wtforms.DateField('Start Date')
493 log_level = wtforms.StringField('Log level')
494 message = wtforms.StringField('Message')
495 namespace = wtforms.StringField('Namespace')
534 applications = wtforms.SelectMultipleField(
535 "Applications", widget=select_multi_checkbox
536 )
537 start_date = wtforms.DateField("Start Date")
538 log_level = wtforms.StringField("Log level")
539 message = wtforms.StringField("Message")
540 namespace = wtforms.StringField("Namespace")
496 541 submit = wtforms.SubmitField(
497 542 '<span class="glyphicon glyphicon-search"></span> Filter results',
498 widget=button_widget)
499 ignore_labels = ['submit']
500 css_classes = {'submit': 'btn btn-primary'}
543 widget=button_widget,
544 )
545 ignore_labels = ["submit"]
546 css_classes = {"submit": "btn btn-primary"}
501 547
502 548
503 549 class CommentForm(ReactorForm):
504 body = wtforms.TextAreaField('Comment', validators=[
550 body = wtforms.TextAreaField(
551 "Comment",
552 validators=[
505 553 wtforms.validators.Length(min=1),
506 wtforms.validators.DataRequired()
507 ])
508 submit = wtforms.SubmitField('Comment', )
509 ignore_labels = ['submit']
510 css_classes = {'submit': 'btn btn-primary'}
554 wtforms.validators.DataRequired(),
555 ],
556 )
557 submit = wtforms.SubmitField("Comment")
558 ignore_labels = ["submit"]
559 css_classes = {"submit": "btn btn-primary"}
511 560
512 561
513 562 class EmailChannelCreateForm(ReactorForm):
514 email = wtforms.StringField(_('Email Address'),
563 email = wtforms.StringField(
564 _("Email Address"),
515 565 filters=[strip_filter],
516 validators=[email_validator,
566 validators=[
567 email_validator,
517 568 unique_alert_email_validator,
518 wtforms.validators.DataRequired()])
519 submit = wtforms.SubmitField('Add email channel', )
520 ignore_labels = ['submit']
521 css_classes = {'submit': 'btn btn-primary'}
569 wtforms.validators.DataRequired(),
570 ],
571 )
572 submit = wtforms.SubmitField("Add email channel")
573 ignore_labels = ["submit"]
574 css_classes = {"submit": "btn btn-primary"}
522 575
523 576
524 577 def gen_user_profile_form():
525 578 class UserProfileForm(ReactorForm):
526 579 email = wtforms.StringField(
527 _('Email Address'),
528 validators=[email_validator, wtforms.validators.DataRequired()])
529 first_name = wtforms.StringField(_('First Name'))
530 last_name = wtforms.StringField(_('Last Name'))
531 company_name = wtforms.StringField(_('Company Name'))
532 company_address = wtforms.TextAreaField(_('Company Address'))
533 zip_code = wtforms.StringField(_('ZIP code'))
534 city = wtforms.StringField(_('City'))
535 notifications = wtforms.BooleanField('Account notifications',
536 false_values=FALSE_VALUES)
537 submit = wtforms.SubmitField(_('Update Account'))
538 ignore_labels = ['submit']
539 css_classes = {'submit': 'btn btn-primary'}
580 _("Email Address"),
581 validators=[email_validator, wtforms.validators.DataRequired()],
582 )
583 first_name = wtforms.StringField(_("First Name"))
584 last_name = wtforms.StringField(_("Last Name"))
585 company_name = wtforms.StringField(_("Company Name"))
586 company_address = wtforms.TextAreaField(_("Company Address"))
587 zip_code = wtforms.StringField(_("ZIP code"))
588 city = wtforms.StringField(_("City"))
589 notifications = wtforms.BooleanField(
590 "Account notifications", false_values=FALSE_VALUES
591 )
592 submit = wtforms.SubmitField(_("Update Account"))
593 ignore_labels = ["submit"]
594 css_classes = {"submit": "btn btn-primary"}
540 595
541 596 return UserProfileForm
542 597
543 598
544 599 class PurgeAppForm(ReactorForm):
545 600 resource_id = wtforms.HiddenField(
546 'App Id',
547 validators=[wtforms.validators.DataRequired()])
548 days = wtforms.IntegerField(
549 'Days',
550 validators=[wtforms.validators.DataRequired()])
601 "App Id", validators=[wtforms.validators.DataRequired()]
602 )
603 days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()])
551 604 password = wtforms.PasswordField(
552 'Admin Password',
553 validators=[old_password_validator, wtforms.validators.DataRequired()])
554 submit = wtforms.SubmitField(_('Purge Data'))
555 ignore_labels = ['submit']
556 css_classes = {'submit': 'btn btn-primary'}
605 "Admin Password",
606 validators=[old_password_validator, wtforms.validators.DataRequired()],
607 )
608 submit = wtforms.SubmitField(_("Purge Data"))
609 ignore_labels = ["submit"]
610 css_classes = {"submit": "btn btn-primary"}
557 611
558 612
559 613 class IntegrationRepoForm(ReactorForm):
560 host_name = wtforms.StringField("Service Host", default='')
614 host_name = wtforms.StringField("Service Host", default="")
561 615 user_name = wtforms.StringField(
562 616 "User Name",
563 617 filters=[strip_filter],
564 validators=[wtforms.validators.DataRequired(),
565 wtforms.validators.Length(min=1)])
618 validators=[
619 wtforms.validators.DataRequired(),
620 wtforms.validators.Length(min=1),
621 ],
622 )
566 623 repo_name = wtforms.StringField(
567 624 "Repo Name",
568 625 filters=[strip_filter],
569 validators=[wtforms.validators.DataRequired(),
570 wtforms.validators.Length(min=1)])
626 validators=[
627 wtforms.validators.DataRequired(),
628 wtforms.validators.Length(min=1),
629 ],
630 )
571 631
572 632
573 633 class IntegrationBitbucketForm(IntegrationRepoForm):
574 host_name = wtforms.StringField("Service Host",
575 default='https://bitbucket.org')
634 host_name = wtforms.StringField("Service Host", default="https://bitbucket.org")
576 635
577 636 def validate_user_name(self, field):
578 637 try:
579 638 request = pyramid.threadlocal.get_current_request()
580 639 client = BitbucketIntegration.create_client(
581 request,
582 self.user_name.data,
583 self.repo_name.data)
640 request, self.user_name.data, self.repo_name.data
641 )
584 642 client.get_assignees()
585 643 except IntegrationException as e:
586 644 raise wtforms.validators.ValidationError(str(e))
587 645
588 646
589 647 class IntegrationGithubForm(IntegrationRepoForm):
590 host_name = wtforms.StringField("Service Host",
591 default='https://github.com')
648 host_name = wtforms.StringField("Service Host", default="https://github.com")
592 649
593 650 def validate_user_name(self, field):
594 651 try:
595 652 request = pyramid.threadlocal.get_current_request()
596 653 client = GithubIntegration.create_client(
597 request,
598 self.user_name.data,
599 self.repo_name.data)
654 request, self.user_name.data, self.repo_name.data
655 )
600 656 client.get_assignees()
601 657 except IntegrationException as e:
602 658 raise wtforms.validators.ValidationError(str(e))
@@ -605,25 +661,28 b' class IntegrationGithubForm(IntegrationRepoForm):'
605 661
606 662 def filter_rooms(data):
607 663 if data is not None:
608 rooms = data.split(',')
609 return ','.join([r.strip() for r in rooms])
664 rooms = data.split(",")
665 return ",".join([r.strip() for r in rooms])
610 666
611 667
612 668 class IntegrationCampfireForm(ReactorForm):
613 669 account = wtforms.StringField(
614 'Account',
670 "Account",
615 671 filters=[strip_filter],
616 validators=[wtforms.validators.DataRequired()])
672 validators=[wtforms.validators.DataRequired()],
673 )
617 674 api_token = wtforms.StringField(
618 'Api Token',
675 "Api Token",
619 676 filters=[strip_filter],
620 validators=[wtforms.validators.DataRequired()])
621 rooms = wtforms.StringField('Room ID list', filters=[filter_rooms])
677 validators=[wtforms.validators.DataRequired()],
678 )
679 rooms = wtforms.StringField("Room ID list", filters=[filter_rooms])
622 680
623 681 def validate_api_token(self, field):
624 682 try:
625 client = CampfireIntegration.create_client(self.api_token.data,
626 self.account.data)
683 client = CampfireIntegration.create_client(
684 self.api_token.data, self.account.data
685 )
627 686 client.get_account()
628 687 except IntegrationException as e:
629 688 raise wtforms.validators.ValidationError(str(e))
@@ -631,17 +690,18 b' class IntegrationCampfireForm(ReactorForm):'
631 690 def validate_rooms(self, field):
632 691 if not field.data:
633 692 return
634 client = CampfireIntegration.create_client(self.api_token.data,
635 self.account.data)
693 client = CampfireIntegration.create_client(
694 self.api_token.data, self.account.data
695 )
636 696
637 697 try:
638 room_list = [r['id'] for r in client.get_rooms()]
698 room_list = [r["id"] for r in client.get_rooms()]
639 699 except IntegrationException as e:
640 700 raise wtforms.validators.ValidationError(str(e))
641 701
642 rooms = field.data.split(',')
702 rooms = field.data.split(",")
643 703 if len(rooms) > 3:
644 msg = 'You can use up to 3 room ids'
704 msg = "You can use up to 3 room ids"
645 705 raise wtforms.validators.ValidationError(msg)
646 706 if rooms:
647 707 for room_id in rooms:
@@ -649,75 +709,78 b' class IntegrationCampfireForm(ReactorForm):'
649 709 msg = "Room %s doesn't exist"
650 710 raise wtforms.validators.ValidationError(msg % room_id)
651 711 if not room_id.strip().isdigit():
652 msg = 'You must use only integers for room ids'
712 msg = "You must use only integers for room ids"
653 713 raise wtforms.validators.ValidationError(msg)
654 714
655 submit = wtforms.SubmitField(_('Connect to Campfire'))
656 ignore_labels = ['submit']
657 css_classes = {'submit': 'btn btn-primary'}
715 submit = wtforms.SubmitField(_("Connect to Campfire"))
716 ignore_labels = ["submit"]
717 css_classes = {"submit": "btn btn-primary"}
658 718
659 719
660 720 def filter_rooms(data):
661 721 if data is not None:
662 rooms = data.split(',')
663 return ','.join([r.strip() for r in rooms])
722 rooms = data.split(",")
723 return ",".join([r.strip() for r in rooms])
664 724
665 725
666 726 class IntegrationHipchatForm(ReactorForm):
667 727 api_token = wtforms.StringField(
668 'Api Token',
728 "Api Token",
669 729 filters=[strip_filter],
670 validators=[wtforms.validators.DataRequired()])
730 validators=[wtforms.validators.DataRequired()],
731 )
671 732 rooms = wtforms.StringField(
672 'Room ID list',
733 "Room ID list",
673 734 filters=[filter_rooms],
674 validators=[wtforms.validators.DataRequired()])
735 validators=[wtforms.validators.DataRequired()],
736 )
675 737
676 738 def validate_rooms(self, field):
677 739 if not field.data:
678 740 return
679 741 client = HipchatIntegration.create_client(self.api_token.data)
680 rooms = field.data.split(',')
742 rooms = field.data.split(",")
681 743 if len(rooms) > 3:
682 msg = 'You can use up to 3 room ids'
744 msg = "You can use up to 3 room ids"
683 745 raise wtforms.validators.ValidationError(msg)
684 746 if rooms:
685 747 for room_id in rooms:
686 748 if not room_id.strip().isdigit():
687 msg = 'You must use only integers for room ids'
749 msg = "You must use only integers for room ids"
688 750 raise wtforms.validators.ValidationError(msg)
689 751 try:
690 client.send({
691 "message_format": 'text',
752 client.send(
753 {
754 "message_format": "text",
692 755 "message": "testing for room existence",
693 756 "from": "AppEnlight",
694 757 "room_id": room_id,
695 "color": "green"
696 })
758 "color": "green",
759 }
760 )
697 761 except IntegrationException as exc:
698 msg = 'Room id: %s exception: %s'
699 raise wtforms.validators.ValidationError(msg % (room_id,
700 exc))
762 msg = "Room id: %s exception: %s"
763 raise wtforms.validators.ValidationError(msg % (room_id, exc))
701 764
702 765
703 766 class IntegrationFlowdockForm(ReactorForm):
704 api_token = wtforms.StringField('API Token',
767 api_token = wtforms.StringField(
768 "API Token",
705 769 filters=[strip_filter],
706 validators=[
707 wtforms.validators.DataRequired()
708 ], )
770 validators=[wtforms.validators.DataRequired()],
771 )
709 772
710 773 def validate_api_token(self, field):
711 774 try:
712 775 client = FlowdockIntegration.create_client(self.api_token.data)
713 776 registry = pyramid.threadlocal.get_current_registry()
714 777 payload = {
715 "source": registry.settings['mailing.from_name'],
716 "from_address": registry.settings['mailing.from_email'],
778 "source": registry.settings["mailing.from_name"],
779 "from_address": registry.settings["mailing.from_email"],
717 780 "subject": "Integration test",
718 781 "content": "If you can see this it was successful",
719 782 "tags": ["appenlight"],
720 "link": registry.settings['mailing.app_url']
783 "link": registry.settings["mailing.app_url"],
721 784 }
722 785 client.send_to_inbox(payload)
723 786 except IntegrationException as e:
@@ -726,30 +789,35 b' class IntegrationFlowdockForm(ReactorForm):'
726 789
727 790 class IntegrationSlackForm(ReactorForm):
728 791 webhook_url = wtforms.StringField(
729 'Reports webhook',
792 "Reports webhook",
730 793 filters=[strip_filter],
731 validators=[wtforms.validators.DataRequired()])
794 validators=[wtforms.validators.DataRequired()],
795 )
732 796
733 797 def validate_webhook_url(self, field):
734 798 registry = pyramid.threadlocal.get_current_registry()
735 799 client = SlackIntegration.create_client(field.data)
736 link = "<%s|%s>" % (registry.settings['mailing.app_url'],
737 registry.settings['mailing.from_name'])
800 link = "<%s|%s>" % (
801 registry.settings["mailing.app_url"],
802 registry.settings["mailing.from_name"],
803 )
738 804 test_data = {
739 805 "username": "AppEnlight",
740 806 "icon_emoji": ":fire:",
741 807 "attachments": [
742 {"fallback": "Testing integration channel: %s" % link,
808 {
809 "fallback": "Testing integration channel: %s" % link,
743 810 "pretext": "Testing integration channel: %s" % link,
744 811 "color": "good",
745 812 "fields": [
746 813 {
747 814 "title": "Status",
748 815 "value": "Integration is working fine",
749 "short": False
816 "short": False,
750 817 }
751 ]}
752 ]
818 ],
819 }
820 ],
753 821 }
754 822 try:
755 823 client.make_request(data=test_data)
@@ -759,44 +827,52 b' class IntegrationSlackForm(ReactorForm):'
759 827
760 828 class IntegrationWebhooksForm(ReactorForm):
761 829 reports_webhook = wtforms.StringField(
762 'Reports webhook',
830 "Reports webhook",
763 831 filters=[strip_filter],
764 validators=[wtforms.validators.DataRequired()])
832 validators=[wtforms.validators.DataRequired()],
833 )
765 834 alerts_webhook = wtforms.StringField(
766 'Alerts webhook',
835 "Alerts webhook",
767 836 filters=[strip_filter],
768 validators=[wtforms.validators.DataRequired()])
769 submit = wtforms.SubmitField(_('Setup webhooks'))
770 ignore_labels = ['submit']
771 css_classes = {'submit': 'btn btn-primary'}
837 validators=[wtforms.validators.DataRequired()],
838 )
839 submit = wtforms.SubmitField(_("Setup webhooks"))
840 ignore_labels = ["submit"]
841 css_classes = {"submit": "btn btn-primary"}
772 842
773 843
774 844 class IntegrationJiraForm(ReactorForm):
775 845 host_name = wtforms.StringField(
776 'Server URL',
846 "Server URL",
777 847 filters=[strip_filter],
778 validators=[wtforms.validators.DataRequired()])
848 validators=[wtforms.validators.DataRequired()],
849 )
779 850 user_name = wtforms.StringField(
780 'Username',
851 "Username",
781 852 filters=[strip_filter],
782 validators=[wtforms.validators.DataRequired()])
853 validators=[wtforms.validators.DataRequired()],
854 )
783 855 password = wtforms.PasswordField(
784 'Password',
856 "Password",
785 857 filters=[strip_filter],
786 validators=[wtforms.validators.DataRequired()])
858 validators=[wtforms.validators.DataRequired()],
859 )
787 860 project = wtforms.StringField(
788 'Project key',
861 "Project key",
789 862 filters=[uppercase_filter, strip_filter],
790 validators=[wtforms.validators.DataRequired()])
863 validators=[wtforms.validators.DataRequired()],
864 )
791 865
792 866 def validate_project(self, field):
793 867 if not field.data:
794 868 return
795 869 try:
796 client = JiraClient(self.user_name.data,
870 client = JiraClient(
871 self.user_name.data,
797 872 self.password.data,
798 873 self.host_name.data,
799 self.project.data)
874 self.project.data,
875 )
800 876 except Exception as exc:
801 877 raise wtforms.validators.ValidationError(str(exc))
802 878
@@ -809,88 +885,97 b' class IntegrationJiraForm(ReactorForm):'
809 885 def get_deletion_form(resource):
810 886 class F(ReactorForm):
811 887 application_name = wtforms.StringField(
812 'Application Name',
888 "Application Name",
813 889 filters=[strip_filter],
814 validators=[wtforms.validators.AnyOf([resource.resource_name])])
890 validators=[wtforms.validators.AnyOf([resource.resource_name])],
891 )
815 892 resource_id = wtforms.HiddenField(default=resource.resource_id)
816 submit = wtforms.SubmitField(_('Delete my application'))
817 ignore_labels = ['submit']
818 css_classes = {'submit': 'btn btn-danger'}
893 submit = wtforms.SubmitField(_("Delete my application"))
894 ignore_labels = ["submit"]
895 css_classes = {"submit": "btn btn-danger"}
819 896
820 897 return F
821 898
822 899
823 900 class ChangeApplicationOwnerForm(ReactorForm):
824 901 password = wtforms.PasswordField(
825 'Password',
902 "Password",
826 903 filters=[strip_filter],
827 validators=[old_password_validator,
828 wtforms.validators.DataRequired()])
904 validators=[old_password_validator, wtforms.validators.DataRequired()],
905 )
829 906
830 907 user_name = wtforms.StringField(
831 'New owners username',
908 "New owners username",
832 909 filters=[strip_filter],
833 validators=[found_username_validator,
834 wtforms.validators.DataRequired()])
835 submit = wtforms.SubmitField(_('Transfer ownership of application'))
836 ignore_labels = ['submit']
837 css_classes = {'submit': 'btn btn-danger'}
910 validators=[found_username_validator, wtforms.validators.DataRequired()],
911 )
912 submit = wtforms.SubmitField(_("Transfer ownership of application"))
913 ignore_labels = ["submit"]
914 css_classes = {"submit": "btn btn-danger"}
838 915
839 916
840 917 def default_filename():
841 return 'Invoice %s' % datetime.datetime.utcnow().strftime('%Y/%m')
918 return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m")
842 919
843 920
844 921 class FileUploadForm(ReactorForm):
845 title = wtforms.StringField('File Title',
922 title = wtforms.StringField(
923 "File Title",
846 924 default=default_filename,
847 validators=[wtforms.validators.DataRequired()])
848 file = wtforms.FileField('File')
925 validators=[wtforms.validators.DataRequired()],
926 )
927 file = wtforms.FileField("File")
849 928
850 929 def validate_file(self, field):
851 if not hasattr(field.data, 'file'):
852 raise wtforms.ValidationError('File is missing')
930 if not hasattr(field.data, "file"):
931 raise wtforms.ValidationError("File is missing")
853 932
854 submit = wtforms.SubmitField(_('Upload'))
933 submit = wtforms.SubmitField(_("Upload"))
855 934
856 935
857 936 def get_partition_deletion_form(es_indices, pg_indices):
858 937 class F(ReactorForm):
859 es_index = wtforms.SelectMultipleField('Elasticsearch',
860 choices=[(ix, '') for ix in
861 es_indices])
862 pg_index = wtforms.SelectMultipleField('pg',
863 choices=[(ix, '') for ix in
864 pg_indices])
865 confirm = wtforms.TextField('Confirm',
938 es_index = wtforms.SelectMultipleField(
939 "Elasticsearch", choices=[(ix, "") for ix in es_indices]
940 )
941 pg_index = wtforms.SelectMultipleField(
942 "pg", choices=[(ix, "") for ix in pg_indices]
943 )
944 confirm = wtforms.TextField(
945 "Confirm",
866 946 filters=[uppercase_filter, strip_filter],
867 947 validators=[
868 wtforms.validators.AnyOf(['CONFIRM']),
869 wtforms.validators.DataRequired()])
870 ignore_labels = ['submit']
871 css_classes = {'submit': 'btn btn-danger'}
948 wtforms.validators.AnyOf(["CONFIRM"]),
949 wtforms.validators.DataRequired(),
950 ],
951 )
952 ignore_labels = ["submit"]
953 css_classes = {"submit": "btn btn-danger"}
872 954
873 955 return F
874 956
875 957
876 958 class GroupCreateForm(ReactorForm):
877 959 group_name = wtforms.StringField(
878 _('Group Name'),
960 _("Group Name"),
879 961 filters=[strip_filter],
880 962 validators=[
881 963 wtforms.validators.Length(min=2, max=50),
882 964 unique_groupname_validator,
883 wtforms.validators.DataRequired()
884 ])
885 description = wtforms.StringField(_('Group description'))
965 wtforms.validators.DataRequired(),
966 ],
967 )
968 description = wtforms.StringField(_("Group description"))
886 969
887 970
888 time_choices = [(k, v['label'],) for k, v in h.time_deltas.items()]
971 time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()]
889 972
890 973
891 974 class AuthTokenCreateForm(ReactorForm):
892 description = wtforms.StringField(_('Token description'))
893 expires = wtforms.SelectField('Expires',
975 description = wtforms.StringField(_("Token description"))
976 expires = wtforms.SelectField(
977 "Expires",
894 978 coerce=lambda x: x,
895 979 choices=time_choices,
896 validators=[wtforms.validators.Optional()])
980 validators=[wtforms.validators.Optional()],
981 )
@@ -24,20 +24,20 b' from appenlight_client.exceptions import get_current_traceback'
24 24
25 25
26 26 def generate_random_string(chars=10):
27 return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
28 chars))
27 return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars))
29 28
30 29
31 30 def to_integer_safe(input):
32 31 try:
33 32 return int(input)
34 except (TypeError, ValueError,):
33 except (TypeError, ValueError):
35 34 return None
36 35
37 36
38 37 def print_traceback(log):
39 traceback = get_current_traceback(skip=1, show_hidden_frames=True,
40 ignore_system_exceptions=True)
38 traceback = get_current_traceback(
39 skip=1, show_hidden_frames=True, ignore_system_exceptions=True
40 )
41 41 exception_text = traceback.exception
42 42 log.error(exception_text)
43 43 log.error(traceback.plaintext)
@@ -45,6 +45,5 b' def print_traceback(log):'
45 45
46 46
47 47 def get_callable(import_string):
48 import_module, indexer_callable = import_string.split(':')
49 return getattr(importlib.import_module(import_module),
50 indexer_callable)
48 import_module, indexer_callable = import_string.split(":")
49 return getattr(importlib.import_module(import_module), indexer_callable)
@@ -27,21 +27,18 b' log = logging.getLogger(__name__)'
27 27
28 28 def rate_limiting(request, resource, section, to_increment=1):
29 29 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
30 key = REDIS_KEYS['rate_limits'][section].format(tsample,
31 resource.resource_id)
30 key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id)
32 31 redis_pipeline = request.registry.redis_conn.pipeline()
33 32 redis_pipeline.incr(key, to_increment)
34 33 redis_pipeline.expire(key, 3600 * 24)
35 34 results = redis_pipeline.execute()
36 35 current_count = results[0]
37 config = ConfigService.by_key_and_section(section, 'global')
36 config = ConfigService.by_key_and_section(section, "global")
38 37 limit = config.value if config else 1000
39 38 if current_count > int(limit):
40 log.info('RATE LIMITING: {}: {}, {}'.format(
41 section, resource, current_count))
42 abort_msg = 'Rate limits are in effect for this application'
43 raise HTTPTooManyRequests(abort_msg,
44 headers={'X-AppEnlight': abort_msg})
39 log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count))
40 abort_msg = "Rate limits are in effect for this application"
41 raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg})
45 42
46 43
47 44 def check_cors(request, application, should_return=True):
@@ -50,31 +47,34 b' def check_cors(request, application, should_return=True):'
50 47 application, otherwise return 403
51 48 """
52 49 origin_found = False
53 origin = request.headers.get('Origin')
50 origin = request.headers.get("Origin")
54 51 if should_return:
55 log.info('CORS for %s' % origin)
52 log.info("CORS for %s" % origin)
56 53 if not origin:
57 54 return False
58 for domain in application.domains.split('\n'):
55 for domain in application.domains.split("\n"):
59 56 if domain in origin:
60 57 origin_found = True
61 58 if origin_found:
62 request.response.headers.add('Access-Control-Allow-Origin', origin)
63 request.response.headers.add('XDomainRequestAllowed', '1')
64 request.response.headers.add('Access-Control-Allow-Methods',
65 'GET, POST, OPTIONS')
66 request.response.headers.add('Access-Control-Allow-Headers',
67 'Accept-Encoding, Accept-Language, '
68 'Content-Type, '
69 'Depth, User-Agent, X-File-Size, '
70 'X-Requested-With, If-Modified-Since, '
71 'X-File-Name, '
72 'Cache-Control, Host, Pragma, Accept, '
73 'Origin, Connection, '
74 'Referer, Cookie, '
75 'X-appenlight-public-api-key, '
76 'x-appenlight-public-api-key')
77 request.response.headers.add('Access-Control-Max-Age', '86400')
59 request.response.headers.add("Access-Control-Allow-Origin", origin)
60 request.response.headers.add("XDomainRequestAllowed", "1")
61 request.response.headers.add(
62 "Access-Control-Allow-Methods", "GET, POST, OPTIONS"
63 )
64 request.response.headers.add(
65 "Access-Control-Allow-Headers",
66 "Accept-Encoding, Accept-Language, "
67 "Content-Type, "
68 "Depth, User-Agent, X-File-Size, "
69 "X-Requested-With, If-Modified-Since, "
70 "X-File-Name, "
71 "Cache-Control, Host, Pragma, Accept, "
72 "Origin, Connection, "
73 "Referer, Cookie, "
74 "X-appenlight-public-api-key, "
75 "x-appenlight-public-api-key",
76 )
77 request.response.headers.add("Access-Control-Max-Age", "86400")
78 78 return request.response
79 79 else:
80 80 return HTTPForbidden()
@@ -42,23 +42,27 b' def hashgen(namespace, fn, to_str=compat.string_type):'
42 42 """
43 43
44 44 if namespace is None:
45 namespace = '%s:%s' % (fn.__module__, fn.__name__)
45 namespace = "%s:%s" % (fn.__module__, fn.__name__)
46 46 else:
47 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
47 namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
48 48
49 49 args = inspect.getargspec(fn)
50 has_self = args[0] and args[0][0] in ('self', 'cls')
50 has_self = args[0] and args[0][0] in ("self", "cls")
51 51
52 52 def generate_key(*args, **kw):
53 53 if kw:
54 54 raise ValueError(
55 55 "dogpile.cache's default key creation "
56 "function does not accept keyword arguments.")
56 "function does not accept keyword arguments."
57 )
57 58 if has_self:
58 59 args = args[1:]
59 60
60 return namespace + "|" + hashlib.sha1(
61 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
61 return (
62 namespace
63 + "|"
64 + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest()
65 )
62 66
63 67 return generate_key
64 68
@@ -68,116 +72,97 b' class CacheRegions(object):'
68 72 config_redis = {"arguments": settings}
69 73
70 74 self.redis_min_1 = make_region(
71 function_key_generator=hashgen,
72 key_mangler=key_mangler).configure(
73 "dogpile.cache.redis",
74 expiration_time=60,
75 **copy.deepcopy(config_redis))
75 function_key_generator=hashgen, key_mangler=key_mangler
76 ).configure(
77 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
78 )
76 79 self.redis_min_5 = make_region(
77 function_key_generator=hashgen,
78 key_mangler=key_mangler).configure(
79 "dogpile.cache.redis",
80 expiration_time=300,
81 **copy.deepcopy(config_redis))
80 function_key_generator=hashgen, key_mangler=key_mangler
81 ).configure(
82 "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis)
83 )
82 84
83 85 self.redis_min_10 = make_region(
84 function_key_generator=hashgen,
85 key_mangler=key_mangler).configure(
86 "dogpile.cache.redis",
87 expiration_time=60,
88 **copy.deepcopy(config_redis))
86 function_key_generator=hashgen, key_mangler=key_mangler
87 ).configure(
88 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
89 )
89 90
90 91 self.redis_min_60 = make_region(
91 function_key_generator=hashgen,
92 key_mangler=key_mangler).configure(
93 "dogpile.cache.redis",
94 expiration_time=3600,
95 **copy.deepcopy(config_redis))
92 function_key_generator=hashgen, key_mangler=key_mangler
93 ).configure(
94 "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis)
95 )
96 96
97 97 self.redis_sec_1 = make_region(
98 function_key_generator=hashgen,
99 key_mangler=key_mangler).configure(
100 "dogpile.cache.redis",
101 expiration_time=1,
102 **copy.deepcopy(config_redis))
98 function_key_generator=hashgen, key_mangler=key_mangler
99 ).configure(
100 "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis)
101 )
103 102
104 103 self.redis_sec_5 = make_region(
105 function_key_generator=hashgen,
106 key_mangler=key_mangler).configure(
107 "dogpile.cache.redis",
108 expiration_time=5,
109 **copy.deepcopy(config_redis))
104 function_key_generator=hashgen, key_mangler=key_mangler
105 ).configure(
106 "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis)
107 )
110 108
111 109 self.redis_sec_30 = make_region(
112 function_key_generator=hashgen,
113 key_mangler=key_mangler).configure(
114 "dogpile.cache.redis",
115 expiration_time=30,
116 **copy.deepcopy(config_redis))
110 function_key_generator=hashgen, key_mangler=key_mangler
111 ).configure(
112 "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis)
113 )
117 114
118 115 self.redis_day_1 = make_region(
119 function_key_generator=hashgen,
120 key_mangler=key_mangler).configure(
121 "dogpile.cache.redis",
122 expiration_time=86400,
123 **copy.deepcopy(config_redis))
116 function_key_generator=hashgen, key_mangler=key_mangler
117 ).configure(
118 "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis)
119 )
124 120
125 121 self.redis_day_7 = make_region(
126 function_key_generator=hashgen,
127 key_mangler=key_mangler).configure(
122 function_key_generator=hashgen, key_mangler=key_mangler
123 ).configure(
128 124 "dogpile.cache.redis",
129 125 expiration_time=86400 * 7,
130 **copy.deepcopy(config_redis))
126 **copy.deepcopy(config_redis)
127 )
131 128
132 129 self.redis_day_30 = make_region(
133 function_key_generator=hashgen,
134 key_mangler=key_mangler).configure(
130 function_key_generator=hashgen, key_mangler=key_mangler
131 ).configure(
135 132 "dogpile.cache.redis",
136 133 expiration_time=86400 * 30,
137 **copy.deepcopy(config_redis))
134 **copy.deepcopy(config_redis)
135 )
138 136
139 137 self.memory_day_1 = make_region(
140 function_key_generator=hashgen,
141 key_mangler=key_mangler).configure(
142 "dogpile.cache.memory",
143 expiration_time=86400,
144 **copy.deepcopy(config_redis))
138 function_key_generator=hashgen, key_mangler=key_mangler
139 ).configure(
140 "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis)
141 )
145 142
146 143 self.memory_sec_1 = make_region(
147 function_key_generator=hashgen,
148 key_mangler=key_mangler).configure(
149 "dogpile.cache.memory",
150 expiration_time=1)
144 function_key_generator=hashgen, key_mangler=key_mangler
145 ).configure("dogpile.cache.memory", expiration_time=1)
151 146
152 147 self.memory_sec_5 = make_region(
153 function_key_generator=hashgen,
154 key_mangler=key_mangler).configure(
155 "dogpile.cache.memory",
156 expiration_time=5)
148 function_key_generator=hashgen, key_mangler=key_mangler
149 ).configure("dogpile.cache.memory", expiration_time=5)
157 150
158 151 self.memory_min_1 = make_region(
159 function_key_generator=hashgen,
160 key_mangler=key_mangler).configure(
161 "dogpile.cache.memory",
162 expiration_time=60)
152 function_key_generator=hashgen, key_mangler=key_mangler
153 ).configure("dogpile.cache.memory", expiration_time=60)
163 154
164 155 self.memory_min_5 = make_region(
165 function_key_generator=hashgen,
166 key_mangler=key_mangler).configure(
167 "dogpile.cache.memory",
168 expiration_time=300)
156 function_key_generator=hashgen, key_mangler=key_mangler
157 ).configure("dogpile.cache.memory", expiration_time=300)
169 158
170 159 self.memory_min_10 = make_region(
171 function_key_generator=hashgen,
172 key_mangler=key_mangler).configure(
173 "dogpile.cache.memory",
174 expiration_time=600)
160 function_key_generator=hashgen, key_mangler=key_mangler
161 ).configure("dogpile.cache.memory", expiration_time=600)
175 162
176 163 self.memory_min_60 = make_region(
177 function_key_generator=hashgen,
178 key_mangler=key_mangler).configure(
179 "dogpile.cache.memory",
180 expiration_time=3600)
164 function_key_generator=hashgen, key_mangler=key_mangler
165 ).configure("dogpile.cache.memory", expiration_time=3600)
181 166
182 167
183 168 def get_region(region):
@@ -5,6 +5,7 b' from pyramid.config import Configurator'
5 5
6 6 log = logging.getLogger(__name__)
7 7
8
8 9 class InspectProxy(object):
9 10 """
10 11 Proxy to the `inspect` module that allows us to use the pyramid include
@@ -17,7 +18,7 b' class InspectProxy(object):'
17 18 """
18 19 if inspect.ismethod(cyfunction):
19 20 cyfunction = cyfunction.im_func
20 return getattr(cyfunction, 'func_code')
21 return getattr(cyfunction, "func_code")
21 22
22 23 def getmodule(self, *args, **kwds):
23 24 """
@@ -40,14 +41,14 b' class InspectProxy(object):'
40 41 """
41 42 # Check if it's called to look up the source file that contains the
42 43 # magic pyramid `includeme` callable.
43 if getattr(obj, '__name__') == 'includeme':
44 if getattr(obj, "__name__") == "includeme":
44 45 try:
45 46 return inspect.getfile(obj)
46 47 except TypeError as e:
47 48 # Cython functions are not recognized as functions by the
48 49 # inspect module. We have to unpack the func_code attribute
49 50 # ourself.
50 if 'cyfunction' in e.message:
51 if "cyfunction" in e.message:
51 52 obj = self._get_cyfunction_func_code(obj)
52 53 return inspect.getfile(obj)
53 54 raise
@@ -60,33 +61,32 b' class CythonCompatConfigurator(Configurator):'
60 61 Customized configurator to replace the inspect class attribute with
61 62 a custom one that is cython compatible.
62 63 """
64
63 65 inspect = InspectProxy()
64 66
65 67
66 68 def register_appenlight_plugin(config, plugin_name, plugin_config):
67 69 def register():
68 log.warning('Registering plugin: {}'.format(plugin_name))
70 log.warning("Registering plugin: {}".format(plugin_name))
69 71 if plugin_name not in config.registry.appenlight_plugins:
70 72 config.registry.appenlight_plugins[plugin_name] = {
71 'javascript': None,
72 'static': None,
73 'css': None,
74 'celery_tasks': None,
75 'celery_beats': None,
76 'fulltext_indexer': None,
77 'sqlalchemy_migrations': None,
78 'default_values_setter': None,
79 'header_html': None,
80 'resource_types': [],
81 'url_gen': None
73 "javascript": None,
74 "static": None,
75 "css": None,
76 "celery_tasks": None,
77 "celery_beats": None,
78 "fulltext_indexer": None,
79 "sqlalchemy_migrations": None,
80 "default_values_setter": None,
81 "header_html": None,
82 "resource_types": [],
83 "url_gen": None,
82 84 }
83 config.registry.appenlight_plugins[plugin_name].update(
84 plugin_config)
85 config.registry.appenlight_plugins[plugin_name].update(plugin_config)
85 86 # inform AE what kind of resource types we have available
86 87 # so we can avoid failing when a plugin is removed but data
87 88 # is still present in the db
88 if plugin_config.get('resource_types'):
89 config.registry.resource_types.extend(
90 plugin_config['resource_types'])
89 if plugin_config.get("resource_types"):
90 config.registry.resource_types.extend(plugin_config["resource_types"])
91 91
92 config.action('appenlight_plugin={}'.format(plugin_name), register)
92 config.action("appenlight_plugin={}".format(plugin_name), register)
@@ -23,20 +23,20 b' ENCRYPTION_SECRET = None'
23 23 def encrypt_fernet(value):
24 24 # avoid double encryption
25 25 # not sure if this is needed but it won't hurt too much to have this
26 if value.startswith('enc$fernet$'):
26 if value.startswith("enc$fernet$"):
27 27 return value
28 28 f = Fernet(ENCRYPTION_SECRET)
29 return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
29 return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8"))
30 30
31 31
32 32 def decrypt_fernet(value):
33 parts = value.split('$', 3)
33 parts = value.split("$", 3)
34 34 if not len(parts) == 3:
35 35 # not encrypted values
36 36 return value
37 37 else:
38 38 f = Fernet(ENCRYPTION_SECRET)
39 decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
39 decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8")
40 40 return decrypted_data
41 41
42 42
@@ -1,4 +1,5 b''
1 1 import collections
2
2 3 # -*- coding: utf-8 -*-
3 4
4 5 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
@@ -20,13 +21,14 b' class StupidEnum(object):'
20 21 @classmethod
21 22 def set_inverse(cls):
22 23 cls._inverse_values = dict(
23 (y, x) for x, y in vars(cls).items() if
24 not x.startswith('_') and not callable(y)
24 (y, x)
25 for x, y in vars(cls).items()
26 if not x.startswith("_") and not callable(y)
25 27 )
26 28
27 29 @classmethod
28 30 def key_from_value(cls, value):
29 if not hasattr(cls, '_inverse_values'):
31 if not hasattr(cls, "_inverse_values"):
30 32 cls.set_inverse()
31 33 return cls._inverse_values.get(value)
32 34
@@ -25,7 +25,7 b' import functools'
25 25 import decimal
26 26 import imp
27 27
28 __all__ = ['json', 'simplejson', 'stdlibjson']
28 __all__ = ["json", "simplejson", "stdlibjson"]
29 29
30 30
31 31 def _is_aware(value):
@@ -35,8 +35,7 b' def _is_aware(value):'
35 35 The logic is described in Python's docs:
36 36 http://docs.python.org/library/datetime.html#datetime.tzinfo
37 37 """
38 return (value.tzinfo is not None
39 and value.tzinfo.utcoffset(value) is not None)
38 return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
40 39
41 40
42 41 def _obj_dump(obj):
@@ -55,8 +54,8 b' def _obj_dump(obj):'
55 54 r = obj.isoformat()
56 55 # if obj.microsecond:
57 56 # r = r[:23] + r[26:]
58 if r.endswith('+00:00'):
59 r = r[:-6] + 'Z'
57 if r.endswith("+00:00"):
58 r = r[:-6] + "Z"
60 59 return r
61 60 elif isinstance(obj, datetime.date):
62 61 return obj.isoformat()
@@ -71,7 +70,7 b' def _obj_dump(obj):'
71 70 return r
72 71 elif isinstance(obj, set):
73 72 return list(obj)
74 elif hasattr(obj, '__json__'):
73 elif hasattr(obj, "__json__"):
75 74 if callable(obj.__json__):
76 75 return obj.__json__()
77 76 else:
@@ -83,8 +82,7 b' def _obj_dump(obj):'
83 82 # Import simplejson
84 83 try:
85 84 # import simplejson initially
86 _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
87
85 _sj = imp.load_module("_sj", *imp.find_module("simplejson"))
88 86
89 87 def extended_encode(obj):
90 88 try:
@@ -93,22 +91,21 b' try:'
93 91 pass
94 92 raise TypeError("%r is not JSON serializable" % (obj,))
95 93
96
97 94 # we handle decimals our own it makes unified behavior of json vs
98 95 # simplejson
99 sj_version = [int(x) for x in _sj.__version__.split('.')]
96 sj_version = [int(x) for x in _sj.__version__.split(".")]
100 97 major, minor = sj_version[0], sj_version[1]
101 98 if major < 2 or (major == 2 and minor < 1):
102 99 # simplejson < 2.1 doesnt support use_decimal
103 _sj.dumps = functools.partial(
104 _sj.dumps, default=extended_encode)
105 _sj.dump = functools.partial(
106 _sj.dump, default=extended_encode)
100 _sj.dumps = functools.partial(_sj.dumps, default=extended_encode)
101 _sj.dump = functools.partial(_sj.dump, default=extended_encode)
107 102 else:
108 103 _sj.dumps = functools.partial(
109 _sj.dumps, default=extended_encode, use_decimal=False)
104 _sj.dumps, default=extended_encode, use_decimal=False
105 )
110 106 _sj.dump = functools.partial(
111 _sj.dump, default=extended_encode, use_decimal=False)
107 _sj.dump, default=extended_encode, use_decimal=False
108 )
112 109 simplejson = _sj
113 110
114 111 except ImportError:
@@ -117,8 +114,7 b' except ImportError:'
117 114
118 115 try:
119 116 # simplejson not found try out regular json module
120 _json = imp.load_module('_json', *imp.find_module('json'))
121
117 _json = imp.load_module("_json", *imp.find_module("json"))
122 118
123 119 # extended JSON encoder for json
124 120 class ExtendedEncoder(_json.JSONEncoder):
@@ -129,7 +125,6 b' try:'
129 125 pass
130 126 raise TypeError("%r is not JSON serializable" % (obj,))
131 127
132
133 128 # monkey-patch JSON encoder to use extended version
134 129 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
135 130 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
@@ -145,4 +140,4 b' if simplejson:'
145 140 elif _json:
146 141 json = _json
147 142 else:
148 raise ImportError('Could not find any json modules')
143 raise ImportError("Could not find any json modules")
@@ -26,94 +26,135 b' _ = lambda x: x'
26 26
27 27 time_deltas = OrderedDict()
28 28
29 time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
30 'label': '1 minute', 'minutes': 1}
31
32 time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
33 'label': '5 minutes', 'minutes': 5}
34 time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
35 'label': '30 minutes', 'minutes': 30}
36 time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
37 'label': '60 minutes', 'minutes': 60}
38 time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
39 'minutes': 60 * 4}
40 time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
41 'label': '12 hours', 'minutes': 60 * 12}
42 time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
43 'label': '24 hours', 'minutes': 60 * 24}
44 time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
45 'minutes': 60 * 24 * 3}
46 time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
47 'minutes': 60 * 24 * 7}
48 time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
49 'minutes': 60 * 24 * 14}
50 time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
51 'minutes': 60 * 24 * 31}
52 time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
53 'label': '3 months',
54 'minutes': 60 * 24 * 31 * 3}
55 time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
56 'label': '6 months',
57 'minutes': 60 * 24 * 31 * 6}
58 time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
59 'label': '12 months',
60 'minutes': 60 * 24 * 31 * 12}
29 time_deltas["1m"] = {
30 "delta": datetime.timedelta(minutes=1),
31 "label": "1 minute",
32 "minutes": 1,
33 }
34
35 time_deltas["5m"] = {
36 "delta": datetime.timedelta(minutes=5),
37 "label": "5 minutes",
38 "minutes": 5,
39 }
40 time_deltas["30m"] = {
41 "delta": datetime.timedelta(minutes=30),
42 "label": "30 minutes",
43 "minutes": 30,
44 }
45 time_deltas["1h"] = {
46 "delta": datetime.timedelta(hours=1),
47 "label": "60 minutes",
48 "minutes": 60,
49 }
50 time_deltas["4h"] = {
51 "delta": datetime.timedelta(hours=4),
52 "label": "4 hours",
53 "minutes": 60 * 4,
54 }
55 time_deltas["12h"] = {
56 "delta": datetime.timedelta(hours=12),
57 "label": "12 hours",
58 "minutes": 60 * 12,
59 }
60 time_deltas["24h"] = {
61 "delta": datetime.timedelta(hours=24),
62 "label": "24 hours",
63 "minutes": 60 * 24,
64 }
65 time_deltas["3d"] = {
66 "delta": datetime.timedelta(days=3),
67 "label": "3 days",
68 "minutes": 60 * 24 * 3,
69 }
70 time_deltas["1w"] = {
71 "delta": datetime.timedelta(days=7),
72 "label": "7 days",
73 "minutes": 60 * 24 * 7,
74 }
75 time_deltas["2w"] = {
76 "delta": datetime.timedelta(days=14),
77 "label": "14 days",
78 "minutes": 60 * 24 * 14,
79 }
80 time_deltas["1M"] = {
81 "delta": datetime.timedelta(days=31),
82 "label": "31 days",
83 "minutes": 60 * 24 * 31,
84 }
85 time_deltas["3M"] = {
86 "delta": datetime.timedelta(days=31 * 3),
87 "label": "3 months",
88 "minutes": 60 * 24 * 31 * 3,
89 }
90 time_deltas["6M"] = {
91 "delta": datetime.timedelta(days=31 * 6),
92 "label": "6 months",
93 "minutes": 60 * 24 * 31 * 6,
94 }
95 time_deltas["12M"] = {
96 "delta": datetime.timedelta(days=31 * 12),
97 "label": "12 months",
98 "minutes": 60 * 24 * 31 * 12,
99 }
61 100
62 101 # used in json representation
63 time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
64 for k, v in time_deltas.items()])
65 FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
102 time_options = dict(
103 [
104 (k, {"label": v["label"], "minutes": v["minutes"]})
105 for k, v in time_deltas.items()
106 ]
107 )
108 FlashMsg = namedtuple("FlashMsg", ["msg", "level"])
66 109
67 110
68 111 def get_flash(request):
69 112 messages = []
70 113 messages.extend(
71 [FlashMsg(msg, 'error')
72 for msg in request.session.peek_flash('error')])
73 messages.extend([FlashMsg(msg, 'warning')
74 for msg in request.session.peek_flash('warning')])
114 [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")]
115 )
75 116 messages.extend(
76 [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
117 [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")]
118 )
119 messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()])
77 120 return messages
78 121
79 122
80 123 def clear_flash(request):
81 request.session.pop_flash('error')
82 request.session.pop_flash('warning')
124 request.session.pop_flash("error")
125 request.session.pop_flash("warning")
83 126 request.session.pop_flash()
84 127
85 128
86 129 def get_type_formatted_flash(request):
87 return [{'msg': message.msg, 'type': message.level}
88 for message in get_flash(request)]
130 return [
131 {"msg": message.msg, "type": message.level} for message in get_flash(request)
132 ]
89 133
90 134
91 135 def gen_pagination_headers(request, paginator):
92 136 headers = {
93 'x-total-count': str(paginator.item_count),
94 'x-current-page': str(paginator.page),
95 'x-items-per-page': str(paginator.items_per_page)
137 "x-total-count": str(paginator.item_count),
138 "x-current-page": str(paginator.page),
139 "x-items-per-page": str(paginator.items_per_page),
96 140 }
97 141 params_dict = request.GET.dict_of_lists()
98 142 last_page_params = copy.deepcopy(params_dict)
99 last_page_params['page'] = paginator.last_page or 1
143 last_page_params["page"] = paginator.last_page or 1
100 144 first_page_params = copy.deepcopy(params_dict)
101 first_page_params.pop('page', None)
145 first_page_params.pop("page", None)
102 146 next_page_params = copy.deepcopy(params_dict)
103 next_page_params['page'] = paginator.next_page or paginator.last_page or 1
147 next_page_params["page"] = paginator.next_page or paginator.last_page or 1
104 148 prev_page_params = copy.deepcopy(params_dict)
105 prev_page_params['page'] = paginator.previous_page or 1
149 prev_page_params["page"] = paginator.previous_page or 1
106 150 lp_url = request.current_route_url(_query=last_page_params)
107 151 fp_url = request.current_route_url(_query=first_page_params)
108 links = [
109 'rel="last", <{}>'.format(lp_url),
110 'rel="first", <{}>'.format(fp_url),
111 ]
152 links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)]
112 153 if first_page_params != prev_page_params:
113 154 prev_url = request.current_route_url(_query=prev_page_params)
114 155 links.append('rel="prev", <{}>'.format(prev_url))
115 156 if last_page_params != next_page_params:
116 157 next_url = request.current_route_url(_query=next_page_params)
117 158 links.append('rel="next", <{}>'.format(next_url))
118 headers['link'] = '; '.join(links)
159 headers["link"] = "; ".join(links)
119 160 return headers
@@ -18,17 +18,21 b' import re'
18 18 from appenlight.lib.ext_json import json
19 19 from jinja2 import Markup, escape, evalcontextfilter
20 20
21 _paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
21 _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}")
22 22
23 23
24 24 @evalcontextfilter
25 25 def nl2br(eval_ctx, value):
26 26 if eval_ctx.autoescape:
27 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n'))
28 for p in _paragraph_re.split(escape(value)))
27 result = "\n\n".join(
28 "<p>%s</p>" % p.replace("\n", Markup("<br>\n"))
29 for p in _paragraph_re.split(escape(value))
30 )
29 31 else:
30 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n')
31 for p in _paragraph_re.split(escape(value)))
32 result = "\n\n".join(
33 "<p>%s</p>" % p.replace("\n", "<br>\n")
34 for p in _paragraph_re.split(escape(value))
35 )
32 36 if eval_ctx.autoescape:
33 37 result = Markup(result)
34 38 return result
@@ -36,11 +40,14 b' def nl2br(eval_ctx, value):'
36 40
37 41 @evalcontextfilter
38 42 def toJSONUnsafe(eval_ctx, value):
39 encoded = json.dumps(value).replace('&', '\\u0026') \
40 .replace('<', '\\u003c') \
41 .replace('>', '\\u003e') \
42 .replace('>', '\\u003e') \
43 .replace('"', '\\u0022') \
44 .replace("'", '\\u0027') \
45 .replace(r'\n', '/\\\n')
43 encoded = (
44 json.dumps(value)
45 .replace("&", "\\u0026")
46 .replace("<", "\\u003c")
47 .replace(">", "\\u003e")
48 .replace(">", "\\u003e")
49 .replace('"', "\\u0022")
50 .replace("'", "\\u0027")
51 .replace(r"\n", "/\\\n")
52 )
46 53 return Markup("'%s'" % encoded)
@@ -17,11 +17,30 b''
17 17 import json
18 18 import logging
19 19
20 ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text',
21 'filename', 'funcName', 'levelname', 'levelno', 'lineno',
22 'message', 'module', 'msecs', 'msg', 'name', 'pathname',
23 'process', 'processName', 'relativeCreated', 'stack_info',
24 'thread', 'threadName']
20 ignored_keys = [
21 "args",
22 "asctime",
23 "created",
24 "exc_info",
25 "exc_text",
26 "filename",
27 "funcName",
28 "levelname",
29 "levelno",
30 "lineno",
31 "message",
32 "module",
33 "msecs",
34 "msg",
35 "name",
36 "pathname",
37 "process",
38 "processName",
39 "relativeCreated",
40 "stack_info",
41 "thread",
42 "threadName",
43 ]
25 44
26 45
27 46 class JSONFormatter(logging.Formatter):
@@ -41,7 +60,7 b' class JSONFormatter(logging.Formatter):'
41 60 record.message = record.getMessage()
42 61 log_dict = vars(record)
43 62 keys = [k for k in log_dict.keys() if k not in ignored_keys]
44 payload = {'message': record.message}
63 payload = {"message": record.message}
45 64 payload.update({k: log_dict[k] for k in keys})
46 65 record.message = json.dumps(payload, default=lambda x: str(x))
47 66
@@ -14,52 +14,56 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 BASE = 'appenlight:data:{}'
17 BASE = "appenlight:data:{}"
18 18
19 19 REDIS_KEYS = {
20 'tasks': {
21 'add_reports_lock': BASE.format('add_reports_lock:{}'),
22 'add_logs_lock': BASE.format('add_logs_lock:{}'),
20 "tasks": {
21 "add_reports_lock": BASE.format("add_reports_lock:{}"),
22 "add_logs_lock": BASE.format("add_logs_lock:{}"),
23 23 },
24 'counters': {
25 'events_per_minute_per_user': BASE.format(
26 'events_per_minute_per_user:{}:{}'),
27 'reports_per_minute': BASE.format('reports_per_minute:{}'),
28 'reports_per_hour_per_app': BASE.format(
29 'reports_per_hour_per_app:{}:{}'),
30 'reports_per_type': BASE.format('reports_per_type:{}'),
31 'logs_per_minute': BASE.format('logs_per_minute:{}'),
32 'logs_per_hour_per_app': BASE.format(
33 'logs_per_hour_per_app:{}:{}'),
34 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
35 'metrics_per_hour_per_app': BASE.format(
36 'metrics_per_hour_per_app:{}:{}'),
37 'report_group_occurences': BASE.format('report_group_occurences:{}'),
38 'report_group_occurences_alerting': BASE.format(
39 'report_group_occurences_alerting:{}'),
40 'report_group_occurences_10th': BASE.format(
41 'report_group_occurences_10th:{}'),
42 'report_group_occurences_100th': BASE.format(
43 'report_group_occurences_100th:{}'),
24 "counters": {
25 "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"),
26 "reports_per_minute": BASE.format("reports_per_minute:{}"),
27 "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"),
28 "reports_per_type": BASE.format("reports_per_type:{}"),
29 "logs_per_minute": BASE.format("logs_per_minute:{}"),
30 "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"),
31 "metrics_per_minute": BASE.format("metrics_per_minute:{}"),
32 "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"),
33 "report_group_occurences": BASE.format("report_group_occurences:{}"),
34 "report_group_occurences_alerting": BASE.format(
35 "report_group_occurences_alerting:{}"
36 ),
37 "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"),
38 "report_group_occurences_100th": BASE.format(
39 "report_group_occurences_100th:{}"
40 ),
44 41 },
45 'rate_limits': {
46 'per_application_reports_rate_limit': BASE.format(
47 'per_application_reports_limit:{}:{}'),
48 'per_application_logs_rate_limit': BASE.format(
49 'per_application_logs_rate_limit:{}:{}'),
50 'per_application_metrics_rate_limit': BASE.format(
51 'per_application_metrics_rate_limit:{}:{}'),
42 "rate_limits": {
43 "per_application_reports_rate_limit": BASE.format(
44 "per_application_reports_limit:{}:{}"
45 ),
46 "per_application_logs_rate_limit": BASE.format(
47 "per_application_logs_rate_limit:{}:{}"
48 ),
49 "per_application_metrics_rate_limit": BASE.format(
50 "per_application_metrics_rate_limit:{}:{}"
51 ),
52 52 },
53 'apps_that_got_new_data_per_hour': BASE.format('apps_that_got_new_data_per_hour:{}'),
54 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
55 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
56 'apps_that_had_reports_alerting': BASE.format(
57 'apps_that_had_reports_alerting'),
58 'apps_that_had_error_reports_alerting': BASE.format(
59 'apps_that_had_error_reports_alerting'),
60 'reports_to_notify_per_type_per_app': BASE.format(
61 'reports_to_notify_per_type_per_app:{}:{}'),
62 'reports_to_notify_per_type_per_app_alerting': BASE.format(
63 'reports_to_notify_per_type_per_app_alerting:{}:{}'),
64 'seen_tag_list': BASE.format('seen_tag_list')
53 "apps_that_got_new_data_per_hour": BASE.format(
54 "apps_that_got_new_data_per_hour:{}"
55 ),
56 "apps_that_had_reports": BASE.format("apps_that_had_reports"),
57 "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"),
58 "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"),
59 "apps_that_had_error_reports_alerting": BASE.format(
60 "apps_that_had_error_reports_alerting"
61 ),
62 "reports_to_notify_per_type_per_app": BASE.format(
63 "reports_to_notify_per_type_per_app:{}:{}"
64 ),
65 "reports_to_notify_per_type_per_app_alerting": BASE.format(
66 "reports_to_notify_per_type_per_app_alerting:{}:{}"
67 ),
68 "seen_tag_list": BASE.format("seen_tag_list"),
65 69 }
@@ -54,11 +54,11 b' def unsafe_json_body(request):'
54 54 try:
55 55 return request.json_body
56 56 except ValueError:
57 raise JSONException('Incorrect JSON')
57 raise JSONException("Incorrect JSON")
58 58
59 59
60 60 def get_user(request):
61 if not request.path_info.startswith('/static'):
61 if not request.path_info.startswith("/static"):
62 62 user_id = unauthenticated_userid(request)
63 63 try:
64 64 user_id = int(user_id)
@@ -68,8 +68,10 b' def get_user(request):'
68 68 if user_id:
69 69 user = UserService.by_id(user_id)
70 70 if user:
71 request.environ['appenlight.username'] = '%d:%s' % (
72 user_id, user.user_name)
71 request.environ["appenlight.username"] = "%d:%s" % (
72 user_id,
73 user.user_name,
74 )
73 75 return user
74 76 else:
75 77 return None
@@ -85,7 +87,7 b' def add_flash_to_headers(request, clear=True):'
85 87 flash queue
86 88 """
87 89 flash_msgs = helpers.get_type_formatted_flash(request)
88 request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
90 request.response.headers["x-flash-messages"] = json.dumps(flash_msgs)
89 91 helpers.clear_flash(request)
90 92
91 93
@@ -94,42 +96,36 b' def get_authomatic(request):'
94 96 # authomatic social auth
95 97 authomatic_conf = {
96 98 # callback http://yourapp.com/social_auth/twitter
97 'twitter': {
98 'class_': oauth1.Twitter,
99 'consumer_key': settings.get('authomatic.pr.twitter.key', ''),
100 'consumer_secret': settings.get('authomatic.pr.twitter.secret',
101 ''),
99 "twitter": {
100 "class_": oauth1.Twitter,
101 "consumer_key": settings.get("authomatic.pr.twitter.key", ""),
102 "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""),
102 103 },
103 104 # callback http://yourapp.com/social_auth/facebook
104 'facebook': {
105 'class_': oauth2.Facebook,
106 'consumer_key': settings.get('authomatic.pr.facebook.app_id', ''),
107 'consumer_secret': settings.get('authomatic.pr.facebook.secret',
108 ''),
109 'scope': ['email'],
105 "facebook": {
106 "class_": oauth2.Facebook,
107 "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""),
108 "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""),
109 "scope": ["email"],
110 110 },
111 111 # callback http://yourapp.com/social_auth/google
112 'google': {
113 'class_': oauth2.Google,
114 'consumer_key': settings.get('authomatic.pr.google.key', ''),
115 'consumer_secret': settings.get(
116 'authomatic.pr.google.secret', ''),
117 'scope': ['profile', 'email'],
112 "google": {
113 "class_": oauth2.Google,
114 "consumer_key": settings.get("authomatic.pr.google.key", ""),
115 "consumer_secret": settings.get("authomatic.pr.google.secret", ""),
116 "scope": ["profile", "email"],
118 117 },
119 'github': {
120 'class_': oauth2.GitHub,
121 'consumer_key': settings.get('authomatic.pr.github.key', ''),
122 'consumer_secret': settings.get(
123 'authomatic.pr.github.secret', ''),
124 'scope': ['repo', 'public_repo', 'user:email'],
125 'access_headers': {'User-Agent': 'AppEnlight'},
118 "github": {
119 "class_": oauth2.GitHub,
120 "consumer_key": settings.get("authomatic.pr.github.key", ""),
121 "consumer_secret": settings.get("authomatic.pr.github.secret", ""),
122 "scope": ["repo", "public_repo", "user:email"],
123 "access_headers": {"User-Agent": "AppEnlight"},
124 },
125 "bitbucket": {
126 "class_": oauth1.Bitbucket,
127 "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""),
128 "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""),
126 129 },
127 'bitbucket': {
128 'class_': oauth1.Bitbucket,
129 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''),
130 'consumer_secret': settings.get(
131 'authomatic.pr.bitbucket.secret', '')
132 }
133 130 }
134 return Authomatic(
135 config=authomatic_conf, secret=settings['authomatic.secret'])
131 return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"])
@@ -52,13 +52,13 b' class RuleBase(object):'
52 52 :param field_name:
53 53 :return:
54 54 """
55 parts = field_name.split(':') if field_name else []
55 parts = field_name.split(":") if field_name else []
56 56 found = struct
57 57 while parts:
58 58 current_key = parts.pop(0)
59 59 found = found.get(current_key)
60 60 if not found and parts:
61 raise KeyNotFoundException('Key not found in structure')
61 raise KeyNotFoundException("Key not found in structure")
62 62 return found
63 63
64 64 @classmethod
@@ -72,13 +72,13 b' class RuleBase(object):'
72 72 :param field_name:
73 73 :return:
74 74 """
75 parts = field_name.split(':')
75 parts = field_name.split(":")
76 76 found = struct
77 77 while parts:
78 78 current_key = parts.pop(0)
79 79 found = getattr(found, current_key, None)
80 80 if not found and parts:
81 raise KeyNotFoundException('Key not found in structure')
81 raise KeyNotFoundException("Key not found in structure")
82 82 return found
83 83
84 84 def normalized_type(self, field, value):
@@ -89,28 +89,32 b' class RuleBase(object):'
89 89 """
90 90 f_type = self.type_matrix.get(field)
91 91 if f_type:
92 cast_to = f_type['type']
92 cast_to = f_type["type"]
93 93 else:
94 raise UnknownTypeException('Unknown type')
94 raise UnknownTypeException("Unknown type")
95 95
96 96 if value is None:
97 97 return None
98 98
99 99 try:
100 if cast_to == 'int':
100 if cast_to == "int":
101 101 return int(value)
102 elif cast_to == 'float':
102 elif cast_to == "float":
103 103 return float(value)
104 elif cast_to == 'unicode':
104 elif cast_to == "unicode":
105 105 return str(value)
106 106 except ValueError as exc:
107 107 raise InvalidValueException(exc)
108 108
109 109
110 110 class Rule(RuleBase):
111 def __init__(self, config, type_matrix,
111 def __init__(
112 self,
113 config,
114 type_matrix,
112 115 struct_getter=RuleBase.default_dict_struct_getter,
113 config_manipulator=None):
116 config_manipulator=None,
117 ):
114 118 """
115 119
116 120 :param config: dict - contains rule configuration
@@ -159,8 +163,9 b' class Rule(RuleBase):'
159 163 config_manipulator(self)
160 164
161 165 def subrule_check(self, rule_config, struct):
162 rule = Rule(rule_config, self.type_matrix,
163 config_manipulator=self.config_manipulator)
166 rule = Rule(
167 rule_config, self.type_matrix, config_manipulator=self.config_manipulator
168 )
164 169 return rule.match(struct)
165 170
166 171 def match(self, struct):
@@ -169,32 +174,41 b' class Rule(RuleBase):'
169 174 First tries report value, then tests tags in not found, then finally
170 175 report group
171 176 """
172 field_name = self.config.get('field')
173 test_value = self.config.get('value')
177 field_name = self.config.get("field")
178 test_value = self.config.get("value")
174 179
175 180 if not field_name:
176 181 return False
177 182
178 if field_name == '__AND__':
179 rule = AND(self.config['rules'], self.type_matrix,
180 config_manipulator=self.config_manipulator)
183 if field_name == "__AND__":
184 rule = AND(
185 self.config["rules"],
186 self.type_matrix,
187 config_manipulator=self.config_manipulator,
188 )
181 189 return rule.match(struct)
182 elif field_name == '__OR__':
183 rule = OR(self.config['rules'], self.type_matrix,
184 config_manipulator=self.config_manipulator)
190 elif field_name == "__OR__":
191 rule = OR(
192 self.config["rules"],
193 self.type_matrix,
194 config_manipulator=self.config_manipulator,
195 )
185 196 return rule.match(struct)
186 elif field_name == '__NOT__':
187 rule = NOT(self.config['rules'], self.type_matrix,
188 config_manipulator=self.config_manipulator)
197 elif field_name == "__NOT__":
198 rule = NOT(
199 self.config["rules"],
200 self.type_matrix,
201 config_manipulator=self.config_manipulator,
202 )
189 203 return rule.match(struct)
190 204
191 205 if test_value is None:
192 206 return False
193 207
194 208 try:
195 struct_value = self.normalized_type(field_name,
196 self.struct_getter(struct,
197 field_name))
209 struct_value = self.normalized_type(
210 field_name, self.struct_getter(struct, field_name)
211 )
198 212 except (UnknownTypeException, InvalidValueException) as exc:
199 213 log.error(str(exc))
200 214 return False
@@ -205,24 +219,23 b' class Rule(RuleBase):'
205 219 log.error(str(exc))
206 220 return False
207 221
208 if self.config['op'] not in ('startswith', 'endswith', 'contains'):
222 if self.config["op"] not in ("startswith", "endswith", "contains"):
209 223 try:
210 return getattr(operator,
211 self.config['op'])(struct_value, test_value)
224 return getattr(operator, self.config["op"])(struct_value, test_value)
212 225 except TypeError:
213 226 return False
214 elif self.config['op'] == 'startswith':
227 elif self.config["op"] == "startswith":
215 228 return struct_value.startswith(test_value)
216 elif self.config['op'] == 'endswith':
229 elif self.config["op"] == "endswith":
217 230 return struct_value.endswith(test_value)
218 elif self.config['op'] == 'contains':
231 elif self.config["op"] == "contains":
219 232 return test_value in struct_value
220 raise BadConfigException('Invalid configuration, '
221 'unknown operator: {}'.format(self.config))
233 raise BadConfigException(
234 "Invalid configuration, " "unknown operator: {}".format(self.config)
235 )
222 236
223 237 def __repr__(self):
224 return '<Rule {} {}>'.format(self.config.get('field'),
225 self.config.get('value'))
238 return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value"))
226 239
227 240
228 241 class AND(Rule):
@@ -231,8 +244,7 b' class AND(Rule):'
231 244 self.rules = rules
232 245
233 246 def match(self, struct):
234 return all([self.subrule_check(r_conf, struct) for r_conf
235 in self.rules])
247 return all([self.subrule_check(r_conf, struct) for r_conf in self.rules])
236 248
237 249
238 250 class NOT(Rule):
@@ -241,8 +253,7 b' class NOT(Rule):'
241 253 self.rules = rules
242 254
243 255 def match(self, struct):
244 return all([not self.subrule_check(r_conf, struct) for r_conf
245 in self.rules])
256 return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules])
246 257
247 258
248 259 class OR(Rule):
@@ -251,14 +262,12 b' class OR(Rule):'
251 262 self.rules = rules
252 263
253 264 def match(self, struct):
254 return any([self.subrule_check(r_conf, struct) for r_conf
255 in self.rules])
265 return any([self.subrule_check(r_conf, struct) for r_conf in self.rules])
256 266
257 267
258 268 class RuleService(object):
259 269 @staticmethod
260 def rule_from_config(config, field_mappings, labels_dict,
261 manipulator_func=None):
270 def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None):
262 271 """
263 272 Returns modified rule with manipulator function
264 273 By default manipulator function replaces field id from labels_dict
@@ -270,28 +279,33 b' class RuleService(object):'
270 279 """
271 280 rev_map = {}
272 281 for k, v in labels_dict.items():
273 rev_map[(v['agg'], v['key'],)] = k
282 rev_map[(v["agg"], v["key"])] = k
274 283
275 284 if manipulator_func is None:
285
276 286 def label_rewriter_func(rule):
277 field = rule.config.get('field')
278 if not field or rule.config['field'] in ['__OR__',
279 '__AND__', '__NOT__']:
287 field = rule.config.get("field")
288 if not field or rule.config["field"] in [
289 "__OR__",
290 "__AND__",
291 "__NOT__",
292 ]:
280 293 return
281 294
282 to_map = field_mappings.get(rule.config['field'])
295 to_map = field_mappings.get(rule.config["field"])
283 296
284 297 # we need to replace series field with _AE_NOT_FOUND_ to not match
285 298 # accidently some other field which happens to have the series that
286 299 # was used when the alert was created
287 300 if to_map:
288 to_replace = rev_map.get((to_map['agg'], to_map['key'],),
289 '_AE_NOT_FOUND_')
301 to_replace = rev_map.get(
302 (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_"
303 )
290 304 else:
291 to_replace = '_AE_NOT_FOUND_'
305 to_replace = "_AE_NOT_FOUND_"
292 306
293 rule.config['field'] = to_replace
294 rule.type_matrix[to_replace] = {"type": 'float'}
307 rule.config["field"] = to_replace
308 rule.type_matrix[to_replace] = {"type": "float"}
295 309
296 310 manipulator_func = label_rewriter_func
297 311
@@ -14,8 +14,9 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from ziggurat_foundations.models.services.external_identity import \
18 ExternalIdentityService
17 from ziggurat_foundations.models.services.external_identity import (
18 ExternalIdentityService,
19 )
19 20 from appenlight.models.external_identity import ExternalIdentity
20 21
21 22
@@ -24,37 +25,38 b' def handle_social_data(request, user, social_data):'
24 25 update_identity = False
25 26
26 27 extng_id = ExternalIdentityService.by_external_id_and_provider(
27 social_data['user']['id'],
28 social_data['credentials'].provider_name
28 social_data["user"]["id"], social_data["credentials"].provider_name
29 29 )
30 30
31 31 # fix legacy accounts with wrong google ID
32 if not extng_id and social_data['credentials'].provider_name == 'google':
32 if not extng_id and social_data["credentials"].provider_name == "google":
33 33 extng_id = ExternalIdentityService.by_external_id_and_provider(
34 social_data['user']['email'],
35 social_data['credentials'].provider_name
34 social_data["user"]["email"], social_data["credentials"].provider_name
36 35 )
37 36
38 37 if extng_id:
39 38 extng_id.delete()
40 39 update_identity = True
41 40
42 if not social_data['user']['id']:
41 if not social_data["user"]["id"]:
43 42 request.session.flash(
44 'No external user id found? Perhaps permissions for '
45 'authentication are set incorrectly', 'error')
43 "No external user id found? Perhaps permissions for "
44 "authentication are set incorrectly",
45 "error",
46 )
46 47 return False
47 48
48 49 if not extng_id or update_identity:
49 50 if not update_identity:
50 request.session.flash('Your external identity is now '
51 'connected with your account')
51 request.session.flash(
52 "Your external identity is now " "connected with your account"
53 )
52 54 ex_identity = ExternalIdentity()
53 ex_identity.external_id = social_data['user']['id']
54 ex_identity.external_user_name = social_data['user']['user_name']
55 ex_identity.provider_name = social_data['credentials'].provider_name
56 ex_identity.access_token = social_data['credentials'].token
57 ex_identity.token_secret = social_data['credentials'].token_secret
58 ex_identity.alt_token = social_data['credentials'].refresh_token
55 ex_identity.external_id = social_data["user"]["id"]
56 ex_identity.external_user_name = social_data["user"]["user_name"]
57 ex_identity.provider_name = social_data["credentials"].provider_name
58 ex_identity.access_token = social_data["credentials"].token
59 ex_identity.token_secret = social_data["credentials"].token_secret
60 ex_identity.alt_token = social_data["credentials"].refresh_token
59 61 user.external_identities.append(ex_identity)
60 request.session.pop('zigg.social_auth', None)
62 request.session.pop("zigg.social_auth", None)
@@ -28,9 +28,7 b' from collections import namedtuple'
28 28 from datetime import timedelta, datetime, date
29 29 from dogpile.cache.api import NO_VALUE
30 30 from appenlight.models import Datastores
31 from appenlight.validators import (LogSearchSchema,
32 TagListSchema,
33 accepted_search_params)
31 from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params
34 32 from itsdangerous import TimestampSigner
35 33 from ziggurat_foundations.permissions import ALL_PERMISSIONS
36 34 from ziggurat_foundations.models.services.user import UserService
@@ -40,21 +38,20 b' from dateutil.rrule import rrule, MONTHLY, DAILY'
40 38 log = logging.getLogger(__name__)
41 39
42 40
43 Stat = namedtuple('Stat', 'start_interval value')
41 Stat = namedtuple("Stat", "start_interval value")
44 42
45 43
46 44 def default_extractor(item):
47 45 """
48 46 :param item - item to extract date from
49 47 """
50 if hasattr(item, 'start_interval'):
48 if hasattr(item, "start_interval"):
51 49 return item.start_interval
52 return item['start_interval']
50 return item["start_interval"]
53 51
54 52
55 53 # fast gap generator
56 def gap_gen_default(start, step, itemiterator, end_time=None,
57 iv_extractor=None):
54 def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None):
58 55 """ generates a list of time/value items based on step and itemiterator
59 56 if there are entries missing from iterator time/None will be returned
60 57 instead
@@ -100,27 +97,31 b' class DateTimeEncoder(json.JSONEncoder):'
100 97 return json.JSONEncoder.default(self, obj)
101 98
102 99
103 def channelstream_request(secret, endpoint, payload, throw_exceptions=False,
104 servers=None):
100 def channelstream_request(
101 secret, endpoint, payload, throw_exceptions=False, servers=None
102 ):
105 103 responses = []
106 104 if not servers:
107 105 servers = []
108 106
109 107 signer = TimestampSigner(secret)
110 108 sig_for_server = signer.sign(endpoint)
111 for secret, server in [(s['secret'], s['server']) for s in servers]:
109 for secret, server in [(s["secret"], s["server"]) for s in servers]:
112 110 response = {}
113 secret_headers = {'x-channelstream-secret': sig_for_server,
114 'x-channelstream-endpoint': endpoint,
115 'Content-Type': 'application/json'}
116 url = '%s%s' % (server, endpoint)
111 secret_headers = {
112 "x-channelstream-secret": sig_for_server,
113 "x-channelstream-endpoint": endpoint,
114 "Content-Type": "application/json",
115 }
116 url = "%s%s" % (server, endpoint)
117 117 try:
118 response = requests.post(url,
119 data=json.dumps(payload,
120 cls=DateTimeEncoder),
118 response = requests.post(
119 url,
120 data=json.dumps(payload, cls=DateTimeEncoder),
121 121 headers=secret_headers,
122 122 verify=False,
123 timeout=2).json()
123 timeout=2,
124 ).json()
124 125 except requests.exceptions.RequestException as e:
125 126 if throw_exceptions:
126 127 raise
@@ -130,13 +131,15 b' def channelstream_request(secret, endpoint, payload, throw_exceptions=False,'
130 131
131 132 def add_cors_headers(response):
132 133 # allow CORS
133 response.headers.add('Access-Control-Allow-Origin', '*')
134 response.headers.add('XDomainRequestAllowed', '1')
135 response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
134 response.headers.add("Access-Control-Allow-Origin", "*")
135 response.headers.add("XDomainRequestAllowed", "1")
136 response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
136 137 # response.headers.add('Access-Control-Allow-Credentials', 'true')
137 response.headers.add('Access-Control-Allow-Headers',
138 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
139 response.headers.add('Access-Control-Max-Age', '86400')
138 response.headers.add(
139 "Access-Control-Allow-Headers",
140 "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie",
141 )
142 response.headers.add("Access-Control-Max-Age", "86400")
140 143
141 144
142 145 from sqlalchemy.sql import compiler
@@ -145,6 +148,7 b' from psycopg2.extensions import adapt as sqlescape'
145 148
146 149 # or use the appropiate escape function from your db driver
147 150
151
148 152 def compile_query(query):
149 153 dialect = query.session.bind.dialect
150 154 statement = query.statement
@@ -166,22 +170,23 b' def convert_es_type(input_data):'
166 170 return str(input_data)
167 171
168 172
169 ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
173 ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"])
170 174
171 175
172 176 def parse_proto(input_data):
173 177 try:
174 parts = [int(x) for x in input_data.split('.')]
178 parts = [int(x) for x in input_data.split(".")]
175 179 while len(parts) < 3:
176 180 parts.append(0)
177 181 return ProtoVersion(*parts)
178 182 except Exception as e:
179 log.info('Unknown protocol version: %s' % e)
183 log.info("Unknown protocol version: %s" % e)
180 184 return ProtoVersion(99, 99, 99)
181 185
182 186
183 def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
184 ixtypes=None):
187 def es_index_name_limiter(
188 start_date=None, end_date=None, months_in_past=6, ixtypes=None
189 ):
185 190 """
186 191 This function limits the search to 6 months by default so we don't have to
187 192 query 300 elasticsearch indices for 20 years of historical data for example
@@ -189,23 +194,23 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,'
189 194
190 195 # should be cached later
191 196 def get_possible_names():
192 return list(Datastores.es.indices.get_alias('*'))
197 return list(Datastores.es.indices.get_alias("*"))
193 198
194 199 possible_names = get_possible_names()
195 200 es_index_types = []
196 201 if not ixtypes:
197 ixtypes = ['reports', 'metrics', 'logs']
202 ixtypes = ["reports", "metrics", "logs"]
198 203 for t in ixtypes:
199 if t == 'reports':
200 es_index_types.append('rcae_r_%s')
201 elif t == 'logs':
202 es_index_types.append('rcae_l_%s')
203 elif t == 'metrics':
204 es_index_types.append('rcae_m_%s')
205 elif t == 'uptime':
206 es_index_types.append('rcae_u_%s')
207 elif t == 'slow_calls':
208 es_index_types.append('rcae_sc_%s')
204 if t == "reports":
205 es_index_types.append("rcae_r_%s")
206 elif t == "logs":
207 es_index_types.append("rcae_l_%s")
208 elif t == "metrics":
209 es_index_types.append("rcae_m_%s")
210 elif t == "uptime":
211 es_index_types.append("rcae_u_%s")
212 elif t == "slow_calls":
213 es_index_types.append("rcae_sc_%s")
209 214
210 215 if start_date:
211 216 start_date = copy.copy(start_date)
@@ -217,26 +222,34 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,'
217 222 if not end_date:
218 223 end_date = start_date + relativedelta(months=months_in_past)
219 224
220 index_dates = list(rrule(MONTHLY,
225 index_dates = list(
226 rrule(
227 MONTHLY,
221 228 dtstart=start_date.date().replace(day=1),
222 229 until=end_date.date(),
223 count=36))
230 count=36,
231 )
232 )
224 233 index_names = []
225 234 for ix_type in es_index_types:
226 to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
227 if ix_type % d.strftime('%Y_%m') in possible_names]
235 to_extend = [
236 ix_type % d.strftime("%Y_%m")
237 for d in index_dates
238 if ix_type % d.strftime("%Y_%m") in possible_names
239 ]
228 240 index_names.extend(to_extend)
229 for day in list(rrule(DAILY, dtstart=start_date.date(),
230 until=end_date.date(), count=366)):
231 ix_name = ix_type % day.strftime('%Y_%m_%d')
241 for day in list(
242 rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366)
243 ):
244 ix_name = ix_type % day.strftime("%Y_%m_%d")
232 245 if ix_name in possible_names:
233 246 index_names.append(ix_name)
234 247 return index_names
235 248
236 249
237 250 def build_filter_settings_from_query_dict(
238 request, params=None, override_app_ids=None,
239 resource_permissions=None):
251 request, params=None, override_app_ids=None, resource_permissions=None
252 ):
240 253 """
241 254 Builds list of normalized search terms for ES from query params
242 255 ensuring application list is restricted to only applications user
@@ -249,11 +262,12 b' def build_filter_settings_from_query_dict('
249 262 params = copy.deepcopy(params)
250 263 applications = []
251 264 if not resource_permissions:
252 resource_permissions = ['view']
265 resource_permissions = ["view"]
253 266
254 267 if request.user:
255 268 applications = UserService.resources_with_perms(
256 request.user, resource_permissions, resource_types=['application'])
269 request.user, resource_permissions, resource_types=["application"]
270 )
257 271
258 272 # CRITICAL - this ensures our resultset is limited to only the ones
259 273 # user has view permissions
@@ -273,11 +287,11 b' def build_filter_settings_from_query_dict('
273 287 for k, v in list(filter_settings.items()):
274 288 if k in accepted_search_params:
275 289 continue
276 tag_list.append({"name": k, "value": v, "op": 'eq'})
290 tag_list.append({"name": k, "value": v, "op": "eq"})
277 291 # remove the key from filter_settings
278 292 filter_settings.pop(k, None)
279 293 tags = tag_schema.deserialize(tag_list)
280 filter_settings['tags'] = tags
294 filter_settings["tags"] = tags
281 295 return filter_settings
282 296
283 297
@@ -299,26 +313,36 b' def permission_tuple_to_dict(data):'
299 313 "resource_type": None,
300 314 "resource_id": None,
301 315 "group_name": None,
302 "group_id": None
316 "group_id": None,
303 317 }
304 318 if data.user:
305 319 out["user_name"] = data.user.user_name
306 320 if data.perm_name == ALL_PERMISSIONS:
307 out['perm_name'] = '__all_permissions__'
321 out["perm_name"] = "__all_permissions__"
308 322 if data.resource:
309 out['resource_name'] = data.resource.resource_name
310 out['resource_type'] = data.resource.resource_type
311 out['resource_id'] = data.resource.resource_id
323 out["resource_name"] = data.resource.resource_name
324 out["resource_type"] = data.resource.resource_type
325 out["resource_id"] = data.resource.resource_id
312 326 if data.group:
313 out['group_name'] = data.group.group_name
314 out['group_id'] = data.group.id
327 out["group_name"] = data.group.group_name
328 out["group_id"] = data.group.id
315 329 return out
316 330
317 331
318 def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
319 gap_gen=None, db_session=None, step_interval=None,
332 def get_cached_buckets(
333 request,
334 stats_since,
335 end_time,
336 fn,
337 cache_key,
338 gap_gen=None,
339 db_session=None,
340 step_interval=None,
320 341 iv_extractor=None,
321 rerange=False, *args, **kwargs):
342 rerange=False,
343 *args,
344 **kwargs
345 ):
322 346 """ Takes "fn" that should return some data and tries to load the data
323 347 dividing it into daily buckets - if the stats_since and end time give a
324 348 delta bigger than 24hours, then only "todays" data is computed on the fly
@@ -360,25 +384,28 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,'
360 384 # do not use custom interval if total time range with new iv would exceed
361 385 # end time
362 386 if not step_interval or stats_since + step_interval >= end_time:
363 if delta < h.time_deltas.get('12h')['delta']:
387 if delta < h.time_deltas.get("12h")["delta"]:
364 388 step_interval = timedelta(seconds=60)
365 elif delta < h.time_deltas.get('3d')['delta']:
389 elif delta < h.time_deltas.get("3d")["delta"]:
366 390 step_interval = timedelta(seconds=60 * 5)
367 elif delta > h.time_deltas.get('2w')['delta']:
391 elif delta > h.time_deltas.get("2w")["delta"]:
368 392 step_interval = timedelta(days=1)
369 393 else:
370 394 step_interval = timedelta(minutes=60)
371 395
372 396 if step_interval >= timedelta(minutes=60):
373 log.info('cached_buckets:{}: adjusting start time '
374 'for hourly or daily intervals'.format(cache_key))
397 log.info(
398 "cached_buckets:{}: adjusting start time "
399 "for hourly or daily intervals".format(cache_key)
400 )
375 401 stats_since = stats_since.replace(hour=0, minute=0)
376 402
377 ranges = [i.start_interval for i in list(gap_gen(stats_since,
378 step_interval, [],
379 end_time=end_time))]
403 ranges = [
404 i.start_interval
405 for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time))
406 ]
380 407 buckets = {}
381 storage_key = 'buckets:' + cache_key + '{}|{}'
408 storage_key = "buckets:" + cache_key + "{}|{}"
382 409 # this means we basicly cache per hour in 3-14 day intervals but i think
383 410 # its fine at this point - will be faster than db access anyways
384 411
@@ -391,45 +418,67 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,'
391 418 k = storage_key.format(step_interval.total_seconds(), r)
392 419 value = request.registry.cache_regions.redis_day_30.get(k)
393 420 # last buckets are never loaded from cache
394 is_last_result = (
395 r >= end_time - timedelta(hours=6) or r in last_ranges)
421 is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges
396 422 if value is not NO_VALUE and not is_last_result:
397 log.info("cached_buckets:{}: "
398 "loading range {} from cache".format(cache_key, r))
423 log.info(
424 "cached_buckets:{}: "
425 "loading range {} from cache".format(cache_key, r)
426 )
399 427 buckets[r] = value
400 428 else:
401 log.info("cached_buckets:{}: "
402 "loading range {} from storage".format(cache_key, r))
429 log.info(
430 "cached_buckets:{}: "
431 "loading range {} from storage".format(cache_key, r)
432 )
403 433 range_size = step_interval
404 if (step_interval == timedelta(minutes=60) and
405 not is_last_result and rerange):
434 if (
435 step_interval == timedelta(minutes=60)
436 and not is_last_result
437 and rerange
438 ):
406 439 range_size = timedelta(days=1)
407 440 r = r.replace(hour=0, minute=0)
408 log.info("cached_buckets:{}: "
441 log.info(
442 "cached_buckets:{}: "
409 443 "loading collapsed "
410 "range {} {}".format(cache_key, r,
411 r + range_size))
444 "range {} {}".format(cache_key, r, r + range_size)
445 )
412 446 bucket_data = fn(
413 request, r, r + range_size, step_interval,
414 gap_gen, bucket_count=len(ranges), *args, **kwargs)
447 request,
448 r,
449 r + range_size,
450 step_interval,
451 gap_gen,
452 bucket_count=len(ranges),
453 *args,
454 **kwargs
455 )
415 456 for b in bucket_data:
416 457 b_iv = iv_extractor(b)
417 458 buckets[b_iv] = b
418 k2 = storage_key.format(
419 step_interval.total_seconds(), b_iv)
459 k2 = storage_key.format(step_interval.total_seconds(), b_iv)
420 460 request.registry.cache_regions.redis_day_30.set(k2, b)
421 461 log.info("cached_buckets:{}: saving cache".format(cache_key))
422 462 else:
423 463 # bucket count is 1 for short time ranges <= 24h from now
424 bucket_data = fn(request, stats_since, end_time, step_interval,
425 gap_gen, bucket_count=1, *args, **kwargs)
464 bucket_data = fn(
465 request,
466 stats_since,
467 end_time,
468 step_interval,
469 gap_gen,
470 bucket_count=1,
471 *args,
472 **kwargs
473 )
426 474 for b in bucket_data:
427 475 buckets[iv_extractor(b)] = b
428 476 return buckets
429 477
430 478
431 def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
432 db_session=None, *args, **kwargs):
479 def get_cached_split_data(
480 request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs
481 ):
433 482 """ Takes "fn" that should return some data and tries to load the data
434 483 dividing it into 2 buckets - cached "since_from" bucket and "today"
435 484 bucket - then the data can be reduced into single value
@@ -442,43 +491,51 b' def get_cached_split_data(request, stats_since, end_time, fn, cache_key,'
442 491 delta = end_time - stats_since
443 492
444 493 if delta >= timedelta(minutes=60):
445 log.info('cached_split_data:{}: adjusting start time '
446 'for hourly or daily intervals'.format(cache_key))
494 log.info(
495 "cached_split_data:{}: adjusting start time "
496 "for hourly or daily intervals".format(cache_key)
497 )
447 498 stats_since = stats_since.replace(hour=0, minute=0)
448 499
449 storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
500 storage_key = "buckets_split_data:" + cache_key + ":{}|{}"
450 501 old_end_time = end_time.replace(hour=0, minute=0)
451 502
452 final_storage_key = storage_key.format(delta.total_seconds(),
453 old_end_time)
503 final_storage_key = storage_key.format(delta.total_seconds(), old_end_time)
454 504 older_data = None
455 505
456 cdata = request.registry.cache_regions.redis_day_7.get(
457 final_storage_key)
506 cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key)
458 507
459 508 if cdata:
460 log.info("cached_split_data:{}: found old "
461 "bucket data".format(cache_key))
509 log.info("cached_split_data:{}: found old " "bucket data".format(cache_key))
462 510 older_data = cdata
463 511
464 if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
465 not cdata):
466 log.info("cached_split_data:{}: didn't find the "
467 "start bucket in cache so load older data".format(cache_key))
512 if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata:
513 log.info(
514 "cached_split_data:{}: didn't find the "
515 "start bucket in cache so load older data".format(cache_key)
516 )
468 517 recent_stats_since = old_end_time
469 older_data = fn(request, stats_since, recent_stats_since,
470 db_session=db_session, *args, **kwargs)
471 request.registry.cache_regions.redis_day_7.set(final_storage_key,
472 older_data)
473 elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
518 older_data = fn(
519 request,
520 stats_since,
521 recent_stats_since,
522 db_session=db_session,
523 *args,
524 **kwargs
525 )
526 request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data)
527 elif stats_since < end_time - h.time_deltas.get("24h")["delta"]:
474 528 recent_stats_since = old_end_time
475 529 else:
476 530 recent_stats_since = stats_since
477 531
478 log.info("cached_split_data:{}: loading fresh "
479 "data bucksts from last 24h ".format(cache_key))
480 todays_data = fn(request, recent_stats_since, end_time,
481 db_session=db_session, *args, **kwargs)
532 log.info(
533 "cached_split_data:{}: loading fresh "
534 "data bucksts from last 24h ".format(cache_key)
535 )
536 todays_data = fn(
537 request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs
538 )
482 539 return older_data, todays_data
483 540
484 541
@@ -24,119 +24,138 b' log = logging.getLogger(__name__)'
24 24
25 25 def parse_airbrake_xml(request):
26 26 root = request.context.airbrake_xml_etree
27 error = root.find('error')
28 notifier = root.find('notifier')
29 server_env = root.find('server-environment')
30 request_data = root.find('request')
31 user = root.find('current-user')
27 error = root.find("error")
28 notifier = root.find("notifier")
29 server_env = root.find("server-environment")
30 request_data = root.find("request")
31 user = root.find("current-user")
32 32 if request_data is not None:
33 cgi_data = request_data.find('cgi-data')
33 cgi_data = request_data.find("cgi-data")
34 34 if cgi_data is None:
35 35 cgi_data = []
36 36
37 37 error_dict = {
38 'class_name': error.findtext('class') or '',
39 'error': error.findtext('message') or '',
38 "class_name": error.findtext("class") or "",
39 "error": error.findtext("message") or "",
40 40 "occurences": 1,
41 41 "http_status": 500,
42 42 "priority": 5,
43 "server": 'unknown',
44 'url': 'unknown', 'request': {}
43 "server": "unknown",
44 "url": "unknown",
45 "request": {},
45 46 }
46 47 if user is not None:
47 error_dict['username'] = user.findtext('username') or \
48 user.findtext('id')
48 error_dict["username"] = user.findtext("username") or user.findtext("id")
49 49 if notifier is not None:
50 error_dict['client'] = notifier.findtext('name')
50 error_dict["client"] = notifier.findtext("name")
51 51
52 52 if server_env is not None:
53 error_dict["server"] = server_env.findtext('hostname', 'unknown')
53 error_dict["server"] = server_env.findtext("hostname", "unknown")
54 54
55 whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
56 'CONTENT_TYPE', 'HTTP_REFERER']
55 whitelist_environ = [
56 "REMOTE_USER",
57 "REMOTE_ADDR",
58 "SERVER_NAME",
59 "CONTENT_TYPE",
60 "HTTP_REFERER",
61 ]
57 62
58 63 if request_data is not None:
59 error_dict['url'] = request_data.findtext('url', 'unknown')
60 component = request_data.findtext('component')
61 action = request_data.findtext('action')
64 error_dict["url"] = request_data.findtext("url", "unknown")
65 component = request_data.findtext("component")
66 action = request_data.findtext("action")
62 67 if component and action:
63 error_dict['view_name'] = '%s:%s' % (component, action)
68 error_dict["view_name"] = "%s:%s" % (component, action)
64 69 for node in cgi_data:
65 key = node.get('key')
66 if key.startswith('HTTP') or key in whitelist_environ:
67 error_dict['request'][key] = node.text
68 elif 'query_parameters' in key:
69 error_dict['request']['GET'] = {}
70 key = node.get("key")
71 if key.startswith("HTTP") or key in whitelist_environ:
72 error_dict["request"][key] = node.text
73 elif "query_parameters" in key:
74 error_dict["request"]["GET"] = {}
70 75 for x in node:
71 error_dict['request']['GET'][x.get('key')] = x.text
72 elif 'request_parameters' in key:
73 error_dict['request']['POST'] = {}
76 error_dict["request"]["GET"][x.get("key")] = x.text
77 elif "request_parameters" in key:
78 error_dict["request"]["POST"] = {}
74 79 for x in node:
75 error_dict['request']['POST'][x.get('key')] = x.text
76 elif key.endswith('cookie'):
77 error_dict['request']['COOKIE'] = {}
80 error_dict["request"]["POST"][x.get("key")] = x.text
81 elif key.endswith("cookie"):
82 error_dict["request"]["COOKIE"] = {}
78 83 for x in node:
79 error_dict['request']['COOKIE'][x.get('key')] = x.text
80 elif key.endswith('request_id'):
81 error_dict['request_id'] = node.text
82 elif key.endswith('session'):
83 error_dict['request']['SESSION'] = {}
84 error_dict["request"]["COOKIE"][x.get("key")] = x.text
85 elif key.endswith("request_id"):
86 error_dict["request_id"] = node.text
87 elif key.endswith("session"):
88 error_dict["request"]["SESSION"] = {}
84 89 for x in node:
85 error_dict['request']['SESSION'][x.get('key')] = x.text
90 error_dict["request"]["SESSION"][x.get("key")] = x.text
86 91 else:
87 if key in ['rack.session.options']:
92 if key in ["rack.session.options"]:
88 93 # skip secret configs
89 94 continue
90 95 try:
91 96 if len(node):
92 error_dict['request'][key] = dict(
93 [(x.get('key'), x.text,) for x in node])
97 error_dict["request"][key] = dict(
98 [(x.get("key"), x.text) for x in node]
99 )
94 100 else:
95 error_dict['request'][key] = node.text
101 error_dict["request"][key] = node.text
96 102 except Exception as e:
97 log.warning('Airbrake integration exception: %s' % e)
103 log.warning("Airbrake integration exception: %s" % e)
98 104
99 error_dict['request'].pop('HTTP_COOKIE', '')
105 error_dict["request"].pop("HTTP_COOKIE", "")
100 106
101 error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
102 error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
103 if 'request_id' not in error_dict:
104 error_dict['request_id'] = str(uuid.uuid4())
107 error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "")
108 error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "")
109 if "request_id" not in error_dict:
110 error_dict["request_id"] = str(uuid.uuid4())
105 111 if request.context.possibly_public:
106 112 # set ip for reports that come from airbrake js client
107 113 error_dict["timestamp"] = datetime.utcnow()
108 114 if request.environ.get("HTTP_X_FORWARDED_FOR"):
109 ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
110 first_ip = ip.split(',')[0]
115 ip = request.environ.get("HTTP_X_FORWARDED_FOR", "")
116 first_ip = ip.split(",")[0]
111 117 remote_addr = first_ip.strip()
112 118 else:
113 remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
114 request.environ.get('REMOTE_ADDR'))
119 remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get(
120 "REMOTE_ADDR"
121 )
115 122 error_dict["ip"] = remote_addr
116 123
117 blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
118 'session', 'test']
124 blacklist = [
125 "password",
126 "passwd",
127 "pwd",
128 "auth_tkt",
129 "secret",
130 "csrf",
131 "session",
132 "test",
133 ]
119 134
120 135 lines = []
121 for l in error.find('backtrace'):
122 lines.append({'file': l.get("file", ""),
123 'line': l.get("number", ""),
124 'fn': l.get("method", ""),
125 'module': l.get("module", ""),
126 'cline': l.get("method", ""),
127 'vars': {}})
128 error_dict['traceback'] = list(reversed(lines))
136 for l in error.find("backtrace"):
137 lines.append(
138 {
139 "file": l.get("file", ""),
140 "line": l.get("number", ""),
141 "fn": l.get("method", ""),
142 "module": l.get("module", ""),
143 "cline": l.get("method", ""),
144 "vars": {},
145 }
146 )
147 error_dict["traceback"] = list(reversed(lines))
129 148 # filtering is not provided by airbrake
130 149 keys_to_check = (
131 error_dict['request'].get('COOKIE'),
132 error_dict['request'].get('COOKIES'),
133 error_dict['request'].get('POST'),
134 error_dict['request'].get('SESSION'),
150 error_dict["request"].get("COOKIE"),
151 error_dict["request"].get("COOKIES"),
152 error_dict["request"].get("POST"),
153 error_dict["request"].get("SESSION"),
135 154 )
136 155 for source in [_f for _f in keys_to_check if _f]:
137 156 for k in source.keys():
138 157 for bad_key in blacklist:
139 158 if bad_key in k.lower():
140 source[k] = '***'
159 source[k] = "***"
141 160
142 161 return error_dict
@@ -22,12 +22,12 b' log = logging.getLogger(__name__)'
22 22
23 23
24 24 def to_relativedelta(time_delta):
25 return relativedelta(seconds=int(time_delta.total_seconds()),
26 microseconds=time_delta.microseconds)
25 return relativedelta(
26 seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds
27 )
27 28
28 29
29 def convert_date(date_str, return_utcnow_if_wrong=True,
30 normalize_future=False):
30 def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False):
31 31 utcnow = datetime.utcnow()
32 32 if isinstance(date_str, datetime):
33 33 # get rid of tzinfo
@@ -36,21 +36,21 b' def convert_date(date_str, return_utcnow_if_wrong=True,'
36 36 return utcnow
37 37 try:
38 38 try:
39 if 'Z' in date_str:
40 date_str = date_str[:date_str.index('Z')]
41 if '.' in date_str:
42 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
39 if "Z" in date_str:
40 date_str = date_str[: date_str.index("Z")]
41 if "." in date_str:
42 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f")
43 43 else:
44 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
44 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S")
45 45 except Exception:
46 46 # bw compat with old client
47 date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
47 date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f")
48 48 except Exception:
49 49 if return_utcnow_if_wrong:
50 50 date = utcnow
51 51 else:
52 52 date = None
53 53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
54 log.warning('time %s in future + 3 min, normalizing' % date)
54 log.warning("time %s in future + 3 min, normalizing" % date)
55 55 return utcnow
56 56 return date
@@ -19,45 +19,68 b' from datetime import timedelta'
19 19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
20 20
21 21 EXCLUDED_LOG_VARS = [
22 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
23 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
24 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
25 'thread', 'threadName']
22 "args",
23 "asctime",
24 "created",
25 "exc_info",
26 "exc_text",
27 "filename",
28 "funcName",
29 "levelname",
30 "levelno",
31 "lineno",
32 "message",
33 "module",
34 "msecs",
35 "msg",
36 "name",
37 "pathname",
38 "process",
39 "processName",
40 "relativeCreated",
41 "thread",
42 "threadName",
43 ]
26 44
27 45 EXCLUDE_SENTRY_KEYS = [
28 'csp',
29 'culprit',
30 'event_id',
31 'exception',
32 'extra',
33 'level',
34 'logentry',
35 'logger',
36 'message',
37 'modules',
38 'platform',
39 'query',
40 'release',
41 'request',
42 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
43 'sentry.interfaces.Http', 'sentry.interfaces.Message',
44 'sentry.interfaces.Query',
45 'sentry.interfaces.Stacktrace',
46 'sentry.interfaces.Template', 'sentry.interfaces.User',
47 'sentry.interfaces.csp.Csp',
48 'sentry.interfaces.exception.Exception',
49 'sentry.interfaces.http.Http',
50 'sentry.interfaces.message.Message',
51 'sentry.interfaces.query.Query',
52 'sentry.interfaces.stacktrace.Stacktrace',
53 'sentry.interfaces.template.Template',
54 'sentry.interfaces.user.User', 'server_name',
55 'stacktrace',
56 'tags',
57 'template',
58 'time_spent',
59 'timestamp',
60 'user']
46 "csp",
47 "culprit",
48 "event_id",
49 "exception",
50 "extra",
51 "level",
52 "logentry",
53 "logger",
54 "message",
55 "modules",
56 "platform",
57 "query",
58 "release",
59 "request",
60 "sentry.interfaces.Csp",
61 "sentry.interfaces.Exception",
62 "sentry.interfaces.Http",
63 "sentry.interfaces.Message",
64 "sentry.interfaces.Query",
65 "sentry.interfaces.Stacktrace",
66 "sentry.interfaces.Template",
67 "sentry.interfaces.User",
68 "sentry.interfaces.csp.Csp",
69 "sentry.interfaces.exception.Exception",
70 "sentry.interfaces.http.Http",
71 "sentry.interfaces.message.Message",
72 "sentry.interfaces.query.Query",
73 "sentry.interfaces.stacktrace.Stacktrace",
74 "sentry.interfaces.template.Template",
75 "sentry.interfaces.user.User",
76 "server_name",
77 "stacktrace",
78 "tags",
79 "template",
80 "time_spent",
81 "timestamp",
82 "user",
83 ]
61 84
62 85
63 86 def get_keys(list_of_keys, json_body):
@@ -67,9 +90,10 b' def get_keys(list_of_keys, json_body):'
67 90
68 91
69 92 def get_logentry(json_body):
70 key_names = ['logentry',
71 'sentry.interfaces.message.Message',
72 'sentry.interfaces.Message'
93 key_names = [
94 "logentry",
95 "sentry.interfaces.message.Message",
96 "sentry.interfaces.Message",
73 97 ]
74 98 logentry = get_keys(key_names, json_body)
75 99 return logentry
@@ -77,20 +101,21 b' def get_logentry(json_body):'
77 101
78 102 def get_exception(json_body):
79 103 parsed_exception = {}
80 key_names = ['exception',
81 'sentry.interfaces.exception.Exception',
82 'sentry.interfaces.Exception'
104 key_names = [
105 "exception",
106 "sentry.interfaces.exception.Exception",
107 "sentry.interfaces.Exception",
83 108 ]
84 109 exception = get_keys(key_names, json_body) or {}
85 110 if exception:
86 111 if isinstance(exception, dict):
87 exception = exception['values'][0]
112 exception = exception["values"][0]
88 113 else:
89 114 exception = exception[0]
90 115
91 parsed_exception['type'] = exception.get('type')
92 parsed_exception['value'] = exception.get('value')
93 parsed_exception['module'] = exception.get('module')
116 parsed_exception["type"] = exception.get("type")
117 parsed_exception["value"] = exception.get("value")
118 parsed_exception["module"] = exception.get("module")
94 119 parsed_stacktrace = get_stacktrace(exception) or {}
95 120 parsed_exception = exception or {}
96 121 return parsed_exception, parsed_stacktrace
@@ -98,20 +123,22 b' def get_exception(json_body):'
98 123
99 124 def get_stacktrace(json_body):
100 125 parsed_stacktrace = []
101 key_names = ['stacktrace',
102 'sentry.interfaces.stacktrace.Stacktrace',
103 'sentry.interfaces.Stacktrace'
126 key_names = [
127 "stacktrace",
128 "sentry.interfaces.stacktrace.Stacktrace",
129 "sentry.interfaces.Stacktrace",
104 130 ]
105 131 stacktrace = get_keys(key_names, json_body)
106 132 if stacktrace:
107 for frame in stacktrace['frames']:
133 for frame in stacktrace["frames"]:
108 134 parsed_stacktrace.append(
109 {"cline": frame.get('context_line', ''),
110 "file": frame.get('filename', ''),
111 "module": frame.get('module', ''),
112 "fn": frame.get('function', ''),
113 "line": frame.get('lineno', ''),
114 "vars": list(frame.get('vars', {}).items())
135 {
136 "cline": frame.get("context_line", ""),
137 "file": frame.get("filename", ""),
138 "module": frame.get("module", ""),
139 "fn": frame.get("function", ""),
140 "line": frame.get("lineno", ""),
141 "vars": list(frame.get("vars", {}).items()),
115 142 }
116 143 )
117 144 return parsed_stacktrace
@@ -119,19 +146,21 b' def get_stacktrace(json_body):'
119 146
120 147 def get_template(json_body):
121 148 parsed_template = {}
122 key_names = ['template',
123 'sentry.interfaces.template.Template',
124 'sentry.interfaces.Template'
149 key_names = [
150 "template",
151 "sentry.interfaces.template.Template",
152 "sentry.interfaces.Template",
125 153 ]
126 154 template = get_keys(key_names, json_body)
127 155 if template:
128 for frame in template['frames']:
156 for frame in template["frames"]:
129 157 parsed_template.append(
130 {"cline": frame.get('context_line', ''),
131 "file": frame.get('filename', ''),
132 "fn": '',
133 "line": frame.get('lineno', ''),
134 "vars": []
158 {
159 "cline": frame.get("context_line", ""),
160 "file": frame.get("filename", ""),
161 "fn": "",
162 "line": frame.get("lineno", ""),
163 "vars": [],
135 164 }
136 165 )
137 166
@@ -140,16 +169,13 b' def get_template(json_body):'
140 169
141 170 def get_request(json_body):
142 171 parsed_http = {}
143 key_names = ['request',
144 'sentry.interfaces.http.Http',
145 'sentry.interfaces.Http'
146 ]
172 key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"]
147 173 http = get_keys(key_names, json_body) or {}
148 174 for k, v in http.items():
149 if k == 'headers':
150 parsed_http['headers'] = {}
151 for sk, sv in http['headers'].items():
152 parsed_http['headers'][sk.title()] = sv
175 if k == "headers":
176 parsed_http["headers"] = {}
177 for sk, sv in http["headers"].items():
178 parsed_http["headers"][sk.title()] = sv
153 179 else:
154 180 parsed_http[k.lower()] = v
155 181 return parsed_http
@@ -157,53 +183,47 b' def get_request(json_body):'
157 183
158 184 def get_user(json_body):
159 185 parsed_user = {}
160 key_names = ['user',
161 'sentry.interfaces.user.User',
162 'sentry.interfaces.User'
163 ]
186 key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"]
164 187 user = get_keys(key_names, json_body)
165 188 if user:
166 parsed_user['id'] = user.get('id')
167 parsed_user['username'] = user.get('username')
168 parsed_user['email'] = user.get('email')
169 parsed_user['ip_address'] = user.get('ip_address')
189 parsed_user["id"] = user.get("id")
190 parsed_user["username"] = user.get("username")
191 parsed_user["email"] = user.get("email")
192 parsed_user["ip_address"] = user.get("ip_address")
170 193
171 194 return parsed_user
172 195
173 196
174 197 def get_query(json_body):
175 198 query = None
176 key_name = ['query',
177 'sentry.interfaces.query.Query',
178 'sentry.interfaces.Query'
179 ]
199 key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"]
180 200 query = get_keys(key_name, json_body)
181 201 return query
182 202
183 203
184 204 def parse_sentry_event(json_body):
185 request_id = json_body.get('event_id')
205 request_id = json_body.get("event_id")
186 206
187 207 # required
188 message = json_body.get('message')
189 log_timestamp = json_body.get('timestamp')
190 level = json_body.get('level')
208 message = json_body.get("message")
209 log_timestamp = json_body.get("timestamp")
210 level = json_body.get("level")
191 211 if isinstance(level, int):
192 212 level = LogLevelPython.key_from_value(level)
193 213
194 namespace = json_body.get('logger')
195 language = json_body.get('platform')
214 namespace = json_body.get("logger")
215 language = json_body.get("platform")
196 216
197 217 # optional
198 server_name = json_body.get('server_name')
199 culprit = json_body.get('culprit')
200 release = json_body.get('release')
218 server_name = json_body.get("server_name")
219 culprit = json_body.get("culprit")
220 release = json_body.get("release")
201 221
202 tags = json_body.get('tags', {})
203 if hasattr(tags, 'items'):
222 tags = json_body.get("tags", {})
223 if hasattr(tags, "items"):
204 224 tags = list(tags.items())
205 extra = json_body.get('extra', {})
206 if hasattr(extra, 'items'):
225 extra = json_body.get("extra", {})
226 if hasattr(extra, "items"):
207 227 extra = list(extra.items())
208 228
209 229 parsed_req = get_request(json_body)
@@ -212,12 +232,13 b' def parse_sentry_event(json_body):'
212 232 query = get_query(json_body)
213 233
214 234 # other unidentified keys found
215 other_keys = [(k, json_body[k]) for k in json_body.keys()
216 if k not in EXCLUDE_SENTRY_KEYS]
235 other_keys = [
236 (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS
237 ]
217 238
218 239 logentry = get_logentry(json_body)
219 240 if logentry:
220 message = logentry['message']
241 message = logentry["message"]
221 242
222 243 exception, stacktrace = get_exception(json_body)
223 244
@@ -227,70 +248,70 b' def parse_sentry_event(json_body):'
227 248 event_type = ParsedSentryEventType.LOG
228 249
229 250 event_dict = {
230 'log_level': level,
231 'message': message,
232 'namespace': namespace,
233 'request_id': request_id,
234 'server': server_name,
235 'date': log_timestamp,
236 'tags': tags
251 "log_level": level,
252 "message": message,
253 "namespace": namespace,
254 "request_id": request_id,
255 "server": server_name,
256 "date": log_timestamp,
257 "tags": tags,
237 258 }
238 event_dict['tags'].extend(
239 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
259 event_dict["tags"].extend(
260 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS]
261 )
240 262
241 263 # other keys can be various object types
242 event_dict['tags'].extend([(k, v) for k, v in other_keys
243 if isinstance(v, str)])
264 event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)])
244 265 if culprit:
245 event_dict['tags'].append(('sentry_culprit', culprit))
266 event_dict["tags"].append(("sentry_culprit", culprit))
246 267 if language:
247 event_dict['tags'].append(('sentry_language', language))
268 event_dict["tags"].append(("sentry_language", language))
248 269 if release:
249 event_dict['tags'].append(('sentry_release', release))
270 event_dict["tags"].append(("sentry_release", release))
250 271
251 272 if exception or stacktrace or alt_stacktrace or template:
252 273 event_type = ParsedSentryEventType.ERROR_REPORT
253 274 event_dict = {
254 'client': 'sentry',
255 'error': message,
256 'namespace': namespace,
257 'request_id': request_id,
258 'server': server_name,
259 'start_time': log_timestamp,
260 'end_time': None,
261 'tags': tags,
262 'extra': extra,
263 'language': language,
264 'view_name': json_body.get('culprit'),
265 'http_status': None,
266 'username': None,
267 'url': parsed_req.get('url'),
268 'ip': None,
269 'user_agent': None,
270 'request': None,
271 'slow_calls': None,
272 'request_stats': None,
273 'traceback': None
275 "client": "sentry",
276 "error": message,
277 "namespace": namespace,
278 "request_id": request_id,
279 "server": server_name,
280 "start_time": log_timestamp,
281 "end_time": None,
282 "tags": tags,
283 "extra": extra,
284 "language": language,
285 "view_name": json_body.get("culprit"),
286 "http_status": None,
287 "username": None,
288 "url": parsed_req.get("url"),
289 "ip": None,
290 "user_agent": None,
291 "request": None,
292 "slow_calls": None,
293 "request_stats": None,
294 "traceback": None,
274 295 }
275 296
276 event_dict['extra'].extend(other_keys)
297 event_dict["extra"].extend(other_keys)
277 298 if release:
278 event_dict['tags'].append(('sentry_release', release))
279 event_dict['request'] = parsed_req
280 if 'headers' in parsed_req:
281 event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
282 if 'env' in parsed_req:
283 event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
284 ts_ms = int(json_body.get('time_spent') or 0)
299 event_dict["tags"].append(("sentry_release", release))
300 event_dict["request"] = parsed_req
301 if "headers" in parsed_req:
302 event_dict["user_agent"] = parsed_req["headers"].get("User-Agent")
303 if "env" in parsed_req:
304 event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR")
305 ts_ms = int(json_body.get("time_spent") or 0)
285 306 if ts_ms > 0:
286 event_dict['end_time'] = event_dict['start_time'] + \
287 timedelta(milliseconds=ts_ms)
307 event_dict["end_time"] = event_dict["start_time"] + timedelta(
308 milliseconds=ts_ms
309 )
288 310 if stacktrace or alt_stacktrace or template:
289 event_dict['traceback'] = stacktrace or alt_stacktrace or template
311 event_dict["traceback"] = stacktrace or alt_stacktrace or template
290 312 for k in list(event_dict.keys()):
291 313 if event_dict[k] is None:
292 314 del event_dict[k]
293 315 if user:
294 event_dict['username'] = user['username'] or user['id'] \
295 or user['email']
316 event_dict["username"] = user["username"] or user["id"] or user["email"]
296 317 return event_dict, event_type
@@ -13,5 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
17
@@ -41,7 +41,7 b' target_metadata = MetaData(naming_convention=NAMING_CONVENTION)'
41 41 # my_important_option = config.get_main_option("my_important_option")
42 42 # ... etc.
43 43
44 VERSION_TABLE_NAME = 'alembic_appenlight_version'
44 VERSION_TABLE_NAME = "alembic_appenlight_version"
45 45
46 46
47 47 def run_migrations_offline():
@@ -57,9 +57,12 b' def run_migrations_offline():'
57 57
58 58 """
59 59 url = config.get_main_option("sqlalchemy.url")
60 context.configure(url=url, target_metadata=target_metadata,
60 context.configure(
61 url=url,
62 target_metadata=target_metadata,
61 63 transaction_per_migration=True,
62 version_table=VERSION_TABLE_NAME)
64 version_table=VERSION_TABLE_NAME,
65 )
63 66
64 67 with context.begin_transaction():
65 68 context.run_migrations()
@@ -74,15 +77,16 b' def run_migrations_online():'
74 77 """
75 78 engine = engine_from_config(
76 79 config.get_section(config.config_ini_section),
77 prefix='sqlalchemy.',
78 poolclass=pool.NullPool)
80 prefix="sqlalchemy.",
81 poolclass=pool.NullPool,
82 )
79 83
80 84 connection = engine.connect()
81 85 context.configure(
82 86 connection=connection,
83 87 target_metadata=target_metadata,
84 88 transaction_per_migration=True,
85 version_table=VERSION_TABLE_NAME
89 version_table=VERSION_TABLE_NAME,
86 90 )
87 91
88 92 try:
This diff has been collapsed as it changes many lines, (787 lines changed) Show them Hide them
@@ -23,7 +23,7 b' Create Date: 2014-10-13 23:47:38.295159'
23 23 """
24 24
25 25 # revision identifiers, used by Alembic.
26 revision = '55b6e612672f'
26 revision = "55b6e612672f"
27 27 down_revision = None
28 28
29 29 from alembic import op
@@ -31,348 +31,514 b' import sqlalchemy as sa'
31 31
32 32
33 33 def upgrade():
34 op.add_column('users', sa.Column('first_name', sa.Unicode(25)))
35 op.add_column('users', sa.Column('last_name', sa.Unicode(50)))
36 op.add_column('users', sa.Column('company_name', sa.Unicode(255)))
37 op.add_column('users', sa.Column('company_address', sa.Unicode(255)))
38 op.add_column('users', sa.Column('phone1', sa.Unicode(25)))
39 op.add_column('users', sa.Column('phone2', sa.Unicode(25)))
40 op.add_column('users', sa.Column('zip_code', sa.Unicode(25)))
41 op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest"))
42 op.add_column('users', sa.Column('city', sa.Unicode(128)))
43 op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default=''))
44 op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true'))
45 op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default=''))
34 op.add_column("users", sa.Column("first_name", sa.Unicode(25)))
35 op.add_column("users", sa.Column("last_name", sa.Unicode(50)))
36 op.add_column("users", sa.Column("company_name", sa.Unicode(255)))
37 op.add_column("users", sa.Column("company_address", sa.Unicode(255)))
38 op.add_column("users", sa.Column("phone1", sa.Unicode(25)))
39 op.add_column("users", sa.Column("phone2", sa.Unicode(25)))
40 op.add_column("users", sa.Column("zip_code", sa.Unicode(25)))
41 op.add_column(
42 "users",
43 sa.Column(
44 "default_report_sort",
45 sa.Unicode(20),
46 nullable=False,
47 server_default="newest",
48 ),
49 )
50 op.add_column("users", sa.Column("city", sa.Unicode(128)))
51 op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default=""))
52 op.add_column(
53 "users",
54 sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"),
55 )
56 op.add_column(
57 "users",
58 sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""),
59 )
46 60
47 61 op.create_table(
48 'integrations',
49 sa.Column('id', sa.Integer(), primary_key=True),
50 sa.Column('resource_id', sa.Integer(),
51 sa.ForeignKey('resources.resource_id', onupdate='cascade',
52 ondelete='cascade')),
53 sa.Column('integration_name', sa.Unicode(64)),
54 sa.Column('config', sa.dialects.postgresql.JSON, nullable=False),
55 sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()),
56 sa.Column('external_id', sa.Unicode(255)),
57 sa.Column('external_id2', sa.Unicode(255))
62 "integrations",
63 sa.Column("id", sa.Integer(), primary_key=True),
64 sa.Column(
65 "resource_id",
66 sa.Integer(),
67 sa.ForeignKey(
68 "resources.resource_id", onupdate="cascade", ondelete="cascade"
69 ),
70 ),
71 sa.Column("integration_name", sa.Unicode(64)),
72 sa.Column("config", sa.dialects.postgresql.JSON, nullable=False),
73 sa.Column(
74 "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now()
75 ),
76 sa.Column("external_id", sa.Unicode(255)),
77 sa.Column("external_id2", sa.Unicode(255)),
58 78 )
59 79
60 80 op.create_table(
61 'alert_channels',
62 sa.Column('owner_id', sa.Integer(),
63 sa.ForeignKey('users.id', onupdate='cascade',
64 ondelete='cascade'), nullable=False),
65 sa.Column('channel_name', sa.Unicode(25), nullable=False),
66 sa.Column('channel_value', sa.Unicode(80), nullable=False),
67 sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False),
68 sa.Column('channel_validated', sa.Boolean, nullable=False, server_default='False'),
69 sa.Column('send_alerts', sa.Boolean, nullable=False, server_default='True'),
70 sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'),
71 sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'),
72 sa.Column('pkey', sa.Integer(), primary_key=True),
73 sa.Column('integration_id', sa.Integer,
74 sa.ForeignKey('integrations.id', onupdate='cascade',
75 ondelete='cascade')),
76 )
77 op.create_unique_constraint('uq_alert_channels', 'alert_channels',
78 ["owner_id", "channel_name", "channel_value"])
81 "alert_channels",
82 sa.Column(
83 "owner_id",
84 sa.Integer(),
85 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
86 nullable=False,
87 ),
88 sa.Column("channel_name", sa.Unicode(25), nullable=False),
89 sa.Column("channel_value", sa.Unicode(80), nullable=False),
90 sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False),
91 sa.Column(
92 "channel_validated", sa.Boolean, nullable=False, server_default="False"
93 ),
94 sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"),
95 sa.Column(
96 "notify_only_first", sa.Boolean, nullable=False, server_default="False"
97 ),
98 sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"),
99 sa.Column("pkey", sa.Integer(), primary_key=True),
100 sa.Column(
101 "integration_id",
102 sa.Integer,
103 sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"),
104 ),
105 )
106 op.create_unique_constraint(
107 "uq_alert_channels",
108 "alert_channels",
109 ["owner_id", "channel_name", "channel_value"],
110 )
79 111
80 112 op.create_table(
81 'alert_channels_actions',
82 sa.Column('owner_id', sa.Integer(), nullable=False),
83 sa.Column('resource_id', sa.Integer(),
84 sa.ForeignKey('resources.resource_id', onupdate='cascade',
85 ondelete='cascade')),
86 sa.Column('pkey', sa.Integer(), primary_key=True),
87 sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'),
88 sa.Column('rule', sa.dialects.postgresql.JSON),
89 sa.Column('type', sa.Unicode(10), index=True),
90 sa.Column('other_id', sa.Unicode(40), index=True),
91 sa.Column('config', sa.dialects.postgresql.JSON),
92 sa.Column('name', sa.Unicode(255), server_default='')
113 "alert_channels_actions",
114 sa.Column("owner_id", sa.Integer(), nullable=False),
115 sa.Column(
116 "resource_id",
117 sa.Integer(),
118 sa.ForeignKey(
119 "resources.resource_id", onupdate="cascade", ondelete="cascade"
120 ),
121 ),
122 sa.Column("pkey", sa.Integer(), primary_key=True),
123 sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"),
124 sa.Column("rule", sa.dialects.postgresql.JSON),
125 sa.Column("type", sa.Unicode(10), index=True),
126 sa.Column("other_id", sa.Unicode(40), index=True),
127 sa.Column("config", sa.dialects.postgresql.JSON),
128 sa.Column("name", sa.Unicode(255), server_default=""),
93 129 )
94 130
95
96 131 op.create_table(
97 'application_postprocess_conf',
98 sa.Column('pkey', sa.Integer(), primary_key=True),
99 sa.Column('do', sa.Unicode(25), nullable=False),
100 sa.Column('new_value', sa.UnicodeText(), nullable=False, server_default=''),
101 sa.Column('resource_id', sa.Integer(),
102 sa.ForeignKey('resources.resource_id',
103 onupdate='cascade',
104 ondelete='cascade'), nullable=False),
105 sa.Column('rule', sa.dialects.postgresql.JSON),
132 "application_postprocess_conf",
133 sa.Column("pkey", sa.Integer(), primary_key=True),
134 sa.Column("do", sa.Unicode(25), nullable=False),
135 sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""),
136 sa.Column(
137 "resource_id",
138 sa.Integer(),
139 sa.ForeignKey(
140 "resources.resource_id", onupdate="cascade", ondelete="cascade"
141 ),
142 nullable=False,
143 ),
144 sa.Column("rule", sa.dialects.postgresql.JSON),
106 145 )
107 146
108 147 op.create_table(
109 'applications',
110 sa.Column('resource_id', sa.Integer(),
111 sa.ForeignKey('resources.resource_id', onupdate='cascade',
112 ondelete='cascade'), nullable=False,
113 primary_key=True, autoincrement=False),
114 sa.Column('domains', sa.UnicodeText, nullable=False),
115 sa.Column('api_key', sa.Unicode(32), nullable=False, index=True),
116 sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'),
117 sa.Column('public_key', sa.Unicode(32), nullable=False, index=True),
118 sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False),
119 sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False),
120 sa.Column('apdex_threshold', sa.Float(), server_default='0.7', nullable=False),
121 sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False),
122 )
123 op.create_unique_constraint(None, 'applications',
124 ["public_key"])
125 op.create_unique_constraint(None, 'applications',
126 ["api_key"])
148 "applications",
149 sa.Column(
150 "resource_id",
151 sa.Integer(),
152 sa.ForeignKey(
153 "resources.resource_id", onupdate="cascade", ondelete="cascade"
154 ),
155 nullable=False,
156 primary_key=True,
157 autoincrement=False,
158 ),
159 sa.Column("domains", sa.UnicodeText, nullable=False),
160 sa.Column("api_key", sa.Unicode(32), nullable=False, index=True),
161 sa.Column(
162 "default_grouping",
163 sa.Unicode(20),
164 nullable=False,
165 server_default="url_type",
166 ),
167 sa.Column("public_key", sa.Unicode(32), nullable=False, index=True),
168 sa.Column(
169 "error_report_threshold", sa.Integer(), server_default="10", nullable=False
170 ),
171 sa.Column(
172 "slow_report_threshold", sa.Integer(), server_default="10", nullable=False
173 ),
174 sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False),
175 sa.Column(
176 "allow_permanent_storage",
177 sa.Boolean(),
178 server_default="false",
179 nullable=False,
180 ),
181 )
182 op.create_unique_constraint(None, "applications", ["public_key"])
183 op.create_unique_constraint(None, "applications", ["api_key"])
127 184
128 185 op.create_table(
129 'metrics',
130 sa.Column('pkey', sa.types.BigInteger, nullable=False, primary_key=True),
131 sa.Column('resource_id', sa.Integer(),
132 sa.ForeignKey('resources.resource_id',
133 onupdate='cascade',
134 ondelete='cascade')),
135 sa.Column('timestamp', sa.DateTime),
136 sa.Column('namespace', sa.Unicode(255)),
137 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}")
186 "metrics",
187 sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True),
188 sa.Column(
189 "resource_id",
190 sa.Integer(),
191 sa.ForeignKey(
192 "resources.resource_id", onupdate="cascade", ondelete="cascade"
193 ),
194 ),
195 sa.Column("timestamp", sa.DateTime),
196 sa.Column("namespace", sa.Unicode(255)),
197 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
138 198 )
139 199
140 200 op.create_table(
141 'events',
142 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
143 sa.Column('start_date', sa.DateTime, nullable=False, index=True),
144 sa.Column('end_date', sa.DateTime),
145 sa.Column('status', sa.Integer(), nullable=False, index=True),
146 sa.Column('event_type', sa.Integer(), nullable=False, index=True),
147 sa.Column('origin_user_id', sa.Integer()),
148 sa.Column('target_user_id', sa.Integer()),
149 sa.Column('resource_id', sa.Integer(), index=True),
150 sa.Column('text', sa.UnicodeText, server_default=''),
151 sa.Column('values', sa.dialects.postgresql.JSON),
152 sa.Column('target_id', sa.Integer()),
153 sa.Column('target_uuid', sa.Unicode(40), index=True)
201 "events",
202 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
203 sa.Column("start_date", sa.DateTime, nullable=False, index=True),
204 sa.Column("end_date", sa.DateTime),
205 sa.Column("status", sa.Integer(), nullable=False, index=True),
206 sa.Column("event_type", sa.Integer(), nullable=False, index=True),
207 sa.Column("origin_user_id", sa.Integer()),
208 sa.Column("target_user_id", sa.Integer()),
209 sa.Column("resource_id", sa.Integer(), index=True),
210 sa.Column("text", sa.UnicodeText, server_default=""),
211 sa.Column("values", sa.dialects.postgresql.JSON),
212 sa.Column("target_id", sa.Integer()),
213 sa.Column("target_uuid", sa.Unicode(40), index=True),
154 214 )
155 215
156 216 op.create_table(
157 'logs',
158 sa.Column('log_id', sa.types.BigInteger, nullable=False, primary_key=True),
159 sa.Column('resource_id', sa.Integer(),
160 sa.ForeignKey('resources.resource_id',
161 onupdate='cascade',
162 ondelete='cascade')),
163 sa.Column('log_level', sa.SmallInteger(), nullable=False),
164 sa.Column('primary_key', sa.Unicode(128), nullable=True),
165 sa.Column('message', sa.UnicodeText, nullable=False, server_default=''),
166 sa.Column('timestamp', sa.DateTime),
167 sa.Column('namespace', sa.Unicode(255)),
168 sa.Column('request_id', sa.Unicode(40)),
169 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"),
170 sa.Column('permanent', sa.Boolean(), server_default="false",
171 nullable=False)
217 "logs",
218 sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True),
219 sa.Column(
220 "resource_id",
221 sa.Integer(),
222 sa.ForeignKey(
223 "resources.resource_id", onupdate="cascade", ondelete="cascade"
224 ),
225 ),
226 sa.Column("log_level", sa.SmallInteger(), nullable=False),
227 sa.Column("primary_key", sa.Unicode(128), nullable=True),
228 sa.Column("message", sa.UnicodeText, nullable=False, server_default=""),
229 sa.Column("timestamp", sa.DateTime),
230 sa.Column("namespace", sa.Unicode(255)),
231 sa.Column("request_id", sa.Unicode(40)),
232 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
233 sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False),
172 234 )
173 235
174 236 op.create_table(
175 'reports_groups',
176 sa.Column('id', sa.types.BigInteger, primary_key=True),
177 sa.Column('resource_id', sa.Integer,
178 sa.ForeignKey('resources.resource_id', onupdate='cascade',
179 ondelete='cascade'), nullable=False),
180 sa.Column('priority', sa.Integer, nullable=False, server_default="5"),
181 sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()),
182 sa.Column('last_timestamp', sa.DateTime()),
183 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
184 sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""),
185 sa.Column('triggered_postprocesses_ids', sa.dialects.postgresql.JSON, nullable=False, server_default="[]"),
186 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
187 sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"),
188 sa.Column('last_report', sa.Integer, nullable=False, server_default="0"),
189 sa.Column('occurences', sa.Integer, nullable=False, server_default="1"),
190 sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"),
191 sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"),
192 sa.Column('notified', sa.Boolean, nullable=False, server_default="False"),
193 sa.Column('fixed', sa.Boolean, nullable=False, server_default="False"),
194 sa.Column('public', sa.Boolean, nullable=False, server_default="False"),
195 sa.Column('read', sa.Boolean, nullable=False, server_default="False"),
237 "reports_groups",
238 sa.Column("id", sa.types.BigInteger, primary_key=True),
239 sa.Column(
240 "resource_id",
241 sa.Integer,
242 sa.ForeignKey(
243 "resources.resource_id", onupdate="cascade", ondelete="cascade"
244 ),
245 nullable=False,
246 ),
247 sa.Column("priority", sa.Integer, nullable=False, server_default="5"),
248 sa.Column(
249 "first_timestamp",
250 sa.DateTime(),
251 nullable=False,
252 server_default=sa.func.now(),
253 ),
254 sa.Column("last_timestamp", sa.DateTime()),
255 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
256 sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""),
257 sa.Column(
258 "triggered_postprocesses_ids",
259 sa.dialects.postgresql.JSON,
260 nullable=False,
261 server_default="[]",
262 ),
263 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
264 sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"),
265 sa.Column("last_report", sa.Integer, nullable=False, server_default="0"),
266 sa.Column("occurences", sa.Integer, nullable=False, server_default="1"),
267 sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"),
268 sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"),
269 sa.Column("notified", sa.Boolean, nullable=False, server_default="False"),
270 sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"),
271 sa.Column("public", sa.Boolean, nullable=False, server_default="False"),
272 sa.Column("read", sa.Boolean, nullable=False, server_default="False"),
196 273 )
197 274
198 275 op.create_table(
199 'reports',
200 sa.Column('id', sa.types.BigInteger, primary_key=True),
201 sa.Column('group_id', sa.types.BigInteger,
202 sa.ForeignKey('reports_groups.id', onupdate='cascade',
203 ondelete='cascade'), nullable=False, index=True),
204 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
205 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
206 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
207 sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
208 sa.Column('request', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
209 sa.Column('tags', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
210 sa.Column('ip', sa.Unicode(39), nullable=False, server_default=""),
211 sa.Column('username', sa.Unicode(255), nullable=False, server_default=""),
212 sa.Column('user_agent', sa.Unicode(512), nullable=False, server_default=""),
213 sa.Column('url', sa.UnicodeText, nullable=False, server_default=""),
214 sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""),
215 sa.Column('request_stats', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
216 sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
217 sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""),
218 sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()),
219 sa.Column('end_time', sa.DateTime()),
220 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
221 sa.Column('duration', sa.Float(), nullable=False, server_default="0"),
222 sa.Column('http_status', sa.Integer, index=True),
223 sa.Column('url_domain', sa.Unicode(128)),
224 sa.Column('url_path', sa.UnicodeText),
225 sa.Column('language', sa.Integer, server_default="0"),
226 )
227 op.create_index(None, 'reports',
228 [sa.text("(tags ->> 'server_name')")])
229 op.create_index(None, 'reports',
230 [sa.text("(tags ->> 'view_name')")])
276 "reports",
277 sa.Column("id", sa.types.BigInteger, primary_key=True),
278 sa.Column(
279 "group_id",
280 sa.types.BigInteger,
281 sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"),
282 nullable=False,
283 index=True,
284 ),
285 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
286 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
287 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
288 sa.Column(
289 "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
290 ),
291 sa.Column(
292 "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
293 ),
294 sa.Column(
295 "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
296 ),
297 sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""),
298 sa.Column("username", sa.Unicode(255), nullable=False, server_default=""),
299 sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""),
300 sa.Column("url", sa.UnicodeText, nullable=False, server_default=""),
301 sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""),
302 sa.Column(
303 "request_stats",
304 sa.dialects.postgresql.JSON,
305 nullable=False,
306 server_default="{}",
307 ),
308 sa.Column(
309 "traceback",
310 sa.dialects.postgresql.JSON,
311 nullable=False,
312 server_default="{}",
313 ),
314 sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""),
315 sa.Column(
316 "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now()
317 ),
318 sa.Column("end_time", sa.DateTime()),
319 sa.Column(
320 "report_group_time",
321 sa.DateTime,
322 index=True,
323 nullable=False,
324 server_default=sa.func.now(),
325 ),
326 sa.Column("duration", sa.Float(), nullable=False, server_default="0"),
327 sa.Column("http_status", sa.Integer, index=True),
328 sa.Column("url_domain", sa.Unicode(128)),
329 sa.Column("url_path", sa.UnicodeText),
330 sa.Column("language", sa.Integer, server_default="0"),
331 )
332 op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")])
333 op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")])
231 334
232 335 op.create_table(
233 'reports_assignments',
234 sa.Column('group_id', sa.types.BigInteger, nullable=False, primary_key=True),
235 sa.Column('owner_id', sa.Integer,
236 sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'),
237 nullable=False, primary_key=True),
238 sa.Column('report_time', sa.DateTime, nullable=False)
336 "reports_assignments",
337 sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True),
338 sa.Column(
339 "owner_id",
340 sa.Integer,
341 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
342 nullable=False,
343 primary_key=True,
344 ),
345 sa.Column("report_time", sa.DateTime, nullable=False),
239 346 )
240 347
241 348 op.create_table(
242 'reports_comments',
243 sa.Column('comment_id', sa.Integer, primary_key=True),
244 sa.Column('body', sa.UnicodeText, nullable=False, server_default=''),
245 sa.Column('owner_id', sa.Integer,
246 sa.ForeignKey('users.id', onupdate='cascade',
247 ondelete='set null'), nullable=True),
248 sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
249 sa.Column('report_time', sa.DateTime, nullable=False),
250 sa.Column('group_id', sa.types.BigInteger, nullable=False)
349 "reports_comments",
350 sa.Column("comment_id", sa.Integer, primary_key=True),
351 sa.Column("body", sa.UnicodeText, nullable=False, server_default=""),
352 sa.Column(
353 "owner_id",
354 sa.Integer,
355 sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"),
356 nullable=True,
357 ),
358 sa.Column(
359 "created_timestamp",
360 sa.DateTime,
361 nullable=False,
362 server_default=sa.func.now(),
363 ),
364 sa.Column("report_time", sa.DateTime, nullable=False),
365 sa.Column("group_id", sa.types.BigInteger, nullable=False),
251 366 )
252 367
253 368 op.create_table(
254 'reports_stats',
255 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
256 sa.Column('start_interval', sa.DateTime, nullable=False, index=True),
257 sa.Column('group_id', sa.types.BigInteger, index=True),
258 sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True),
259 sa.Column('owner_user_id', sa.Integer),
260 sa.Column('type', sa.Integer, index=True, nullable=False),
261 sa.Column('duration', sa.Float(), server_default='0'),
262 sa.Column('server_name', sa.Unicode(128),
263 server_default=''),
264 sa.Column('view_name', sa.Unicode(128),
265 server_default=''),
266 sa.Column('id', sa.BigInteger(), nullable=False, primary_key=True),
267 )
268 op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats',
269 ["start_interval", "group_id"])
369 "reports_stats",
370 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
371 sa.Column("start_interval", sa.DateTime, nullable=False, index=True),
372 sa.Column("group_id", sa.types.BigInteger, index=True),
373 sa.Column(
374 "occurences", sa.Integer, nullable=False, server_default="0", index=True
375 ),
376 sa.Column("owner_user_id", sa.Integer),
377 sa.Column("type", sa.Integer, index=True, nullable=False),
378 sa.Column("duration", sa.Float(), server_default="0"),
379 sa.Column("server_name", sa.Unicode(128), server_default=""),
380 sa.Column("view_name", sa.Unicode(128), server_default=""),
381 sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True),
382 )
383 op.create_index(
384 "ix_reports_stats_start_interval_group_id",
385 "reports_stats",
386 ["start_interval", "group_id"],
387 )
270 388
271 389 op.create_table(
272 'slow_calls',
273 sa.Column('id', sa.types.BigInteger, primary_key=True),
274 sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'),
275 nullable=False, index=True),
276 sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True),
277 sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True),
278 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
279 sa.Column('type', sa.Unicode(16), nullable=False, index=True),
280 sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''),
281 sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False),
282 sa.Column('location', sa.UnicodeText, server_default=''),
283 sa.Column('subtype', sa.Unicode(16), nullable=False, index=True),
284 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
285 sa.Column('statement_hash', sa.Unicode(60), index=True)
390 "slow_calls",
391 sa.Column("id", sa.types.BigInteger, primary_key=True),
392 sa.Column(
393 "report_id",
394 sa.types.BigInteger,
395 sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"),
396 nullable=False,
397 index=True,
398 ),
399 sa.Column(
400 "duration", sa.Float(), nullable=False, server_default="0", index=True
401 ),
402 sa.Column(
403 "timestamp",
404 sa.DateTime,
405 nullable=False,
406 server_default=sa.func.now(),
407 index=True,
408 ),
409 sa.Column(
410 "report_group_time",
411 sa.DateTime,
412 index=True,
413 nullable=False,
414 server_default=sa.func.now(),
415 ),
416 sa.Column("type", sa.Unicode(16), nullable=False, index=True),
417 sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""),
418 sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False),
419 sa.Column("location", sa.UnicodeText, server_default=""),
420 sa.Column("subtype", sa.Unicode(16), nullable=False, index=True),
421 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
422 sa.Column("statement_hash", sa.Unicode(60), index=True),
286 423 )
287 424
288 425 op.create_table(
289 'tags',
290 sa.Column('id', sa.types.BigInteger, primary_key=True),
291 sa.Column('resource_id', sa.Integer,
292 sa.ForeignKey('resources.resource_id', onupdate='cascade',
293 ondelete='cascade')),
294 sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
295 sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
296 sa.Column('name', sa.Unicode(32), nullable=False),
297 sa.Column('value', sa.dialects.postgresql.JSON, nullable=False),
298 sa.Column('times_seen', sa.Integer, nullable=False, server_default='1')
426 "tags",
427 sa.Column("id", sa.types.BigInteger, primary_key=True),
428 sa.Column(
429 "resource_id",
430 sa.Integer,
431 sa.ForeignKey(
432 "resources.resource_id", onupdate="cascade", ondelete="cascade"
433 ),
434 ),
435 sa.Column(
436 "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
437 ),
438 sa.Column(
439 "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
440 ),
441 sa.Column("name", sa.Unicode(32), nullable=False),
442 sa.Column("value", sa.dialects.postgresql.JSON, nullable=False),
443 sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"),
299 444 )
300 445
301 446 op.create_table(
302 'auth_tokens',
303 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
304 sa.Column('token', sa.Unicode),
305 sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()),
306 sa.Column('expires', sa.DateTime),
307 sa.Column('owner_id', sa.Integer,
308 sa.ForeignKey('users.id', onupdate='cascade',
309 ondelete='cascade')),
310 sa.Column('description', sa.Unicode),
447 "auth_tokens",
448 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
449 sa.Column("token", sa.Unicode),
450 sa.Column(
451 "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now()
452 ),
453 sa.Column("expires", sa.DateTime),
454 sa.Column(
455 "owner_id",
456 sa.Integer,
457 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
458 ),
459 sa.Column("description", sa.Unicode),
311 460 )
312 461
313 462 op.create_table(
314 'channels_actions',
315 sa.Column('channel_pkey', sa.Integer,
316 sa.ForeignKey('alert_channels.pkey',
317 ondelete='CASCADE', onupdate='CASCADE')),
318 sa.Column('action_pkey', sa.Integer,
319 sa.ForeignKey('alert_channels_actions.pkey',
320 ondelete='CASCADE', onupdate='CASCADE'))
463 "channels_actions",
464 sa.Column(
465 "channel_pkey",
466 sa.Integer,
467 sa.ForeignKey(
468 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
469 ),
470 ),
471 sa.Column(
472 "action_pkey",
473 sa.Integer,
474 sa.ForeignKey(
475 "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE"
476 ),
477 ),
321 478 )
322 479
323 480 op.create_table(
324 'config',
325 sa.Column('key', sa.Unicode(128), primary_key=True),
326 sa.Column('section', sa.Unicode(128), primary_key=True),
327 sa.Column('value', sa.dialects.postgresql.JSON,
328 server_default="{}")
481 "config",
482 sa.Column("key", sa.Unicode(128), primary_key=True),
483 sa.Column("section", sa.Unicode(128), primary_key=True),
484 sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"),
329 485 )
330 486
331 487 op.create_table(
332 'plugin_configs',
333 sa.Column('id', sa.Integer, primary_key=True),
334 sa.Column('plugin_name', sa.Unicode(128)),
335 sa.Column('section', sa.Unicode(128)),
336 sa.Column('config', sa.dialects.postgresql.JSON,
337 server_default="{}"),
338 sa.Column('resource_id', sa.Integer(),
339 sa.ForeignKey('resources.resource_id', onupdate='cascade',
340 ondelete='cascade')),
341 sa.Column('owner_id', sa.Integer(),
342 sa.ForeignKey('users.id', onupdate='cascade',
343 ondelete='cascade')))
488 "plugin_configs",
489 sa.Column("id", sa.Integer, primary_key=True),
490 sa.Column("plugin_name", sa.Unicode(128)),
491 sa.Column("section", sa.Unicode(128)),
492 sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"),
493 sa.Column(
494 "resource_id",
495 sa.Integer(),
496 sa.ForeignKey(
497 "resources.resource_id", onupdate="cascade", ondelete="cascade"
498 ),
499 ),
500 sa.Column(
501 "owner_id",
502 sa.Integer(),
503 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
504 ),
505 )
344 506
345 507 op.create_table(
346 'rc_versions',
347 sa.Column('name', sa.Unicode(40), primary_key=True),
348 sa.Column('value', sa.Unicode(40)),
508 "rc_versions",
509 sa.Column("name", sa.Unicode(40), primary_key=True),
510 sa.Column("value", sa.Unicode(40)),
511 )
512 version_table = sa.table(
513 "rc_versions",
514 sa.Column("name", sa.Unicode(40)),
515 sa.Column("value", sa.Unicode(40)),
349 516 )
350 version_table = sa.table('rc_versions',
351 sa.Column('name', sa.Unicode(40)),
352 sa.Column('value', sa.Unicode(40)))
353 517
354 insert = version_table.insert().values(name='es_reports')
518 insert = version_table.insert().values(name="es_reports")
355 519 op.execute(insert)
356 insert = version_table.insert().values(name='es_reports_groups')
520 insert = version_table.insert().values(name="es_reports_groups")
357 521 op.execute(insert)
358 insert = version_table.insert().values(name='es_reports_stats')
522 insert = version_table.insert().values(name="es_reports_stats")
359 523 op.execute(insert)
360 insert = version_table.insert().values(name='es_logs')
524 insert = version_table.insert().values(name="es_logs")
361 525 op.execute(insert)
362 insert = version_table.insert().values(name='es_metrics')
526 insert = version_table.insert().values(name="es_metrics")
363 527 op.execute(insert)
364 insert = version_table.insert().values(name='es_slow_calls')
528 insert = version_table.insert().values(name="es_slow_calls")
365 529 op.execute(insert)
366 530
367
368 op.execute('''
531 op.execute(
532 """
369 533 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
370 534 RETURNS timestamp without time zone AS
371 535 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
372 536 LANGUAGE sql VOLATILE;
373 ''')
537 """
538 )
374 539
375 op.execute('''
540 op.execute(
541 """
376 542 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
377 543 LANGUAGE plpgsql SECURITY DEFINER
378 544 AS $$
@@ -426,13 +592,17 b' def upgrade():'
426 592 RETURN NULL;
427 593 END
428 594 $$;
429 ''')
595 """
596 )
430 597
431 op.execute('''
598 op.execute(
599 """
432 600 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
433 ''')
601 """
602 )
434 603
435 op.execute('''
604 op.execute(
605 """
436 606 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
437 607 LANGUAGE plpgsql SECURITY DEFINER
438 608 AS $$
@@ -463,13 +633,17 b' def upgrade():'
463 633 RETURN NULL;
464 634 END
465 635 $$;
466 ''')
636 """
637 )
467 638
468 op.execute('''
639 op.execute(
640 """
469 641 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
470 ''')
642 """
643 )
471 644
472 op.execute('''
645 op.execute(
646 """
473 647 CREATE FUNCTION partition_reports_stats() RETURNS trigger
474 648 LANGUAGE plpgsql SECURITY DEFINER
475 649 AS $$
@@ -499,13 +673,17 b' def upgrade():'
499 673 RETURN NULL;
500 674 END
501 675 $$;
502 ''')
676 """
677 )
503 678
504 op.execute('''
679 op.execute(
680 """
505 681 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
506 ''')
682 """
683 )
507 684
508 op.execute('''
685 op.execute(
686 """
509 687 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
510 688 LANGUAGE plpgsql SECURITY DEFINER
511 689 AS $$
@@ -533,13 +711,17 b' def upgrade():'
533 711 RETURN NULL;
534 712 END
535 713 $$;
536 ''')
714 """
715 )
537 716
538 op.execute('''
717 op.execute(
718 """
539 719 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
540 ''')
720 """
721 )
541 722
542 op.execute('''
723 op.execute(
724 """
543 725 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
544 726 LANGUAGE plpgsql SECURITY DEFINER
545 727 AS $$
@@ -573,14 +755,17 b' def upgrade():'
573 755 RETURN NULL;
574 756 END
575 757 $$;
576 ''')
758 """
759 )
577 760
578 op.execute('''
761 op.execute(
762 """
579 763 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
580 ''')
581
764 """
765 )
582 766
583 op.execute('''
767 op.execute(
768 """
584 769 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
585 770 LANGUAGE plpgsql SECURITY DEFINER
586 771 AS $$
@@ -614,11 +799,15 b' def upgrade():'
614 799 RETURN NULL;
615 800 END
616 801 $$;
617 ''')
802 """
803 )
618 804
619 op.execute('''
805 op.execute(
806 """
620 807 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
621 ''')
808 """
809 )
810
622 811
623 812 def downgrade():
624 813 pass
@@ -7,8 +7,8 b' Create Date: 2018-02-28 13:52:50.717217'
7 7 """
8 8
9 9 # revision identifiers, used by Alembic.
10 revision = 'e9fcfbdd9498'
11 down_revision = '55b6e612672f'
10 revision = "e9fcfbdd9498"
11 down_revision = "55b6e612672f"
12 12
13 13 from alembic import op
14 14 import sqlalchemy as sa
@@ -16,17 +16,25 b' import sqlalchemy as sa'
16 16
17 17 def upgrade():
18 18 op.create_table(
19 'channels_resources',
20 sa.Column('channel_pkey', sa.Integer,
21 sa.ForeignKey('alert_channels.pkey',
22 ondelete='CASCADE', onupdate='CASCADE'),
23 primary_key=True),
24 sa.Column('resource_id', sa.Integer,
25 sa.ForeignKey('resources.resource_id',
26 ondelete='CASCADE', onupdate='CASCADE'),
27 primary_key=True)
19 "channels_resources",
20 sa.Column(
21 "channel_pkey",
22 sa.Integer,
23 sa.ForeignKey(
24 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
25 ),
26 primary_key=True,
27 ),
28 sa.Column(
29 "resource_id",
30 sa.Integer,
31 sa.ForeignKey(
32 "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE"
33 ),
34 primary_key=True,
35 ),
28 36 )
29 37
30 38
31 39 def downgrade():
32 op.drop_table('channels_resources')
40 op.drop_table("channels_resources")
@@ -29,11 +29,11 b' log = logging.getLogger(__name__)'
29 29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
30 30
31 31 NAMING_CONVENTION = {
32 "ix": 'ix_%(column_0_label)s',
32 "ix": "ix_%(column_0_label)s",
33 33 "uq": "uq_%(table_name)s_%(column_0_name)s",
34 34 "ck": "ck_%(table_name)s_%(constraint_name)s",
35 35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
36 "pk": "pk_%(table_name)s"
36 "pk": "pk_%(table_name)s",
37 37 }
38 38
39 39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
@@ -59,23 +59,24 b' class SliceableESQuery(object):'
59 59 self.query = query
60 60 self.sort_query = sort_query
61 61 self.aggregations = aggregations
62 self.items_per_page = kwconfig.pop('items_per_page', 10)
63 self.page = kwconfig.pop('page', 1)
62 self.items_per_page = kwconfig.pop("items_per_page", 10)
63 self.page = kwconfig.pop("page", 1)
64 64 self.kwconfig = kwconfig
65 65 self.result = None
66 66
67 67 def __getitem__(self, index):
68 68 config = self.kwconfig.copy()
69 config['from_'] = index.start
69 config["from_"] = index.start
70 70 query = self.query.copy()
71 71 if self.sort_query:
72 72 query.update(self.sort_query)
73 self.result = Datastores.es.search(body=query, size=self.items_per_page,
74 **config)
73 self.result = Datastores.es.search(
74 body=query, size=self.items_per_page, **config
75 )
75 76 if self.aggregations:
76 self.items = self.result.get('aggregations')
77 self.items = self.result.get("aggregations")
77 78 else:
78 self.items = self.result['hits']['hits']
79 self.items = self.result["hits"]["hits"]
79 80
80 81 return self.items
81 82
@@ -85,14 +86,15 b' class SliceableESQuery(object):'
85 86 def __len__(self):
86 87 config = self.kwconfig.copy()
87 88 query = self.query.copy()
88 self.result = Datastores.es.search(body=query, size=self.items_per_page,
89 **config)
89 self.result = Datastores.es.search(
90 body=query, size=self.items_per_page, **config
91 )
90 92 if self.aggregations:
91 self.items = self.result.get('aggregations')
93 self.items = self.result.get("aggregations")
92 94 else:
93 self.items = self.result['hits']['hits']
95 self.items = self.result["hits"]["hits"]
94 96
95 count = int(self.result['hits']['total'])
97 count = int(self.result["hits"]["total"])
96 98 return count if count < 5000 else 5000
97 99
98 100
@@ -102,8 +104,7 b' from appenlight.models.user import User'
102 104 from appenlight.models.alert_channel import AlertChannel
103 105 from appenlight.models.alert_channel_action import AlertChannelAction
104 106 from appenlight.models.metric import Metric
105 from appenlight.models.application_postprocess_conf import \
106 ApplicationPostprocessConf
107 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
107 108 from appenlight.models.auth_token import AuthToken
108 109 from appenlight.models.event import Event
109 110 from appenlight.models.external_identity import ExternalIdentity
@@ -124,7 +125,15 b' from appenlight.models.user_permission import UserPermission'
124 125 from appenlight.models.user_resource_permission import UserResourcePermission
125 126 from ziggurat_foundations import ziggurat_model_init
126 127
127 ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission,
128 UserResourcePermission, GroupResourcePermission,
128 ziggurat_model_init(
129 User,
130 Group,
131 UserGroup,
132 GroupPermission,
133 UserPermission,
134 UserResourcePermission,
135 GroupResourcePermission,
129 136 Resource,
130 ExternalIdentity, passwordmanager=None)
137 ExternalIdentity,
138 passwordmanager=None,
139 )
@@ -27,126 +27,125 b' log = logging.getLogger(__name__)'
27 27
28 28 #
29 29 channel_rules_m2m_table = sa.Table(
30 'channels_actions', Base.metadata,
31 sa.Column('channel_pkey', sa.Integer,
32 sa.ForeignKey('alert_channels.pkey')),
33 sa.Column('action_pkey', sa.Integer,
34 sa.ForeignKey('alert_channels_actions.pkey'))
30 "channels_actions",
31 Base.metadata,
32 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
33 sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")),
35 34 )
36 35
37 36 channel_resources_m2m_table = sa.Table(
38 'channels_resources', Base.metadata,
39 sa.Column('channel_pkey', sa.Integer,
40 sa.ForeignKey('alert_channels.pkey')),
41 sa.Column('resource_id', sa.Integer,
42 sa.ForeignKey('resources.resource_id'))
37 "channels_resources",
38 Base.metadata,
39 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
40 sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")),
43 41 )
44 42
45 DATE_FRMT = '%Y-%m-%dT%H:%M'
43 DATE_FRMT = "%Y-%m-%dT%H:%M"
46 44
47 45
48 46 class AlertChannel(Base, BaseModel):
49 47 """
50 48 Stores information about possible alerting options
51 49 """
52 __tablename__ = 'alert_channels'
53 __possible_channel_names__ = ['email']
50
51 __tablename__ = "alert_channels"
52 __possible_channel_names__ = ["email"]
54 53 __mapper_args__ = {
55 'polymorphic_on': 'channel_name',
56 'polymorphic_identity': 'integration'
54 "polymorphic_on": "channel_name",
55 "polymorphic_identity": "integration",
57 56 }
58 57
59 owner_id = sa.Column(sa.Unicode(30),
60 sa.ForeignKey('users.id', onupdate='CASCADE',
61 ondelete='CASCADE'))
58 owner_id = sa.Column(
59 sa.Unicode(30),
60 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
61 )
62 62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default='')
64 channel_json_conf = sa.Column(JSON(), nullable=False, default='')
65 channel_validated = sa.Column(sa.Boolean, nullable=False,
66 default=False)
67 send_alerts = sa.Column(sa.Boolean, nullable=False,
68 default=True)
69 daily_digest = sa.Column(sa.Boolean, nullable=False,
70 default=True)
71 integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'),
72 nullable=True)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default="")
64 channel_json_conf = sa.Column(JSON(), nullable=False, default="")
65 channel_validated = sa.Column(sa.Boolean, nullable=False, default=False)
66 send_alerts = sa.Column(sa.Boolean, nullable=False, default=True)
67 daily_digest = sa.Column(sa.Boolean, nullable=False, default=True)
68 integration_id = sa.Column(
69 sa.Integer, sa.ForeignKey("integrations.id"), nullable=True
70 )
73 71 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
74 72
75 channel_actions = sa.orm.relationship('AlertChannelAction',
73 channel_actions = sa.orm.relationship(
74 "AlertChannelAction",
76 75 cascade="all",
77 76 passive_deletes=True,
78 77 passive_updates=True,
79 78 secondary=channel_rules_m2m_table,
80 backref='channels')
81 resources = sa.orm.relationship('Resource',
79 backref="channels",
80 )
81 resources = sa.orm.relationship(
82 "Resource",
82 83 cascade="all",
83 84 passive_deletes=True,
84 85 passive_updates=True,
85 86 secondary=channel_resources_m2m_table,
86 backref='resources')
87 backref="resources",
88 )
87 89
88 90 @property
89 91 def channel_visible_value(self):
90 92 if self.integration:
91 return '{}: {}'.format(
92 self.channel_name,
93 self.integration.resource.resource_name
93 return "{}: {}".format(
94 self.channel_name, self.integration.resource.resource_name
94 95 )
95 96
96 return '{}: {}'.format(
97 self.channel_name,
98 self.channel_value
99 )
97 return "{}: {}".format(self.channel_name, self.channel_value)
100 98
101 def get_dict(self, exclude_keys=None, include_keys=None,
102 extended_info=True):
99 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True):
103 100 """
104 101 Returns dictionary with required information that will be consumed by
105 102 angular
106 103 """
107 instance_dict = super(AlertChannel, self).get_dict(exclude_keys,
108 include_keys)
104 instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys)
109 105 exclude_keys_list = exclude_keys or []
110 106 include_keys_list = include_keys or []
111 107
112 instance_dict['supports_report_alerting'] = True
113 instance_dict['channel_visible_value'] = self.channel_visible_value
108 instance_dict["supports_report_alerting"] = True
109 instance_dict["channel_visible_value"] = self.channel_visible_value
114 110
115 111 if extended_info:
116 instance_dict['actions'] = [
117 rule.get_dict(extended_info=True) for
118 rule in self.channel_actions]
112 instance_dict["actions"] = [
113 rule.get_dict(extended_info=True) for rule in self.channel_actions
114 ]
119 115
120 del instance_dict['channel_json_conf']
116 del instance_dict["channel_json_conf"]
121 117
122 118 if self.integration:
123 119 instance_dict[
124 'supports_report_alerting'] = \
125 self.integration.supports_report_alerting
120 "supports_report_alerting"
121 ] = self.integration.supports_report_alerting
126 122 d = {}
127 123 for k in instance_dict.keys():
128 if (k not in exclude_keys_list and
129 (k in include_keys_list or not include_keys)):
124 if k not in exclude_keys_list and (
125 k in include_keys_list or not include_keys
126 ):
130 127 d[k] = instance_dict[k]
131 128 return d
132 129
133 130 def __repr__(self):
134 return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name,
131 return "<AlertChannel: (%s,%s), user:%s>" % (
132 self.channel_name,
135 133 self.channel_value,
136 self.user_name,)
134 self.user_name,
135 )
137 136
138 137 def send_digest(self, **kwargs):
139 138 """
140 139 This should implement daily top error report notifications
141 140 """
142 log.warning('send_digest NOT IMPLEMENTED')
141 log.warning("send_digest NOT IMPLEMENTED")
143 142
144 143 def notify_reports(self, **kwargs):
145 144 """
146 145 This should implement notification of reports that occured in 1 min
147 146 interval
148 147 """
149 log.warning('notify_reports NOT IMPLEMENTED')
148 log.warning("notify_reports NOT IMPLEMENTED")
150 149
151 150 def notify_alert(self, **kwargs):
152 151 """
@@ -160,87 +159,85 b' class AlertChannel(Base, BaseModel):'
160 159 request: request object
161 160
162 161 """
163 alert_name = kwargs['event'].unified_alert_name()
164 if alert_name in ['slow_report_alert', 'error_report_alert']:
162 alert_name = kwargs["event"].unified_alert_name()
163 if alert_name in ["slow_report_alert", "error_report_alert"]:
165 164 self.notify_report_alert(**kwargs)
166 elif alert_name == 'uptime_alert':
165 elif alert_name == "uptime_alert":
167 166 self.notify_uptime_alert(**kwargs)
168 elif alert_name == 'chart_alert':
167 elif alert_name == "chart_alert":
169 168 self.notify_chart_alert(**kwargs)
170 169
171 170 def notify_chart_alert(self, **kwargs):
172 171 """
173 172 This should implement report open/close alerts notifications
174 173 """
175 log.warning('notify_chart_alert NOT IMPLEMENTED')
174 log.warning("notify_chart_alert NOT IMPLEMENTED")
176 175
177 176 def notify_report_alert(self, **kwargs):
178 177 """
179 178 This should implement report open/close alerts notifications
180 179 """
181 log.warning('notify_report_alert NOT IMPLEMENTED')
180 log.warning("notify_report_alert NOT IMPLEMENTED")
182 181
183 182 def notify_uptime_alert(self, **kwargs):
184 183 """
185 184 This should implement uptime open/close alerts notifications
186 185 """
187 log.warning('notify_uptime_alert NOT IMPLEMENTED')
186 log.warning("notify_uptime_alert NOT IMPLEMENTED")
188 187
189 188 def get_notification_basic_vars(self, kwargs):
190 189 """
191 190 Sets most common variables used later for rendering notifications for
192 191 channel
193 192 """
194 if 'event' in kwargs:
195 kwargs['since_when'] = kwargs['event'].start_date
193 if "event" in kwargs:
194 kwargs["since_when"] = kwargs["event"].start_date
196 195
197 url_start_date = kwargs.get('since_when') - timedelta(minutes=1)
198 url_end_date = kwargs.get('since_when') + timedelta(minutes=4)
196 url_start_date = kwargs.get("since_when") - timedelta(minutes=1)
197 url_end_date = kwargs.get("since_when") + timedelta(minutes=4)
199 198 tmpl_vars = {
200 "timestamp": kwargs['since_when'],
201 "user": kwargs['user'],
202 "since_when": kwargs.get('since_when'),
199 "timestamp": kwargs["since_when"],
200 "user": kwargs["user"],
201 "since_when": kwargs.get("since_when"),
203 202 "url_start_date": url_start_date,
204 "url_end_date": url_end_date
203 "url_end_date": url_end_date,
205 204 }
206 tmpl_vars["resource_name"] = kwargs['resource'].resource_name
207 tmpl_vars["resource"] = kwargs['resource']
205 tmpl_vars["resource_name"] = kwargs["resource"].resource_name
206 tmpl_vars["resource"] = kwargs["resource"]
208 207
209 if 'event' in kwargs:
210 tmpl_vars['event_values'] = kwargs['event'].values
211 tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name()
212 tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action()
208 if "event" in kwargs:
209 tmpl_vars["event_values"] = kwargs["event"].values
210 tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name()
211 tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action()
213 212 return tmpl_vars
214 213
215 214 def report_alert_notification_vars(self, kwargs):
216 215 tmpl_vars = self.get_notification_basic_vars(kwargs)
217 reports = kwargs.get('reports', [])
216 reports = kwargs.get("reports", [])
218 217 tmpl_vars["reports"] = reports
219 218 tmpl_vars["confirmed_total"] = len(reports)
220 219
221 220 tmpl_vars["report_type"] = "error reports"
222 tmpl_vars["url_report_type"] = 'report/list'
221 tmpl_vars["url_report_type"] = "report/list"
223 222
224 alert_type = tmpl_vars.get('alert_type', '')
225 if 'slow_report' in alert_type:
223 alert_type = tmpl_vars.get("alert_type", "")
224 if "slow_report" in alert_type:
226 225 tmpl_vars["report_type"] = "slow reports"
227 tmpl_vars["url_report_type"] = 'report/list_slow'
226 tmpl_vars["url_report_type"] = "report/list_slow"
228 227
229 app_url = kwargs['request'].registry.settings['_mail_url']
228 app_url = kwargs["request"].registry.settings["_mail_url"]
230 229
231 destination_url = kwargs['request'].route_url('/',
232 _app_url=app_url)
230 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
233 231 if alert_type:
234 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
232 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(
235 233 tmpl_vars["url_report_type"],
236 tmpl_vars['resource'].resource_id,
237 tmpl_vars['url_start_date'].strftime(DATE_FRMT),
238 tmpl_vars['url_end_date'].strftime(DATE_FRMT)
234 tmpl_vars["resource"].resource_id,
235 tmpl_vars["url_start_date"].strftime(DATE_FRMT),
236 tmpl_vars["url_end_date"].strftime(DATE_FRMT),
239 237 )
240 238 else:
241 destination_url += 'ui/{}?resource={}'.format(
242 tmpl_vars["url_report_type"],
243 tmpl_vars['resource'].resource_id
239 destination_url += "ui/{}?resource={}".format(
240 tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id
244 241 )
245 242 tmpl_vars["destination_url"] = destination_url
246 243
@@ -248,58 +245,54 b' class AlertChannel(Base, BaseModel):'
248 245
249 246 def uptime_alert_notification_vars(self, kwargs):
250 247 tmpl_vars = self.get_notification_basic_vars(kwargs)
251 app_url = kwargs['request'].registry.settings['_mail_url']
252 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
253 destination_url += 'ui/{}?resource={}'.format(
254 'uptime',
255 tmpl_vars['resource'].resource_id)
256 tmpl_vars['destination_url'] = destination_url
257
258 reason = ''
259 e_values = tmpl_vars.get('event_values')
260
261 if e_values and e_values.get('response_time') == 0:
262 reason += ' Response time was slower than 20 seconds.'
248 app_url = kwargs["request"].registry.settings["_mail_url"]
249 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
250 destination_url += "ui/{}?resource={}".format(
251 "uptime", tmpl_vars["resource"].resource_id
252 )
253 tmpl_vars["destination_url"] = destination_url
254
255 reason = ""
256 e_values = tmpl_vars.get("event_values")
257
258 if e_values and e_values.get("response_time") == 0:
259 reason += " Response time was slower than 20 seconds."
263 260 elif e_values:
264 code = e_values.get('status_code')
265 reason += ' Response status code: %s.' % code
261 code = e_values.get("status_code")
262 reason += " Response status code: %s." % code
266 263
267 tmpl_vars['reason'] = reason
264 tmpl_vars["reason"] = reason
268 265 return tmpl_vars
269 266
270 267 def chart_alert_notification_vars(self, kwargs):
271 268 tmpl_vars = self.get_notification_basic_vars(kwargs)
272 tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name']
273 tmpl_vars['action_name'] = tmpl_vars['event_values'].get(
274 'action_name') or ''
275 matched_values = tmpl_vars['event_values']['matched_step_values']
276 tmpl_vars['readable_values'] = []
277 for key, value in list(matched_values['values'].items()):
278 matched_label = matched_values['labels'].get(key)
269 tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"]
270 tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or ""
271 matched_values = tmpl_vars["event_values"]["matched_step_values"]
272 tmpl_vars["readable_values"] = []
273 for key, value in list(matched_values["values"].items()):
274 matched_label = matched_values["labels"].get(key)
279 275 if matched_label:
280 tmpl_vars['readable_values'].append({
281 'label': matched_label['human_label'],
282 'value': value
283 })
284 tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'],
285 key=lambda x: x['label'])
286 start_date = convert_date(tmpl_vars['event_values']['start_interval'])
276 tmpl_vars["readable_values"].append(
277 {"label": matched_label["human_label"], "value": value}
278 )
279 tmpl_vars["readable_values"] = sorted(
280 tmpl_vars["readable_values"], key=lambda x: x["label"]
281 )
282 start_date = convert_date(tmpl_vars["event_values"]["start_interval"])
287 283 end_date = None
288 if tmpl_vars['event_values'].get('end_interval'):
289 end_date = convert_date(tmpl_vars['event_values']['end_interval'])
284 if tmpl_vars["event_values"].get("end_interval"):
285 end_date = convert_date(tmpl_vars["event_values"]["end_interval"])
290 286
291 app_url = kwargs['request'].registry.settings['_mail_url']
292 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
287 app_url = kwargs["request"].registry.settings["_mail_url"]
288 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
293 289 to_encode = {
294 'resource': tmpl_vars['event_values']['resource'],
295 'start_date': start_date.strftime(DATE_FRMT),
290 "resource": tmpl_vars["event_values"]["resource"],
291 "start_date": start_date.strftime(DATE_FRMT),
296 292 }
297 293 if end_date:
298 to_encode['end_date'] = end_date.strftime(DATE_FRMT)
294 to_encode["end_date"] = end_date.strftime(DATE_FRMT)
299 295
300 destination_url += 'ui/{}?{}'.format(
301 'logs',
302 urllib.parse.urlencode(to_encode)
303 )
304 tmpl_vars['destination_url'] = destination_url
296 destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode))
297 tmpl_vars["destination_url"] = destination_url
305 298 return tmpl_vars
@@ -27,39 +27,42 b' class AlertChannelAction(Base, BaseModel):'
27 27 Stores notifications conditions for user's alert channels
28 28 This is later used for rule parsing like "alert if http_status == 500"
29 29 """
30 __tablename__ = 'alert_channels_actions'
31 30
32 types = ['report', 'chart']
31 __tablename__ = "alert_channels_actions"
33 32
34 owner_id = sa.Column(sa.Integer,
35 sa.ForeignKey('users.id', onupdate='CASCADE',
36 ondelete='CASCADE'))
33 types = ["report", "chart"]
34
35 owner_id = sa.Column(
36 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
37 )
37 38 resource_id = sa.Column(sa.Integer())
38 action = sa.Column(sa.Unicode(10), nullable=False, default='always')
39 action = sa.Column(sa.Unicode(10), nullable=False, default="always")
39 40 type = sa.Column(sa.Unicode(10), nullable=False)
40 41 other_id = sa.Column(sa.Unicode(40))
41 42 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
42 rule = sa.Column(sa.dialects.postgresql.JSON,
43 nullable=False, default={'field': 'http_status',
44 "op": "ge", "value": "500"})
43 rule = sa.Column(
44 sa.dialects.postgresql.JSON,
45 nullable=False,
46 default={"field": "http_status", "op": "ge", "value": "500"},
47 )
45 48 config = sa.Column(sa.dialects.postgresql.JSON)
46 49 name = sa.Column(sa.Unicode(255))
47 50
48 @validates('notify_type')
51 @validates("notify_type")
49 52 def validate_email(self, key, notify_type):
50 assert notify_type in ['always', 'only_first']
53 assert notify_type in ["always", "only_first"]
51 54 return notify_type
52 55
53 56 def resource_name(self, db_session=None):
54 57 db_session = get_db_session(db_session)
55 58 if self.resource_id:
56 59 return ResourceService.by_resource_id(
57 self.resource_id, db_session=db_session).resource_name
60 self.resource_id, db_session=db_session
61 ).resource_name
58 62 else:
59 return 'any resource'
63 return "any resource"
60 64
61 def get_dict(self, exclude_keys=None, include_keys=None,
62 extended_info=False):
65 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
63 66 """
64 67 Returns dictionary with required information that will be consumed by
65 68 angular
@@ -68,12 +71,14 b' class AlertChannelAction(Base, BaseModel):'
68 71 exclude_keys_list = exclude_keys or []
69 72 include_keys_list = include_keys or []
70 73 if extended_info:
71 instance_dict['channels'] = [
72 c.get_dict(extended_info=False) for c in self.channels]
74 instance_dict["channels"] = [
75 c.get_dict(extended_info=False) for c in self.channels
76 ]
73 77
74 78 d = {}
75 79 for k in instance_dict.keys():
76 if (k not in exclude_keys_list and
77 (k in include_keys_list or not include_keys)):
80 if k not in exclude_keys_list and (
81 k in include_keys_list or not include_keys
82 ):
78 83 d[k] = instance_dict[k]
79 84 return d
@@ -13,4 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -23,15 +23,13 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class CampfireAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'campfire'
28 }
26 __mapper_args__ = {"polymorphic_identity": "campfire"}
29 27
30 28 @property
31 29 def client(self):
32 30 client = CampfireIntegration.create_client(
33 self.integration.config['api_token'],
34 self.integration.config['account'])
31 self.integration.config["api_token"], self.integration.config["account"]
32 )
35 33 return client
36 34
37 35 def notify_reports(self, **kwargs):
@@ -48,37 +46,40 b' class CampfireAlertChannel(AlertChannel):'
48 46 """
49 47 template_vars = self.report_alert_notification_vars(kwargs)
50 48
51 app_url = kwargs['request'].registry.settings['_mail_url']
52 destination_url = kwargs['request'].route_url('/',
53 app_url=app_url)
54 f_args = ('report',
55 template_vars['resource'].resource_id,
56 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
57 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
58 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
59 *f_args)
60
61 if template_vars['confirmed_total'] > 1:
49 app_url = kwargs["request"].registry.settings["_mail_url"]
50 destination_url = kwargs["request"].route_url("/", app_url=app_url)
51 f_args = (
52 "report",
53 template_vars["resource"].resource_id,
54 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
55 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
56 )
57 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
58
59 if template_vars["confirmed_total"] > 1:
62 60 template_vars["title"] = "%s - %s reports" % (
63 template_vars['resource_name'],
64 template_vars['confirmed_total'],
61 template_vars["resource_name"],
62 template_vars["confirmed_total"],
65 63 )
66 64 else:
67 error_title = truncate(template_vars['reports'][0][1].error or
68 'slow report', 90)
65 error_title = truncate(
66 template_vars["reports"][0][1].error or "slow report", 90
67 )
69 68 template_vars["title"] = "%s - '%s' report" % (
70 template_vars['resource_name'],
71 error_title)
69 template_vars["resource_name"],
70 error_title,
71 )
72 72
73 template_vars["title"] += ' ' + destination_url
73 template_vars["title"] += " " + destination_url
74 74
75 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
76 kwargs['user'].user_name,
75 log_msg = "NOTIFY : %s via %s :: %s reports" % (
76 kwargs["user"].user_name,
77 77 self.channel_visible_value,
78 template_vars['confirmed_total'])
78 template_vars["confirmed_total"],
79 )
79 80 log.warning(log_msg)
80 81
81 for room in self.integration.config['rooms'].split(','):
82 for room in self.integration.config["rooms"].split(","):
82 83 self.client.speak_to_room(room.strip(), template_vars["title"])
83 84
84 85 def notify_report_alert(self, **kwargs):
@@ -94,23 +95,23 b' class CampfireAlertChannel(AlertChannel):'
94 95 """
95 96 template_vars = self.report_alert_notification_vars(kwargs)
96 97
97 if kwargs['event'].unified_alert_action() == 'OPEN':
98 title = 'ALERT %s: %s - %s %s %s' % (
99 template_vars['alert_action'],
100 template_vars['resource_name'],
101 kwargs['event'].values['reports'],
102 template_vars['report_type'],
103 template_vars['destination_url']
98 if kwargs["event"].unified_alert_action() == "OPEN":
99 title = "ALERT %s: %s - %s %s %s" % (
100 template_vars["alert_action"],
101 template_vars["resource_name"],
102 kwargs["event"].values["reports"],
103 template_vars["report_type"],
104 template_vars["destination_url"],
104 105 )
105 106
106 107 else:
107 title = 'ALERT %s: %s type: %s' % (
108 template_vars['alert_action'],
109 template_vars['resource_name'],
110 template_vars['alert_type'].replace('_', ' '),
108 title = "ALERT %s: %s type: %s" % (
109 template_vars["alert_action"],
110 template_vars["resource_name"],
111 template_vars["alert_type"].replace("_", " "),
111 112 )
112 for room in self.integration.config['rooms'].split(','):
113 self.client.speak_to_room(room.strip(), title, sound='VUVUZELA')
113 for room in self.integration.config["rooms"].split(","):
114 self.client.speak_to_room(room.strip(), title, sound="VUVUZELA")
114 115
115 116 def notify_uptime_alert(self, **kwargs):
116 117 """
@@ -125,15 +126,15 b' class CampfireAlertChannel(AlertChannel):'
125 126 """
126 127 template_vars = self.uptime_alert_notification_vars(kwargs)
127 128
128 message = 'ALERT %s: %s has uptime issues %s\n\n' % (
129 template_vars['alert_action'],
130 template_vars['resource_name'],
131 template_vars['destination_url']
129 message = "ALERT %s: %s has uptime issues %s\n\n" % (
130 template_vars["alert_action"],
131 template_vars["resource_name"],
132 template_vars["destination_url"],
132 133 )
133 message += template_vars['reason']
134 message += template_vars["reason"]
134 135
135 for room in self.integration.config['rooms'].split(','):
136 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
136 for room in self.integration.config["rooms"].split(","):
137 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
137 138
138 139 def send_digest(self, **kwargs):
139 140 """
@@ -148,17 +149,17 b' class CampfireAlertChannel(AlertChannel):'
148 149
149 150 """
150 151 template_vars = self.report_alert_notification_vars(kwargs)
151 f_args = (template_vars['resource_name'],
152 template_vars['confirmed_total'],)
152 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
153 153 message = "Daily report digest: %s - %s reports" % f_args
154 message += '{}\n'.format(template_vars['destination_url'])
155 for room in self.integration.config['rooms'].split(','):
154 message += "{}\n".format(template_vars["destination_url"])
155 for room in self.integration.config["rooms"].split(","):
156 156 self.client.speak_to_room(room.strip(), message)
157 157
158 log_msg = 'DIGEST : %s via %s :: %s reports' % (
159 kwargs['user'].user_name,
158 log_msg = "DIGEST : %s via %s :: %s reports" % (
159 kwargs["user"].user_name,
160 160 self.channel_visible_value,
161 template_vars['confirmed_total'])
161 template_vars["confirmed_total"],
162 )
162 163 log.warning(log_msg)
163 164
164 165 def notify_chart_alert(self, **kwargs):
@@ -173,16 +174,18 b' class CampfireAlertChannel(AlertChannel):'
173 174
174 175 """
175 176 template_vars = self.chart_alert_notification_vars(kwargs)
176 message = 'ALERT {}: value in "{}" chart: ' \
177 message = (
178 'ALERT {}: value in "{}" chart: '
177 179 'met alert "{}" criteria {} \n'.format(
178 template_vars['alert_action'],
179 template_vars['chart_name'],
180 template_vars['action_name'],
181 template_vars['destination_url']
180 template_vars["alert_action"],
181 template_vars["chart_name"],
182 template_vars["action_name"],
183 template_vars["destination_url"],
184 )
182 185 )
183 186
184 for item in template_vars['readable_values']:
185 message += '{}: {}\n'.format(item['label'], item['value'])
187 for item in template_vars["readable_values"]:
188 message += "{}: {}\n".format(item["label"], item["value"])
186 189
187 for room in self.integration.config['rooms'].split(','):
188 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
190 for room in self.integration.config["rooms"].split(","):
191 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
@@ -27,9 +27,7 b' class EmailAlertChannel(AlertChannel):'
27 27 Default email alerting channel
28 28 """
29 29
30 __mapper_args__ = {
31 'polymorphic_identity': 'email'
32 }
30 __mapper_args__ = {"polymorphic_identity": "email"}
33 31
34 32 def notify_reports(self, **kwargs):
35 33 """
@@ -45,25 +43,30 b' class EmailAlertChannel(AlertChannel):'
45 43 """
46 44 template_vars = self.report_alert_notification_vars(kwargs)
47 45
48 if template_vars['confirmed_total'] > 1:
46 if template_vars["confirmed_total"] > 1:
49 47 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
50 template_vars['resource_name'],
51 template_vars['confirmed_total'],
48 template_vars["resource_name"],
49 template_vars["confirmed_total"],
52 50 )
53 51 else:
54 error_title = truncate(template_vars['reports'][0][1].error or
55 'slow report', 20)
52 error_title = truncate(
53 template_vars["reports"][0][1].error or "slow report", 20
54 )
56 55 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
57 template_vars['resource_name'],
58 error_title)
59 UserService.send_email(kwargs['request'],
56 template_vars["resource_name"],
57 error_title,
58 )
59 UserService.send_email(
60 kwargs["request"],
60 61 [self.channel_value],
61 62 template_vars,
62 '/email_templates/notify_reports.jinja2')
63 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
64 kwargs['user'].user_name,
63 "/email_templates/notify_reports.jinja2",
64 )
65 log_msg = "NOTIFY : %s via %s :: %s reports" % (
66 kwargs["user"].user_name,
65 67 self.channel_visible_value,
66 template_vars['confirmed_total'])
68 template_vars["confirmed_total"],
69 )
67 70 log.warning(log_msg)
68 71
69 72 def send_digest(self, **kwargs):
@@ -81,20 +84,23 b' class EmailAlertChannel(AlertChannel):'
81 84 template_vars = self.report_alert_notification_vars(kwargs)
82 85 title = "AppEnlight :: Daily report digest: %s - %s reports"
83 86 template_vars["email_title"] = title % (
84 template_vars['resource_name'],
85 template_vars['confirmed_total'],
87 template_vars["resource_name"],
88 template_vars["confirmed_total"],
86 89 )
87 90
88 UserService.send_email(kwargs['request'],
91 UserService.send_email(
92 kwargs["request"],
89 93 [self.channel_value],
90 94 template_vars,
91 '/email_templates/notify_reports.jinja2',
95 "/email_templates/notify_reports.jinja2",
92 96 immediately=True,
93 silent=True)
94 log_msg = 'DIGEST : %s via %s :: %s reports' % (
95 kwargs['user'].user_name,
97 silent=True,
98 )
99 log_msg = "DIGEST : %s via %s :: %s reports" % (
100 kwargs["user"].user_name,
96 101 self.channel_visible_value,
97 template_vars['confirmed_total'])
102 template_vars["confirmed_total"],
103 )
98 104 log.warning(log_msg)
99 105
100 106 def notify_report_alert(self, **kwargs):
@@ -110,23 +116,26 b' class EmailAlertChannel(AlertChannel):'
110 116 """
111 117 template_vars = self.report_alert_notification_vars(kwargs)
112 118
113 if kwargs['event'].unified_alert_action() == 'OPEN':
114 title = 'AppEnlight :: ALERT %s: %s - %s %s' % (
115 template_vars['alert_action'],
116 template_vars['resource_name'],
117 kwargs['event'].values['reports'],
118 template_vars['report_type'],
119 if kwargs["event"].unified_alert_action() == "OPEN":
120 title = "AppEnlight :: ALERT %s: %s - %s %s" % (
121 template_vars["alert_action"],
122 template_vars["resource_name"],
123 kwargs["event"].values["reports"],
124 template_vars["report_type"],
119 125 )
120 126 else:
121 title = 'AppEnlight :: ALERT %s: %s type: %s' % (
122 template_vars['alert_action'],
123 template_vars['resource_name'],
124 template_vars['alert_type'].replace('_', ' '),
127 title = "AppEnlight :: ALERT %s: %s type: %s" % (
128 template_vars["alert_action"],
129 template_vars["resource_name"],
130 template_vars["alert_type"].replace("_", " "),
125 131 )
126 template_vars['email_title'] = title
127 UserService.send_email(kwargs['request'], [self.channel_value],
132 template_vars["email_title"] = title
133 UserService.send_email(
134 kwargs["request"],
135 [self.channel_value],
128 136 template_vars,
129 '/email_templates/alert_reports.jinja2')
137 "/email_templates/alert_reports.jinja2",
138 )
130 139
131 140 def notify_uptime_alert(self, **kwargs):
132 141 """
@@ -140,15 +149,18 b' class EmailAlertChannel(AlertChannel):'
140 149
141 150 """
142 151 template_vars = self.uptime_alert_notification_vars(kwargs)
143 title = 'AppEnlight :: ALERT %s: %s has uptime issues' % (
144 template_vars['alert_action'],
145 template_vars['resource_name'],
152 title = "AppEnlight :: ALERT %s: %s has uptime issues" % (
153 template_vars["alert_action"],
154 template_vars["resource_name"],
146 155 )
147 template_vars['email_title'] = title
156 template_vars["email_title"] = title
148 157
149 UserService.send_email(kwargs['request'], [self.channel_value],
158 UserService.send_email(
159 kwargs["request"],
160 [self.channel_value],
150 161 template_vars,
151 '/email_templates/alert_uptime.jinja2')
162 "/email_templates/alert_uptime.jinja2",
163 )
152 164
153 165 def notify_chart_alert(self, **kwargs):
154 166 """
@@ -163,13 +175,18 b' class EmailAlertChannel(AlertChannel):'
163 175 """
164 176 template_vars = self.chart_alert_notification_vars(kwargs)
165 177
166 title = 'AppEnlight :: ALERT {} value in "{}" chart' \
178 title = (
179 'AppEnlight :: ALERT {} value in "{}" chart'
167 180 ' met alert "{}" criteria'.format(
168 template_vars['alert_action'],
169 template_vars['chart_name'],
170 template_vars['action_name'],
181 template_vars["alert_action"],
182 template_vars["chart_name"],
183 template_vars["action_name"],
171 184 )
172 template_vars['email_title'] = title
173 UserService.send_email(kwargs['request'], [self.channel_value],
185 )
186 template_vars["email_title"] = title
187 UserService.send_email(
188 kwargs["request"],
189 [self.channel_value],
174 190 template_vars,
175 '/email_templates/alert_chart.jinja2')
191 "/email_templates/alert_chart.jinja2",
192 )
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class FlowdockAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'flowdock'
28 }
26 __mapper_args__ = {"polymorphic_identity": "flowdock"}
29 27
30 28 def notify_reports(self, **kwargs):
31 29 """
@@ -41,44 +39,45 b' class FlowdockAlertChannel(AlertChannel):'
41 39 """
42 40 template_vars = self.report_alert_notification_vars(kwargs)
43 41
44 app_url = kwargs['request'].registry.settings['_mail_url']
45 destination_url = kwargs['request'].route_url('/',
46 _app_url=app_url)
47 f_args = ('report',
48 template_vars['resource'].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
52 *f_args)
53
54 if template_vars['confirmed_total'] > 1:
42 app_url = kwargs["request"].registry.settings["_mail_url"]
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
44 f_args = (
45 "report",
46 template_vars["resource"].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
49 )
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
51
52 if template_vars["confirmed_total"] > 1:
55 53 template_vars["title"] = "%s - %s reports" % (
56 template_vars['resource_name'],
57 template_vars['confirmed_total'],
54 template_vars["resource_name"],
55 template_vars["confirmed_total"],
58 56 )
59 57 else:
60 error_title = truncate(template_vars['reports'][0][1].error or
61 'slow report', 90)
58 error_title = truncate(
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
62 61 template_vars["title"] = "%s - '%s' report" % (
63 template_vars['resource_name'],
64 error_title)
62 template_vars["resource_name"],
63 error_title,
64 )
65 65
66 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
67 kwargs['user'].user_name,
66 log_msg = "NOTIFY : %s via %s :: %s reports" % (
67 kwargs["user"].user_name,
68 68 self.channel_visible_value,
69 template_vars['confirmed_total'])
69 template_vars["confirmed_total"],
70 )
70 71 log.warning(log_msg)
71 72
72 client = FlowdockIntegration.create_client(
73 self.integration.config['api_token'])
73 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
74 74 payload = {
75 75 "source": "AppEnlight",
76 "from_address": kwargs['request'].registry.settings[
77 'mailing.from_email'],
76 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
78 77 "subject": template_vars["title"],
79 78 "content": "New report present",
80 79 "tags": ["appenlight"],
81 "link": destination_url
80 "link": destination_url,
82 81 }
83 82 client.send_to_inbox(payload)
84 83
@@ -95,32 +94,30 b' class FlowdockAlertChannel(AlertChannel):'
95 94 """
96 95 template_vars = self.report_alert_notification_vars(kwargs)
97 96
98 if kwargs['event'].unified_alert_action() == 'OPEN':
97 if kwargs["event"].unified_alert_action() == "OPEN":
99 98
100 title = 'ALERT %s: %s - %s %s' % (
101 template_vars['alert_action'],
102 template_vars['resource_name'],
103 kwargs['event'].values['reports'],
104 template_vars['report_type'],
99 title = "ALERT %s: %s - %s %s" % (
100 template_vars["alert_action"],
101 template_vars["resource_name"],
102 kwargs["event"].values["reports"],
103 template_vars["report_type"],
105 104 )
106 105
107 106 else:
108 title = 'ALERT %s: %s type: %s' % (
109 template_vars['alert_action'],
110 template_vars['resource_name'],
111 template_vars['alert_type'].replace('_', ' '),
107 title = "ALERT %s: %s type: %s" % (
108 template_vars["alert_action"],
109 template_vars["resource_name"],
110 template_vars["alert_type"].replace("_", " "),
112 111 )
113 112
114 client = FlowdockIntegration.create_client(
115 self.integration.config['api_token'])
113 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
116 114 payload = {
117 115 "source": "AppEnlight",
118 "from_address": kwargs['request'].registry.settings[
119 'mailing.from_email'],
116 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
120 117 "subject": title,
121 "content": 'Investigation required',
122 "tags": ["appenlight", "alert", template_vars['alert_type']],
123 "link": template_vars['destination_url']
118 "content": "Investigation required",
119 "tags": ["appenlight", "alert", template_vars["alert_type"]],
120 "link": template_vars["destination_url"],
124 121 }
125 122 client.send_to_inbox(payload)
126 123
@@ -137,23 +134,21 b' class FlowdockAlertChannel(AlertChannel):'
137 134 """
138 135 template_vars = self.uptime_alert_notification_vars(kwargs)
139 136
140 message = 'ALERT %s: %s has uptime issues' % (
141 template_vars['alert_action'],
142 template_vars['resource_name'],
137 message = "ALERT %s: %s has uptime issues" % (
138 template_vars["alert_action"],
139 template_vars["resource_name"],
143 140 )
144 submessage = 'Info: '
145 submessage += template_vars['reason']
141 submessage = "Info: "
142 submessage += template_vars["reason"]
146 143
147 client = FlowdockIntegration.create_client(
148 self.integration.config['api_token'])
144 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
149 145 payload = {
150 146 "source": "AppEnlight",
151 "from_address": kwargs['request'].registry.settings[
152 'mailing.from_email'],
147 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
153 148 "subject": message,
154 149 "content": submessage,
155 "tags": ["appenlight", "alert", 'uptime'],
156 "link": template_vars['destination_url']
150 "tags": ["appenlight", "alert", "uptime"],
151 "link": template_vars["destination_url"],
157 152 }
158 153 client.send_to_inbox(payload)
159 154
@@ -171,29 +166,29 b' class FlowdockAlertChannel(AlertChannel):'
171 166 """
172 167 template_vars = self.report_alert_notification_vars(kwargs)
173 168 message = "Daily report digest: %s - %s reports" % (
174 template_vars['resource_name'], template_vars['confirmed_total'])
169 template_vars["resource_name"],
170 template_vars["confirmed_total"],
171 )
175 172
176 f_args = (template_vars['confirmed_total'],
177 template_vars['timestamp'])
173 f_args = (template_vars["confirmed_total"], template_vars["timestamp"])
178 174
179 175 payload = {
180 176 "source": "AppEnlight",
181 "from_address": kwargs['request'].registry.settings[
182 'mailing.from_email'],
177 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
183 178 "subject": message,
184 "content": '%s reports in total since %s' % f_args,
179 "content": "%s reports in total since %s" % f_args,
185 180 "tags": ["appenlight", "digest"],
186 "link": template_vars['destination_url']
181 "link": template_vars["destination_url"],
187 182 }
188 183
189 client = FlowdockIntegration.create_client(
190 self.integration.config['api_token'])
184 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
191 185 client.send_to_inbox(payload)
192 186
193 log_msg = 'DIGEST : %s via %s :: %s reports' % (
194 kwargs['user'].user_name,
187 log_msg = "DIGEST : %s via %s :: %s reports" % (
188 kwargs["user"].user_name,
195 189 self.channel_visible_value,
196 template_vars['confirmed_total'])
190 template_vars["confirmed_total"],
191 )
197 192 log.warning(log_msg)
198 193
199 194 def notify_chart_alert(self, **kwargs):
@@ -209,25 +204,22 b' class FlowdockAlertChannel(AlertChannel):'
209 204 """
210 205 template_vars = self.chart_alert_notification_vars(kwargs)
211 206
212 message = 'ALERT {}: value in "{}" chart ' \
213 'met alert "{}" criteria'.format(
214 template_vars['alert_action'],
215 template_vars['chart_name'],
216 template_vars['action_name'],
207 message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format(
208 template_vars["alert_action"],
209 template_vars["chart_name"],
210 template_vars["action_name"],
217 211 )
218 submessage = 'Info: '
219 for item in template_vars['readable_values']:
220 submessage += '{}: {}\n'.format(item['label'], item['value'])
212 submessage = "Info: "
213 for item in template_vars["readable_values"]:
214 submessage += "{}: {}\n".format(item["label"], item["value"])
221 215
222 client = FlowdockIntegration.create_client(
223 self.integration.config['api_token'])
216 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
224 217 payload = {
225 218 "source": "AppEnlight",
226 "from_address": kwargs['request'].registry.settings[
227 'mailing.from_email'],
219 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
228 220 "subject": message,
229 221 "content": submessage,
230 "tags": ["appenlight", "alert", 'chart'],
231 "link": template_vars['destination_url']
222 "tags": ["appenlight", "alert", "chart"],
223 "link": template_vars["destination_url"],
232 224 }
233 225 client.send_to_inbox(payload)
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class HipchatAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'hipchat'
28 }
26 __mapper_args__ = {"polymorphic_identity": "hipchat"}
29 27
30 28 def notify_reports(self, **kwargs):
31 29 """
@@ -41,46 +39,50 b' class HipchatAlertChannel(AlertChannel):'
41 39 """
42 40 template_vars = self.report_alert_notification_vars(kwargs)
43 41
44 app_url = kwargs['request'].registry.settings['_mail_url']
45 destination_url = kwargs['request'].route_url('/',
46 _app_url=app_url)
47 f_args = ('report',
48 template_vars['resource'].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
52 *f_args)
53
54 if template_vars['confirmed_total'] > 1:
42 app_url = kwargs["request"].registry.settings["_mail_url"]
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
44 f_args = (
45 "report",
46 template_vars["resource"].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
49 )
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
51
52 if template_vars["confirmed_total"] > 1:
55 53 template_vars["title"] = "%s - %s reports" % (
56 template_vars['resource_name'],
57 template_vars['confirmed_total'],
54 template_vars["resource_name"],
55 template_vars["confirmed_total"],
58 56 )
59 57 else:
60 error_title = truncate(template_vars['reports'][0][1].error or
61 'slow report', 90)
58 error_title = truncate(
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
62 61 template_vars["title"] = "%s - '%s' report" % (
63 template_vars['resource_name'],
64 error_title)
62 template_vars["resource_name"],
63 error_title,
64 )
65 65
66 template_vars["title"] += ' ' + destination_url
66 template_vars["title"] += " " + destination_url
67 67
68 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
69 kwargs['user'].user_name,
68 log_msg = "NOTIFY : %s via %s :: %s reports" % (
69 kwargs["user"].user_name,
70 70 self.channel_visible_value,
71 template_vars['confirmed_total'])
71 template_vars["confirmed_total"],
72 )
72 73 log.warning(log_msg)
73 74
74 client = HipchatIntegration.create_client(
75 self.integration.config['api_token'])
76 for room in self.integration.config['rooms'].split(','):
77 client.send({
78 "message_format": 'text',
75 client = HipchatIntegration.create_client(self.integration.config["api_token"])
76 for room in self.integration.config["rooms"].split(","):
77 client.send(
78 {
79 "message_format": "text",
79 80 "message": template_vars["title"],
80 81 "from": "AppEnlight",
81 82 "room_id": room.strip(),
82 "color": "yellow"
83 })
83 "color": "yellow",
84 }
85 )
84 86
85 87 def notify_report_alert(self, **kwargs):
86 88 """
@@ -95,35 +97,37 b' class HipchatAlertChannel(AlertChannel):'
95 97 """
96 98 template_vars = self.report_alert_notification_vars(kwargs)
97 99
98 if kwargs['event'].unified_alert_action() == 'OPEN':
100 if kwargs["event"].unified_alert_action() == "OPEN":
99 101
100 title = 'ALERT %s: %s - %s %s' % (
101 template_vars['alert_action'],
102 template_vars['resource_name'],
103 kwargs['event'].values['reports'],
104 template_vars['report_type'],
102 title = "ALERT %s: %s - %s %s" % (
103 template_vars["alert_action"],
104 template_vars["resource_name"],
105 kwargs["event"].values["reports"],
106 template_vars["report_type"],
105 107 )
106 108
107 109 else:
108 title = 'ALERT %s: %s type: %s' % (
109 template_vars['alert_action'],
110 template_vars['resource_name'],
111 template_vars['alert_type'].replace('_', ' '),
110 title = "ALERT %s: %s type: %s" % (
111 template_vars["alert_action"],
112 template_vars["resource_name"],
113 template_vars["alert_type"].replace("_", " "),
112 114 )
113 115
114 title += '\n ' + template_vars['destination_url']
116 title += "\n " + template_vars["destination_url"]
115 117
116 api_token = self.integration.config['api_token']
118 api_token = self.integration.config["api_token"]
117 119 client = HipchatIntegration.create_client(api_token)
118 for room in self.integration.config['rooms'].split(','):
119 client.send({
120 "message_format": 'text',
120 for room in self.integration.config["rooms"].split(","):
121 client.send(
122 {
123 "message_format": "text",
121 124 "message": title,
122 125 "from": "AppEnlight",
123 126 "room_id": room.strip(),
124 127 "color": "red",
125 "notify": '1'
126 })
128 "notify": "1",
129 }
130 )
127 131
128 132 def notify_uptime_alert(self, **kwargs):
129 133 """
@@ -138,24 +142,26 b' class HipchatAlertChannel(AlertChannel):'
138 142 """
139 143 template_vars = self.uptime_alert_notification_vars(kwargs)
140 144
141 message = 'ALERT %s: %s has uptime issues\n' % (
142 template_vars['alert_action'],
143 template_vars['resource_name'],
145 message = "ALERT %s: %s has uptime issues\n" % (
146 template_vars["alert_action"],
147 template_vars["resource_name"],
144 148 )
145 message += template_vars['reason']
146 message += '\n{}'.format(template_vars['destination_url'])
149 message += template_vars["reason"]
150 message += "\n{}".format(template_vars["destination_url"])
147 151
148 api_token = self.integration.config['api_token']
152 api_token = self.integration.config["api_token"]
149 153 client = HipchatIntegration.create_client(api_token)
150 for room in self.integration.config['rooms'].split(','):
151 client.send({
152 "message_format": 'text',
154 for room in self.integration.config["rooms"].split(","):
155 client.send(
156 {
157 "message_format": "text",
153 158 "message": message,
154 159 "from": "AppEnlight",
155 160 "room_id": room.strip(),
156 161 "color": "red",
157 "notify": '1'
158 })
162 "notify": "1",
163 }
164 )
159 165
160 166 def notify_chart_alert(self, **kwargs):
161 167 """
@@ -169,29 +175,30 b' class HipchatAlertChannel(AlertChannel):'
169 175
170 176 """
171 177 template_vars = self.chart_alert_notification_vars(kwargs)
172 message = 'ALERT {}: value in "{}" chart: ' \
173 'met alert "{}" criteria\n'.format(
174 template_vars['alert_action'],
175 template_vars['chart_name'],
176 template_vars['action_name'],
178 message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format(
179 template_vars["alert_action"],
180 template_vars["chart_name"],
181 template_vars["action_name"],
177 182 )
178 183
179 for item in template_vars['readable_values']:
180 message += '{}: {}\n'.format(item['label'], item['value'])
184 for item in template_vars["readable_values"]:
185 message += "{}: {}\n".format(item["label"], item["value"])
181 186
182 message += template_vars['destination_url']
187 message += template_vars["destination_url"]
183 188
184 api_token = self.integration.config['api_token']
189 api_token = self.integration.config["api_token"]
185 190 client = HipchatIntegration.create_client(api_token)
186 for room in self.integration.config['rooms'].split(','):
187 client.send({
188 "message_format": 'text',
191 for room in self.integration.config["rooms"].split(","):
192 client.send(
193 {
194 "message_format": "text",
189 195 "message": message,
190 196 "from": "AppEnlight",
191 197 "room_id": room.strip(),
192 198 "color": "red",
193 "notify": '1'
194 })
199 "notify": "1",
200 }
201 )
195 202
196 203 def send_digest(self, **kwargs):
197 204 """
@@ -206,24 +213,26 b' class HipchatAlertChannel(AlertChannel):'
206 213
207 214 """
208 215 template_vars = self.report_alert_notification_vars(kwargs)
209 f_args = (template_vars['resource_name'],
210 template_vars['confirmed_total'],)
216 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
211 217 message = "Daily report digest: %s - %s reports" % f_args
212 message += '\n{}'.format(template_vars['destination_url'])
213 api_token = self.integration.config['api_token']
218 message += "\n{}".format(template_vars["destination_url"])
219 api_token = self.integration.config["api_token"]
214 220 client = HipchatIntegration.create_client(api_token)
215 for room in self.integration.config['rooms'].split(','):
216 client.send({
217 "message_format": 'text',
221 for room in self.integration.config["rooms"].split(","):
222 client.send(
223 {
224 "message_format": "text",
218 225 "message": message,
219 226 "from": "AppEnlight",
220 227 "room_id": room.strip(),
221 228 "color": "green",
222 "notify": '1'
223 })
229 "notify": "1",
230 }
231 )
224 232
225 log_msg = 'DIGEST : %s via %s :: %s reports' % (
226 kwargs['user'].user_name,
233 log_msg = "DIGEST : %s via %s :: %s reports" % (
234 kwargs["user"].user_name,
227 235 self.channel_visible_value,
228 template_vars['confirmed_total'])
236 template_vars["confirmed_total"],
237 )
229 238 log.warning(log_msg)
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)'
23 23
24 24
25 25 class SlackAlertChannel(AlertChannel):
26 __mapper_args__ = {
27 'polymorphic_identity': 'slack'
28 }
26 __mapper_args__ = {"polymorphic_identity": "slack"}
29 27
30 28 def notify_reports(self, **kwargs):
31 29 """
@@ -40,45 +38,40 b' class SlackAlertChannel(AlertChannel):'
40 38
41 39 """
42 40 template_vars = self.report_alert_notification_vars(kwargs)
43 template_vars["title"] = template_vars['resource_name']
41 template_vars["title"] = template_vars["resource_name"]
44 42
45 if template_vars['confirmed_total'] > 1:
46 template_vars['subtext'] = '%s reports' % template_vars[
47 'confirmed_total']
43 if template_vars["confirmed_total"] > 1:
44 template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"]
48 45 else:
49 error_title = truncate(template_vars['reports'][0][1].error or
50 'slow report', 90)
51 template_vars['subtext'] = error_title
46 error_title = truncate(
47 template_vars["reports"][0][1].error or "slow report", 90
48 )
49 template_vars["subtext"] = error_title
52 50
53 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
54 kwargs['user'].user_name,
51 log_msg = "NOTIFY : %s via %s :: %s reports" % (
52 kwargs["user"].user_name,
55 53 self.channel_visible_value,
56 template_vars['confirmed_total'])
54 template_vars["confirmed_total"],
55 )
57 56 log.warning(log_msg)
58 57
59 client = SlackIntegration.create_client(
60 self.integration.config['webhook_url'])
58 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
61 59 report_data = {
62 60 "username": "AppEnlight",
63 61 "icon_emoji": ":fire:",
64 62 "attachments": [
65 63 {
66 64 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
67 "fallback": "*%s* - <%s| Browse>" % (
68 template_vars["title"],
69 template_vars['destination_url']),
70 "pretext": "*%s* - <%s| Browse>" % (
71 template_vars["title"],
72 template_vars['destination_url']),
65 "fallback": "*%s* - <%s| Browse>"
66 % (template_vars["title"], template_vars["destination_url"]),
67 "pretext": "*%s* - <%s| Browse>"
68 % (template_vars["title"], template_vars["destination_url"]),
73 69 "color": "warning",
74 70 "fields": [
75 {
76 "value": 'Info: %s' % template_vars['subtext'],
77 "short": False
71 {"value": "Info: %s" % template_vars["subtext"], "short": False}
72 ],
78 73 }
79 ]
80 }
81 ]
74 ],
82 75 }
83 76 client.make_request(data=report_data)
84 77
@@ -95,53 +88,51 b' class SlackAlertChannel(AlertChannel):'
95 88 """
96 89 template_vars = self.report_alert_notification_vars(kwargs)
97 90
98 if kwargs['event'].unified_alert_action() == 'OPEN':
99 title = '*ALERT %s*: %s' % (
100 template_vars['alert_action'],
101 template_vars['resource_name']
91 if kwargs["event"].unified_alert_action() == "OPEN":
92 title = "*ALERT %s*: %s" % (
93 template_vars["alert_action"],
94 template_vars["resource_name"],
102 95 )
103 96
104 template_vars['subtext'] = 'Got at least %s %s' % (
105 kwargs['event'].values['reports'],
106 template_vars['report_type']
97 template_vars["subtext"] = "Got at least %s %s" % (
98 kwargs["event"].values["reports"],
99 template_vars["report_type"],
107 100 )
108 101
109 102 else:
110 title = '*ALERT %s*: %s' % (
111 template_vars['alert_action'],
112 template_vars['resource_name'],
103 title = "*ALERT %s*: %s" % (
104 template_vars["alert_action"],
105 template_vars["resource_name"],
113 106 )
114 107
115 template_vars['subtext'] = ''
108 template_vars["subtext"] = ""
116 109
117 alert_type = template_vars['alert_type'].replace('_', ' ')
118 alert_type = alert_type.replace('alert', '').capitalize()
110 alert_type = template_vars["alert_type"].replace("_", " ")
111 alert_type = alert_type.replace("alert", "").capitalize()
119 112
120 template_vars['type'] = "Type: %s" % alert_type
113 template_vars["type"] = "Type: %s" % alert_type
121 114
122 client = SlackIntegration.create_client(
123 self.integration.config['webhook_url']
124 )
115 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
125 116 report_data = {
126 117 "username": "AppEnlight",
127 118 "icon_emoji": ":rage:",
128 119 "attachments": [
129 120 {
130 121 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
131 "fallback": "%s - <%s| Browse>" % (
132 title, template_vars['destination_url']),
133 "pretext": "%s - <%s| Browse>" % (
134 title, template_vars['destination_url']),
122 "fallback": "%s - <%s| Browse>"
123 % (title, template_vars["destination_url"]),
124 "pretext": "%s - <%s| Browse>"
125 % (title, template_vars["destination_url"]),
135 126 "color": "danger",
136 127 "fields": [
137 128 {
138 "title": template_vars['type'],
139 "value": template_vars['subtext'],
140 "short": False
129 "title": template_vars["type"],
130 "value": template_vars["subtext"],
131 "short": False,
141 132 }
142 ]
133 ],
143 134 }
144 ]
135 ],
145 136 }
146 137 client.make_request(data=report_data)
147 138
@@ -158,13 +149,11 b' class SlackAlertChannel(AlertChannel):'
158 149 """
159 150 template_vars = self.uptime_alert_notification_vars(kwargs)
160 151
161 title = '*ALERT %s*: %s' % (
162 template_vars['alert_action'],
163 template_vars['resource_name'],
164 )
165 client = SlackIntegration.create_client(
166 self.integration.config['webhook_url']
152 title = "*ALERT %s*: %s" % (
153 template_vars["alert_action"],
154 template_vars["resource_name"],
167 155 )
156 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
168 157 report_data = {
169 158 "username": "AppEnlight",
170 159 "icon_emoji": ":rage:",
@@ -172,19 +161,21 b' class SlackAlertChannel(AlertChannel):'
172 161 {
173 162 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
174 163 "fallback": "{} - <{}| Browse>".format(
175 title, template_vars['destination_url']),
164 title, template_vars["destination_url"]
165 ),
176 166 "pretext": "{} - <{}| Browse>".format(
177 title, template_vars['destination_url']),
167 title, template_vars["destination_url"]
168 ),
178 169 "color": "danger",
179 170 "fields": [
180 171 {
181 172 "title": "Application has uptime issues",
182 "value": template_vars['reason'],
183 "short": False
173 "value": template_vars["reason"],
174 "short": False,
184 175 }
185 ]
176 ],
186 177 }
187 ]
178 ],
188 179 }
189 180 client.make_request(data=report_data)
190 181
@@ -201,39 +192,39 b' class SlackAlertChannel(AlertChannel):'
201 192 """
202 193 template_vars = self.chart_alert_notification_vars(kwargs)
203 194
204 title = '*ALERT {}*: value in *"{}"* chart ' \
205 'met alert *"{}"* criteria'.format(
206 template_vars['alert_action'],
207 template_vars['chart_name'],
208 template_vars['action_name'],
195 title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format(
196 template_vars["alert_action"],
197 template_vars["chart_name"],
198 template_vars["action_name"],
209 199 )
210 200
211 subtext = ''
212 for item in template_vars['readable_values']:
213 subtext += '{} - {}\n'.format(item['label'], item['value'])
201 subtext = ""
202 for item in template_vars["readable_values"]:
203 subtext += "{} - {}\n".format(item["label"], item["value"])
214 204
215 client = SlackIntegration.create_client(
216 self.integration.config['webhook_url']
217 )
205 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
218 206 report_data = {
219 207 "username": "AppEnlight",
220 208 "icon_emoji": ":rage:",
221 209 "attachments": [
222 {"mrkdwn_in": ["text", "pretext", "title", "fallback"],
210 {
211 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
223 212 "fallback": "{} - <{}| Browse>".format(
224 title, template_vars['destination_url']),
213 title, template_vars["destination_url"]
214 ),
225 215 "pretext": "{} - <{}| Browse>".format(
226 title, template_vars['destination_url']),
216 title, template_vars["destination_url"]
217 ),
227 218 "color": "danger",
228 219 "fields": [
229 220 {
230 221 "title": "Following criteria were met:",
231 222 "value": subtext,
232 "short": False
223 "short": False,
233 224 }
234 ]
225 ],
235 226 }
236 ]
227 ],
237 228 }
238 229 client.make_request(data=report_data)
239 230
@@ -250,36 +241,30 b' class SlackAlertChannel(AlertChannel):'
250 241
251 242 """
252 243 template_vars = self.report_alert_notification_vars(kwargs)
253 title = "*Daily report digest*: %s" % template_vars['resource_name']
244 title = "*Daily report digest*: %s" % template_vars["resource_name"]
254 245
255 subtext = '%s reports' % template_vars['confirmed_total']
246 subtext = "%s reports" % template_vars["confirmed_total"]
256 247
257 client = SlackIntegration.create_client(
258 self.integration.config['webhook_url']
259 )
248 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
260 249 report_data = {
261 250 "username": "AppEnlight",
262 251 "attachments": [
263 252 {
264 253 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
265 "fallback": "%s : <%s| Browse>" % (
266 title, template_vars['destination_url']),
267 "pretext": "%s: <%s| Browse>" % (
268 title, template_vars['destination_url']),
254 "fallback": "%s : <%s| Browse>"
255 % (title, template_vars["destination_url"]),
256 "pretext": "%s: <%s| Browse>"
257 % (title, template_vars["destination_url"]),
269 258 "color": "good",
270 "fields": [
271 {
272 "title": "Got at least: %s" % subtext,
273 "short": False
274 }
275 ]
259 "fields": [{"title": "Got at least: %s" % subtext, "short": False}],
276 260 }
277 ]
261 ],
278 262 }
279 263 client.make_request(data=report_data)
280 264
281 log_msg = 'DIGEST : %s via %s :: %s reports' % (
282 kwargs['user'].user_name,
265 log_msg = "DIGEST : %s via %s :: %s reports" % (
266 kwargs["user"].user_name,
283 267 self.channel_visible_value,
284 template_vars['confirmed_total'])
268 template_vars["confirmed_total"],
269 )
285 270 log.warning(log_msg)
@@ -24,7 +24,7 b' log = logging.getLogger(__name__)'
24 24
25 25
26 26 def generate_api_key():
27 uid = str(uuid.uuid4()).replace('-', '')
27 uid = str(uuid.uuid4()).replace("-", "")
28 28 return uid[0:32]
29 29
30 30
@@ -33,61 +33,69 b' class Application(Resource):'
33 33 Resource of application type
34 34 """
35 35
36 __tablename__ = 'applications'
37 __mapper_args__ = {'polymorphic_identity': 'application'}
36 __tablename__ = "applications"
37 __mapper_args__ = {"polymorphic_identity": "application"}
38 38
39 39 # lists configurable possible permissions for this resource type
40 __possible_permissions__ = ('view', 'update_reports')
41
42 resource_id = sa.Column(sa.Integer(),
43 sa.ForeignKey('resources.resource_id',
44 onupdate='CASCADE',
45 ondelete='CASCADE', ),
46 primary_key=True, )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default='')
48 api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True,
49 default=generate_api_key)
50 public_key = sa.Column(sa.String(32), nullable=False, unique=True,
51 index=True,
52 default=generate_api_key)
53 default_grouping = sa.Column(sa.Unicode(20), nullable=False,
54 default='url_traceback')
40 __possible_permissions__ = ("view", "update_reports")
41
42 resource_id = sa.Column(
43 sa.Integer(),
44 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
45 primary_key=True,
46 )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default="")
48 api_key = sa.Column(
49 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
50 )
51 public_key = sa.Column(
52 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
53 )
54 default_grouping = sa.Column(
55 sa.Unicode(20), nullable=False, default="url_traceback"
56 )
55 57 error_report_threshold = sa.Column(sa.Integer(), default=10)
56 58 slow_report_threshold = sa.Column(sa.Integer(), default=10)
57 allow_permanent_storage = sa.Column(sa.Boolean(), default=False,
58 nullable=False)
59 allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False)
59 60
60 @sa.orm.validates('default_grouping')
61 @sa.orm.validates("default_grouping")
61 62 def validate_default_grouping(self, key, grouping):
62 63 """ validate if resouce can have specific permission """
63 assert grouping in ['url_type', 'url_traceback', 'traceback_server']
64 assert grouping in ["url_type", "url_traceback", "traceback_server"]
64 65 return grouping
65 66
66 report_groups = sa.orm.relationship('ReportGroup',
67 report_groups = sa.orm.relationship(
68 "ReportGroup",
67 69 cascade="all, delete-orphan",
68 70 passive_deletes=True,
69 71 passive_updates=True,
70 lazy='dynamic',
71 backref=sa.orm.backref('application',
72 lazy="joined"))
72 lazy="dynamic",
73 backref=sa.orm.backref("application", lazy="joined"),
74 )
73 75
74 postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf',
76 postprocess_conf = sa.orm.relationship(
77 "ApplicationPostprocessConf",
75 78 cascade="all, delete-orphan",
76 79 passive_deletes=True,
77 80 passive_updates=True,
78 backref='resource')
81 backref="resource",
82 )
79 83
80 logs = sa.orm.relationship('Log',
81 lazy='dynamic',
82 backref='application',
84 logs = sa.orm.relationship(
85 "Log",
86 lazy="dynamic",
87 backref="application",
83 88 passive_deletes=True,
84 passive_updates=True, )
89 passive_updates=True,
90 )
85 91
86 integrations = sa.orm.relationship('IntegrationBase',
87 backref='resource',
92 integrations = sa.orm.relationship(
93 "IntegrationBase",
94 backref="resource",
88 95 cascade="all, delete-orphan",
89 96 passive_deletes=True,
90 passive_updates=True, )
97 passive_updates=True,
98 )
91 99
92 100 def generate_api_key(self):
93 101 return generate_api_key()
@@ -95,10 +103,11 b' class Application(Resource):'
95 103
96 104 def after_update(mapper, connection, target):
97 105 from appenlight.models.services.application import ApplicationService
98 log.info('clearing out ApplicationService cache')
106
107 log.info("clearing out ApplicationService cache")
99 108 ApplicationService.by_id_cached().invalidate(target.resource_id)
100 109 ApplicationService.by_api_key_cached().invalidate(target.api_key)
101 110
102 111
103 sa.event.listen(Application, 'after_update', after_update)
104 sa.event.listen(Application, 'after_delete', after_update)
112 sa.event.listen(Application, "after_update", after_update)
113 sa.event.listen(Application, "after_delete", after_update)
@@ -27,18 +27,20 b' class ApplicationPostprocessConf(Base, BaseModel):'
27 27 This is later used for rule parsing like "if 10 occurences bump priority +1"
28 28 """
29 29
30 __tablename__ = 'application_postprocess_conf'
30 __tablename__ = "application_postprocess_conf"
31 31
32 32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
33 resource_id = sa.Column(sa.Integer(),
34 sa.ForeignKey('resources.resource_id',
35 onupdate='CASCADE',
36 ondelete='CASCADE'))
33 resource_id = sa.Column(
34 sa.Integer(),
35 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
36 )
37 37 do = sa.Column(sa.Unicode(25), nullable=False)
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default='')
39 rule = sa.Column(sa.dialects.postgresql.JSON,
40 nullable=False, default={'field': 'http_status',
41 "op": "ge", "value": "500"})
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default="")
39 rule = sa.Column(
40 sa.dialects.postgresql.JSON,
41 nullable=False,
42 default={"field": "http_status", "op": "ge", "value": "500"},
43 )
42 44
43 45 def postprocess(self, item):
44 46 new_value = int(self.new_value)
@@ -29,17 +29,22 b' class AuthToken(Base, BaseModel):'
29 29 """
30 30 Stores information about possible alerting options
31 31 """
32 __tablename__ = 'auth_tokens'
32
33 __tablename__ = "auth_tokens"
33 34
34 35 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
35 token = sa.Column(sa.Unicode(40), nullable=False,
36 default=lambda x: UserService.generate_random_string(40))
37 owner_id = sa.Column(sa.Unicode(30),
38 sa.ForeignKey('users.id', onupdate='CASCADE',
39 ondelete='CASCADE'))
36 token = sa.Column(
37 sa.Unicode(40),
38 nullable=False,
39 default=lambda x: UserService.generate_random_string(40),
40 )
41 owner_id = sa.Column(
42 sa.Unicode(30),
43 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
44 )
40 45 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
41 46 expires = sa.Column(sa.DateTime)
42 description = sa.Column(sa.Unicode, default='')
47 description = sa.Column(sa.Unicode, default="")
43 48
44 49 @property
45 50 def is_expired(self):
@@ -49,4 +54,4 b' class AuthToken(Base, BaseModel):'
49 54 return False
50 55
51 56 def __str__(self):
52 return '<AuthToken u:%s t:%s...>' % (self.owner_id, self.token[0:10])
57 return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10])
@@ -22,7 +22,7 b' from . import Base'
22 22
23 23
24 24 class Config(Base, BaseModel):
25 __tablename__ = 'config'
25 __tablename__ = "config"
26 26
27 27 key = sa.Column(sa.Unicode, primary_key=True)
28 28 section = sa.Column(sa.Unicode, primary_key=True)
@@ -30,39 +30,40 b' log = logging.getLogger(__name__)'
30 30
31 31
32 32 class Event(Base, BaseModel):
33 __tablename__ = 'events'
33 __tablename__ = "events"
34 34
35 types = {'error_report_alert': 1,
36 'slow_report_alert': 3,
37 'comment': 5,
38 'assignment': 6,
39 'uptime_alert': 7,
40 'chart_alert': 9}
35 types = {
36 "error_report_alert": 1,
37 "slow_report_alert": 3,
38 "comment": 5,
39 "assignment": 6,
40 "uptime_alert": 7,
41 "chart_alert": 9,
42 }
41 43
42 statuses = {'active': 1,
43 'closed': 0}
44 statuses = {"active": 1, "closed": 0}
44 45
45 46 id = sa.Column(sa.Integer, primary_key=True)
46 47 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
47 48 end_date = sa.Column(sa.DateTime)
48 49 status = sa.Column(sa.Integer, default=1)
49 50 event_type = sa.Column(sa.Integer, default=1)
50 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
51 nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
53 nullable=True)
54 resource_id = sa.Column(sa.Integer(),
55 sa.ForeignKey('resources.resource_id'),
56 nullable=True)
51 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
53 resource_id = sa.Column(
54 sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True
55 )
57 56 target_id = sa.Column(sa.Integer)
58 57 target_uuid = sa.Column(sa.Unicode(40))
59 58 text = sa.Column(sa.UnicodeText())
60 59 values = sa.Column(JSON(), nullable=False, default=None)
61 60
62 61 def __repr__(self):
63 return '<Event %s, app:%s, %s>' % (self.unified_alert_name(),
62 return "<Event %s, app:%s, %s>" % (
63 self.unified_alert_name(),
64 64 self.resource_id,
65 self.unified_alert_action())
65 self.unified_alert_action(),
66 )
66 67
67 68 @property
68 69 def reverse_types(self):
@@ -73,9 +74,9 b' class Event(Base, BaseModel):'
73 74
74 75 def unified_alert_action(self):
75 76 event_name = self.reverse_types[self.event_type]
76 if self.status == Event.statuses['closed']:
77 if self.status == Event.statuses["closed"]:
77 78 return "CLOSE"
78 if self.status != Event.statuses['closed']:
79 if self.status != Event.statuses["closed"]:
79 80 return "OPEN"
80 81 return event_name
81 82
@@ -89,30 +90,33 b' class Event(Base, BaseModel):'
89 90 request = get_current_request()
90 91 if not resource:
91 92 return
92 users = set([p.user for p in ResourceService.users_for_perm(resource, 'view')])
93 users = set([p.user for p in ResourceService.users_for_perm(resource, "view")])
93 94 for user in users:
94 95 for channel in user.alert_channels:
95 matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources]
96 matches_resource = not channel.resources or resource in [
97 r.resource_id for r in channel.resources
98 ]
96 99 if (
97 not channel.channel_validated or
98 not channel.send_alerts or
99 not matches_resource
100 not channel.channel_validated
101 or not channel.send_alerts
102 or not matches_resource
100 103 ):
101 104 continue
102 105 else:
103 106 try:
104 channel.notify_alert(resource=resource,
105 event=self,
106 user=user,
107 request=request)
107 channel.notify_alert(
108 resource=resource, event=self, user=user, request=request
109 )
108 110 except IntegrationException as e:
109 log.warning('%s' % e)
111 log.warning("%s" % e)
110 112
111 113 def validate_or_close(self, since_when, db_session=None):
112 114 """ Checks if alerts should stay open or it's time to close them.
113 115 Generates close alert event if alerts get closed """
114 event_types = [Event.types['error_report_alert'],
115 Event.types['slow_report_alert']]
116 event_types = [
117 Event.types["error_report_alert"],
118 Event.types["slow_report_alert"],
119 ]
116 120 app = ResourceService.by_resource_id(self.resource_id)
117 121 # if app was deleted close instantly
118 122 if not app:
@@ -121,10 +125,11 b' class Event(Base, BaseModel):'
121 125
122 126 if self.event_type in event_types:
123 127 total = ReportStatService.count_by_type(
124 self.event_type, self.resource_id, since_when)
125 if Event.types['error_report_alert'] == self.event_type:
128 self.event_type, self.resource_id, since_when
129 )
130 if Event.types["error_report_alert"] == self.event_type:
126 131 threshold = app.error_report_threshold
127 if Event.types['slow_report_alert'] == self.event_type:
132 if Event.types["slow_report_alert"] == self.event_type:
128 133 threshold = app.slow_report_threshold
129 134
130 135 if total < threshold:
@@ -135,31 +140,31 b' class Event(Base, BaseModel):'
135 140 Closes an event and sends notification to affected users
136 141 """
137 142 self.end_date = datetime.utcnow()
138 self.status = Event.statuses['closed']
139 log.warning('ALERT: CLOSE: %s' % self)
143 self.status = Event.statuses["closed"]
144 log.warning("ALERT: CLOSE: %s" % self)
140 145 self.send_alerts()
141 146
142 147 def text_representation(self):
143 148 alert_type = self.unified_alert_name()
144 text = ''
145 if 'slow_report' in alert_type:
146 text += 'Slow report alert'
147 if 'error_report' in alert_type:
148 text += 'Exception report alert'
149 if 'uptime_alert' in alert_type:
150 text += 'Uptime alert'
151 if 'chart_alert' in alert_type:
152 text += 'Metrics value alert'
149 text = ""
150 if "slow_report" in alert_type:
151 text += "Slow report alert"
152 if "error_report" in alert_type:
153 text += "Exception report alert"
154 if "uptime_alert" in alert_type:
155 text += "Uptime alert"
156 if "chart_alert" in alert_type:
157 text += "Metrics value alert"
153 158
154 159 alert_action = self.unified_alert_action()
155 if alert_action == 'OPEN':
156 text += ' got opened.'
157 if alert_action == 'CLOSE':
158 text += ' got closed.'
160 if alert_action == "OPEN":
161 text += " got opened."
162 if alert_action == "CLOSE":
163 text += " got closed."
159 164 return text
160 165
161 166 def get_dict(self, request=None):
162 167 dict_data = super(Event, self).get_dict()
163 dict_data['text'] = self.text_representation()
164 dict_data['resource_name'] = self.resource.resource_name
168 dict_data["text"] = self.text_representation()
169 dict_data["resource_name"] = self.resource.resource_name
165 170 return dict_data
@@ -25,12 +25,12 b' from appenlight.lib.sqlalchemy_fields import EncryptedUnicode'
25 25 class ExternalIdentity(ExternalIdentityMixin, Base):
26 26 @declared_attr
27 27 def access_token(self):
28 return sa.Column(EncryptedUnicode(255), default='')
28 return sa.Column(EncryptedUnicode(255), default="")
29 29
30 30 @declared_attr
31 31 def alt_token(self):
32 return sa.Column(EncryptedUnicode(255), default='')
32 return sa.Column(EncryptedUnicode(255), default="")
33 33
34 34 @declared_attr
35 35 def token_secret(self):
36 return sa.Column(EncryptedUnicode(255), default='')
36 return sa.Column(EncryptedUnicode(255), default="")
@@ -19,27 +19,28 b' from appenlight.models import Base'
19 19
20 20
21 21 class Group(GroupMixin, Base):
22 __possible_permissions__ = ('root_administration',
23 'test_features',
24 'admin_panel',
25 'admin_users',
26 'manage_partitions',)
22 __possible_permissions__ = (
23 "root_administration",
24 "test_features",
25 "admin_panel",
26 "admin_users",
27 "manage_partitions",
28 )
27 29
28 def get_dict(self, exclude_keys=None, include_keys=None,
29 include_perms=False):
30 def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False):
30 31 result = super(Group, self).get_dict(exclude_keys, include_keys)
31 32 if include_perms:
32 result['possible_permissions'] = self.__possible_permissions__
33 result['current_permissions'] = [p.perm_name for p in
34 self.permissions]
33 result["possible_permissions"] = self.__possible_permissions__
34 result["current_permissions"] = [p.perm_name for p in self.permissions]
35 35 else:
36 result['possible_permissions'] = []
37 result['current_permissions'] = []
36 result["possible_permissions"] = []
37 result["current_permissions"] = []
38 38 exclude_keys_list = exclude_keys or []
39 39 include_keys_list = include_keys or []
40 40 d = {}
41 41 for k in result.keys():
42 if (k not in exclude_keys_list and
43 (k in include_keys_list or not include_keys)):
42 if k not in exclude_keys_list and (
43 k in include_keys_list or not include_keys
44 ):
44 45 d[k] = result[k]
45 46 return d
@@ -14,8 +14,9 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from ziggurat_foundations.models.group_resource_permission import \
18 GroupResourcePermissionMixin
17 from ziggurat_foundations.models.group_resource_permission import (
18 GroupResourcePermissionMixin,
19 )
19 20 from appenlight.models import Base
20 21
21 22
@@ -32,34 +32,37 b' class IntegrationBase(Base, BaseModel):'
32 32 """
33 33 Model from which all integrations inherit using polymorphic approach
34 34 """
35 __tablename__ = 'integrations'
35
36 __tablename__ = "integrations"
36 37
37 38 front_visible = False
38 39 as_alert_channel = False
39 40 supports_report_alerting = False
40 41
41 42 id = sa.Column(sa.Integer, primary_key=True)
42 resource_id = sa.Column(sa.Integer,
43 sa.ForeignKey('applications.resource_id'))
43 resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id"))
44 44 integration_name = sa.Column(sa.Unicode(64))
45 _config = sa.Column('config', JSON(), nullable=False, default='')
45 _config = sa.Column("config", JSON(), nullable=False, default="")
46 46 modified_date = sa.Column(sa.DateTime)
47 47
48 channel = sa.orm.relationship('AlertChannel',
48 channel = sa.orm.relationship(
49 "AlertChannel",
49 50 cascade="all,delete-orphan",
50 51 passive_deletes=True,
51 52 passive_updates=True,
52 53 uselist=False,
53 backref='integration')
54 backref="integration",
55 )
54 56
55 57 __mapper_args__ = {
56 'polymorphic_on': 'integration_name',
57 'polymorphic_identity': 'integration'
58 "polymorphic_on": "integration_name",
59 "polymorphic_identity": "integration",
58 60 }
59 61
60 62 @classmethod
61 def by_app_id_and_integration_name(cls, resource_id, integration_name,
62 db_session=None):
63 def by_app_id_and_integration_name(
64 cls, resource_id, integration_name, db_session=None
65 ):
63 66 db_session = get_db_session(db_session)
64 67 query = db_session.query(cls)
65 68 query = query.filter(cls.integration_name == integration_name)
@@ -72,7 +75,6 b' class IntegrationBase(Base, BaseModel):'
72 75
73 76 @config.setter
74 77 def config(self, value):
75 if not hasattr(value, 'items'):
76 raise Exception('IntegrationBase.config only accepts '
77 'flat dictionaries')
78 if not hasattr(value, "items"):
79 raise Exception("IntegrationBase.config only accepts " "flat dictionaries")
78 80 self._config = encrypt_dictionary_keys(value)
@@ -16,8 +16,7 b''
16 16
17 17 import requests
18 18 from requests_oauthlib import OAuth1
19 from appenlight.models.integrations import (IntegrationBase,
20 IntegrationException)
19 from appenlight.models.integrations import IntegrationBase, IntegrationException
21 20
22 21 _ = str
23 22
@@ -27,14 +26,12 b' class NotFoundException(Exception):'
27 26
28 27
29 28 class BitbucketIntegration(IntegrationBase):
30 __mapper_args__ = {
31 'polymorphic_identity': 'bitbucket'
32 }
29 __mapper_args__ = {"polymorphic_identity": "bitbucket"}
33 30 front_visible = True
34 31 as_alert_channel = False
35 32 supports_report_alerting = False
36 33 action_notification = True
37 integration_action = 'Add issue to Bitbucket'
34 integration_action = "Add issue to Bitbucket"
38 35
39 36 @classmethod
40 37 def create_client(cls, request, user_name=None, repo_name=None):
@@ -46,27 +43,36 b' class BitbucketIntegration(IntegrationBase):'
46 43 token = None
47 44 secret = None
48 45 for identity in request.user.external_identities:
49 if identity.provider_name == 'bitbucket':
46 if identity.provider_name == "bitbucket":
50 47 token = identity.access_token
51 48 secret = identity.token_secret
52 49 break
53 50 if not token:
54 raise IntegrationException(
55 'No valid auth token present for this service')
56 client = BitbucketClient(token, secret,
51 raise IntegrationException("No valid auth token present for this service")
52 client = BitbucketClient(
53 token,
54 secret,
57 55 user_name,
58 56 repo_name,
59 config['authomatic.pr.bitbucket.key'],
60 config['authomatic.pr.bitbucket.secret'])
57 config["authomatic.pr.bitbucket.key"],
58 config["authomatic.pr.bitbucket.secret"],
59 )
61 60 return client
62 61
63 62
64 63 class BitbucketClient(object):
65 api_url = 'https://bitbucket.org/api/1.0'
66 repo_type = 'bitbucket'
67
68 def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key,
69 bitbucket_consumer_secret):
64 api_url = "https://bitbucket.org/api/1.0"
65 repo_type = "bitbucket"
66
67 def __init__(
68 self,
69 token,
70 secret,
71 owner,
72 repo_name,
73 bitbucket_consumer_key,
74 bitbucket_consumer_secret,
75 ):
70 76 self.access_token = token
71 77 self.token_secret = secret
72 78 self.owner = owner
@@ -75,89 +81,108 b' class BitbucketClient(object):'
75 81 self.bitbucket_consumer_secret = bitbucket_consumer_secret
76 82
77 83 possible_keys = {
78 'status': ['new', 'open', 'resolved', 'on hold', 'invalid',
79 'duplicate', 'wontfix'],
80 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'],
81 'kind': ['bug', 'enhancement', 'proposal', 'task']
84 "status": [
85 "new",
86 "open",
87 "resolved",
88 "on hold",
89 "invalid",
90 "duplicate",
91 "wontfix",
92 ],
93 "priority": ["trivial", "minor", "major", "critical", "blocker"],
94 "kind": ["bug", "enhancement", "proposal", "task"],
82 95 }
83 96
84 97 def get_statuses(self):
85 98 """Gets list of possible item statuses"""
86 return self.possible_keys['status']
99 return self.possible_keys["status"]
87 100
88 101 def get_priorities(self):
89 102 """Gets list of possible item statuses"""
90 return self.possible_keys['priority']
103 return self.possible_keys["priority"]
91 104
92 def make_request(self, url, method='get', data=None, headers=None):
105 def make_request(self, url, method="get", data=None, headers=None):
93 106 """
94 107 Performs HTTP request to bitbucket
95 108 """
96 auth = OAuth1(self.bitbucket_consumer_key,
109 auth = OAuth1(
110 self.bitbucket_consumer_key,
97 111 self.bitbucket_consumer_secret,
98 self.access_token, self.token_secret)
112 self.access_token,
113 self.token_secret,
114 )
99 115 try:
100 resp = getattr(requests, method)(url, data=data, auth=auth,
101 timeout=10)
116 resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10)
102 117 except Exception as e:
103 118 raise IntegrationException(
104 _('Error communicating with Bitbucket: %s') % (e,))
119 _("Error communicating with Bitbucket: %s") % (e,)
120 )
105 121 if resp.status_code == 401:
106 raise IntegrationException(
107 _('You are not authorized to access this repo'))
122 raise IntegrationException(_("You are not authorized to access this repo"))
108 123 elif resp.status_code == 404:
109 raise IntegrationException(_('User or repo name are incorrect'))
124 raise IntegrationException(_("User or repo name are incorrect"))
110 125 elif resp.status_code not in [200, 201]:
111 126 raise IntegrationException(
112 _('Bitbucket response_code: %s') % resp.status_code)
127 _("Bitbucket response_code: %s") % resp.status_code
128 )
113 129 try:
114 130 return resp.json()
115 131 except Exception as e:
116 132 raise IntegrationException(
117 _('Error decoding response from Bitbucket: %s') % (e,))
133 _("Error decoding response from Bitbucket: %s") % (e,)
134 )
118 135
119 136 def get_assignees(self):
120 137 """Gets list of possible assignees"""
121 url = '%(api_url)s/privileges/%(owner)s/%(repo_name)s' % {
122 'api_url': self.api_url,
123 'owner': self.owner,
124 'repo_name': self.repo_name}
138 url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % {
139 "api_url": self.api_url,
140 "owner": self.owner,
141 "repo_name": self.repo_name,
142 }
125 143
126 144 data = self.make_request(url)
127 results = [{'user': self.owner, 'name': '(Repo owner)'}]
145 results = [{"user": self.owner, "name": "(Repo owner)"}]
128 146 if data:
129 147 for entry in data:
130 results.append({"user": entry['user']['username'],
131 "name": entry['user'].get('display_name')})
148 results.append(
149 {
150 "user": entry["user"]["username"],
151 "name": entry["user"].get("display_name"),
152 }
153 )
132 154 return results
133 155
134 156 def create_issue(self, form_data):
135 157 """
136 158 Sends creates a new issue in tracker using REST call
137 159 """
138 url = '%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/' % {
139 'api_url': self.api_url,
140 'owner': self.owner,
141 'repo_name': self.repo_name}
160 url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % {
161 "api_url": self.api_url,
162 "owner": self.owner,
163 "repo_name": self.repo_name,
164 }
142 165
143 166 payload = {
144 "title": form_data['title'],
145 "content": form_data['content'],
146 "kind": form_data['kind'],
147 "priority": form_data['priority'],
148 "responsible": form_data['responsible']
167 "title": form_data["title"],
168 "content": form_data["content"],
169 "kind": form_data["kind"],
170 "priority": form_data["priority"],
171 "responsible": form_data["responsible"],
149 172 }
150 data = self.make_request(url, 'post', payload)
173 data = self.make_request(url, "post", payload)
151 174 f_args = {
152 175 "owner": self.owner,
153 176 "repo_name": self.repo_name,
154 "issue_id": data['local_id']
177 "issue_id": data["local_id"],
155 178 }
156 web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \
157 '/issue/%(issue_id)s/issue-title' % f_args
179 web_url = (
180 "https://bitbucket.org/%(owner)s/%(repo_name)s"
181 "/issue/%(issue_id)s/issue-title" % f_args
182 )
158 183 to_return = {
159 'id': data['local_id'],
160 'resource_url': data['resource_uri'],
161 'web_url': web_url
184 "id": data["local_id"],
185 "resource_url": data["resource_uri"],
186 "web_url": web_url,
162 187 }
163 188 return to_return
@@ -20,8 +20,7 b' from requests.exceptions import HTTPError, ConnectionError'
20 20 from camplight import Request, Campfire
21 21 from camplight.exceptions import CamplightException
22 22
23 from appenlight.models.integrations import (IntegrationBase,
24 IntegrationException)
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
25 24
26 25 _ = str
27 26
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33 32
34 33
35 34 class CampfireIntegration(IntegrationBase):
36 __mapper_args__ = {
37 'polymorphic_identity': 'campfire'
38 }
35 __mapper_args__ = {"polymorphic_identity": "campfire"}
39 36 front_visible = False
40 37 as_alert_channel = True
41 38 supports_report_alerting = True
42 39 action_notification = True
43 integration_action = 'Message via Campfire'
40 integration_action = "Message via Campfire"
44 41
45 42 @classmethod
46 43 def create_client(cls, api_token, account):
@@ -50,7 +47,7 b' class CampfireIntegration(IntegrationBase):'
50 47
51 48 class CampfireClient(object):
52 49 def __init__(self, api_token, account):
53 request = Request('https://%s.campfirenow.com' % account, api_token)
50 request = Request("https://%s.campfirenow.com" % account, api_token)
54 51 self.campfire = Campfire(request)
55 52
56 53 def get_account(self):
@@ -65,10 +62,10 b' class CampfireClient(object):'
65 62 except (HTTPError, CamplightException) as e:
66 63 raise IntegrationException(str(e))
67 64
68 def speak_to_room(self, room, message, sound='RIMSHOT'):
65 def speak_to_room(self, room, message, sound="RIMSHOT"):
69 66 try:
70 67 room = self.campfire.room(room)
71 68 room.join()
72 room.speak(message, type_='TextMessage')
69 room.speak(message, type_="TextMessage")
73 70 except (HTTPError, CamplightException, ConnectionError) as e:
74 71 raise IntegrationException(str(e))
@@ -20,8 +20,7 b' import requests'
20 20 from requests.auth import HTTPBasicAuth
21 21 import simplejson as json
22 22
23 from appenlight.models.integrations import (IntegrationBase,
24 IntegrationException)
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
25 24
26 25 _ = str
27 26
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33 32
34 33
35 34 class FlowdockIntegration(IntegrationBase):
36 __mapper_args__ = {
37 'polymorphic_identity': 'flowdock'
38 }
35 __mapper_args__ = {"polymorphic_identity": "flowdock"}
39 36 front_visible = False
40 37 as_alert_channel = True
41 38 supports_report_alerting = True
42 39 action_notification = True
43 integration_action = 'Message via Flowdock'
40 integration_action = "Message via Flowdock"
44 41
45 42 @classmethod
46 43 def create_client(cls, api_token):
@@ -50,33 +47,37 b' class FlowdockIntegration(IntegrationBase):'
50 47
51 48 class FlowdockClient(object):
52 49 def __init__(self, api_token):
53 self.auth = HTTPBasicAuth(api_token, '')
50 self.auth = HTTPBasicAuth(api_token, "")
54 51 self.api_token = api_token
55 self.api_url = 'https://api.flowdock.com/v1/messages'
52 self.api_url = "https://api.flowdock.com/v1/messages"
56 53
57 def make_request(self, url, method='get', data=None):
54 def make_request(self, url, method="get", data=None):
58 55 headers = {
59 'Content-Type': 'application/json',
60 'User-Agent': 'appenlight-flowdock',
56 "Content-Type": "application/json",
57 "User-Agent": "appenlight-flowdock",
61 58 }
62 59 try:
63 60 if data:
64 61 data = json.dumps(data)
65 resp = getattr(requests, method)(url, data=data, headers=headers,
66 timeout=10)
62 resp = getattr(requests, method)(
63 url, data=data, headers=headers, timeout=10
64 )
67 65 except Exception as e:
68 66 raise IntegrationException(
69 _('Error communicating with Flowdock: %s') % (e,))
67 _("Error communicating with Flowdock: %s") % (e,)
68 )
70 69 if resp.status_code > 299:
71 70 raise IntegrationException(resp.text)
72 71 return resp
73 72
74 73 def send_to_chat(self, payload):
75 url = '%(api_url)s/chat/%(api_token)s' % {'api_url': self.api_url,
76 'api_token': self.api_token}
77 return self.make_request(url, method='post', data=payload).json()
74 url = "%(api_url)s/chat/%(api_token)s" % {
75 "api_url": self.api_url,
76 "api_token": self.api_token,
77 }
78 return self.make_request(url, method="post", data=payload).json()
78 79
79 80 def send_to_inbox(self, payload):
80 f_args = {'api_url': self.api_url, 'api_token': self.api_token}
81 url = '%(api_url)s/team_inbox/%(api_token)s' % f_args
82 return self.make_request(url, method='post', data=payload).json()
81 f_args = {"api_url": self.api_url, "api_token": self.api_token}
82 url = "%(api_url)s/team_inbox/%(api_token)s" % f_args
83 return self.make_request(url, method="post", data=payload).json()
@@ -27,14 +27,12 b' class GithubAuthException(Exception):'
27 27
28 28
29 29 class GithubIntegration(IntegrationBase):
30 __mapper_args__ = {
31 'polymorphic_identity': 'github'
32 }
30 __mapper_args__ = {"polymorphic_identity": "github"}
33 31 front_visible = True
34 32 as_alert_channel = False
35 33 supports_report_alerting = False
36 34 action_notification = True
37 integration_action = 'Add issue to Github'
35 integration_action = "Add issue to Github"
38 36
39 37 @classmethod
40 38 def create_client(cls, request, user_name=None, repo_name=None):
@@ -45,112 +43,116 b' class GithubIntegration(IntegrationBase):'
45 43 token = None
46 44 secret = None
47 45 for identity in request.user.external_identities:
48 if identity.provider_name == 'github':
46 if identity.provider_name == "github":
49 47 token = identity.access_token
50 48 secret = identity.token_secret
51 49 break
52 50 if not token:
53 raise IntegrationException(
54 'No valid auth token present for this service')
51 raise IntegrationException("No valid auth token present for this service")
55 52 client = GithubClient(token=token, owner=user_name, name=repo_name)
56 53 return client
57 54
58 55
59 56 class GithubClient(object):
60 api_url = 'https://api.github.com'
61 repo_type = 'github'
57 api_url = "https://api.github.com"
58 repo_type = "github"
62 59
63 60 def __init__(self, token, owner, name):
64 61 self.access_token = token
65 62 self.owner = owner
66 63 self.name = name
67 64
68 def make_request(self, url, method='get', data=None, headers=None):
69 req_headers = {'User-Agent': 'appenlight',
70 'Content-Type': 'application/json',
71 'Authorization': 'token %s' % self.access_token}
65 def make_request(self, url, method="get", data=None, headers=None):
66 req_headers = {
67 "User-Agent": "appenlight",
68 "Content-Type": "application/json",
69 "Authorization": "token %s" % self.access_token,
70 }
72 71 try:
73 72 if data:
74 73 data = json.dumps(data)
75 resp = getattr(requests, method)(url, data=data,
76 headers=req_headers,
77 timeout=10)
74 resp = getattr(requests, method)(
75 url, data=data, headers=req_headers, timeout=10
76 )
78 77 except Exception as e:
79 msg = 'Error communicating with Github: %s'
78 msg = "Error communicating with Github: %s"
80 79 raise IntegrationException(_(msg) % (e,))
81 80
82 81 if resp.status_code == 404:
83 msg = 'User or repo name are incorrect'
82 msg = "User or repo name are incorrect"
84 83 raise IntegrationException(_(msg))
85 84 if resp.status_code == 401:
86 msg = 'You are not authorized to access this repo'
85 msg = "You are not authorized to access this repo"
87 86 raise IntegrationException(_(msg))
88 87 elif resp.status_code not in [200, 201]:
89 msg = 'Github response_code: %s'
88 msg = "Github response_code: %s"
90 89 raise IntegrationException(_(msg) % resp.status_code)
91 90 try:
92 91 return resp.json()
93 92 except Exception as e:
94 msg = 'Error decoding response from Github: %s'
93 msg = "Error decoding response from Github: %s"
95 94 raise IntegrationException(_(msg) % (e,))
96 95
97 96 def get_statuses(self):
98 97 """Gets list of possible item statuses"""
99 url = '%(api_url)s/repos/%(owner)s/%(name)s/labels' % {
100 'api_url': self.api_url,
101 'owner': self.owner,
102 'name': self.name}
98 url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % {
99 "api_url": self.api_url,
100 "owner": self.owner,
101 "name": self.name,
102 }
103 103
104 104 data = self.make_request(url)
105 105
106 106 statuses = []
107 107 for status in data:
108 statuses.append(status['name'])
108 statuses.append(status["name"])
109 109 return statuses
110 110
111 111 def get_repo(self):
112 112 """Gets list of possible item statuses"""
113 url = '%(api_url)s/repos/%(owner)s/%(name)s' % {
114 'api_url': self.api_url,
115 'owner': self.owner,
116 'name': self.name}
113 url = "%(api_url)s/repos/%(owner)s/%(name)s" % {
114 "api_url": self.api_url,
115 "owner": self.owner,
116 "name": self.name,
117 }
117 118
118 119 data = self.make_request(url)
119 120 return data
120 121
121 122 def get_assignees(self):
122 123 """Gets list of possible assignees"""
123 url = '%(api_url)s/repos/%(owner)s/%(name)s/collaborators' % {
124 'api_url': self.api_url,
125 'owner': self.owner,
126 'name': self.name}
124 url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % {
125 "api_url": self.api_url,
126 "owner": self.owner,
127 "name": self.name,
128 }
127 129 data = self.make_request(url)
128 130 results = []
129 131 for entry in data:
130 results.append({"user": entry['login'],
131 "name": entry.get('name')})
132 results.append({"user": entry["login"], "name": entry.get("name")})
132 133 return results
133 134
134 135 def create_issue(self, form_data):
135 136 """
136 137 Make a REST call to create issue in Github's issue tracker
137 138 """
138 url = '%(api_url)s/repos/%(owner)s/%(name)s/issues' % {
139 'api_url': self.api_url,
140 'owner': self.owner,
141 'name': self.name}
139 url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % {
140 "api_url": self.api_url,
141 "owner": self.owner,
142 "name": self.name,
143 }
142 144
143 145 payload = {
144 "title": form_data['title'],
145 "body": form_data['content'],
146 "title": form_data["title"],
147 "body": form_data["content"],
146 148 "labels": [],
147 "assignee": form_data['responsible']
149 "assignee": form_data["responsible"],
148 150 }
149 payload['labels'].extend(form_data['kind'])
150 data = self.make_request(url, 'post', data=payload)
151 payload["labels"].extend(form_data["kind"])
152 data = self.make_request(url, "post", data=payload)
151 153 to_return = {
152 'id': data['number'],
153 'resource_url': data['url'],
154 'web_url': data['html_url']
154 "id": data["number"],
155 "resource_url": data["url"],
156 "web_url": data["html_url"],
155 157 }
156 158 return to_return
@@ -30,14 +30,12 b' class NotFoundException(Exception):'
30 30
31 31
32 32 class HipchatIntegration(IntegrationBase):
33 __mapper_args__ = {
34 'polymorphic_identity': 'hipchat'
35 }
33 __mapper_args__ = {"polymorphic_identity": "hipchat"}
36 34 front_visible = False
37 35 as_alert_channel = True
38 36 supports_report_alerting = True
39 37 action_notification = True
40 integration_action = 'Message via Hipchat'
38 integration_action = "Message via Hipchat"
41 39
42 40 @classmethod
43 41 def create_client(cls, api_token):
@@ -48,36 +46,30 b' class HipchatIntegration(IntegrationBase):'
48 46 class HipchatClient(object):
49 47 def __init__(self, api_token):
50 48 self.api_token = api_token
51 self.api_url = 'https://api.hipchat.com/v1'
52
53 def make_request(self, endpoint, method='get', data=None):
54 headers = {
55 'User-Agent': 'appenlight-hipchat',
56 }
57 url = '%s%s' % (self.api_url, endpoint)
58 params = {
59 'format': 'json',
60 'auth_token': self.api_token
61 }
49 self.api_url = "https://api.hipchat.com/v1"
50
51 def make_request(self, endpoint, method="get", data=None):
52 headers = {"User-Agent": "appenlight-hipchat"}
53 url = "%s%s" % (self.api_url, endpoint)
54 params = {"format": "json", "auth_token": self.api_token}
62 55 try:
63 resp = getattr(requests, method)(url, data=data, headers=headers,
64 params=params,
65 timeout=3)
56 resp = getattr(requests, method)(
57 url, data=data, headers=headers, params=params, timeout=3
58 )
66 59 except Exception as e:
67 msg = 'Error communicating with Hipchat: %s'
60 msg = "Error communicating with Hipchat: %s"
68 61 raise IntegrationException(_(msg) % (e,))
69 62 if resp.status_code == 404:
70 msg = 'Error communicating with Hipchat - Room not found'
63 msg = "Error communicating with Hipchat - Room not found"
71 64 raise IntegrationException(msg)
72 65 elif resp.status_code != requests.codes.ok:
73 msg = 'Error communicating with Hipchat - status code: %s'
66 msg = "Error communicating with Hipchat - status code: %s"
74 67 raise IntegrationException(msg % resp.status_code)
75 68 return resp
76 69
77 70 def get_rooms(self):
78 71 # not used with notification api token
79 return self.make_request('/rooms/list')
72 return self.make_request("/rooms/list")
80 73
81 74 def send(self, payload):
82 return self.make_request('/rooms/message', method='post',
83 data=payload).json()
75 return self.make_request("/rooms/message", method="post", data=payload).json()
@@ -15,8 +15,7 b''
15 15 # limitations under the License.
16 16
17 17 import jira
18 from appenlight.models.integrations import (IntegrationBase,
19 IntegrationException)
18 from appenlight.models.integrations import IntegrationBase, IntegrationException
20 19
21 20 _ = str
22 21
@@ -26,14 +25,12 b' class NotFoundException(Exception):'
26 25
27 26
28 27 class JiraIntegration(IntegrationBase):
29 __mapper_args__ = {
30 'polymorphic_identity': 'jira'
31 }
28 __mapper_args__ = {"polymorphic_identity": "jira"}
32 29 front_visible = True
33 30 as_alert_channel = False
34 31 supports_report_alerting = False
35 32 action_notification = True
36 integration_action = 'Add issue to Jira'
33 integration_action = "Add issue to Jira"
37 34
38 35
39 36 class JiraClient(object):
@@ -44,12 +41,14 b' class JiraClient(object):'
44 41 self.project = project
45 42 self.request = request
46 43 try:
47 self.client = jira.client.JIRA(options={'server': host_name},
48 basic_auth=(user_name, password))
44 self.client = jira.client.JIRA(
45 options={"server": host_name}, basic_auth=(user_name, password)
46 )
49 47 except jira.JIRAError as e:
50 48 raise IntegrationException(
51 'Communication problem: HTTP_STATUS:%s, URL:%s ' % (
52 e.status_code, e.url))
49 "Communication problem: HTTP_STATUS:%s, URL:%s "
50 % (e.status_code, e.url)
51 )
53 52
54 53 def get_projects(self):
55 54 projects = self.client.projects()
@@ -58,42 +57,42 b' class JiraClient(object):'
58 57 def get_assignees(self, request):
59 58 """Gets list of possible assignees"""
60 59 cache_region = request.registry.cache_regions.redis_sec_30
61 @cache_region.cache_on_arguments('JiraClient.get_assignees')
60
61 @cache_region.cache_on_arguments("JiraClient.get_assignees")
62 62 def cached(project_name):
63 63 users = self.client.search_assignable_users_for_issues(
64 None, project=project_name)
64 None, project=project_name
65 )
65 66 results = []
66 67 for user in users:
67 68 results.append({"id": user.name, "name": user.displayName})
68 69 return results
70
69 71 return cached(self.project)
70 72
71 73 def get_issue_types(self, request):
72 74 metadata = self.get_metadata(request)
73 75 assignees = self.get_assignees(request)
74 76 parsed_metadata = []
75 for entry in metadata['projects'][0]['issuetypes']:
76 issue = {"name": entry['name'],
77 "id": entry['id'],
78 "fields": []}
79 for i_id, field_i in entry['fields'].items():
77 for entry in metadata["projects"][0]["issuetypes"]:
78 issue = {"name": entry["name"], "id": entry["id"], "fields": []}
79 for i_id, field_i in entry["fields"].items():
80 80 field = {
81 "name": field_i['name'],
81 "name": field_i["name"],
82 82 "id": i_id,
83 "required": field_i['required'],
83 "required": field_i["required"],
84 84 "values": [],
85 "type": field_i['schema'].get('type')
85 "type": field_i["schema"].get("type"),
86 86 }
87 if field_i.get('allowedValues'):
88 field['values'] = []
89 for i in field_i['allowedValues']:
90 field['values'].append(
91 {'id': i['id'],
92 'name': i.get('name', i.get('value', ''))
93 })
94 if field['id'] == 'assignee':
95 field['values'] = assignees
96 issue['fields'].append(field)
87 if field_i.get("allowedValues"):
88 field["values"] = []
89 for i in field_i["allowedValues"]:
90 field["values"].append(
91 {"id": i["id"], "name": i.get("name", i.get("value", ""))}
92 )
93 if field["id"] == "assignee":
94 field["values"] = assignees
95 issue["fields"].append(field)
97 96 parsed_metadata.append(issue)
98 97 return parsed_metadata
99 98
@@ -102,35 +101,37 b' class JiraClient(object):'
102 101 # @cache_region.cache_on_arguments('JiraClient.get_metadata')
103 102 def cached(project_name):
104 103 return self.client.createmeta(
105 projectKeys=project_name, expand='projects.issuetypes.fields')
104 projectKeys=project_name, expand="projects.issuetypes.fields"
105 )
106
106 107 return cached(self.project)
107 108
108 109 def create_issue(self, form_data, request):
109 110 issue_types = self.get_issue_types(request)
110 111 payload = {
111 'project': {'key': form_data['project']},
112 'summary': form_data['title'],
113 'description': form_data['content'],
114 'issuetype': {'id': form_data['issue_type']},
115 "priority": {'id': form_data['priority']},
116 "assignee": {'name': form_data['responsible']},
112 "project": {"key": form_data["project"]},
113 "summary": form_data["title"],
114 "description": form_data["content"],
115 "issuetype": {"id": form_data["issue_type"]},
116 "priority": {"id": form_data["priority"]},
117 "assignee": {"name": form_data["responsible"]},
117 118 }
118 119 for issue_type in issue_types:
119 if issue_type['id'] == form_data['issue_type']:
120 for field in issue_type['fields']:
120 if issue_type["id"] == form_data["issue_type"]:
121 for field in issue_type["fields"]:
121 122 # set some defaults for other required fields
122 if field == 'reporter':
123 payload["reporter"] = {'id': self.user_name}
124 if field['required'] and field['id'] not in payload:
125 if field['type'] == 'array':
126 payload[field['id']] = [field['values'][0], ]
127 elif field['type'] == 'string':
128 payload[field['id']] = ''
123 if field == "reporter":
124 payload["reporter"] = {"id": self.user_name}
125 if field["required"] and field["id"] not in payload:
126 if field["type"] == "array":
127 payload[field["id"]] = [field["values"][0]]
128 elif field["type"] == "string":
129 payload[field["id"]] = ""
129 130 new_issue = self.client.create_issue(fields=payload)
130 web_url = self.host_name + '/browse/' + new_issue.key
131 web_url = self.host_name + "/browse/" + new_issue.key
131 132 to_return = {
132 'id': new_issue.id,
133 'resource_url': new_issue.self,
134 'web_url': web_url
133 "id": new_issue.id,
134 "resource_url": new_issue.self,
135 "web_url": web_url,
135 136 }
136 137 return to_return
@@ -18,8 +18,7 b' import logging'
18 18
19 19 import requests
20 20
21 from appenlight.models.integrations import (IntegrationBase,
22 IntegrationException)
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
23 22 from appenlight.lib.ext_json import json
24 23
25 24 _ = str
@@ -32,14 +31,12 b' class NotFoundException(Exception):'
32 31
33 32
34 33 class SlackIntegration(IntegrationBase):
35 __mapper_args__ = {
36 'polymorphic_identity': 'slack'
37 }
34 __mapper_args__ = {"polymorphic_identity": "slack"}
38 35 front_visible = False
39 36 as_alert_channel = True
40 37 supports_report_alerting = True
41 38 action_notification = True
42 integration_action = 'Message via Slack'
39 integration_action = "Message via Slack"
43 40
44 41 @classmethod
45 42 def create_client(cls, api_token):
@@ -52,23 +49,17 b' class SlackClient(object):'
52 49 self.api_url = api_url
53 50
54 51 def make_request(self, data=None):
55 headers = {
56 'User-Agent': 'appenlight-slack',
57 'Content-Type': 'application/json'
58 }
52 headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"}
59 53 try:
60 resp = getattr(requests, 'post')(self.api_url,
61 data=json.dumps(data),
62 headers=headers,
63 timeout=3)
54 resp = getattr(requests, "post")(
55 self.api_url, data=json.dumps(data), headers=headers, timeout=3
56 )
64 57 except Exception as e:
65 raise IntegrationException(
66 _('Error communicating with Slack: %s') % (e,))
58 raise IntegrationException(_("Error communicating with Slack: %s") % (e,))
67 59 if resp.status_code != requests.codes.ok:
68 msg = 'Error communicating with Slack - status code: %s'
60 msg = "Error communicating with Slack - status code: %s"
69 61 raise IntegrationException(msg % resp.status_code)
70 62 return resp
71 63
72 64 def send(self, payload):
73 return self.make_request('/rooms/message', method='post',
74 data=payload).json()
65 return self.make_request("/rooms/message", method="post", data=payload).json()
@@ -18,8 +18,7 b' import logging'
18 18
19 19 import requests
20 20
21 from appenlight.models.integrations import (IntegrationBase,
22 IntegrationException)
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
23 22 from appenlight.models.alert_channel import AlertChannel
24 23 from appenlight.lib.ext_json import json
25 24
@@ -33,14 +32,12 b' class NotFoundException(Exception):'
33 32
34 33
35 34 class WebhooksIntegration(IntegrationBase):
36 __mapper_args__ = {
37 'polymorphic_identity': 'webhooks'
38 }
35 __mapper_args__ = {"polymorphic_identity": "webhooks"}
39 36 front_visible = False
40 37 as_alert_channel = True
41 38 supports_report_alerting = True
42 39 action_notification = True
43 integration_action = 'Message via Webhooks'
40 integration_action = "Message via Webhooks"
44 41
45 42 @classmethod
46 43 def create_client(cls, url):
@@ -52,34 +49,33 b' class WebhooksClient(object):'
52 49 def __init__(self, url):
53 50 self.api_url = url
54 51
55 def make_request(self, url, method='get', data=None):
52 def make_request(self, url, method="get", data=None):
56 53 headers = {
57 'Content-Type': 'application/json',
58 'User-Agent': 'appenlight-webhooks',
54 "Content-Type": "application/json",
55 "User-Agent": "appenlight-webhooks",
59 56 }
60 57 try:
61 58 if data:
62 59 data = json.dumps(data)
63 resp = getattr(requests, method)(url, data=data, headers=headers,
64 timeout=3)
60 resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3)
65 61 except Exception as e:
66 62 raise IntegrationException(
67 _('Error communicating with Webhooks: {}').format(e))
63 _("Error communicating with Webhooks: {}").format(e)
64 )
68 65 if resp.status_code > 299:
69 66 raise IntegrationException(
70 'Error communicating with Webhooks - status code: {}'.format(
71 resp.status_code))
67 "Error communicating with Webhooks - status code: {}".format(
68 resp.status_code
69 )
70 )
72 71 return resp
73 72
74 73 def send_to_hook(self, payload):
75 return self.make_request(self.api_url, method='post',
76 data=payload).json()
74 return self.make_request(self.api_url, method="post", data=payload).json()
77 75
78 76
79 77 class WebhooksAlertChannel(AlertChannel):
80 __mapper_args__ = {
81 'polymorphic_identity': 'webhooks'
82 }
78 __mapper_args__ = {"polymorphic_identity": "webhooks"}
83 79
84 80 def notify_reports(self, **kwargs):
85 81 """
@@ -95,17 +91,28 b' class WebhooksAlertChannel(AlertChannel):'
95 91 """
96 92 template_vars = self.get_notification_basic_vars(kwargs)
97 93 payload = []
98 include_keys = ('id', 'http_status', 'report_type', 'resource_name',
99 'front_url', 'resource_id', 'error', 'url_path',
100 'tags', 'duration')
101
102 for occurences, report in kwargs['reports']:
103 r_dict = report.last_report_ref.get_dict(kwargs['request'],
104 include_keys=include_keys)
105 r_dict['group']['occurences'] = occurences
94 include_keys = (
95 "id",
96 "http_status",
97 "report_type",
98 "resource_name",
99 "front_url",
100 "resource_id",
101 "error",
102 "url_path",
103 "tags",
104 "duration",
105 )
106
107 for occurences, report in kwargs["reports"]:
108 r_dict = report.last_report_ref.get_dict(
109 kwargs["request"], include_keys=include_keys
110 )
111 r_dict["group"]["occurences"] = occurences
106 112 payload.append(r_dict)
107 113 client = WebhooksIntegration.create_client(
108 self.integration.config['reports_webhook'])
114 self.integration.config["reports_webhook"]
115 )
109 116 client.send_to_hook(payload)
110 117
111 118 def notify_alert(self, **kwargs):
@@ -120,19 +127,19 b' class WebhooksAlertChannel(AlertChannel):'
120 127
121 128 """
122 129 payload = {
123 'alert_action': kwargs['event'].unified_alert_action(),
124 'alert_name': kwargs['event'].unified_alert_name(),
125 'event_time': kwargs['event'].end_date or kwargs[
126 'event'].start_date,
127 'resource_name': None,
128 'resource_id': None
130 "alert_action": kwargs["event"].unified_alert_action(),
131 "alert_name": kwargs["event"].unified_alert_name(),
132 "event_time": kwargs["event"].end_date or kwargs["event"].start_date,
133 "resource_name": None,
134 "resource_id": None,
129 135 }
130 if kwargs['event'].values and kwargs['event'].values.get('reports'):
131 payload['reports'] = kwargs['event'].values.get('reports', [])
132 if 'application' in kwargs:
133 payload['resource_name'] = kwargs['application'].resource_name
134 payload['resource_id'] = kwargs['application'].resource_id
136 if kwargs["event"].values and kwargs["event"].values.get("reports"):
137 payload["reports"] = kwargs["event"].values.get("reports", [])
138 if "application" in kwargs:
139 payload["resource_name"] = kwargs["application"].resource_name
140 payload["resource_id"] = kwargs["application"].resource_id
135 141
136 142 client = WebhooksIntegration.create_client(
137 self.integration.config['alerts_webhook'])
143 self.integration.config["alerts_webhook"]
144 )
138 145 client.send_to_hook(payload)
@@ -29,21 +29,23 b' log = logging.getLogger(__name__)'
29 29
30 30
31 31 class Log(Base, BaseModel):
32 __tablename__ = 'logs'
33 __table_args__ = {'implicit_returning': False}
32 __tablename__ = "logs"
33 __table_args__ = {"implicit_returning": False}
34 34
35 35 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
36 resource_id = sa.Column(sa.Integer(),
37 sa.ForeignKey('applications.resource_id',
38 onupdate='CASCADE',
39 ondelete='CASCADE'),
36 resource_id = sa.Column(
37 sa.Integer(),
38 sa.ForeignKey(
39 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
40 ),
40 41 nullable=False,
41 index=True)
42 log_level = sa.Column(sa.Unicode, nullable=False, index=True,
43 default='INFO')
44 message = sa.Column(sa.UnicodeText(), default='')
45 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
46 server_default=sa.func.now())
42 index=True,
43 )
44 log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO")
45 message = sa.Column(sa.UnicodeText(), default="")
46 timestamp = sa.Column(
47 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
48 )
47 49 request_id = sa.Column(sa.Unicode())
48 50 namespace = sa.Column(sa.Unicode())
49 51 primary_key = sa.Column(sa.Unicode())
@@ -52,39 +54,40 b' class Log(Base, BaseModel):'
52 54 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
53 55
54 56 def __str__(self):
55 return self.__unicode__().encode('utf8')
57 return self.__unicode__().encode("utf8")
56 58
57 59 def __unicode__(self):
58 return '<Log id:%s, lv:%s, ns:%s >' % (
59 self.log_id, self.log_level, self.namespace)
60 return "<Log id:%s, lv:%s, ns:%s >" % (
61 self.log_id,
62 self.log_level,
63 self.namespace,
64 )
60 65
61 66 def set_data(self, data, resource):
62 level = data.get('log_level').upper()
67 level = data.get("log_level").upper()
63 68 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
64 self.message = data.get('message', '')
65 server_name = data.get('server', '').lower() or 'unknown'
66 self.tags = {
67 'server_name': server_name
68 }
69 if data.get('tags'):
70 for tag_tuple in data['tags']:
69 self.message = data.get("message", "")
70 server_name = data.get("server", "").lower() or "unknown"
71 self.tags = {"server_name": server_name}
72 if data.get("tags"):
73 for tag_tuple in data["tags"]:
71 74 self.tags[tag_tuple[0]] = tag_tuple[1]
72 self.timestamp = data['date']
73 r_id = data.get('request_id', '')
75 self.timestamp = data["date"]
76 r_id = data.get("request_id", "")
74 77 if not r_id:
75 r_id = ''
76 self.request_id = r_id.replace('-', '')
78 r_id = ""
79 self.request_id = r_id.replace("-", "")
77 80 self.resource_id = resource.resource_id
78 self.namespace = data.get('namespace') or ''
79 self.permanent = data.get('permanent')
80 self.primary_key = data.get('primary_key')
81 self.namespace = data.get("namespace") or ""
82 self.permanent = data.get("permanent")
83 self.primary_key = data.get("primary_key")
81 84 if self.primary_key is not None:
82 self.tags['appenlight_primary_key'] = self.primary_key
85 self.tags["appenlight_primary_key"] = self.primary_key
83 86
84 87 def get_dict(self):
85 88 instance_dict = super(Log, self).get_dict()
86 instance_dict['log_level'] = LogLevel.key_from_value(self.log_level)
87 instance_dict['resource_name'] = self.application.resource_name
89 instance_dict["log_level"] = LogLevel.key_from_value(self.log_level)
90 instance_dict["resource_name"] = self.application.resource_name
88 91 return instance_dict
89 92
90 93 @property
@@ -92,39 +95,38 b' class Log(Base, BaseModel):'
92 95 if not self.primary_key:
93 96 return None
94 97
95 to_hash = '{}_{}_{}'.format(self.resource_id, self.primary_key,
96 self.namespace)
97 return hashlib.sha1(to_hash.encode('utf8')).hexdigest()
98 to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace)
99 return hashlib.sha1(to_hash.encode("utf8")).hexdigest()
98 100
99 101 def es_doc(self):
100 102 tags = {}
101 103 tag_list = []
102 104 for name, value in self.tags.items():
103 105 # replace dot in indexed tag name
104 name = name.replace('.', '_')
106 name = name.replace(".", "_")
105 107 tag_list.append(name)
106 108 tags[name] = {
107 109 "values": convert_es_type(value),
108 "numeric_values": value if (
109 isinstance(value, (int, float)) and
110 not isinstance(value, bool)) else None
110 "numeric_values": value
111 if (isinstance(value, (int, float)) and not isinstance(value, bool))
112 else None,
111 113 }
112 114 return {
113 'pg_id': str(self.log_id),
114 'delete_hash': self.delete_hash,
115 'resource_id': self.resource_id,
116 'request_id': self.request_id,
117 'log_level': LogLevel.key_from_value(self.log_level),
118 'timestamp': self.timestamp,
119 'message': self.message if self.message else '',
120 'namespace': self.namespace if self.namespace else '',
121 'tags': tags,
122 'tag_list': tag_list
115 "pg_id": str(self.log_id),
116 "delete_hash": self.delete_hash,
117 "resource_id": self.resource_id,
118 "request_id": self.request_id,
119 "log_level": LogLevel.key_from_value(self.log_level),
120 "timestamp": self.timestamp,
121 "message": self.message if self.message else "",
122 "namespace": self.namespace if self.namespace else "",
123 "tags": tags,
124 "tag_list": tag_list,
123 125 }
124 126
125 127 @property
126 128 def partition_id(self):
127 129 if self.permanent:
128 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m')
130 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m")
129 131 else:
130 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m_%d')
132 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d")
@@ -25,40 +25,44 b' from appenlight.models import Base'
25 25
26 26
27 27 class Metric(Base, BaseModel):
28 __tablename__ = 'metrics'
29 __table_args__ = {'implicit_returning': False}
28 __tablename__ = "metrics"
29 __table_args__ = {"implicit_returning": False}
30 30
31 31 pkey = sa.Column(sa.BigInteger(), primary_key=True)
32 resource_id = sa.Column(sa.Integer(),
33 sa.ForeignKey('applications.resource_id'),
34 nullable=False, primary_key=True)
35 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
36 server_default=sa.func.now())
32 resource_id = sa.Column(
33 sa.Integer(),
34 sa.ForeignKey("applications.resource_id"),
35 nullable=False,
36 primary_key=True,
37 )
38 timestamp = sa.Column(
39 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
40 )
37 41 tags = sa.Column(JSON(), default={})
38 42 namespace = sa.Column(sa.Unicode(255))
39 43
40 44 @property
41 45 def partition_id(self):
42 return 'rcae_m_%s' % self.timestamp.strftime('%Y_%m_%d')
46 return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d")
43 47
44 48 def es_doc(self):
45 49 tags = {}
46 50 tag_list = []
47 51 for name, value in self.tags.items():
48 52 # replace dot in indexed tag name
49 name = name.replace('.', '_')
53 name = name.replace(".", "_")
50 54 tag_list.append(name)
51 55 tags[name] = {
52 56 "values": convert_es_type(value),
53 "numeric_values": value if (
54 isinstance(value, (int, float)) and
55 not isinstance(value, bool)) else None
57 "numeric_values": value
58 if (isinstance(value, (int, float)) and not isinstance(value, bool))
59 else None,
56 60 }
57 61
58 62 return {
59 'resource_id': self.resource_id,
60 'timestamp': self.timestamp,
61 'namespace': self.namespace,
62 'tags': tags,
63 'tag_list': tag_list
63 "resource_id": self.resource_id,
64 "timestamp": self.timestamp,
65 "namespace": self.namespace,
66 "tags": tags,
67 "tag_list": tag_list,
64 68 }
@@ -22,19 +22,19 b' from . import Base'
22 22
23 23
24 24 class PluginConfig(Base, BaseModel):
25 __tablename__ = 'plugin_configs'
25 __tablename__ = "plugin_configs"
26 26
27 27 id = sa.Column(sa.Integer, primary_key=True)
28 28 plugin_name = sa.Column(sa.Unicode)
29 29 section = sa.Column(sa.Unicode)
30 30 config = sa.Column(JSON, nullable=False)
31 resource_id = sa.Column(sa.Integer(),
32 sa.ForeignKey('resources.resource_id',
33 onupdate='cascade',
34 ondelete='cascade'))
35 owner_id = sa.Column(sa.Integer(),
36 sa.ForeignKey('users.id', onupdate='cascade',
37 ondelete='cascade'))
31 resource_id = sa.Column(
32 sa.Integer(),
33 sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"),
34 )
35 owner_id = sa.Column(
36 sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade")
37 )
38 38
39 39 def __json__(self, request):
40 40 return self.get_dict()
This diff has been collapsed as it changes many lines, (515 lines changed) Show them Hide them
@@ -36,51 +36,55 b' from ziggurat_foundations.models.base import BaseModel'
36 36 log = logging.getLogger(__name__)
37 37
38 38 REPORT_TYPE_MATRIX = {
39 'http_status': {"type": 'int',
40 "ops": ('eq', 'ne', 'ge', 'le',)},
41 'group:priority': {"type": 'int',
42 "ops": ('eq', 'ne', 'ge', 'le',)},
43 'duration': {"type": 'float',
44 "ops": ('ge', 'le',)},
45 'url_domain': {"type": 'unicode',
46 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
47 'url_path': {"type": 'unicode',
48 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
49 'error': {"type": 'unicode',
50 "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)},
51 'tags:server_name': {"type": 'unicode',
52 "ops": ('eq', 'ne', 'startswith', 'endswith',
53 'contains',)},
54 'traceback': {"type": 'unicode',
55 "ops": ('contains',)},
56 'group:occurences': {"type": 'int',
57 "ops": ('eq', 'ne', 'ge', 'le',)}
39 "http_status": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
40 "group:priority": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
41 "duration": {"type": "float", "ops": ("ge", "le")},
42 "url_domain": {
43 "type": "unicode",
44 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
45 },
46 "url_path": {
47 "type": "unicode",
48 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
49 },
50 "error": {
51 "type": "unicode",
52 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
53 },
54 "tags:server_name": {
55 "type": "unicode",
56 "ops": ("eq", "ne", "startswith", "endswith", "contains"),
57 },
58 "traceback": {"type": "unicode", "ops": ("contains",)},
59 "group:occurences": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
58 60 }
59 61
60 62
61 63 class Report(Base, BaseModel):
62 __tablename__ = 'reports'
63 __table_args__ = {'implicit_returning': False}
64 __tablename__ = "reports"
65 __table_args__ = {"implicit_returning": False}
64 66
65 67 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
66 group_id = sa.Column(sa.BigInteger,
67 sa.ForeignKey('reports_groups.id', ondelete='cascade',
68 onupdate='cascade'))
68 group_id = sa.Column(
69 sa.BigInteger,
70 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
71 )
69 72 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
70 73 report_type = sa.Column(sa.Integer(), nullable=False, index=True)
71 74 error = sa.Column(sa.UnicodeText(), index=True)
72 75 extra = sa.Column(JSON(), default={})
73 76 request = sa.Column(JSON(), nullable=False, default={})
74 ip = sa.Column(sa.String(39), index=True, default='')
75 username = sa.Column(sa.Unicode(255), default='')
76 user_agent = sa.Column(sa.Unicode(255), default='')
77 ip = sa.Column(sa.String(39), index=True, default="")
78 username = sa.Column(sa.Unicode(255), default="")
79 user_agent = sa.Column(sa.Unicode(255), default="")
77 80 url = sa.Column(sa.UnicodeText(), index=True)
78 81 request_id = sa.Column(sa.Text())
79 82 request_stats = sa.Column(JSON(), nullable=False, default={})
80 83 traceback = sa.Column(JSON(), nullable=False, default=None)
81 84 traceback_hash = sa.Column(sa.Text())
82 start_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
83 server_default=sa.func.now())
85 start_time = sa.Column(
86 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
87 )
84 88 end_time = sa.Column(sa.DateTime())
85 89 duration = sa.Column(sa.Float, default=0)
86 90 http_status = sa.Column(sa.Integer, index=True)
@@ -89,99 +93,104 b' class Report(Base, BaseModel):'
89 93 tags = sa.Column(JSON(), nullable=False, default={})
90 94 language = sa.Column(sa.Integer(), default=0)
91 95 # this is used to determine partition for the report
92 report_group_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
93 server_default=sa.func.now())
96 report_group_time = sa.Column(
97 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
98 )
94 99
95 100 logs = sa.orm.relationship(
96 'Log',
97 lazy='dynamic',
101 "Log",
102 lazy="dynamic",
98 103 passive_deletes=True,
99 104 passive_updates=True,
100 105 primaryjoin="and_(Report.request_id==Log.request_id, "
101 106 "Log.request_id != None, Log.request_id != '')",
102 foreign_keys='[Log.request_id]')
107 foreign_keys="[Log.request_id]",
108 )
103 109
104 slow_calls = sa.orm.relationship('SlowCall',
105 backref='detail',
110 slow_calls = sa.orm.relationship(
111 "SlowCall",
112 backref="detail",
106 113 cascade="all, delete-orphan",
107 114 passive_deletes=True,
108 115 passive_updates=True,
109 order_by='SlowCall.timestamp')
116 order_by="SlowCall.timestamp",
117 )
110 118
111 119 def set_data(self, data, resource, protocol_version=None):
112 self.http_status = data['http_status']
113 self.priority = data['priority']
114 self.error = data['error']
115 report_language = data.get('language', '').lower()
120 self.http_status = data["http_status"]
121 self.priority = data["priority"]
122 self.error = data["error"]
123 report_language = data.get("language", "").lower()
116 124 self.language = getattr(Language, report_language, Language.unknown)
117 125 # we need temp holder here to decide later
118 126 # if we want to to commit the tags if report is marked for creation
119 self.tags = {
120 'server_name': data['server'],
121 'view_name': data['view_name']
122 }
123 if data.get('tags'):
124 for tag_tuple in data['tags']:
127 self.tags = {"server_name": data["server"], "view_name": data["view_name"]}
128 if data.get("tags"):
129 for tag_tuple in data["tags"]:
125 130 self.tags[tag_tuple[0]] = tag_tuple[1]
126 self.traceback = data['traceback']
131 self.traceback = data["traceback"]
127 132 stripped_traceback = self.stripped_traceback()
128 tb_repr = repr(stripped_traceback).encode('utf8')
133 tb_repr = repr(stripped_traceback).encode("utf8")
129 134 self.traceback_hash = hashlib.sha1(tb_repr).hexdigest()
130 url_info = urllib.parse.urlsplit(
131 data.get('url', ''), allow_fragments=False)
135 url_info = urllib.parse.urlsplit(data.get("url", ""), allow_fragments=False)
132 136 self.url_domain = url_info.netloc[:128]
133 137 self.url_path = url_info.path[:2048]
134 self.occurences = data['occurences']
138 self.occurences = data["occurences"]
135 139 if self.error:
136 140 self.report_type = ReportType.error
137 141 else:
138 142 self.report_type = ReportType.slow
139 143
140 144 # but if its status 404 its 404 type
141 if self.http_status in [404, '404'] or self.error == '404 Not Found':
145 if self.http_status in [404, "404"] or self.error == "404 Not Found":
142 146 self.report_type = ReportType.not_found
143 self.error = ''
147 self.error = ""
144 148
145 self.generate_grouping_hash(data.get('appenlight.group_string',
146 data.get('group_string')),
149 self.generate_grouping_hash(
150 data.get("appenlight.group_string", data.get("group_string")),
147 151 resource.default_grouping,
148 protocol_version)
152 protocol_version,
153 )
149 154
150 155 # details
151 if data['http_status'] in [404, '404']:
152 data = {"username": data["username"],
156 if data["http_status"] in [404, "404"]:
157 data = {
158 "username": data["username"],
153 159 "ip": data["ip"],
154 160 "url": data["url"],
155 "user_agent": data["user_agent"]}
156 if data.get('HTTP_REFERER') or data.get('http_referer'):
157 data['HTTP_REFERER'] = data.get(
158 'HTTP_REFERER', '') or data.get('http_referer', '')
161 "user_agent": data["user_agent"],
162 }
163 if data.get("HTTP_REFERER") or data.get("http_referer"):
164 data["HTTP_REFERER"] = data.get("HTTP_REFERER", "") or data.get(
165 "http_referer", ""
166 )
159 167
160 168 self.resource_id = resource.resource_id
161 self.username = data['username']
162 self.user_agent = data['user_agent']
163 self.ip = data['ip']
169 self.username = data["username"]
170 self.user_agent = data["user_agent"]
171 self.ip = data["ip"]
164 172 self.extra = {}
165 if data.get('extra'):
166 for extra_tuple in data['extra']:
173 if data.get("extra"):
174 for extra_tuple in data["extra"]:
167 175 self.extra[extra_tuple[0]] = extra_tuple[1]
168 176
169 self.url = data['url']
170 self.request_id = data.get('request_id', '').replace('-', '') or str(
171 uuid.uuid4())
172 request_data = data.get('request', {})
177 self.url = data["url"]
178 self.request_id = data.get("request_id", "").replace("-", "") or str(
179 uuid.uuid4()
180 )
181 request_data = data.get("request", {})
173 182
174 183 self.request = request_data
175 self.request_stats = data.get('request_stats', {})
176 traceback = data.get('traceback')
184 self.request_stats = data.get("request_stats", {})
185 traceback = data.get("traceback")
177 186 if not traceback:
178 traceback = data.get('frameinfo')
187 traceback = data.get("frameinfo")
179 188 self.traceback = traceback
180 start_date = convert_date(data.get('start_time'))
189 start_date = convert_date(data.get("start_time"))
181 190 if not self.start_time or self.start_time < start_date:
182 191 self.start_time = start_date
183 192
184 self.end_time = convert_date(data.get('end_time'), False)
193 self.end_time = convert_date(data.get("end_time"), False)
185 194 self.duration = 0
186 195
187 196 if self.start_time and self.end_time:
@@ -190,81 +199,85 b' class Report(Base, BaseModel):'
190 199
191 200 # update tags with other vars
192 201 if self.username:
193 self.tags['user_name'] = self.username
194 self.tags['report_language'] = Language.key_from_value(self.language)
202 self.tags["user_name"] = self.username
203 self.tags["report_language"] = Language.key_from_value(self.language)
195 204
196 205 def add_slow_calls(self, data, report_group):
197 206 slow_calls = []
198 for call in data.get('slow_calls', []):
207 for call in data.get("slow_calls", []):
199 208 sc_inst = SlowCall()
200 sc_inst.set_data(call, resource_id=self.resource_id,
201 report_group=report_group)
209 sc_inst.set_data(
210 call, resource_id=self.resource_id, report_group=report_group
211 )
202 212 slow_calls.append(sc_inst)
203 213 self.slow_calls.extend(slow_calls)
204 214 return slow_calls
205 215
206 def get_dict(self, request, details=False, exclude_keys=None,
207 include_keys=None):
216 def get_dict(self, request, details=False, exclude_keys=None, include_keys=None):
208 217 from appenlight.models.services.report_group import ReportGroupService
209 instance_dict = super(Report, self).get_dict()
210 instance_dict['req_stats'] = self.req_stats()
211 instance_dict['group'] = {}
212 instance_dict['group']['id'] = self.report_group.id
213 instance_dict['group'][
214 'total_reports'] = self.report_group.total_reports
215 instance_dict['group']['last_report'] = self.report_group.last_report
216 instance_dict['group']['priority'] = self.report_group.priority
217 instance_dict['group']['occurences'] = self.report_group.occurences
218 instance_dict['group'][
219 'last_timestamp'] = self.report_group.last_timestamp
220 instance_dict['group'][
221 'first_timestamp'] = self.report_group.first_timestamp
222 instance_dict['group']['public'] = self.report_group.public
223 instance_dict['group']['fixed'] = self.report_group.fixed
224 instance_dict['group']['read'] = self.report_group.read
225 instance_dict['group'][
226 'average_duration'] = self.report_group.average_duration
227 218
228 instance_dict[
229 'resource_name'] = self.report_group.application.resource_name
230 instance_dict['report_type'] = self.report_type
231
232 if instance_dict['http_status'] == 404 and not instance_dict['error']:
233 instance_dict['error'] = '404 Not Found'
219 instance_dict = super(Report, self).get_dict()
220 instance_dict["req_stats"] = self.req_stats()
221 instance_dict["group"] = {}
222 instance_dict["group"]["id"] = self.report_group.id
223 instance_dict["group"]["total_reports"] = self.report_group.total_reports
224 instance_dict["group"]["last_report"] = self.report_group.last_report
225 instance_dict["group"]["priority"] = self.report_group.priority
226 instance_dict["group"]["occurences"] = self.report_group.occurences
227 instance_dict["group"]["last_timestamp"] = self.report_group.last_timestamp
228 instance_dict["group"]["first_timestamp"] = self.report_group.first_timestamp
229 instance_dict["group"]["public"] = self.report_group.public
230 instance_dict["group"]["fixed"] = self.report_group.fixed
231 instance_dict["group"]["read"] = self.report_group.read
232 instance_dict["group"]["average_duration"] = self.report_group.average_duration
233
234 instance_dict["resource_name"] = self.report_group.application.resource_name
235 instance_dict["report_type"] = self.report_type
236
237 if instance_dict["http_status"] == 404 and not instance_dict["error"]:
238 instance_dict["error"] = "404 Not Found"
234 239
235 240 if details:
236 instance_dict['affected_users_count'] = \
237 ReportGroupService.affected_users_count(self.report_group)
238 instance_dict['top_affected_users'] = [
239 {'username': u.username, 'count': u.count} for u in
240 ReportGroupService.top_affected_users(self.report_group)]
241 instance_dict['application'] = {'integrations': []}
241 instance_dict[
242 "affected_users_count"
243 ] = ReportGroupService.affected_users_count(self.report_group)
244 instance_dict["top_affected_users"] = [
245 {"username": u.username, "count": u.count}
246 for u in ReportGroupService.top_affected_users(self.report_group)
247 ]
248 instance_dict["application"] = {"integrations": []}
242 249 for integration in self.report_group.application.integrations:
243 250 if integration.front_visible:
244 instance_dict['application']['integrations'].append(
245 {'name': integration.integration_name,
246 'action': integration.integration_action})
247 instance_dict['comments'] = [c.get_dict() for c in
248 self.report_group.comments]
249
250 instance_dict['group']['next_report'] = None
251 instance_dict['group']['previous_report'] = None
251 instance_dict["application"]["integrations"].append(
252 {
253 "name": integration.integration_name,
254 "action": integration.integration_action,
255 }
256 )
257 instance_dict["comments"] = [
258 c.get_dict() for c in self.report_group.comments
259 ]
260
261 instance_dict["group"]["next_report"] = None
262 instance_dict["group"]["previous_report"] = None
252 263 next_in_group = self.get_next_in_group(request)
253 264 previous_in_group = self.get_previous_in_group(request)
254 265 if next_in_group:
255 instance_dict['group']['next_report'] = next_in_group
266 instance_dict["group"]["next_report"] = next_in_group
256 267 if previous_in_group:
257 instance_dict['group']['previous_report'] = previous_in_group
268 instance_dict["group"]["previous_report"] = previous_in_group
258 269
259 270 # slow call ordering
260 271 def find_parent(row, data):
261 272 for r in reversed(data):
262 273 try:
263 if (row['timestamp'] > r['timestamp'] and
264 row['end_time'] < r['end_time']):
274 if (
275 row["timestamp"] > r["timestamp"]
276 and row["end_time"] < r["end_time"]
277 ):
265 278 return r
266 279 except TypeError as e:
267 log.warning('reports_view.find_parent: %s' % e)
280 log.warning("reports_view.find_parent: %s" % e)
268 281 return None
269 282
270 283 new_calls = []
@@ -274,24 +287,23 b' class Report(Base, BaseModel):'
274 287 for x in range(len(calls) - 1, -1, -1):
275 288 parent = find_parent(calls[x], calls)
276 289 if parent:
277 parent['children'].append(calls[x])
290 parent["children"].append(calls[x])
278 291 else:
279 292 # no parent at all? append to new calls anyways
280 293 new_calls.append(calls[x])
281 294 # print 'append', calls[x]
282 295 del calls[x]
283 296 break
284 instance_dict['slow_calls'] = new_calls
297 instance_dict["slow_calls"] = new_calls
285 298
286 instance_dict['front_url'] = self.get_public_url(request)
299 instance_dict["front_url"] = self.get_public_url(request)
287 300
288 301 exclude_keys_list = exclude_keys or []
289 302 include_keys_list = include_keys or []
290 303 for k in list(instance_dict.keys()):
291 if k == 'group':
304 if k == "group":
292 305 continue
293 if (k in exclude_keys_list or
294 (k not in include_keys_list and include_keys)):
306 if k in exclude_keys_list or (k not in include_keys_list and include_keys):
295 307 del instance_dict[k]
296 308 return instance_dict
297 309
@@ -301,19 +313,20 b' class Report(Base, BaseModel):'
301 313 "query": {
302 314 "filtered": {
303 315 "filter": {
304 "and": [{"term": {"group_id": self.group_id}},
305 {"range": {"pg_id": {"lt": self.id}}}]
316 "and": [
317 {"term": {"group_id": self.group_id}},
318 {"range": {"pg_id": {"lt": self.id}}},
319 ]
306 320 }
307 321 }
308 322 },
309 "sort": [
310 {"_doc": {"order": "desc"}},
311 ],
323 "sort": [{"_doc": {"order": "desc"}}],
312 324 }
313 result = request.es_conn.search(body=query, index=self.partition_id,
314 doc_type='report')
315 if result['hits']['total']:
316 return result['hits']['hits'][0]['_source']['pg_id']
325 result = request.es_conn.search(
326 body=query, index=self.partition_id, doc_type="report"
327 )
328 if result["hits"]["total"]:
329 return result["hits"]["hits"][0]["_source"]["pg_id"]
317 330
318 331 def get_next_in_group(self, request):
319 332 query = {
@@ -321,19 +334,20 b' class Report(Base, BaseModel):'
321 334 "query": {
322 335 "filtered": {
323 336 "filter": {
324 "and": [{"term": {"group_id": self.group_id}},
325 {"range": {"pg_id": {"gt": self.id}}}]
337 "and": [
338 {"term": {"group_id": self.group_id}},
339 {"range": {"pg_id": {"gt": self.id}}},
340 ]
326 341 }
327 342 }
328 343 },
329 "sort": [
330 {"_doc": {"order": "asc"}},
331 ],
344 "sort": [{"_doc": {"order": "asc"}}],
332 345 }
333 result = request.es_conn.search(body=query, index=self.partition_id,
334 doc_type='report')
335 if result['hits']['total']:
336 return result['hits']['hits'][0]['_source']['pg_id']
346 result = request.es_conn.search(
347 body=query, index=self.partition_id, doc_type="report"
348 )
349 if result["hits"]["total"]:
350 return result["hits"]["hits"][0]["_source"]["pg_id"]
337 351
338 352 def get_public_url(self, request=None, report_group=None, _app_url=None):
339 353 """
@@ -341,53 +355,51 b' class Report(Base, BaseModel):'
341 355 """
342 356 if not request:
343 357 request = get_current_request()
344 url = request.route_url('/', _app_url=_app_url)
358 url = request.route_url("/", _app_url=_app_url)
345 359 if report_group:
346 return (url + 'ui/report/%s/%s') % (report_group.id, self.id)
347 return (url + 'ui/report/%s/%s') % (self.group_id, self.id)
360 return (url + "ui/report/%s/%s") % (report_group.id, self.id)
361 return (url + "ui/report/%s/%s") % (self.group_id, self.id)
348 362
349 363 def req_stats(self):
350 364 stats = self.request_stats.copy()
351 stats['percentages'] = {}
352 stats['percentages']['main'] = 100.0
353 main = stats.get('main', 0.0)
365 stats["percentages"] = {}
366 stats["percentages"]["main"] = 100.0
367 main = stats.get("main", 0.0)
354 368 if not main:
355 369 return None
356 370 for name, call_time in stats.items():
357 if ('calls' not in name and 'main' not in name and
358 'percentages' not in name):
359 stats['main'] -= call_time
360 stats['percentages'][name] = math.floor(
361 (call_time / main * 100.0))
362 stats['percentages']['main'] -= stats['percentages'][name]
363 if stats['percentages']['main'] < 0.0:
364 stats['percentages']['main'] = 0.0
365 stats['main'] = 0.0
371 if "calls" not in name and "main" not in name and "percentages" not in name:
372 stats["main"] -= call_time
373 stats["percentages"][name] = math.floor((call_time / main * 100.0))
374 stats["percentages"]["main"] -= stats["percentages"][name]
375 if stats["percentages"]["main"] < 0.0:
376 stats["percentages"]["main"] = 0.0
377 stats["main"] = 0.0
366 378 return stats
367 379
368 def generate_grouping_hash(self, hash_string=None, default_grouping=None,
369 protocol_version=None):
380 def generate_grouping_hash(
381 self, hash_string=None, default_grouping=None, protocol_version=None
382 ):
370 383 """
371 384 Generates SHA1 hash that will be used to group reports together
372 385 """
373 386 if not hash_string:
374 location = self.tags.get('view_name') or self.url_path;
375 server_name = self.tags.get('server_name') or ''
376 if default_grouping == 'url_traceback':
377 hash_string = '%s_%s_%s' % (self.traceback_hash, location,
378 self.error)
387 location = self.tags.get("view_name") or self.url_path
388 server_name = self.tags.get("server_name") or ""
389 if default_grouping == "url_traceback":
390 hash_string = "%s_%s_%s" % (self.traceback_hash, location, self.error)
379 391 if self.language == Language.javascript:
380 hash_string = '%s_%s' % (self.traceback_hash, self.error)
392 hash_string = "%s_%s" % (self.traceback_hash, self.error)
381 393
382 elif default_grouping == 'traceback_server':
383 hash_string = '%s_%s' % (self.traceback_hash, server_name)
394 elif default_grouping == "traceback_server":
395 hash_string = "%s_%s" % (self.traceback_hash, server_name)
384 396 if self.language == Language.javascript:
385 hash_string = '%s_%s' % (self.traceback_hash, server_name)
397 hash_string = "%s_%s" % (self.traceback_hash, server_name)
386 398 else:
387 hash_string = '%s_%s' % (self.error, location)
399 hash_string = "%s_%s" % (self.error, location)
388 400 month = datetime.utcnow().date().replace(day=1)
389 hash_string = '{}_{}'.format(month, hash_string)
390 binary_string = hash_string.encode('utf8')
401 hash_string = "{}_{}".format(month, hash_string)
402 binary_string = hash_string.encode("utf8")
391 403 self.grouping_hash = hashlib.sha1(binary_string).hexdigest()
392 404 return self.grouping_hash
393 405
@@ -399,7 +411,7 b' class Report(Base, BaseModel):'
399 411
400 412 if isinstance(stripped_traceback, list):
401 413 for row in stripped_traceback:
402 row.pop('vars', None)
414 row.pop("vars", None)
403 415 return stripped_traceback
404 416
405 417 def notify_channel(self, report_group):
@@ -407,78 +419,81 b' class Report(Base, BaseModel):'
407 419 Sends notification to websocket channel
408 420 """
409 421 settings = get_current_registry().settings
410 log.info('notify channelstream')
422 log.info("notify channelstream")
411 423 if self.report_type != ReportType.error:
412 424 return
413 425 payload = {
414 'type': 'message',
415 "user": '__system__',
416 "channel": 'app_%s' % self.resource_id,
417 'message': {
418 'topic': 'front_dashboard.new_topic',
419 'report': {
420 'group': {
421 'priority': report_group.priority,
422 'first_timestamp': report_group.first_timestamp,
423 'last_timestamp': report_group.last_timestamp,
424 'average_duration': report_group.average_duration,
425 'occurences': report_group.occurences
426 "type": "message",
427 "user": "__system__",
428 "channel": "app_%s" % self.resource_id,
429 "message": {
430 "topic": "front_dashboard.new_topic",
431 "report": {
432 "group": {
433 "priority": report_group.priority,
434 "first_timestamp": report_group.first_timestamp,
435 "last_timestamp": report_group.last_timestamp,
436 "average_duration": report_group.average_duration,
437 "occurences": report_group.occurences,
438 },
439 "report_id": self.id,
440 "group_id": self.group_id,
441 "resource_id": self.resource_id,
442 "http_status": self.http_status,
443 "url_domain": self.url_domain,
444 "url_path": self.url_path,
445 "error": self.error or "",
446 "server": self.tags.get("server_name"),
447 "view_name": self.tags.get("view_name"),
448 "front_url": self.get_public_url(),
449 },
426 450 },
427 'report_id': self.id,
428 'group_id': self.group_id,
429 'resource_id': self.resource_id,
430 'http_status': self.http_status,
431 'url_domain': self.url_domain,
432 'url_path': self.url_path,
433 'error': self.error or '',
434 'server': self.tags.get('server_name'),
435 'view_name': self.tags.get('view_name'),
436 'front_url': self.get_public_url(),
437 }
438 }
439
440 451 }
441 channelstream_request(settings['cometd.secret'], '/message', [payload],
442 servers=[settings['cometd_servers']])
452 channelstream_request(
453 settings["cometd.secret"],
454 "/message",
455 [payload],
456 servers=[settings["cometd_servers"]],
457 )
443 458
444 459 def es_doc(self):
445 460 tags = {}
446 461 tag_list = []
447 462 for name, value in self.tags.items():
448 name = name.replace('.', '_')
463 name = name.replace(".", "_")
449 464 tag_list.append(name)
450 465 tags[name] = {
451 466 "values": convert_es_type(value),
452 "numeric_values": value if (
453 isinstance(value, (int, float)) and
454 not isinstance(value, bool)) else None}
467 "numeric_values": value
468 if (isinstance(value, (int, float)) and not isinstance(value, bool))
469 else None,
470 }
455 471
456 if 'user_name' not in self.tags and self.username:
457 tags["user_name"] = {"value": [self.username],
458 "numeric_value": None}
472 if "user_name" not in self.tags and self.username:
473 tags["user_name"] = {"value": [self.username], "numeric_value": None}
459 474 return {
460 '_id': str(self.id),
461 'pg_id': str(self.id),
462 'resource_id': self.resource_id,
463 'http_status': self.http_status or '',
464 'start_time': self.start_time,
465 'end_time': self.end_time,
466 'url_domain': self.url_domain if self.url_domain else '',
467 'url_path': self.url_path if self.url_path else '',
468 'duration': self.duration,
469 'error': self.error if self.error else '',
470 'report_type': self.report_type,
471 'request_id': self.request_id,
472 'ip': self.ip,
473 'group_id': str(self.group_id),
474 '_parent': str(self.group_id),
475 'tags': tags,
476 'tag_list': tag_list
475 "_id": str(self.id),
476 "pg_id": str(self.id),
477 "resource_id": self.resource_id,
478 "http_status": self.http_status or "",
479 "start_time": self.start_time,
480 "end_time": self.end_time,
481 "url_domain": self.url_domain if self.url_domain else "",
482 "url_path": self.url_path if self.url_path else "",
483 "duration": self.duration,
484 "error": self.error if self.error else "",
485 "report_type": self.report_type,
486 "request_id": self.request_id,
487 "ip": self.ip,
488 "group_id": str(self.group_id),
489 "_parent": str(self.group_id),
490 "tags": tags,
491 "tag_list": tag_list,
477 492 }
478 493
479 494 @property
480 495 def partition_id(self):
481 return 'rcae_r_%s' % self.report_group_time.strftime('%Y_%m')
496 return "rcae_r_%s" % self.report_group_time.strftime("%Y_%m")
482 497
483 498 def partition_range(self):
484 499 start_date = self.report_group_time.date().replace(day=1)
@@ -488,27 +503,31 b' class Report(Base, BaseModel):'
488 503
489 504
490 505 def after_insert(mapper, connection, target):
491 if not hasattr(target, '_skip_ft_index'):
506 if not hasattr(target, "_skip_ft_index"):
492 507 data = target.es_doc()
493 data.pop('_id', None)
494 Datastores.es.index(target.partition_id, 'report', data,
495 parent=target.group_id, id=target.id)
508 data.pop("_id", None)
509 Datastores.es.index(
510 target.partition_id, "report", data, parent=target.group_id, id=target.id
511 )
496 512
497 513
498 514 def after_update(mapper, connection, target):
499 if not hasattr(target, '_skip_ft_index'):
515 if not hasattr(target, "_skip_ft_index"):
500 516 data = target.es_doc()
501 data.pop('_id', None)
502 Datastores.es.index(target.partition_id, 'report', data,
503 parent=target.group_id, id=target.id)
517 data.pop("_id", None)
518 Datastores.es.index(
519 target.partition_id, "report", data, parent=target.group_id, id=target.id
520 )
504 521
505 522
506 523 def after_delete(mapper, connection, target):
507 if not hasattr(target, '_skip_ft_index'):
508 query = {"query":{'term': {'pg_id': target.id}}}
509 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
524 if not hasattr(target, "_skip_ft_index"):
525 query = {"query": {"term": {"pg_id": target.id}}}
526 Datastores.es.transport.perform_request(
527 "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query
528 )
510 529
511 530
512 sa.event.listen(Report, 'after_insert', after_insert)
513 sa.event.listen(Report, 'after_update', after_update)
514 sa.event.listen(Report, 'after_delete', after_delete)
531 sa.event.listen(Report, "after_insert", after_insert)
532 sa.event.listen(Report, "after_update", after_update)
533 sa.event.listen(Report, "after_delete", after_delete)
@@ -20,13 +20,16 b' import sqlalchemy as sa'
20 20
21 21
22 22 class ReportAssignment(Base, BaseModel):
23 __tablename__ = 'reports_assignments'
23 __tablename__ = "reports_assignments"
24 24
25 group_id = sa.Column(sa.BigInteger,
26 sa.ForeignKey('reports_groups.id', ondelete='cascade',
27 onupdate='cascade'),
28 primary_key=True)
29 owner_id = sa.Column(sa.Integer,
30 sa.ForeignKey('users.id', onupdate='CASCADE',
31 ondelete='CASCADE'), primary_key=True)
25 group_id = sa.Column(
26 sa.BigInteger,
27 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
28 primary_key=True,
29 )
30 owner_id = sa.Column(
31 sa.Integer,
32 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
33 primary_key=True,
34 )
32 35 report_time = sa.Column(sa.DateTime(), nullable=False)
@@ -22,23 +22,23 b' from ziggurat_foundations.models.base import BaseModel'
22 22
23 23
24 24 class ReportComment(Base, BaseModel):
25 __tablename__ = 'reports_comments'
25 __tablename__ = "reports_comments"
26 26
27 27 comment_id = sa.Column(sa.Integer, nullable=False, primary_key=True)
28 group_id = sa.Column(sa.BigInteger,
29 sa.ForeignKey('reports_groups.id', ondelete='cascade',
30 onupdate='cascade'))
31 body = sa.Column(sa.UnicodeText(), default='')
32 owner_id = sa.Column(sa.Integer,
33 sa.ForeignKey('users.id', onupdate='CASCADE',
34 ondelete='CASCADE'))
35 created_timestamp = sa.Column(sa.DateTime(),
36 default=datetime.utcnow,
37 server_default=sa.func.now())
28 group_id = sa.Column(
29 sa.BigInteger,
30 sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"),
31 )
32 body = sa.Column(sa.UnicodeText(), default="")
33 owner_id = sa.Column(
34 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
35 )
36 created_timestamp = sa.Column(
37 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
38 )
38 39 report_time = sa.Column(sa.DateTime(), nullable=False)
39 40
40 owner = sa.orm.relationship('User',
41 lazy='joined')
41 owner = sa.orm.relationship("User", lazy="joined")
42 42
43 43 @property
44 44 def processed_body(self):
@@ -46,5 +46,5 b' class ReportComment(Base, BaseModel):'
46 46
47 47 def get_dict(self):
48 48 instance_dict = super(ReportComment, self).get_dict()
49 instance_dict['user_name'] = self.owner.user_name
49 instance_dict["user_name"] = self.owner.user_name
50 50 return instance_dict
@@ -33,26 +33,30 b' log = logging.getLogger(__name__)'
33 33
34 34
35 35 class ReportGroup(Base, BaseModel):
36 __tablename__ = 'reports_groups'
37 __table_args__ = {'implicit_returning': False}
36 __tablename__ = "reports_groups"
37 __table_args__ = {"implicit_returning": False}
38 38
39 39 id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
40 resource_id = sa.Column(sa.Integer(),
41 sa.ForeignKey('applications.resource_id',
42 onupdate='CASCADE',
43 ondelete='CASCADE'),
40 resource_id = sa.Column(
41 sa.Integer(),
42 sa.ForeignKey(
43 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
44 ),
44 45 nullable=False,
45 index=True)
46 priority = sa.Column(sa.Integer, nullable=False, index=True, default=5,
47 server_default='5')
48 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
49 server_default=sa.func.now())
50 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
51 server_default=sa.func.now())
46 index=True,
47 )
48 priority = sa.Column(
49 sa.Integer, nullable=False, index=True, default=5, server_default="5"
50 )
51 first_timestamp = sa.Column(
52 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
53 )
54 last_timestamp = sa.Column(
55 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
56 )
52 57 error = sa.Column(sa.UnicodeText(), index=True)
53 grouping_hash = sa.Column(sa.String(40), default='')
54 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False,
55 default=list)
58 grouping_hash = sa.Column(sa.String(40), default="")
59 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False, default=list)
56 60 report_type = sa.Column(sa.Integer, default=1)
57 61 total_reports = sa.Column(sa.Integer, default=1)
58 62 last_report = sa.Column(sa.Integer)
@@ -64,50 +68,58 b' class ReportGroup(Base, BaseModel):'
64 68 notified = sa.Column(sa.Boolean(), index=True, default=False)
65 69 public = sa.Column(sa.Boolean(), index=True, default=False)
66 70
67 reports = sa.orm.relationship('Report',
68 lazy='dynamic',
69 backref='report_group',
71 reports = sa.orm.relationship(
72 "Report",
73 lazy="dynamic",
74 backref="report_group",
70 75 cascade="all, delete-orphan",
71 76 passive_deletes=True,
72 passive_updates=True, )
77 passive_updates=True,
78 )
73 79
74 comments = sa.orm.relationship('ReportComment',
75 lazy='dynamic',
76 backref='report',
80 comments = sa.orm.relationship(
81 "ReportComment",
82 lazy="dynamic",
83 backref="report",
77 84 cascade="all, delete-orphan",
78 85 passive_deletes=True,
79 86 passive_updates=True,
80 order_by="ReportComment.comment_id")
87 order_by="ReportComment.comment_id",
88 )
81 89
82 assigned_users = sa.orm.relationship('User',
90 assigned_users = sa.orm.relationship(
91 "User",
83 92 backref=sa.orm.backref(
84 'assigned_reports_relation',
85 lazy='dynamic',
86 order_by=sa.desc(
87 sa.text("reports_groups.id"))
93 "assigned_reports_relation",
94 lazy="dynamic",
95 order_by=sa.desc(sa.text("reports_groups.id")),
88 96 ),
89 97 passive_deletes=True,
90 98 passive_updates=True,
91 secondary='reports_assignments',
92 order_by="User.user_name")
93
94 stats = sa.orm.relationship('ReportStat',
95 lazy='dynamic',
96 backref='report',
99 secondary="reports_assignments",
100 order_by="User.user_name",
101 )
102
103 stats = sa.orm.relationship(
104 "ReportStat",
105 lazy="dynamic",
106 backref="report",
97 107 passive_deletes=True,
98 passive_updates=True, )
108 passive_updates=True,
109 )
99 110
100 last_report_ref = sa.orm.relationship('Report',
111 last_report_ref = sa.orm.relationship(
112 "Report",
101 113 uselist=False,
102 primaryjoin="ReportGroup.last_report "
103 "== Report.id",
114 primaryjoin="ReportGroup.last_report " "== Report.id",
104 115 foreign_keys="Report.id",
105 116 cascade="all, delete-orphan",
106 117 passive_deletes=True,
107 passive_updates=True, )
118 passive_updates=True,
119 )
108 120
109 121 def __repr__(self):
110 return '<ReportGroup id:{}>'.format(self.id)
122 return "<ReportGroup id:{}>".format(self.id)
111 123
112 124 def get_report(self, report_id=None, public=False):
113 125 """
@@ -121,8 +133,8 b' class ReportGroup(Base, BaseModel):'
121 133 return self.reports.filter(Report.id == report_id).first()
122 134
123 135 def get_public_url(self, request, _app_url=None):
124 url = request.route_url('/', _app_url=_app_url)
125 return (url + 'ui/report/%s') % self.id
136 url = request.route_url("/", _app_url=_app_url)
137 return (url + "ui/report/%s") % self.id
126 138
127 139 def run_postprocessing(self, report):
128 140 """
@@ -135,12 +147,15 b' class ReportGroup(Base, BaseModel):'
135 147 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
136 148 report_dict = report.get_dict(request)
137 149 # if was not processed yet
138 if (rule_obj.match(report_dict) and
139 action.pkey not in self.triggered_postprocesses_ids):
150 if (
151 rule_obj.match(report_dict)
152 and action.pkey not in self.triggered_postprocesses_ids
153 ):
140 154 action.postprocess(self)
141 155 # this way sqla can track mutation of list
142 self.triggered_postprocesses_ids = \
143 self.triggered_postprocesses_ids + [action.pkey]
156 self.triggered_postprocesses_ids = self.triggered_postprocesses_ids + [
157 action.pkey
158 ]
144 159
145 160 get_db_session(None, self).flush()
146 161 # do not go out of bounds
@@ -151,31 +166,30 b' class ReportGroup(Base, BaseModel):'
151 166
152 167 def get_dict(self, request):
153 168 instance_dict = super(ReportGroup, self).get_dict()
154 instance_dict['server_name'] = self.get_report().tags.get(
155 'server_name')
156 instance_dict['view_name'] = self.get_report().tags.get('view_name')
157 instance_dict['resource_name'] = self.application.resource_name
158 instance_dict['report_type'] = self.get_report().report_type
159 instance_dict['url_path'] = self.get_report().url_path
160 instance_dict['front_url'] = self.get_report().get_public_url(request)
161 del instance_dict['triggered_postprocesses_ids']
169 instance_dict["server_name"] = self.get_report().tags.get("server_name")
170 instance_dict["view_name"] = self.get_report().tags.get("view_name")
171 instance_dict["resource_name"] = self.application.resource_name
172 instance_dict["report_type"] = self.get_report().report_type
173 instance_dict["url_path"] = self.get_report().url_path
174 instance_dict["front_url"] = self.get_report().get_public_url(request)
175 del instance_dict["triggered_postprocesses_ids"]
162 176 return instance_dict
163 177
164 178 def es_doc(self):
165 179 return {
166 '_id': str(self.id),
167 'pg_id': str(self.id),
168 'resource_id': self.resource_id,
169 'error': self.error,
170 'fixed': self.fixed,
171 'public': self.public,
172 'read': self.read,
173 'priority': self.priority,
174 'occurences': self.occurences,
175 'average_duration': self.average_duration,
176 'summed_duration': self.summed_duration,
177 'first_timestamp': self.first_timestamp,
178 'last_timestamp': self.last_timestamp
180 "_id": str(self.id),
181 "pg_id": str(self.id),
182 "resource_id": self.resource_id,
183 "error": self.error,
184 "fixed": self.fixed,
185 "public": self.public,
186 "read": self.read,
187 "priority": self.priority,
188 "occurences": self.occurences,
189 "average_duration": self.average_duration,
190 "summed_duration": self.summed_duration,
191 "first_timestamp": self.first_timestamp,
192 "last_timestamp": self.last_timestamp,
179 193 }
180 194
181 195 def set_notification_info(self, notify_10=False, notify_100=False):
@@ -184,53 +198,54 b' class ReportGroup(Base, BaseModel):'
184 198 """
185 199 current_time = datetime.utcnow().replace(second=0, microsecond=0)
186 200 # global app counter
187 key = REDIS_KEYS['counters']['reports_per_type'].format(
188 self.report_type, current_time)
201 key = REDIS_KEYS["counters"]["reports_per_type"].format(
202 self.report_type, current_time
203 )
189 204 redis_pipeline = Datastores.redis.pipeline()
190 205 redis_pipeline.incr(key)
191 206 redis_pipeline.expire(key, 3600 * 24)
192 207 # detailed app notification for alerts and notifications
208 redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id)
193 209 redis_pipeline.sadd(
194 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
195 redis_pipeline.sadd(
196 REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id)
210 REDIS_KEYS["apps_that_had_reports_alerting"], self.resource_id
211 )
197 212 # only notify for exceptions here
198 213 if self.report_type == ReportType.error:
214 redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id)
199 215 redis_pipeline.sadd(
200 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
201 redis_pipeline.sadd(
202 REDIS_KEYS['apps_that_had_error_reports_alerting'],
203 self.resource_id)
204 key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id)
216 REDIS_KEYS["apps_that_had_error_reports_alerting"], self.resource_id
217 )
218 key = REDIS_KEYS["counters"]["report_group_occurences"].format(self.id)
205 219 redis_pipeline.incr(key)
206 220 redis_pipeline.expire(key, 3600 * 24)
207 key = REDIS_KEYS['counters']['report_group_occurences_alerting'].format(
208 self.id)
221 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(self.id)
209 222 redis_pipeline.incr(key)
210 223 redis_pipeline.expire(key, 3600 * 24)
211 224
212 225 if notify_10:
213 key = REDIS_KEYS['counters'][
214 'report_group_occurences_10th'].format(self.id)
226 key = REDIS_KEYS["counters"]["report_group_occurences_10th"].format(self.id)
215 227 redis_pipeline.setex(key, 3600 * 24, 1)
216 228 if notify_100:
217 key = REDIS_KEYS['counters'][
218 'report_group_occurences_100th'].format(self.id)
229 key = REDIS_KEYS["counters"]["report_group_occurences_100th"].format(
230 self.id
231 )
219 232 redis_pipeline.setex(key, 3600 * 24, 1)
220 233
221 key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
222 self.report_type, self.resource_id)
234 key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
235 self.report_type, self.resource_id
236 )
223 237 redis_pipeline.sadd(key, self.id)
224 238 redis_pipeline.expire(key, 3600 * 24)
225 key = REDIS_KEYS['reports_to_notify_per_type_per_app_alerting'].format(
226 self.report_type, self.resource_id)
239 key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
240 self.report_type, self.resource_id
241 )
227 242 redis_pipeline.sadd(key, self.id)
228 243 redis_pipeline.expire(key, 3600 * 24)
229 244 redis_pipeline.execute()
230 245
231 246 @property
232 247 def partition_id(self):
233 return 'rcae_r_%s' % self.first_timestamp.strftime('%Y_%m')
248 return "rcae_r_%s" % self.first_timestamp.strftime("%Y_%m")
234 249
235 250 def partition_range(self):
236 251 start_date = self.first_timestamp.date().replace(day=1)
@@ -240,29 +255,33 b' class ReportGroup(Base, BaseModel):'
240 255
241 256
242 257 def after_insert(mapper, connection, target):
243 if not hasattr(target, '_skip_ft_index'):
258 if not hasattr(target, "_skip_ft_index"):
244 259 data = target.es_doc()
245 data.pop('_id', None)
246 Datastores.es.index(target.partition_id, 'report_group',
247 data, id=target.id)
260 data.pop("_id", None)
261 Datastores.es.index(target.partition_id, "report_group", data, id=target.id)
248 262
249 263
250 264 def after_update(mapper, connection, target):
251 if not hasattr(target, '_skip_ft_index'):
265 if not hasattr(target, "_skip_ft_index"):
252 266 data = target.es_doc()
253 data.pop('_id', None)
254 Datastores.es.index(target.partition_id, 'report_group',
255 data, id=target.id)
267 data.pop("_id", None)
268 Datastores.es.index(target.partition_id, "report_group", data, id=target.id)
256 269
257 270
258 271 def after_delete(mapper, connection, target):
259 query = {"query": {'term': {'group_id': target.id}}}
272 query = {"query": {"term": {"group_id": target.id}}}
260 273 # delete by query
261 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
262 query = {"query": {'term': {'pg_id': target.id}}}
263 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query)
264
265
266 sa.event.listen(ReportGroup, 'after_insert', after_insert)
267 sa.event.listen(ReportGroup, 'after_update', after_update)
268 sa.event.listen(ReportGroup, 'after_delete', after_delete)
274 Datastores.es.transport.perform_request(
275 "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query
276 )
277 query = {"query": {"term": {"pg_id": target.id}}}
278 Datastores.es.transport.perform_request(
279 "DELETE",
280 "/{}/{}/_query".format(target.partition_id, "report_group"),
281 body=query,
282 )
283
284
285 sa.event.listen(ReportGroup, "after_insert", after_insert)
286 sa.event.listen(ReportGroup, "after_update", after_update)
287 sa.event.listen(ReportGroup, "after_delete", after_delete)
@@ -22,53 +22,58 b' from ziggurat_foundations.models.base import BaseModel'
22 22
23 23
24 24 class ReportStat(Base, BaseModel):
25 __tablename__ = 'reports_stats'
26 __table_args__ = {'implicit_returning': False}
25 __tablename__ = "reports_stats"
26 __table_args__ = {"implicit_returning": False}
27 27
28 group_id = sa.Column(sa.BigInteger(),
29 sa.ForeignKey('reports_groups.id'),
30 nullable=False)
31 resource_id = sa.Column(sa.Integer(),
32 sa.ForeignKey('applications.resource_id'),
33 nullable=False)
28 group_id = sa.Column(
29 sa.BigInteger(), sa.ForeignKey("reports_groups.id"), nullable=False
30 )
31 resource_id = sa.Column(
32 sa.Integer(), sa.ForeignKey("applications.resource_id"), nullable=False
33 )
34 34 start_interval = sa.Column(sa.DateTime(), nullable=False)
35 35 occurences = sa.Column(sa.Integer, nullable=True, default=0)
36 owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
37 nullable=True)
36 owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
38 37 type = sa.Column(sa.Integer, nullable=True, default=0)
39 38 duration = sa.Column(sa.Float, nullable=True, default=0)
40 39 id = sa.Column(sa.BigInteger, nullable=False, primary_key=True)
41 server_name = sa.Column(sa.Unicode(128), nullable=False, default='')
42 view_name = sa.Column(sa.Unicode(128), nullable=False, default='')
40 server_name = sa.Column(sa.Unicode(128), nullable=False, default="")
41 view_name = sa.Column(sa.Unicode(128), nullable=False, default="")
43 42
44 43 @property
45 44 def partition_id(self):
46 return 'rcae_r_%s' % self.start_interval.strftime('%Y_%m')
45 return "rcae_r_%s" % self.start_interval.strftime("%Y_%m")
47 46
48 47 def es_doc(self):
49 48 return {
50 'resource_id': self.resource_id,
51 'timestamp': self.start_interval,
52 'pg_id': str(self.id),
53 'permanent': True,
54 'request_id': None,
55 'log_level': 'ERROR',
56 'message': None,
57 'namespace': 'appenlight.error',
58 'tags': {
59 'duration': {'values': self.duration,
60 'numeric_values': self.duration},
61 'occurences': {'values': self.occurences,
62 'numeric_values': self.occurences},
63 'group_id': {'values': self.group_id,
64 'numeric_values': self.group_id},
65 'type': {'values': ReportType.key_from_value(self.type),
66 'numeric_values': self.type},
67 'server_name': {'values': self.server_name,
68 'numeric_values': None},
69 'view_name': {'values': self.view_name,
70 'numeric_values': None},
49 "resource_id": self.resource_id,
50 "timestamp": self.start_interval,
51 "pg_id": str(self.id),
52 "permanent": True,
53 "request_id": None,
54 "log_level": "ERROR",
55 "message": None,
56 "namespace": "appenlight.error",
57 "tags": {
58 "duration": {"values": self.duration, "numeric_values": self.duration},
59 "occurences": {
60 "values": self.occurences,
61 "numeric_values": self.occurences,
71 62 },
72 'tag_list': ['duration', 'occurences', 'group_id', 'type',
73 'server_name', 'view_name']
63 "group_id": {"values": self.group_id, "numeric_values": self.group_id},
64 "type": {
65 "values": ReportType.key_from_value(self.type),
66 "numeric_values": self.type,
67 },
68 "server_name": {"values": self.server_name, "numeric_values": None},
69 "view_name": {"values": self.view_name, "numeric_values": None},
70 },
71 "tag_list": [
72 "duration",
73 "occurences",
74 "group_id",
75 "type",
76 "server_name",
77 "view_name",
78 ],
74 79 }
@@ -23,11 +23,13 b' from ziggurat_foundations.models.services.resource import ResourceService'
23 23
24 24
25 25 class Resource(ResourceMixin, Base):
26 events = sa.orm.relationship('Event',
27 lazy='dynamic',
28 backref='resource',
26 events = sa.orm.relationship(
27 "Event",
28 lazy="dynamic",
29 backref="resource",
29 30 passive_deletes=True,
30 passive_updates=True)
31 passive_updates=True,
32 )
31 33
32 34 @property
33 35 def owner_user_name(self):
@@ -39,46 +41,56 b' class Resource(ResourceMixin, Base):'
39 41 if self.owner_group:
40 42 return self.owner_group.group_name
41 43
42 def get_dict(self, exclude_keys=None, include_keys=None,
43 include_perms=False, include_processing_rules=False):
44 def get_dict(
45 self,
46 exclude_keys=None,
47 include_keys=None,
48 include_perms=False,
49 include_processing_rules=False,
50 ):
44 51 result = super(Resource, self).get_dict(exclude_keys, include_keys)
45 result['possible_permissions'] = self.__possible_permissions__
52 result["possible_permissions"] = self.__possible_permissions__
46 53 if include_perms:
47 result['current_permissions'] = self.user_permissions_list
54 result["current_permissions"] = self.user_permissions_list
48 55 else:
49 result['current_permissions'] = []
56 result["current_permissions"] = []
50 57 if include_processing_rules:
51 result["postprocessing_rules"] = [rule.get_dict() for rule
52 in self.postprocess_conf]
58 result["postprocessing_rules"] = [
59 rule.get_dict() for rule in self.postprocess_conf
60 ]
53 61 else:
54 62 result["postprocessing_rules"] = []
55 63 exclude_keys_list = exclude_keys or []
56 64 include_keys_list = include_keys or []
57 65 d = {}
58 66 for k in result.keys():
59 if (k not in exclude_keys_list and
60 (k in include_keys_list or not include_keys)):
67 if k not in exclude_keys_list and (
68 k in include_keys_list or not include_keys
69 ):
61 70 d[k] = result[k]
62 for k in ['owner_user_name', 'owner_group_name']:
63 if (k not in exclude_keys_list and
64 (k in include_keys_list or not include_keys)):
71 for k in ["owner_user_name", "owner_group_name"]:
72 if k not in exclude_keys_list and (
73 k in include_keys_list or not include_keys
74 ):
65 75 d[k] = getattr(self, k)
66 76 return d
67 77
68 78 @property
69 79 def user_permissions_list(self):
70 return [permission_tuple_to_dict(perm) for perm in
71 ResourceService.users_for_perm(
72 self, '__any_permission__', limit_group_permissions=True)]
80 return [
81 permission_tuple_to_dict(perm)
82 for perm in ResourceService.users_for_perm(
83 self, "__any_permission__", limit_group_permissions=True
84 )
85 ]
73 86
74 87 @property
75 88 def __acl__(self):
76 89 acls = []
77 90
78 91 if self.owner_user_id:
79 acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS,), ])
92 acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS)])
80 93
81 94 if self.owner_group_id:
82 acls.extend([(Allow, "group:%s" % self.owner_group_id,
83 ALL_PERMISSIONS,), ])
95 acls.extend([(Allow, "group:%s" % self.owner_group_id, ALL_PERMISSIONS)])
84 96 return acls
@@ -13,4 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -33,7 +33,6 b' log = logging.getLogger(__name__)'
33 33
34 34
35 35 class ApplicationService(BaseService):
36
37 36 @classmethod
38 37 def all(cls, db_session=None):
39 38 db_session = get_db_session(db_session)
@@ -51,9 +50,9 b' class ApplicationService(BaseService):'
51 50 @classmethod
52 51 def by_api_key_cached(cls, db_session=None):
53 52 db_session = get_db_session(db_session)
54 cache_region = get_region('redis_min_1')
53 cache_region = get_region("redis_min_1")
55 54
56 @cache_region.cache_on_arguments('ApplicationService.by_api_key')
55 @cache_region.cache_on_arguments("ApplicationService.by_api_key")
57 56 def cached(*args, **kwargs):
58 57 app = cls.by_api_key(*args, db_session=db_session, **kwargs)
59 58 if app:
@@ -63,10 +62,11 b' class ApplicationService(BaseService):'
63 62 return cached
64 63
65 64 @classmethod
66 def by_public_api_key(cls, api_key, db_session=None, from_cache=False,
67 request=None):
65 def by_public_api_key(
66 cls, api_key, db_session=None, from_cache=False, request=None
67 ):
68 68 db_session = get_db_session(db_session)
69 cache_region = get_region('redis_min_1')
69 cache_region = get_region("redis_min_1")
70 70
71 71 def uncached(api_key):
72 72 q = db_session.query(Application)
@@ -75,8 +75,8 b' class ApplicationService(BaseService):'
75 75 return q.first()
76 76
77 77 if from_cache:
78 @cache_region.cache_on_arguments(
79 'ApplicationService.by_public_api_key')
78
79 @cache_region.cache_on_arguments("ApplicationService.by_public_api_key")
80 80 def cached(api_key):
81 81 app = uncached(api_key)
82 82 if app:
@@ -98,9 +98,9 b' class ApplicationService(BaseService):'
98 98 @classmethod
99 99 def by_id_cached(cls, db_session=None):
100 100 db_session = get_db_session(db_session)
101 cache_region = get_region('redis_min_1')
101 cache_region = get_region("redis_min_1")
102 102
103 @cache_region.cache_on_arguments('ApplicationService.by_id')
103 @cache_region.cache_on_arguments("ApplicationService.by_id")
104 104 def cached(*args, **kwargs):
105 105 app = cls.by_id(*args, db_session=db_session, **kwargs)
106 106 if app:
@@ -119,10 +119,9 b' class ApplicationService(BaseService):'
119 119 @classmethod
120 120 def by_http_referer(cls, referer_string, db_session=None):
121 121 db_session = get_db_session(db_session)
122 domain = urllib.parse.urlsplit(
123 referer_string, allow_fragments=False).netloc
122 domain = urllib.parse.urlsplit(referer_string, allow_fragments=False).netloc
124 123 if domain:
125 if domain.startswith('www.'):
124 if domain.startswith("www."):
126 125 domain = domain[4:]
127 126 q = db_session.query(Application).filter(Application.domain == domain)
128 127 return q.first()
@@ -132,7 +131,8 b' class ApplicationService(BaseService):'
132 131 db_session = get_db_session(db_session)
133 132 q = db_session.query(Application)
134 133 q2 = ReportGroup.last_updated(
135 since_when, exclude_status=exclude_status, db_session=db_session)
134 since_when, exclude_status=exclude_status, db_session=db_session
135 )
136 136 q2 = q2.from_self(ReportGroup.resource_id)
137 137 q2 = q2.group_by(ReportGroup.resource_id)
138 138 q = q.filter(Application.resource_id.in_(q2))
@@ -142,10 +142,10 b' class ApplicationService(BaseService):'
142 142 def check_for_groups_alert(cls, resource, event_type, *args, **kwargs):
143 143 """ Check for open alerts depending on group type.
144 144 Create new one if nothing is found and send alerts """
145 db_session = get_db_session(kwargs.get('db_session'))
145 db_session = get_db_session(kwargs.get("db_session"))
146 146 request = get_current_request()
147 report_groups = kwargs['report_groups']
148 occurence_dict = kwargs['occurence_dict']
147 report_groups = kwargs["report_groups"]
148 occurence_dict = kwargs["occurence_dict"]
149 149
150 150 error_reports = 0
151 151 slow_reports = 0
@@ -156,38 +156,45 b' class ApplicationService(BaseService):'
156 156 elif group.get_report().report_type == ReportType.slow:
157 157 slow_reports += occurences
158 158
159 log_msg = 'LIMIT INFO: %s : %s error reports. %s slow_reports' % (
159 log_msg = "LIMIT INFO: %s : %s error reports. %s slow_reports" % (
160 160 resource,
161 161 error_reports,
162 slow_reports)
162 slow_reports,
163 )
163 164 logging.warning(log_msg)
164 165 threshold = 10
165 for event_type in ['error_report_alert', 'slow_report_alert']:
166 if (error_reports < resource.error_report_threshold and
167 event_type == 'error_report_alert'):
166 for event_type in ["error_report_alert", "slow_report_alert"]:
167 if (
168 error_reports < resource.error_report_threshold
169 and event_type == "error_report_alert"
170 ):
168 171 continue
169 elif (slow_reports <= resource.slow_report_threshold and
170 event_type == 'slow_report_alert'):
172 elif (
173 slow_reports <= resource.slow_report_threshold
174 and event_type == "slow_report_alert"
175 ):
171 176 continue
172 if event_type == 'error_report_alert':
177 if event_type == "error_report_alert":
173 178 amount = error_reports
174 179 threshold = resource.error_report_threshold
175 elif event_type == 'slow_report_alert':
180 elif event_type == "slow_report_alert":
176 181 amount = slow_reports
177 182 threshold = resource.slow_report_threshold
178 183
179 event = EventService.for_resource([resource.resource_id],
180 event_type=Event.types[
181 event_type],
182 status=Event.statuses['active'])
184 event = EventService.for_resource(
185 [resource.resource_id],
186 event_type=Event.types[event_type],
187 status=Event.statuses["active"],
188 )
183 189 if event.first():
184 log.info('ALERT: PROGRESS: %s %s' % (event_type, resource))
190 log.info("ALERT: PROGRESS: %s %s" % (event_type, resource))
185 191 else:
186 log.warning('ALERT: OPEN: %s %s' % (event_type, resource))
187 new_event = Event(resource_id=resource.resource_id,
192 log.warning("ALERT: OPEN: %s %s" % (event_type, resource))
193 new_event = Event(
194 resource_id=resource.resource_id,
188 195 event_type=Event.types[event_type],
189 status=Event.statuses['active'],
190 values={'reports': amount,
191 'threshold': threshold})
196 status=Event.statuses["active"],
197 values={"reports": amount, "threshold": threshold},
198 )
192 199 db_session.add(new_event)
193 200 new_event.send_alerts(request=request, resource=resource)
@@ -21,7 +21,6 b' from appenlight.models.services.base import BaseService'
21 21
22 22
23 23 class ApplicationPostprocessConfService(BaseService):
24
25 24 @classmethod
26 25 def by_pkey(cls, pkey, db_session=None):
27 26 db_session = get_db_session(db_session)
@@ -40,9 +40,10 b' class ConfigService(BaseService):'
40 40 if pairs:
41 41 conditions = []
42 42 for pair in pairs:
43 conditions.append(sa.and_(
44 Config.key == pair['key'],
45 Config.section == pair['section'])
43 conditions.append(
44 sa.and_(
45 Config.key == pair["key"], Config.section == pair["section"]
46 )
46 47 )
47 48
48 49 query = query.filter(sa.or_(*conditions))
@@ -57,13 +58,15 b' class ConfigService(BaseService):'
57 58 return config
58 59
59 60 @classmethod
60 def by_key_and_section(cls, key, section, auto_create=False,
61 default_value=None, db_session=None):
61 def by_key_and_section(
62 cls, key, section, auto_create=False, default_value=None, db_session=None
63 ):
62 64 db_session = get_db_session(db_session)
63 65 registry = get_current_registry()
64 66
65 67 @registry.cache_regions.memory_min_1.cache_on_arguments(
66 namespace='ConfigService.by_key_and_section')
68 namespace="ConfigService.by_key_and_section"
69 )
67 70 def cached(key, section):
68 71 query = db_session.query(Config).filter(Config.key == key)
69 72 query = query.filter(Config.section == section)
@@ -76,8 +79,7 b' class ConfigService(BaseService):'
76 79 if config:
77 80 config = db_session.merge(config, load=False)
78 81 if config is None and auto_create:
79 config = ConfigService.create_config(key, section,
80 value=default_value)
82 config = ConfigService.create_config(key, section, value=default_value)
81 83 cached.invalidate(key, section)
82 84 return config
83 85
@@ -87,14 +89,28 b' class ConfigService(BaseService):'
87 89 Will add fresh default config values to database if no keys are found
88 90 :return:
89 91 """
90 log.info('Checking/setting default values')
91 self.by_key_and_section('template_footer_html', 'global',
92 default_value='', auto_create=True)
93 self.by_key_and_section('list_groups_to_non_admins', 'global',
94 default_value=True, auto_create=True)
95 self.by_key_and_section('per_application_reports_rate_limit', 'global',
96 default_value=2000, auto_create=True)
97 self.by_key_and_section('per_application_logs_rate_limit', 'global',
98 default_value=100000, auto_create=True)
99 self.by_key_and_section('per_application_metrics_rate_limit', 'global',
100 default_value=100000, auto_create=True)
92 log.info("Checking/setting default values")
93 self.by_key_and_section(
94 "template_footer_html", "global", default_value="", auto_create=True
95 )
96 self.by_key_and_section(
97 "list_groups_to_non_admins", "global", default_value=True, auto_create=True
98 )
99 self.by_key_and_section(
100 "per_application_reports_rate_limit",
101 "global",
102 default_value=2000,
103 auto_create=True,
104 )
105 self.by_key_and_section(
106 "per_application_logs_rate_limit",
107 "global",
108 default_value=100000,
109 auto_create=True,
110 )
111 self.by_key_and_section(
112 "per_application_metrics_rate_limit",
113 "global",
114 default_value=100000,
115 auto_create=True,
116 )
@@ -26,10 +26,19 b' from appenlight.models.services.base import BaseService'
26 26
27 27 class EventService(BaseService):
28 28 @classmethod
29 def for_resource(cls, resource_ids, event_type=None, status=None,
30 since_when=None, limit=20, event_id=None,
31 target_uuid=None, order_by=None, or_target_user_id=None,
32 db_session=None):
29 def for_resource(
30 cls,
31 resource_ids,
32 event_type=None,
33 status=None,
34 since_when=None,
35 limit=20,
36 event_id=None,
37 target_uuid=None,
38 order_by=None,
39 or_target_user_id=None,
40 db_session=None,
41 ):
33 42 """
34 43 Fetches events including based on passed params OR if target_user_id
35 44 is present include events that just target this user
@@ -57,8 +66,7 b' class EventService(BaseService):'
57 66 if or_target_user_id:
58 67 or_cond.append(sa.or_(Event.target_user_id == or_target_user_id))
59 68
60 query = query.filter(sa.or_(sa.and_(*and_cond),
61 *or_cond))
69 query = query.filter(sa.or_(sa.and_(*and_cond), *or_cond))
62 70 if not order_by:
63 71 query = query.order_by(sa.desc(Event.start_date))
64 72 if limit:
@@ -67,8 +75,15 b' class EventService(BaseService):'
67 75 return query
68 76
69 77 @classmethod
70 def by_type_and_status(cls, event_types, status_types, since_when=None,
71 older_than=None, db_session=None, app_ids=None):
78 def by_type_and_status(
79 cls,
80 event_types,
81 status_types,
82 since_when=None,
83 older_than=None,
84 db_session=None,
85 app_ids=None,
86 ):
72 87 db_session = get_db_session(db_session)
73 88 query = db_session.query(Event)
74 89 query = query.filter(Event.event_type.in_(event_types))
@@ -84,26 +99,38 b' class EventService(BaseService):'
84 99 @classmethod
85 100 def latest_for_user(cls, user, db_session=None):
86 101 registry = get_current_registry()
87 resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types)
102 resources = UserService.resources_with_perms(
103 user, ["view"], resource_types=registry.resource_types
104 )
88 105 resource_ids = [r.resource_id for r in resources]
89 106 db_session = get_db_session(db_session)
90 107 return EventService.for_resource(
91 resource_ids, or_target_user_id=user.id, limit=10,
92 db_session=db_session)
108 resource_ids, or_target_user_id=user.id, limit=10, db_session=db_session
109 )
93 110
94 111 @classmethod
95 def get_paginator(cls, user, page=1, item_count=None, items_per_page=50,
96 order_by=None, filter_settings=None, db_session=None):
112 def get_paginator(
113 cls,
114 user,
115 page=1,
116 item_count=None,
117 items_per_page=50,
118 order_by=None,
119 filter_settings=None,
120 db_session=None,
121 ):
97 122 if not filter_settings:
98 123 filter_settings = {}
99 124 registry = get_current_registry()
100 resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types)
125 resources = UserService.resources_with_perms(
126 user, ["view"], resource_types=registry.resource_types
127 )
101 128 resource_ids = [r.resource_id for r in resources]
102 129 query = EventService.for_resource(
103 resource_ids, or_target_user_id=user.id, limit=100,
104 db_session=db_session)
130 resource_ids, or_target_user_id=user.id, limit=100, db_session=db_session
131 )
105 132
106 paginator = SqlalchemyOrmPage(query, page=page,
107 items_per_page=items_per_page,
108 **filter_settings)
133 paginator = SqlalchemyOrmPage(
134 query, page=page, items_per_page=items_per_page, **filter_settings
135 )
109 136 return paginator
@@ -16,18 +16,20 b''
16 16
17 17 from appenlight.models.group_resource_permission import GroupResourcePermission
18 18 from appenlight.models import get_db_session
19 from ziggurat_foundations.models.services.group_resource_permission import GroupResourcePermissionService
19 from ziggurat_foundations.models.services.group_resource_permission import (
20 GroupResourcePermissionService,
21 )
20 22
21 23
22 24 class GroupResourcePermissionService(GroupResourcePermissionService):
23 25 @classmethod
24 def by_resource_group_and_perm(cls, group_id, perm_name, resource_id,
25 db_session=None):
26 def by_resource_group_and_perm(
27 cls, group_id, perm_name, resource_id, db_session=None
28 ):
26 29 """ return all instances by user name, perm name and resource id """
27 30 db_session = get_db_session(db_session)
28 31 query = db_session.query(GroupResourcePermission)
29 32 query = query.filter(GroupResourcePermission.group_id == group_id)
30 query = query.filter(
31 GroupResourcePermission.resource_id == resource_id)
33 query = query.filter(GroupResourcePermission.resource_id == resource_id)
32 34 query = query.filter(GroupResourcePermission.perm_name == perm_name)
33 35 return query.first()
@@ -28,8 +28,7 b' log = logging.getLogger(__name__)'
28 28
29 29 class LogService(BaseService):
30 30 @classmethod
31 def get_logs(cls, resource_ids=None, filter_settings=None,
32 db_session=None):
31 def get_logs(cls, resource_ids=None, filter_settings=None, db_session=None):
33 32 # ensure we always have id's passed
34 33 if not resource_ids:
35 34 # raise Exception('No App ID passed')
@@ -37,18 +36,17 b' class LogService(BaseService):'
37 36 db_session = get_db_session(db_session)
38 37 q = db_session.query(Log)
39 38 q = q.filter(Log.resource_id.in_(resource_ids))
40 if filter_settings.get('start_date'):
41 q = q.filter(Log.timestamp >= filter_settings.get('start_date'))
42 if filter_settings.get('end_date'):
43 q = q.filter(Log.timestamp <= filter_settings.get('end_date'))
44 if filter_settings.get('log_level'):
45 q = q.filter(
46 Log.log_level == filter_settings.get('log_level').upper())
47 if filter_settings.get('request_id'):
48 request_id = filter_settings.get('request_id', '')
49 q = q.filter(Log.request_id == request_id.replace('-', ''))
50 if filter_settings.get('namespace'):
51 q = q.filter(Log.namespace == filter_settings.get('namespace'))
39 if filter_settings.get("start_date"):
40 q = q.filter(Log.timestamp >= filter_settings.get("start_date"))
41 if filter_settings.get("end_date"):
42 q = q.filter(Log.timestamp <= filter_settings.get("end_date"))
43 if filter_settings.get("log_level"):
44 q = q.filter(Log.log_level == filter_settings.get("log_level").upper())
45 if filter_settings.get("request_id"):
46 request_id = filter_settings.get("request_id", "")
47 q = q.filter(Log.request_id == request_id.replace("-", ""))
48 if filter_settings.get("namespace"):
49 q = q.filter(Log.namespace == filter_settings.get("namespace"))
52 50 q = q.order_by(sa.desc(Log.timestamp))
53 51 return q
54 52
@@ -60,20 +58,18 b' class LogService(BaseService):'
60 58 query = {
61 59 "query": {
62 60 "filtered": {
63 "filter": {
64 "and": [{"terms": {"resource_id": list(app_ids)}}]
65 }
61 "filter": {"and": [{"terms": {"resource_id": list(app_ids)}}]}
66 62 }
67 63 }
68 64 }
69 65
70 start_date = filter_settings.get('start_date')
71 end_date = filter_settings.get('end_date')
72 filter_part = query['query']['filtered']['filter']['and']
66 start_date = filter_settings.get("start_date")
67 end_date = filter_settings.get("end_date")
68 filter_part = query["query"]["filtered"]["filter"]["and"]
73 69
74 for tag in filter_settings.get('tags', []):
75 tag_values = [v.lower() for v in tag['value']]
76 key = "tags.%s.values" % tag['name'].replace('.', '_')
70 for tag in filter_settings.get("tags", []):
71 tag_values = [v.lower() for v in tag["value"]]
72 key = "tags.%s.values" % tag["name"].replace(".", "_")
77 73 filter_part.append({"terms": {key: tag_values}})
78 74
79 75 date_range = {"range": {"timestamp": {}}}
@@ -84,26 +80,21 b' class LogService(BaseService):'
84 80 if start_date or end_date:
85 81 filter_part.append(date_range)
86 82
87 levels = filter_settings.get('level')
83 levels = filter_settings.get("level")
88 84 if levels:
89 filter_part.append({"terms": {'log_level': levels}})
90 namespaces = filter_settings.get('namespace')
85 filter_part.append({"terms": {"log_level": levels}})
86 namespaces = filter_settings.get("namespace")
91 87 if namespaces:
92 filter_part.append({"terms": {'namespace': namespaces}})
88 filter_part.append({"terms": {"namespace": namespaces}})
93 89
94 request_ids = filter_settings.get('request_id')
90 request_ids = filter_settings.get("request_id")
95 91 if request_ids:
96 filter_part.append({"terms": {'request_id': request_ids}})
92 filter_part.append({"terms": {"request_id": request_ids}})
97 93
98 messages = filter_settings.get('message')
94 messages = filter_settings.get("message")
99 95 if messages:
100 query['query']['filtered']['query'] = {
101 'match': {
102 'message': {
103 'query': ' '.join(messages),
104 'operator': 'and'
105 }
106 }
96 query["query"]["filtered"]["query"] = {
97 "match": {"message": {"query": " ".join(messages), "operator": "and"}}
107 98 }
108 99 return query
109 100
@@ -118,76 +109,96 b' class LogService(BaseService):'
118 109 "field": "timestamp",
119 110 "interval": "1h",
120 111 "min_doc_count": 0,
121 'extended_bounds': {
122 'max': filter_settings.get('end_date'),
123 'min': filter_settings.get('start_date')}
112 "extended_bounds": {
113 "max": filter_settings.get("end_date"),
114 "min": filter_settings.get("start_date"),
115 },
124 116 }
125 117 }
126 118 }
127 119 log.debug(es_query)
128 index_names = es_index_name_limiter(filter_settings.get('start_date'),
129 filter_settings.get('end_date'),
130 ixtypes=['logs'])
120 index_names = es_index_name_limiter(
121 filter_settings.get("start_date"),
122 filter_settings.get("end_date"),
123 ixtypes=["logs"],
124 )
131 125 if index_names:
132 126 results = Datastores.es.search(
133 body=es_query, index=index_names, doc_type='log', size=0)
127 body=es_query, index=index_names, doc_type="log", size=0
128 )
134 129 else:
135 130 results = []
136 131 return results
137 132
138 133 @classmethod
139 def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50,
140 order_by=None, filter_settings=None, limit=None):
134 def get_search_iterator(
135 cls,
136 app_ids=None,
137 page=1,
138 items_per_page=50,
139 order_by=None,
140 filter_settings=None,
141 limit=None,
142 ):
141 143 if not app_ids:
142 144 return {}, 0
143 145
144 146 es_query = cls.es_query_builder(app_ids, filter_settings)
145 sort_query = {
146 "sort": [
147 {"timestamp": {"order": "desc"}}
148 ]
149 }
147 sort_query = {"sort": [{"timestamp": {"order": "desc"}}]}
150 148 es_query.update(sort_query)
151 149 log.debug(es_query)
152 150 es_from = (page - 1) * items_per_page
153 index_names = es_index_name_limiter(filter_settings.get('start_date'),
154 filter_settings.get('end_date'),
155 ixtypes=['logs'])
151 index_names = es_index_name_limiter(
152 filter_settings.get("start_date"),
153 filter_settings.get("end_date"),
154 ixtypes=["logs"],
155 )
156 156 if not index_names:
157 157 return {}, 0
158 158
159 results = Datastores.es.search(body=es_query, index=index_names,
160 doc_type='log', size=items_per_page,
161 from_=es_from)
162 if results['hits']['total'] > 5000:
159 results = Datastores.es.search(
160 body=es_query,
161 index=index_names,
162 doc_type="log",
163 size=items_per_page,
164 from_=es_from,
165 )
166 if results["hits"]["total"] > 5000:
163 167 count = 5000
164 168 else:
165 count = results['hits']['total']
166 return results['hits'], count
169 count = results["hits"]["total"]
170 return results["hits"], count
167 171
168 172 @classmethod
169 def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None,
170 items_per_page=50, order_by=None,
173 def get_paginator_by_app_ids(
174 cls,
175 app_ids=None,
176 page=1,
177 item_count=None,
178 items_per_page=50,
179 order_by=None,
171 180 filter_settings=None,
172 exclude_columns=None, db_session=None):
181 exclude_columns=None,
182 db_session=None,
183 ):
173 184 if not filter_settings:
174 185 filter_settings = {}
175 results, item_count = cls.get_search_iterator(app_ids, page,
176 items_per_page, order_by,
177 filter_settings)
178 paginator = paginate.Page([],
179 item_count=item_count,
180 items_per_page=items_per_page,
181 **filter_settings)
182 ordered_ids = tuple(item['_source']['pg_id']
183 for item in results.get('hits', []))
186 results, item_count = cls.get_search_iterator(
187 app_ids, page, items_per_page, order_by, filter_settings
188 )
189 paginator = paginate.Page(
190 [], item_count=item_count, items_per_page=items_per_page, **filter_settings
191 )
192 ordered_ids = tuple(
193 item["_source"]["pg_id"] for item in results.get("hits", [])
194 )
184 195
185 196 sorted_instance_list = []
186 197 if ordered_ids:
187 198 db_session = get_db_session(db_session)
188 199 query = db_session.query(Log)
189 200 query = query.filter(Log.log_id.in_(ordered_ids))
190 query = query.order_by(sa.desc('timestamp'))
201 query = query.order_by(sa.desc("timestamp"))
191 202 sa_items = query.all()
192 203 # resort by score
193 204 for i_id in ordered_ids:
@@ -198,14 +209,14 b' class LogService(BaseService):'
198 209 return paginator
199 210
200 211 @classmethod
201 def query_by_primary_key_and_namespace(cls, list_of_pairs,
202 db_session=None):
212 def query_by_primary_key_and_namespace(cls, list_of_pairs, db_session=None):
203 213 db_session = get_db_session(db_session)
204 214 list_of_conditions = []
205 215 query = db_session.query(Log)
206 216 for pair in list_of_pairs:
207 list_of_conditions.append(sa.and_(
208 Log.primary_key == pair['pk'], Log.namespace == pair['ns']))
217 list_of_conditions.append(
218 sa.and_(Log.primary_key == pair["pk"], Log.namespace == pair["ns"])
219 )
209 220 query = query.filter(sa.or_(*list_of_conditions))
210 221 query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id))
211 222 return query
@@ -38,8 +38,9 b' class PluginConfigService(BaseService):'
38 38 return query.first()
39 39
40 40 @classmethod
41 def by_query(cls, resource_id=None, plugin_name=None,
42 section=None, db_session=None):
41 def by_query(
42 cls, resource_id=None, plugin_name=None, section=None, db_session=None
43 ):
43 44 db_session = get_db_session(db_session)
44 45
45 46 query = db_session.query(PluginConfig)
@@ -37,21 +37,24 b' class ReportService(BaseService):'
37 37 return q
38 38
39 39 @classmethod
40 def generate_stat_rows(cls, report, resource, report_group, occurences=1,
41 db_session=None):
40 def generate_stat_rows(
41 cls, report, resource, report_group, occurences=1, db_session=None
42 ):
42 43 """
43 44 Generates timeseries for this report's group
44 45 """
45 46 db_session = get_db_session(db_session)
46 stats = ReportStat(resource_id=report.resource_id,
47 stats = ReportStat(
48 resource_id=report.resource_id,
47 49 group_id=report_group.id,
48 50 start_interval=report.start_time,
49 51 owner_user_id=resource.owner_user_id,
50 server_name=report.tags.get('server_name'),
51 view_name=report.tags.get('view_name'),
52 server_name=report.tags.get("server_name"),
53 view_name=report.tags.get("view_name"),
52 54 type=report.report_type,
53 55 occurences=occurences,
54 duration=report.duration)
56 duration=report.duration,
57 )
55 58 db_session.add(stats)
56 59 db_session.flush()
57 60 return stats
@@ -35,71 +35,98 b' log = logging.getLogger(__name__)'
35 35
36 36 class ReportGroupService(BaseService):
37 37 @classmethod
38 def get_trending(cls, request, filter_settings, limit=15,
39 db_session=None):
38 def get_trending(cls, request, filter_settings, limit=15, db_session=None):
40 39 """
41 40 Returns report groups trending for specific time interval
42 41 """
43 42 db_session = get_db_session(db_session)
44 43
45 44 tags = []
46 if filter_settings.get('tags'):
47 for tag in filter_settings['tags']:
45 if filter_settings.get("tags"):
46 for tag in filter_settings["tags"]:
48 47 tags.append(
49 {'terms': {
50 'tags.{}.values'.format(tag['name']): tag['value']}})
48 {"terms": {"tags.{}.values".format(tag["name"]): tag["value"]}}
49 )
51 50
52 51 index_names = es_index_name_limiter(
53 start_date=filter_settings['start_date'],
54 end_date=filter_settings['end_date'],
55 ixtypes=['reports'])
52 start_date=filter_settings["start_date"],
53 end_date=filter_settings["end_date"],
54 ixtypes=["reports"],
55 )
56 56
57 if not index_names or not filter_settings['resource']:
57 if not index_names or not filter_settings["resource"]:
58 58 return []
59 59
60 60 es_query = {
61 'aggs': {'parent_agg': {'aggs': {'groups': {'aggs': {
62 'sub_agg': {
63 'value_count': {'field': 'tags.group_id.values'}}},
64 'filter': {'exists': {'field': 'tags.group_id.values'}}}},
65 'terms': {'field': 'tags.group_id.values', 'size': limit}}},
66 'query': {'filtered': {
67 'filter': {'and': [
68 {'terms': {
69 'resource_id': [filter_settings['resource'][0]]}
61 "aggs": {
62 "parent_agg": {
63 "aggs": {
64 "groups": {
65 "aggs": {
66 "sub_agg": {
67 "value_count": {"field": "tags.group_id.values"}
68 }
69 },
70 "filter": {"exists": {"field": "tags.group_id.values"}},
71 }
72 },
73 "terms": {"field": "tags.group_id.values", "size": limit},
74 }
75 },
76 "query": {
77 "filtered": {
78 "filter": {
79 "and": [
80 {
81 "terms": {
82 "resource_id": [filter_settings["resource"][0]]
83 }
84 },
85 {
86 "range": {
87 "timestamp": {
88 "gte": filter_settings["start_date"],
89 "lte": filter_settings["end_date"],
90 }
91 }
70 92 },
71 {'range': {'timestamp': {
72 'gte': filter_settings['start_date'],
73 'lte': filter_settings['end_date']}}}]
93 ]
74 94 }
75 }}
95 }
96 },
76 97 }
77 98 if tags:
78 es_query['query']['filtered']['filter']['and'].extend(tags)
99 es_query["query"]["filtered"]["filter"]["and"].extend(tags)
79 100
80 101 result = Datastores.es.search(
81 body=es_query, index=index_names, doc_type='log', size=0)
102 body=es_query, index=index_names, doc_type="log", size=0
103 )
82 104 series = []
83 for bucket in result['aggregations']['parent_agg']['buckets']:
84 series.append({
85 'key': bucket['key'],
86 'groups': bucket['groups']['sub_agg']['value']
87 })
105 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
106 series.append(
107 {"key": bucket["key"], "groups": bucket["groups"]["sub_agg"]["value"]}
108 )
88 109
89 110 report_groups_d = {}
90 111 for g in series:
91 report_groups_d[int(g['key'])] = g['groups'] or 0
112 report_groups_d[int(g["key"])] = g["groups"] or 0
92 113
93 114 query = db_session.query(ReportGroup)
94 115 query = query.filter(ReportGroup.id.in_(list(report_groups_d.keys())))
95 query = query.options(
96 sa.orm.joinedload(ReportGroup.last_report_ref))
97 results = [(report_groups_d[group.id], group,) for group in query]
116 query = query.options(sa.orm.joinedload(ReportGroup.last_report_ref))
117 results = [(report_groups_d[group.id], group) for group in query]
98 118 return sorted(results, reverse=True, key=lambda x:x[0])
99 119
100 120 @classmethod
101 def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50,
102 order_by=None, filter_settings=None, limit=None):
121 def get_search_iterator(
122 cls,
123 app_ids=None,
124 page=1,
125 items_per_page=50,
126 order_by=None,
127 filter_settings=None,
128 limit=None,
129 ):
103 130 if not app_ids:
104 131 return {}
105 132 if not filter_settings:
@@ -109,38 +136,29 b' class ReportGroupService(BaseService):'
109 136 "size": 0,
110 137 "query": {
111 138 "filtered": {
112 "filter": {
113 "and": [{"terms": {"resource_id": list(app_ids)}}]
114 }
139 "filter": {"and": [{"terms": {"resource_id": list(app_ids)}}]}
115 140 }
116 141 },
117
118 142 "aggs": {
119 143 "top_groups": {
120 144 "terms": {
121 145 "size": 5000,
122 146 "field": "_parent",
123 "order": {
124 "newest": "desc"
125 }
147 "order": {"newest": "desc"},
126 148 },
127 149 "aggs": {
128 150 "top_reports_hits": {
129 "top_hits": {"size": 1,
130 "sort": {"start_time": "desc"}
131 }
151 "top_hits": {"size": 1, "sort": {"start_time": "desc"}}
152 },
153 "newest": {"max": {"field": "start_time"}},
132 154 },
133 "newest": {
134 "max": {"field": "start_time"}
135 }
136 }
137 }
138 155 }
156 },
139 157 }
140 158
141 start_date = filter_settings.get('start_date')
142 end_date = filter_settings.get('end_date')
143 filter_part = query['query']['filtered']['filter']['and']
159 start_date = filter_settings.get("start_date")
160 end_date = filter_settings.get("end_date")
161 filter_part = query["query"]["filtered"]["filter"]["and"]
144 162 date_range = {"range": {"start_time": {}}}
145 163 if start_date:
146 164 date_range["range"]["start_time"]["gte"] = start_date
@@ -149,131 +167,160 b' class ReportGroupService(BaseService):'
149 167 if start_date or end_date:
150 168 filter_part.append(date_range)
151 169
152 priorities = filter_settings.get('priority')
170 priorities = filter_settings.get("priority")
153 171
154 for tag in filter_settings.get('tags', []):
155 tag_values = [v.lower() for v in tag['value']]
156 key = "tags.%s.values" % tag['name'].replace('.', '_')
172 for tag in filter_settings.get("tags", []):
173 tag_values = [v.lower() for v in tag["value"]]
174 key = "tags.%s.values" % tag["name"].replace(".", "_")
157 175 filter_part.append({"terms": {key: tag_values}})
158 176
159 177 if priorities:
160 filter_part.append({"has_parent": {
178 filter_part.append(
179 {
180 "has_parent": {
161 181 "parent_type": "report_group",
162 "query": {
163 "terms": {'priority': priorities}
164 }}})
182 "query": {"terms": {"priority": priorities}},
183 }
184 }
185 )
165 186
166 min_occurences = filter_settings.get('min_occurences')
187 min_occurences = filter_settings.get("min_occurences")
167 188 if min_occurences:
168 filter_part.append({"has_parent": {
189 filter_part.append(
190 {
191 "has_parent": {
169 192 "parent_type": "report_group",
170 "query": {
171 "range": {'occurences': {"gte": min_occurences[0]}}
172 }}})
193 "query": {"range": {"occurences": {"gte": min_occurences[0]}}},
194 }
195 }
196 )
173 197
174 min_duration = filter_settings.get('min_duration')
175 max_duration = filter_settings.get('max_duration')
198 min_duration = filter_settings.get("min_duration")
199 max_duration = filter_settings.get("max_duration")
176 200
177 request_ids = filter_settings.get('request_id')
201 request_ids = filter_settings.get("request_id")
178 202 if request_ids:
179 filter_part.append({"terms": {'request_id': request_ids}})
203 filter_part.append({"terms": {"request_id": request_ids}})
180 204
181 205 duration_range = {"range": {"average_duration": {}}}
182 206 if min_duration:
183 duration_range["range"]["average_duration"]["gte"] = \
184 min_duration[0]
207 duration_range["range"]["average_duration"]["gte"] = min_duration[0]
185 208 if max_duration:
186 duration_range["range"]["average_duration"]["lte"] = \
187 max_duration[0]
209 duration_range["range"]["average_duration"]["lte"] = max_duration[0]
188 210 if min_duration or max_duration:
189 filter_part.append({"has_parent": {
190 "parent_type": "report_group",
191 "query": duration_range}})
211 filter_part.append(
212 {"has_parent": {"parent_type": "report_group", "query": duration_range}}
213 )
192 214
193 http_status = filter_settings.get('http_status')
194 report_type = filter_settings.get('report_type', [ReportType.error])
215 http_status = filter_settings.get("http_status")
216 report_type = filter_settings.get("report_type", [ReportType.error])
195 217 # set error report type if http status is not found
196 218 # and we are dealing with slow reports
197 219 if not http_status or ReportType.slow in report_type:
198 filter_part.append({"terms": {'report_type': report_type}})
220 filter_part.append({"terms": {"report_type": report_type}})
199 221 if http_status:
200 filter_part.append({"terms": {'http_status': http_status}})
222 filter_part.append({"terms": {"http_status": http_status}})
201 223
202 messages = filter_settings.get('message')
224 messages = filter_settings.get("message")
203 225 if messages:
204 condition = {'match': {"message": ' '.join(messages)}}
205 query['query']['filtered']['query'] = condition
206 errors = filter_settings.get('error')
226 condition = {"match": {"message": " ".join(messages)}}
227 query["query"]["filtered"]["query"] = condition
228 errors = filter_settings.get("error")
207 229 if errors:
208 condition = {'match': {"error": ' '.join(errors)}}
209 query['query']['filtered']['query'] = condition
210 url_domains = filter_settings.get('url_domain')
230 condition = {"match": {"error": " ".join(errors)}}
231 query["query"]["filtered"]["query"] = condition
232 url_domains = filter_settings.get("url_domain")
211 233 if url_domains:
212 condition = {'terms': {"url_domain": url_domains}}
213 query['query']['filtered']['query'] = condition
214 url_paths = filter_settings.get('url_path')
234 condition = {"terms": {"url_domain": url_domains}}
235 query["query"]["filtered"]["query"] = condition
236 url_paths = filter_settings.get("url_path")
215 237 if url_paths:
216 condition = {'terms': {"url_path": url_paths}}
217 query['query']['filtered']['query'] = condition
218
219 if filter_settings.get('report_status'):
220 for status in filter_settings.get('report_status'):
221 if status == 'never_reviewed':
222 filter_part.append({"has_parent": {
238 condition = {"terms": {"url_path": url_paths}}
239 query["query"]["filtered"]["query"] = condition
240
241 if filter_settings.get("report_status"):
242 for status in filter_settings.get("report_status"):
243 if status == "never_reviewed":
244 filter_part.append(
245 {
246 "has_parent": {
223 247 "parent_type": "report_group",
224 "query": {
225 "term": {"read": False}
226 }}})
227 elif status == 'reviewed':
228 filter_part.append({"has_parent": {
248 "query": {"term": {"read": False}},
249 }
250 }
251 )
252 elif status == "reviewed":
253 filter_part.append(
254 {
255 "has_parent": {
229 256 "parent_type": "report_group",
230 "query": {
231 "term": {"read": True}
232 }}})
233 elif status == 'public':
234 filter_part.append({"has_parent": {
257 "query": {"term": {"read": True}},
258 }
259 }
260 )
261 elif status == "public":
262 filter_part.append(
263 {
264 "has_parent": {
235 265 "parent_type": "report_group",
236 "query": {
237 "term": {"public": True}
238 }}})
239 elif status == 'fixed':
240 filter_part.append({"has_parent": {
266 "query": {"term": {"public": True}},
267 }
268 }
269 )
270 elif status == "fixed":
271 filter_part.append(
272 {
273 "has_parent": {
241 274 "parent_type": "report_group",
242 "query": {
243 "term": {"fixed": True}
244 }}})
275 "query": {"term": {"fixed": True}},
276 }
277 }
278 )
245 279
246 280 # logging.getLogger('pyelasticsearch').setLevel(logging.DEBUG)
247 index_names = es_index_name_limiter(filter_settings.get('start_date'),
248 filter_settings.get('end_date'),
249 ixtypes=['reports'])
281 index_names = es_index_name_limiter(
282 filter_settings.get("start_date"),
283 filter_settings.get("end_date"),
284 ixtypes=["reports"],
285 )
250 286 if index_names:
251 287 results = Datastores.es.search(
252 body=query, index=index_names, doc_type=["report", "report_group"],
253 size=0)
288 body=query,
289 index=index_names,
290 doc_type=["report", "report_group"],
291 size=0,
292 )
254 293 else:
255 294 return []
256 return results['aggregations']
295 return results["aggregations"]
257 296
258 297 @classmethod
259 def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None,
260 items_per_page=50, order_by=None,
298 def get_paginator_by_app_ids(
299 cls,
300 app_ids=None,
301 page=1,
302 item_count=None,
303 items_per_page=50,
304 order_by=None,
261 305 filter_settings=None,
262 exclude_columns=None, db_session=None):
306 exclude_columns=None,
307 db_session=None,
308 ):
263 309 if not filter_settings:
264 310 filter_settings = {}
265 results = cls.get_search_iterator(app_ids, page, items_per_page,
266 order_by, filter_settings)
311 results = cls.get_search_iterator(
312 app_ids, page, items_per_page, order_by, filter_settings
313 )
267 314
268 315 ordered_ids = []
269 316 if results:
270 for item in results['top_groups']['buckets']:
271 pg_id = item['top_reports_hits']['hits']['hits'][0]['_source'][
272 'pg_id']
317 for item in results["top_groups"]["buckets"]:
318 pg_id = item["top_reports_hits"]["hits"]["hits"][0]["_source"]["pg_id"]
273 319 ordered_ids.append(pg_id)
274 320 log.info(filter_settings)
275 paginator = paginate.Page(ordered_ids, items_per_page=items_per_page,
276 **filter_settings)
321 paginator = paginate.Page(
322 ordered_ids, items_per_page=items_per_page, **filter_settings
323 )
277 324 sa_items = ()
278 325 if paginator.items:
279 326 db_session = get_db_session(db_session)
@@ -281,22 +328,22 b' class ReportGroupService(BaseService):'
281 328 query = db_session.query(Report)
282 329 query = query.options(sa.orm.joinedload(Report.report_group))
283 330 query = query.filter(Report.id.in_(paginator.items))
284 if filter_settings.get('order_col'):
285 order_col = filter_settings.get('order_col')
286 if filter_settings.get('order_dir') == 'dsc':
287 sort_on = 'desc'
331 if filter_settings.get("order_col"):
332 order_col = filter_settings.get("order_col")
333 if filter_settings.get("order_dir") == "dsc":
334 sort_on = "desc"
288 335 else:
289 sort_on = 'asc'
290 if order_col == 'when':
291 order_col = 'last_timestamp'
292 query = query.order_by(getattr(sa, sort_on)(
293 getattr(ReportGroup, order_col)))
336 sort_on = "asc"
337 if order_col == "when":
338 order_col = "last_timestamp"
339 query = query.order_by(
340 getattr(sa, sort_on)(getattr(ReportGroup, order_col))
341 )
294 342 sa_items = query.all()
295 343 sorted_instance_list = []
296 344 for i_id in ordered_ids:
297 345 for report in sa_items:
298 if (str(report.id) == i_id and
299 report not in sorted_instance_list):
346 if str(report.id) == i_id and report not in sorted_instance_list:
300 347 sorted_instance_list.append(report)
301 348 paginator.sa_items = sorted_instance_list
302 349 return paginator
@@ -314,8 +361,7 b' class ReportGroupService(BaseService):'
314 361 @classmethod
315 362 def by_id(cls, group_id, app_ids=None, db_session=None):
316 363 db_session = get_db_session(db_session)
317 q = db_session.query(ReportGroup).filter(
318 ReportGroup.id == int(group_id))
364 q = db_session.query(ReportGroup).filter(ReportGroup.id == int(group_id))
319 365 if app_ids:
320 366 q = q.filter(ReportGroup.resource_id.in_(app_ids))
321 367 return q.first()
@@ -328,8 +374,9 b' class ReportGroupService(BaseService):'
328 374 return query
329 375
330 376 @classmethod
331 def by_hash_and_resource(cls, resource_id, grouping_hash, since_when=None,
332 db_session=None):
377 def by_hash_and_resource(
378 cls, resource_id, grouping_hash, since_when=None, db_session=None
379 ):
333 380 db_session = get_db_session(db_session)
334 381 q = db_session.query(ReportGroup)
335 382 q = q.filter(ReportGroup.resource_id == resource_id)
@@ -340,8 +387,7 b' class ReportGroupService(BaseService):'
340 387 return q.first()
341 388
342 389 @classmethod
343 def users_commenting(cls, report_group, exclude_user_id=None,
344 db_session=None):
390 def users_commenting(cls, report_group, exclude_user_id=None, db_session=None):
345 391 db_session = get_db_session(None, report_group)
346 392 query = db_session.query(User).distinct()
347 393 query = query.filter(User.id == ReportComment.owner_id)
@@ -355,7 +401,7 b' class ReportGroupService(BaseService):'
355 401 db_session = get_db_session(db_session)
356 402 query = db_session.query(sa.func.count(Report.username))
357 403 query = query.filter(Report.group_id == report_group.id)
358 query = query.filter(Report.username != '')
404 query = query.filter(Report.username != "")
359 405 query = query.filter(Report.username != None)
360 406 query = query.group_by(Report.username)
361 407 return query.count()
@@ -363,11 +409,11 b' class ReportGroupService(BaseService):'
363 409 @classmethod
364 410 def top_affected_users(cls, report_group, db_session=None):
365 411 db_session = get_db_session(db_session)
366 count_label = sa.func.count(Report.username).label('count')
412 count_label = sa.func.count(Report.username).label("count")
367 413 query = db_session.query(Report.username, count_label)
368 414 query = query.filter(Report.group_id == report_group.id)
369 415 query = query.filter(Report.username != None)
370 query = query.filter(Report.username != '')
416 query = query.filter(Report.username != "")
371 417 query = query.group_by(Report.username)
372 418 query = query.order_by(sa.desc(count_label))
373 419 query = query.limit(50)
@@ -381,71 +427,95 b' class ReportGroupService(BaseService):'
381 427 detailed means version that returns time intervals - non detailed
382 428 returns total sum
383 429 """
384 delta = filter_settings['end_date'] - filter_settings['start_date']
385 if delta < h.time_deltas.get('12h')['delta']:
386 interval = '1m'
387 elif delta <= h.time_deltas.get('3d')['delta']:
388 interval = '5m'
389 elif delta >= h.time_deltas.get('2w')['delta']:
390 interval = '24h'
430 delta = filter_settings["end_date"] - filter_settings["start_date"]
431 if delta < h.time_deltas.get("12h")["delta"]:
432 interval = "1m"
433 elif delta <= h.time_deltas.get("3d")["delta"]:
434 interval = "5m"
435 elif delta >= h.time_deltas.get("2w")["delta"]:
436 interval = "24h"
391 437 else:
392 interval = '1h'
438 interval = "1h"
393 439
394 group_id = filter_settings.get('group_id')
440 group_id = filter_settings.get("group_id")
395 441
396 442 es_query = {
397 'aggs': {'parent_agg': {'aggs': {'types': {
398 'aggs': {'sub_agg': {'terms': {'field': 'tags.type.values'}}},
399 'filter': {
400 'and': [{'exists': {'field': 'tags.type.values'}}]}
401 }},
402 'date_histogram': {'extended_bounds': {
403 'max': filter_settings['end_date'],
404 'min': filter_settings['start_date']},
405 'field': 'timestamp',
406 'interval': interval,
407 'min_doc_count': 0}}},
408 'query': {'filtered': {
409 'filter': {'and': [
410 {'terms': {
411 'resource_id': [filter_settings['resource'][0]]}},
412 {'range': {'timestamp': {
413 'gte': filter_settings['start_date'],
414 'lte': filter_settings['end_date']}}}]
415 }
416 }}
443 "aggs": {
444 "parent_agg": {
445 "aggs": {
446 "types": {
447 "aggs": {
448 "sub_agg": {"terms": {"field": "tags.type.values"}}
449 },
450 "filter": {
451 "and": [{"exists": {"field": "tags.type.values"}}]
452 },
453 }
454 },
455 "date_histogram": {
456 "extended_bounds": {
457 "max": filter_settings["end_date"],
458 "min": filter_settings["start_date"],
459 },
460 "field": "timestamp",
461 "interval": interval,
462 "min_doc_count": 0,
463 },
464 }
465 },
466 "query": {
467 "filtered": {
468 "filter": {
469 "and": [
470 {
471 "terms": {
472 "resource_id": [filter_settings["resource"][0]]
473 }
474 },
475 {
476 "range": {
477 "timestamp": {
478 "gte": filter_settings["start_date"],
479 "lte": filter_settings["end_date"],
480 }
481 }
482 },
483 ]
484 }
485 }
486 },
417 487 }
418 488 if group_id:
419 parent_agg = es_query['aggs']['parent_agg']
420 filters = parent_agg['aggs']['types']['filter']['and']
421 filters.append({'terms': {'tags.group_id.values': [group_id]}})
489 parent_agg = es_query["aggs"]["parent_agg"]
490 filters = parent_agg["aggs"]["types"]["filter"]["and"]
491 filters.append({"terms": {"tags.group_id.values": [group_id]}})
422 492
423 493 index_names = es_index_name_limiter(
424 start_date=filter_settings['start_date'],
425 end_date=filter_settings['end_date'],
426 ixtypes=['reports'])
494 start_date=filter_settings["start_date"],
495 end_date=filter_settings["end_date"],
496 ixtypes=["reports"],
497 )
427 498
428 499 if not index_names:
429 500 return []
430 501
431 result = Datastores.es.search(body=es_query,
432 index=index_names,
433 doc_type='log',
434 size=0)
502 result = Datastores.es.search(
503 body=es_query, index=index_names, doc_type="log", size=0
504 )
435 505 series = []
436 for bucket in result['aggregations']['parent_agg']['buckets']:
506 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
437 507 point = {
438 'x': datetime.utcfromtimestamp(int(bucket['key']) / 1000),
439 'report': 0,
440 'not_found': 0,
441 'slow_report': 0
442 }
443 for subbucket in bucket['types']['sub_agg']['buckets']:
444 if subbucket['key'] == 'slow':
445 point['slow_report'] = subbucket['doc_count']
446 elif subbucket['key'] == 'error':
447 point['report'] = subbucket['doc_count']
448 elif subbucket['key'] == 'not_found':
449 point['not_found'] = subbucket['doc_count']
508 "x": datetime.utcfromtimestamp(int(bucket["key"]) / 1000),
509 "report": 0,
510 "not_found": 0,
511 "slow_report": 0,
512 }
513 for subbucket in bucket["types"]["sub_agg"]["buckets"]:
514 if subbucket["key"] == "slow":
515 point["slow_report"] = subbucket["doc_count"]
516 elif subbucket["key"] == "error":
517 point["report"] = subbucket["doc_count"]
518 elif subbucket["key"] == "not_found":
519 point["not_found"] = subbucket["doc_count"]
450 520 series.append(point)
451 521 return series
@@ -25,26 +25,39 b' class ReportStatService(BaseService):'
25 25 def count_by_type(cls, report_type, resource_id, since_when):
26 26 report_type = ReportType.key_from_value(report_type)
27 27
28 index_names = es_index_name_limiter(start_date=since_when,
29 ixtypes=['reports'])
28 index_names = es_index_name_limiter(start_date=since_when, ixtypes=["reports"])
30 29
31 30 es_query = {
32 'aggs': {'reports': {'aggs': {
33 'sub_agg': {'value_count': {'field': 'tags.group_id.values'}}},
34 'filter': {'and': [{'terms': {'resource_id': [resource_id]}},
35 {'exists': {
36 'field': 'tags.group_id.values'}}]}}},
37 'query': {'filtered': {'filter': {
38 'and': [{'terms': {'resource_id': [resource_id]}},
39 {'terms': {'tags.type.values': [report_type]}},
40 {'range': {'timestamp': {
41 'gte': since_when}}}]}}}}
31 "aggs": {
32 "reports": {
33 "aggs": {
34 "sub_agg": {"value_count": {"field": "tags.group_id.values"}}
35 },
36 "filter": {
37 "and": [
38 {"terms": {"resource_id": [resource_id]}},
39 {"exists": {"field": "tags.group_id.values"}},
40 ]
41 },
42 }
43 },
44 "query": {
45 "filtered": {
46 "filter": {
47 "and": [
48 {"terms": {"resource_id": [resource_id]}},
49 {"terms": {"tags.type.values": [report_type]}},
50 {"range": {"timestamp": {"gte": since_when}}},
51 ]
52 }
53 }
54 },
55 }
42 56
43 57 if index_names:
44 result = Datastores.es.search(body=es_query,
45 index=index_names,
46 doc_type='log',
47 size=0)
48 return result['aggregations']['reports']['sub_agg']['value']
58 result = Datastores.es.search(
59 body=es_query, index=index_names, doc_type="log", size=0
60 )
61 return result["aggregations"]["reports"]["sub_agg"]["value"]
49 62 else:
50 63 return 0
This diff has been collapsed as it changes many lines, (792 lines changed) Show them Hide them
@@ -23,264 +23,353 b' from appenlight.lib.enums import ReportType'
23 23 from appenlight.lib.utils import es_index_name_limiter
24 24
25 25 try:
26 from ae_uptime_ce.models.services.uptime_metric import \
27 UptimeMetricService
26 from ae_uptime_ce.models.services.uptime_metric import UptimeMetricService
28 27 except ImportError:
29 28 UptimeMetricService = None
30 29
31 30
32 31 def check_key(key, stats, uptime, total_seconds):
33 32 if key not in stats:
34 stats[key] = {'name': key,
35 'requests': 0,
36 'errors': 0,
37 'tolerated_requests': 0,
38 'frustrating_requests': 0,
39 'satisfying_requests': 0,
40 'total_minutes': total_seconds / 60.0,
41 'uptime': uptime,
42 'apdex': 0,
43 'rpm': 0,
44 'response_time': 0,
45 'avg_response_time': 0}
33 stats[key] = {
34 "name": key,
35 "requests": 0,
36 "errors": 0,
37 "tolerated_requests": 0,
38 "frustrating_requests": 0,
39 "satisfying_requests": 0,
40 "total_minutes": total_seconds / 60.0,
41 "uptime": uptime,
42 "apdex": 0,
43 "rpm": 0,
44 "response_time": 0,
45 "avg_response_time": 0,
46 }
46 47
47 48
48 49 class RequestMetricService(BaseService):
49 50 @classmethod
50 51 def get_metrics_stats(cls, request, filter_settings, db_session=None):
51 delta = filter_settings['end_date'] - filter_settings['start_date']
52 if delta < h.time_deltas.get('12h')['delta']:
53 interval = '1m'
54 elif delta <= h.time_deltas.get('3d')['delta']:
55 interval = '5m'
56 elif delta >= h.time_deltas.get('2w')['delta']:
57 interval = '24h'
52 delta = filter_settings["end_date"] - filter_settings["start_date"]
53 if delta < h.time_deltas.get("12h")["delta"]:
54 interval = "1m"
55 elif delta <= h.time_deltas.get("3d")["delta"]:
56 interval = "5m"
57 elif delta >= h.time_deltas.get("2w")["delta"]:
58 interval = "24h"
58 59 else:
59 interval = '1h'
60 interval = "1h"
60 61
61 filter_settings['namespace'] = ['appenlight.request_metric']
62 filter_settings["namespace"] = ["appenlight.request_metric"]
62 63
63 64 es_query = {
64 'aggs': {
65 'parent_agg': {
66 'aggs': {'custom': {'aggs': {'sub_agg': {
67 'sum': {'field': 'tags.custom.numeric_values'}}},
68 'filter': {'exists': {
69 'field': 'tags.custom.numeric_values'}}},
70 'main': {'aggs': {'sub_agg': {'sum': {
71 'field': 'tags.main.numeric_values'}}},
72 'filter': {'exists': {
73 'field': 'tags.main.numeric_values'}}},
74 'nosql': {'aggs': {'sub_agg': {'sum': {
75 'field': 'tags.nosql.numeric_values'}}},
76 'filter': {'exists': {
77 'field': 'tags.nosql.numeric_values'}}},
78 'remote': {'aggs': {'sub_agg': {'sum': {
79 'field': 'tags.remote.numeric_values'}}},
80 'filter': {'exists': {
81 'field': 'tags.remote.numeric_values'}}},
82 'requests': {'aggs': {'sub_agg': {'sum': {
83 'field': 'tags.requests.numeric_values'}}},
84 'filter': {'exists': {
85 'field': 'tags.requests.numeric_values'}}},
86 'sql': {'aggs': {'sub_agg': {
87 'sum': {'field': 'tags.sql.numeric_values'}}},
88 'filter': {'exists': {
89 'field': 'tags.sql.numeric_values'}}},
90 'tmpl': {'aggs': {'sub_agg': {'sum': {
91 'field': 'tags.tmpl.numeric_values'}}},
92 'filter': {'exists': {
93 'field': 'tags.tmpl.numeric_values'}}}},
94 'date_histogram': {'extended_bounds': {
95 'max': filter_settings['end_date'],
96 'min': filter_settings['start_date']},
97 'field': 'timestamp',
98 'interval': interval,
99 'min_doc_count': 0}}},
100 'query': {'filtered': {
101 'filter': {'and': [{'terms': {
102 'resource_id': [filter_settings['resource'][0]]}},
103 {'range': {'timestamp': {
104 'gte': filter_settings['start_date'],
105 'lte': filter_settings['end_date']}}},
106 {'terms': {'namespace': [
107 'appenlight.request_metric']}}]}}}}
65 "aggs": {
66 "parent_agg": {
67 "aggs": {
68 "custom": {
69 "aggs": {
70 "sub_agg": {
71 "sum": {"field": "tags.custom.numeric_values"}
72 }
73 },
74 "filter": {
75 "exists": {"field": "tags.custom.numeric_values"}
76 },
77 },
78 "main": {
79 "aggs": {
80 "sub_agg": {
81 "sum": {"field": "tags.main.numeric_values"}
82 }
83 },
84 "filter": {"exists": {"field": "tags.main.numeric_values"}},
85 },
86 "nosql": {
87 "aggs": {
88 "sub_agg": {
89 "sum": {"field": "tags.nosql.numeric_values"}
90 }
91 },
92 "filter": {
93 "exists": {"field": "tags.nosql.numeric_values"}
94 },
95 },
96 "remote": {
97 "aggs": {
98 "sub_agg": {
99 "sum": {"field": "tags.remote.numeric_values"}
100 }
101 },
102 "filter": {
103 "exists": {"field": "tags.remote.numeric_values"}
104 },
105 },
106 "requests": {
107 "aggs": {
108 "sub_agg": {
109 "sum": {"field": "tags.requests.numeric_values"}
110 }
111 },
112 "filter": {
113 "exists": {"field": "tags.requests.numeric_values"}
114 },
115 },
116 "sql": {
117 "aggs": {
118 "sub_agg": {"sum": {"field": "tags.sql.numeric_values"}}
119 },
120 "filter": {"exists": {"field": "tags.sql.numeric_values"}},
121 },
122 "tmpl": {
123 "aggs": {
124 "sub_agg": {
125 "sum": {"field": "tags.tmpl.numeric_values"}
126 }
127 },
128 "filter": {"exists": {"field": "tags.tmpl.numeric_values"}},
129 },
130 },
131 "date_histogram": {
132 "extended_bounds": {
133 "max": filter_settings["end_date"],
134 "min": filter_settings["start_date"],
135 },
136 "field": "timestamp",
137 "interval": interval,
138 "min_doc_count": 0,
139 },
140 }
141 },
142 "query": {
143 "filtered": {
144 "filter": {
145 "and": [
146 {
147 "terms": {
148 "resource_id": [filter_settings["resource"][0]]
149 }
150 },
151 {
152 "range": {
153 "timestamp": {
154 "gte": filter_settings["start_date"],
155 "lte": filter_settings["end_date"],
156 }
157 }
158 },
159 {"terms": {"namespace": ["appenlight.request_metric"]}},
160 ]
161 }
162 }
163 },
164 }
108 165
109 166 index_names = es_index_name_limiter(
110 start_date=filter_settings['start_date'],
111 end_date=filter_settings['end_date'],
112 ixtypes=['metrics'])
167 start_date=filter_settings["start_date"],
168 end_date=filter_settings["end_date"],
169 ixtypes=["metrics"],
170 )
113 171 if not index_names:
114 172 return []
115 173
116 result = Datastores.es.search(body=es_query,
117 index=index_names,
118 doc_type='log',
119 size=0)
174 result = Datastores.es.search(
175 body=es_query, index=index_names, doc_type="log", size=0
176 )
120 177
121 178 plot_data = []
122 for item in result['aggregations']['parent_agg']['buckets']:
123 x_time = datetime.utcfromtimestamp(int(item['key']) / 1000)
179 for item in result["aggregations"]["parent_agg"]["buckets"]:
180 x_time = datetime.utcfromtimestamp(int(item["key"]) / 1000)
124 181 point = {"x": x_time}
125 for key in ['custom', 'main', 'nosql', 'remote',
126 'requests', 'sql', 'tmpl']:
127 value = item[key]['sub_agg']['value']
182 for key in ["custom", "main", "nosql", "remote", "requests", "sql", "tmpl"]:
183 value = item[key]["sub_agg"]["value"]
128 184 point[key] = round(value, 3) if value else 0
129 185 plot_data.append(point)
130 186
131 187 return plot_data
132 188
133 189 @classmethod
134 def get_requests_breakdown(cls, request, filter_settings,
135 db_session=None):
190 def get_requests_breakdown(cls, request, filter_settings, db_session=None):
136 191 db_session = get_db_session(db_session)
137 192
138 193 # fetch total time of all requests in this time range
139 194 index_names = es_index_name_limiter(
140 start_date=filter_settings['start_date'],
141 end_date=filter_settings['end_date'],
142 ixtypes=['metrics'])
195 start_date=filter_settings["start_date"],
196 end_date=filter_settings["end_date"],
197 ixtypes=["metrics"],
198 )
143 199
144 if index_names and filter_settings['resource']:
200 if index_names and filter_settings["resource"]:
145 201 es_query = {
146 'aggs': {'main': {'aggs': {
147 'sub_agg': {'sum': {'field': 'tags.main.numeric_values'}}},
148 'filter': {'exists': {
149 'field': 'tags.main.numeric_values'}}}},
150 'query': {'filtered': {
151 'filter': {'and': [
152 {'terms': {
153 'resource_id': [filter_settings['resource'][0]]}},
154 {'range': {'timestamp': {
155 'gte': filter_settings['start_date'],
156 'lte': filter_settings['end_date']}}},
157 {'terms': {'namespace': [
158 'appenlight.request_metric']}}]}}}}
159 result = Datastores.es.search(body=es_query,
160 index=index_names,
161 doc_type='log',
162 size=0)
163 total_time_spent = result['aggregations']['main']['sub_agg'][
164 'value']
202 "aggs": {
203 "main": {
204 "aggs": {
205 "sub_agg": {"sum": {"field": "tags.main.numeric_values"}}
206 },
207 "filter": {"exists": {"field": "tags.main.numeric_values"}},
208 }
209 },
210 "query": {
211 "filtered": {
212 "filter": {
213 "and": [
214 {
215 "terms": {
216 "resource_id": [filter_settings["resource"][0]]
217 }
218 },
219 {
220 "range": {
221 "timestamp": {
222 "gte": filter_settings["start_date"],
223 "lte": filter_settings["end_date"],
224 }
225 }
226 },
227 {"terms": {"namespace": ["appenlight.request_metric"]}},
228 ]
229 }
230 }
231 },
232 }
233 result = Datastores.es.search(
234 body=es_query, index=index_names, doc_type="log", size=0
235 )
236 total_time_spent = result["aggregations"]["main"]["sub_agg"]["value"]
165 237 else:
166 238 total_time_spent = 0
167 239 script_text = "doc['tags.main.numeric_values'].value / {}".format(
168 total_time_spent)
240 total_time_spent
241 )
169 242
170 if index_names and filter_settings['resource']:
243 if index_names and filter_settings["resource"]:
171 244 es_query = {
172 'aggs': {
173 'parent_agg': {
174 'aggs': {'main': {'aggs': {
175 'sub_agg': {
176 'sum': {'field': 'tags.main.numeric_values'}}},
177 'filter': {
178 'exists': {
179 'field': 'tags.main.numeric_values'}}},
180 'percentage': {
181 'aggs': {'sub_agg': {
182 'sum': {
183 'lang': 'expression',
184 'script': script_text}}},
185 'filter': {
186 'exists': {
187 'field': 'tags.main.numeric_values'}}},
188 'requests': {'aggs': {'sub_agg': {
189 'sum': {
190 'field': 'tags.requests.numeric_values'}}},
191 'filter': {'exists': {
192 'field': 'tags.requests.numeric_values'}}}},
193 'terms': {'field': 'tags.view_name.values',
194 'order': {'percentage>sub_agg': 'desc'},
195 'size': 15}}},
196 'query': {'filtered': {'filter': {'and': [
197 {'terms': {
198 'resource_id': [filter_settings['resource'][0]]}},
199 {'range': {
200 'timestamp': {'gte': filter_settings['start_date'],
201 'lte': filter_settings['end_date']
202 }
203 }
204 }
205 ]}
206 }}
207 }
208 result = Datastores.es.search(body=es_query,
209 index=index_names,
210 doc_type='log',
211 size=0)
212 series = result['aggregations']['parent_agg']['buckets']
213 else:
214 series = []
215
216 and_part = [
217 {"term": {"resource_id": filter_settings['resource'][0]}},
218 {"terms": {"tags.view_name.values": [row['key'] for
219 row in series]}},
220 {"term": {"report_type": str(ReportType.slow)}}
221 ]
222 query = {
223 245 "aggs": {
224 "top_reports": {
225 "terms": {
226 "field": "tags.view_name.values",
227 "size": len(series)
246 "parent_agg": {
247 "aggs": {
248 "main": {
249 "aggs": {
250 "sub_agg": {
251 "sum": {"field": "tags.main.numeric_values"}
252 }
253 },
254 "filter": {
255 "exists": {"field": "tags.main.numeric_values"}
256 },
228 257 },
258 "percentage": {
229 259 "aggs": {
230 "top_calls_hits": {
231 "top_hits": {
232 "sort": {"start_time": "desc"},
233 "size": 5
260 "sub_agg": {
261 "sum": {
262 "lang": "expression",
263 "script": script_text,
234 264 }
235 265 }
266 },
267 "filter": {
268 "exists": {"field": "tags.main.numeric_values"}
269 },
270 },
271 "requests": {
272 "aggs": {
273 "sub_agg": {
274 "sum": {"field": "tags.requests.numeric_values"}
236 275 }
276 },
277 "filter": {
278 "exists": {"field": "tags.requests.numeric_values"}
279 },
280 },
281 },
282 "terms": {
283 "field": "tags.view_name.values",
284 "order": {"percentage>sub_agg": "desc"},
285 "size": 15,
286 },
237 287 }
238 288 },
239
240 289 "query": {
241 290 "filtered": {
242 291 "filter": {
243 "and": and_part
292 "and": [
293 {
294 "terms": {
295 "resource_id": [filter_settings["resource"][0]]
244 296 }
297 },
298 {
299 "range": {
300 "timestamp": {
301 "gte": filter_settings["start_date"],
302 "lte": filter_settings["end_date"],
245 303 }
246 304 }
305 },
306 ]
307 }
308 }
309 },
310 }
311 result = Datastores.es.search(
312 body=es_query, index=index_names, doc_type="log", size=0
313 )
314 series = result["aggregations"]["parent_agg"]["buckets"]
315 else:
316 series = []
317
318 and_part = [
319 {"term": {"resource_id": filter_settings["resource"][0]}},
320 {"terms": {"tags.view_name.values": [row["key"] for row in series]}},
321 {"term": {"report_type": str(ReportType.slow)}},
322 ]
323 query = {
324 "aggs": {
325 "top_reports": {
326 "terms": {"field": "tags.view_name.values", "size": len(series)},
327 "aggs": {
328 "top_calls_hits": {
329 "top_hits": {"sort": {"start_time": "desc"}, "size": 5}
330 }
331 },
332 }
333 },
334 "query": {"filtered": {"filter": {"and": and_part}}},
247 335 }
248 336 details = {}
249 index_names = es_index_name_limiter(ixtypes=['reports'])
337 index_names = es_index_name_limiter(ixtypes=["reports"])
250 338 if index_names and series:
251 339 result = Datastores.es.search(
252 body=query, doc_type='report', size=0, index=index_names)
253 for bucket in result['aggregations']['top_reports']['buckets']:
254 details[bucket['key']] = []
255
256 for hit in bucket['top_calls_hits']['hits']['hits']:
257 details[bucket['key']].append(
258 {'report_id': hit['_source']['pg_id'],
259 'group_id': hit['_source']['group_id']}
340 body=query, doc_type="report", size=0, index=index_names
341 )
342 for bucket in result["aggregations"]["top_reports"]["buckets"]:
343 details[bucket["key"]] = []
344
345 for hit in bucket["top_calls_hits"]["hits"]["hits"]:
346 details[bucket["key"]].append(
347 {
348 "report_id": hit["_source"]["pg_id"],
349 "group_id": hit["_source"]["group_id"],
350 }
260 351 )
261 352
262 353 results = []
263 354 for row in series:
264 355 result = {
265 'key': row['key'],
266 'main': row['main']['sub_agg']['value'],
267 'requests': row['requests']['sub_agg']['value']
356 "key": row["key"],
357 "main": row["main"]["sub_agg"]["value"],
358 "requests": row["requests"]["sub_agg"]["value"],
268 359 }
269 360 # es can return 'infinity'
270 361 try:
271 result['percentage'] = float(
272 row['percentage']['sub_agg']['value'])
362 result["percentage"] = float(row["percentage"]["sub_agg"]["value"])
273 363 except ValueError:
274 result['percentage'] = 0
364 result["percentage"] = 0
275 365
276 result['latest_details'] = details.get(row['key']) or []
366 result["latest_details"] = details.get(row["key"]) or []
277 367 results.append(result)
278 368
279 369 return results
280 370
281 371 @classmethod
282 def get_apdex_stats(cls, request, filter_settings,
283 threshold=1, db_session=None):
372 def get_apdex_stats(cls, request, filter_settings, threshold=1, db_session=None):
284 373 """
285 374 Returns information and calculates APDEX score per server for dashboard
286 375 server information (upper right stats boxes)
@@ -288,156 +377,241 b' class RequestMetricService(BaseService):'
288 377 # Apdex t = (Satisfied Count + Tolerated Count / 2) / Total Samples
289 378 db_session = get_db_session(db_session)
290 379 index_names = es_index_name_limiter(
291 start_date=filter_settings['start_date'],
292 end_date=filter_settings['end_date'], ixtypes=['metrics'])
380 start_date=filter_settings["start_date"],
381 end_date=filter_settings["end_date"],
382 ixtypes=["metrics"],
383 )
293 384
294 385 requests_series = []
295 386
296 if index_names and filter_settings['resource']:
387 if index_names and filter_settings["resource"]:
297 388 es_query = {
298 'aggs': {
299 'parent_agg': {'aggs': {
300 'frustrating': {'aggs': {'sub_agg': {
301 'sum': {'field': 'tags.requests.numeric_values'}}},
302 'filter': {'and': [
303 {'range': {
304 'tags.main.numeric_values': {'gte': '4'}}},
305 {'exists': {
306 'field': 'tags.requests.numeric_values'}}]
307 }
308 },
309 'main': {'aggs': {'sub_agg': {'sum': {
310 'field': 'tags.main.numeric_values'}}},
311 'filter': {'exists': {
312 'field': 'tags.main.numeric_values'}}},
313 'requests': {'aggs': {'sub_agg': {
314 'sum': {
315 'field': 'tags.requests.numeric_values'}}},
316 'filter': {'exists': {
317 'field': 'tags.requests.numeric_values'}}},
318 'tolerated': {'aggs': {'sub_agg': {
319 'sum': {
320 'field': 'tags.requests.numeric_values'}}},
321 'filter': {'and': [
322 {'range': {
323 'tags.main.numeric_values': {'gte': '1'}}},
324 {'range': {
325 'tags.main.numeric_values': {'lt': '4'}}},
326 {'exists': {
327 'field': 'tags.requests.numeric_values'}}]}
328 }
329 },
330 'terms': {'field': 'tags.server_name.values',
331 'size': 999999}}},
332 'query': {
333 'filtered': {
334 'filter': {'and': [{'terms': {
335 'resource_id': [
336 filter_settings['resource'][0]]}},
337 {'range': {'timestamp': {
338 'gte': filter_settings['start_date'],
339 'lte': filter_settings['end_date']}}},
340 {'terms': {'namespace': [
341 'appenlight.request_metric']}}]}}}}
342
343 result = Datastores.es.search(body=es_query,
344 index=index_names,
345 doc_type='log',
346 size=0)
347 for bucket in result['aggregations']['parent_agg']['buckets']:
348 requests_series.append({
349 'frustrating': bucket['frustrating']['sub_agg']['value'],
350 'main': bucket['main']['sub_agg']['value'],
351 'requests': bucket['requests']['sub_agg']['value'],
352 'tolerated': bucket['tolerated']['sub_agg']['value'],
353 'key': bucket['key']
354 })
355
356 since_when = filter_settings['start_date']
357 until = filter_settings['end_date']
389 "aggs": {
390 "parent_agg": {
391 "aggs": {
392 "frustrating": {
393 "aggs": {
394 "sub_agg": {
395 "sum": {"field": "tags.requests.numeric_values"}
396 }
397 },
398 "filter": {
399 "and": [
400 {
401 "range": {
402 "tags.main.numeric_values": {"gte": "4"}
403 }
404 },
405 {
406 "exists": {
407 "field": "tags.requests.numeric_values"
408 }
409 },
410 ]
411 },
412 },
413 "main": {
414 "aggs": {
415 "sub_agg": {
416 "sum": {"field": "tags.main.numeric_values"}
417 }
418 },
419 "filter": {
420 "exists": {"field": "tags.main.numeric_values"}
421 },
422 },
423 "requests": {
424 "aggs": {
425 "sub_agg": {
426 "sum": {"field": "tags.requests.numeric_values"}
427 }
428 },
429 "filter": {
430 "exists": {"field": "tags.requests.numeric_values"}
431 },
432 },
433 "tolerated": {
434 "aggs": {
435 "sub_agg": {
436 "sum": {"field": "tags.requests.numeric_values"}
437 }
438 },
439 "filter": {
440 "and": [
441 {
442 "range": {
443 "tags.main.numeric_values": {"gte": "1"}
444 }
445 },
446 {
447 "range": {
448 "tags.main.numeric_values": {"lt": "4"}
449 }
450 },
451 {
452 "exists": {
453 "field": "tags.requests.numeric_values"
454 }
455 },
456 ]
457 },
458 },
459 },
460 "terms": {"field": "tags.server_name.values", "size": 999999},
461 }
462 },
463 "query": {
464 "filtered": {
465 "filter": {
466 "and": [
467 {
468 "terms": {
469 "resource_id": [filter_settings["resource"][0]]
470 }
471 },
472 {
473 "range": {
474 "timestamp": {
475 "gte": filter_settings["start_date"],
476 "lte": filter_settings["end_date"],
477 }
478 }
479 },
480 {"terms": {"namespace": ["appenlight.request_metric"]}},
481 ]
482 }
483 }
484 },
485 }
486
487 result = Datastores.es.search(
488 body=es_query, index=index_names, doc_type="log", size=0
489 )
490 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
491 requests_series.append(
492 {
493 "frustrating": bucket["frustrating"]["sub_agg"]["value"],
494 "main": bucket["main"]["sub_agg"]["value"],
495 "requests": bucket["requests"]["sub_agg"]["value"],
496 "tolerated": bucket["tolerated"]["sub_agg"]["value"],
497 "key": bucket["key"],
498 }
499 )
500
501 since_when = filter_settings["start_date"]
502 until = filter_settings["end_date"]
358 503
359 504 # total errors
360 505
361 506 index_names = es_index_name_limiter(
362 start_date=filter_settings['start_date'],
363 end_date=filter_settings['end_date'], ixtypes=['reports'])
507 start_date=filter_settings["start_date"],
508 end_date=filter_settings["end_date"],
509 ixtypes=["reports"],
510 )
364 511
365 512 report_series = []
366 if index_names and filter_settings['resource']:
513 if index_names and filter_settings["resource"]:
367 514 report_type = ReportType.key_from_value(ReportType.error)
368 515 es_query = {
369 'aggs': {
370 'parent_agg': {'aggs': {'errors': {'aggs': {'sub_agg': {
371 'sum': {
372 'field': 'tags.occurences.numeric_values'}}},
373 'filter': {'and': [
374 {'terms': {
375 'tags.type.values': [report_type]}},
376 {'exists': {
377 'field': 'tags.occurences.numeric_values'}}]
378 }
379 }},
380 'terms': {'field': 'tags.server_name.values',
381 'size': 999999}}},
382 'query': {'filtered': {
383 'filter': {'and': [
384 {'terms': {
385 'resource_id': [filter_settings['resource'][0]]}},
386 {'range': {
387 'timestamp': {'gte': filter_settings['start_date'],
388 'lte': filter_settings['end_date']}}
389 },
390 {'terms': {'namespace': ['appenlight.error']}}]
391 }
392 }}
393 }
394 result = Datastores.es.search(body=es_query,
395 index=index_names,
396 doc_type='log',
397 size=0)
398 for bucket in result['aggregations']['parent_agg']['buckets']:
516 "aggs": {
517 "parent_agg": {
518 "aggs": {
519 "errors": {
520 "aggs": {
521 "sub_agg": {
522 "sum": {
523 "field": "tags.occurences.numeric_values"
524 }
525 }
526 },
527 "filter": {
528 "and": [
529 {"terms": {"tags.type.values": [report_type]}},
530 {
531 "exists": {
532 "field": "tags.occurences.numeric_values"
533 }
534 },
535 ]
536 },
537 }
538 },
539 "terms": {"field": "tags.server_name.values", "size": 999999},
540 }
541 },
542 "query": {
543 "filtered": {
544 "filter": {
545 "and": [
546 {
547 "terms": {
548 "resource_id": [filter_settings["resource"][0]]
549 }
550 },
551 {
552 "range": {
553 "timestamp": {
554 "gte": filter_settings["start_date"],
555 "lte": filter_settings["end_date"],
556 }
557 }
558 },
559 {"terms": {"namespace": ["appenlight.error"]}},
560 ]
561 }
562 }
563 },
564 }
565 result = Datastores.es.search(
566 body=es_query, index=index_names, doc_type="log", size=0
567 )
568 for bucket in result["aggregations"]["parent_agg"]["buckets"]:
399 569 report_series.append(
400 {'key': bucket['key'],
401 'errors': bucket['errors']['sub_agg']['value']
570 {
571 "key": bucket["key"],
572 "errors": bucket["errors"]["sub_agg"]["value"],
402 573 }
403 574 )
404 575
405 576 stats = {}
406 577 if UptimeMetricService is not None:
407 578 uptime = UptimeMetricService.get_uptime_by_app(
408 filter_settings['resource'][0],
409 since_when=since_when, until=until)
579 filter_settings["resource"][0], since_when=since_when, until=until
580 )
410 581 else:
411 582 uptime = 0
412 583
413 584 total_seconds = (until - since_when).total_seconds()
414 585
415 586 for stat in requests_series:
416 check_key(stat['key'], stats, uptime, total_seconds)
417 stats[stat['key']]['requests'] = int(stat['requests'])
418 stats[stat['key']]['response_time'] = stat['main']
419 stats[stat['key']]['tolerated_requests'] = stat['tolerated']
420 stats[stat['key']]['frustrating_requests'] = stat['frustrating']
587 check_key(stat["key"], stats, uptime, total_seconds)
588 stats[stat["key"]]["requests"] = int(stat["requests"])
589 stats[stat["key"]]["response_time"] = stat["main"]
590 stats[stat["key"]]["tolerated_requests"] = stat["tolerated"]
591 stats[stat["key"]]["frustrating_requests"] = stat["frustrating"]
421 592 for server in report_series:
422 check_key(server['key'], stats, uptime, total_seconds)
423 stats[server['key']]['errors'] = server['errors']
593 check_key(server["key"], stats, uptime, total_seconds)
594 stats[server["key"]]["errors"] = server["errors"]
424 595
425 596 server_stats = list(stats.values())
426 597 for stat in server_stats:
427 stat['satisfying_requests'] = stat['requests'] - stat['errors'] \
428 - stat['frustrating_requests'] - \
429 stat['tolerated_requests']
430 if stat['satisfying_requests'] < 0:
431 stat['satisfying_requests'] = 0
432
433 if stat['requests']:
434 stat['avg_response_time'] = round(stat['response_time'] /
435 stat['requests'], 3)
436 qual_requests = stat['satisfying_requests'] + \
437 stat['tolerated_requests'] / 2.0
438 stat['apdex'] = round((qual_requests / stat['requests']) * 100,
439 2)
440 stat['rpm'] = round(stat['requests'] / stat['total_minutes'],
441 2)
442
443 return sorted(server_stats, key=lambda x: x['name'])
598 stat["satisfying_requests"] = (
599 stat["requests"]
600 - stat["errors"]
601 - stat["frustrating_requests"]
602 - stat["tolerated_requests"]
603 )
604 if stat["satisfying_requests"] < 0:
605 stat["satisfying_requests"] = 0
606
607 if stat["requests"]:
608 stat["avg_response_time"] = round(
609 stat["response_time"] / stat["requests"], 3
610 )
611 qual_requests = (
612 stat["satisfying_requests"] + stat["tolerated_requests"] / 2.0
613 )
614 stat["apdex"] = round((qual_requests / stat["requests"]) * 100, 2)
615 stat["rpm"] = round(stat["requests"] / stat["total_minutes"], 2)
616
617 return sorted(server_stats, key=lambda x: x["name"])
@@ -22,71 +22,90 b' from appenlight.lib.utils import es_index_name_limiter'
22 22
23 23 class SlowCallService(BaseService):
24 24 @classmethod
25 def get_time_consuming_calls(cls, request, filter_settings,
26 db_session=None):
25 def get_time_consuming_calls(cls, request, filter_settings, db_session=None):
27 26 db_session = get_db_session(db_session)
28 27 # get slow calls from older partitions too
29 28 index_names = es_index_name_limiter(
30 start_date=filter_settings['start_date'],
31 end_date=filter_settings['end_date'],
32 ixtypes=['slow_calls'])
33 if index_names and filter_settings['resource']:
29 start_date=filter_settings["start_date"],
30 end_date=filter_settings["end_date"],
31 ixtypes=["slow_calls"],
32 )
33 if index_names and filter_settings["resource"]:
34 34 # get longest time taking hashes
35 35 es_query = {
36 'aggs': {
37 'parent_agg': {
38 'aggs': {
39 'duration': {
40 'aggs': {'sub_agg': {
41 'sum': {
42 'field': 'tags.duration.numeric_values'}
43 }},
44 'filter': {'exists': {
45 'field': 'tags.duration.numeric_values'}}},
46 'total': {
47 'aggs': {'sub_agg': {'value_count': {
48 'field': 'tags.statement_hash.values'}}},
49 'filter': {'exists': {
50 'field': 'tags.statement_hash.values'}}}},
51 'terms': {'field': 'tags.statement_hash.values',
52 'order': {'duration>sub_agg': 'desc'},
53 'size': 15}}},
54 'query': {'filtered': {
55 'filter': {'and': [
56 {'terms': {
57 'resource_id': [filter_settings['resource'][0]]
58 }},
59 {'range': {'timestamp': {
60 'gte': filter_settings['start_date'],
61 'lte': filter_settings['end_date']}
62 }}]
36 "aggs": {
37 "parent_agg": {
38 "aggs": {
39 "duration": {
40 "aggs": {
41 "sub_agg": {
42 "sum": {"field": "tags.duration.numeric_values"}
43 }
44 },
45 "filter": {
46 "exists": {"field": "tags.duration.numeric_values"}
47 },
48 },
49 "total": {
50 "aggs": {
51 "sub_agg": {
52 "value_count": {
53 "field": "tags.statement_hash.values"
63 54 }
64 55 }
56 },
57 "filter": {
58 "exists": {"field": "tags.statement_hash.values"}
59 },
60 },
61 },
62 "terms": {
63 "field": "tags.statement_hash.values",
64 "order": {"duration>sub_agg": "desc"},
65 "size": 15,
66 },
65 67 }
68 },
69 "query": {
70 "filtered": {
71 "filter": {
72 "and": [
73 {
74 "terms": {
75 "resource_id": [filter_settings["resource"][0]]
76 }
77 },
78 {
79 "range": {
80 "timestamp": {
81 "gte": filter_settings["start_date"],
82 "lte": filter_settings["end_date"],
83 }
84 }
85 },
86 ]
87 }
88 }
89 },
66 90 }
67 91 result = Datastores.es.search(
68 body=es_query, index=index_names, doc_type='log', size=0)
69 results = result['aggregations']['parent_agg']['buckets']
92 body=es_query, index=index_names, doc_type="log", size=0
93 )
94 results = result["aggregations"]["parent_agg"]["buckets"]
70 95 else:
71 96 return []
72 hashes = [i['key'] for i in results]
97 hashes = [i["key"] for i in results]
73 98
74 99 # get queries associated with hashes
75 100 calls_query = {
76 101 "aggs": {
77 102 "top_calls": {
78 "terms": {
79 "field": "tags.statement_hash.values",
80 "size": 15
81 },
103 "terms": {"field": "tags.statement_hash.values", "size": 15},
82 104 "aggs": {
83 105 "top_calls_hits": {
84 "top_hits": {
85 "sort": {"timestamp": "desc"},
86 "size": 5
87 }
88 }
106 "top_hits": {"sort": {"timestamp": "desc"}, "size": 5}
89 107 }
108 },
90 109 }
91 110 },
92 111 "query": {
@@ -95,45 +114,38 b' class SlowCallService(BaseService):'
95 114 "and": [
96 115 {
97 116 "terms": {
98 "resource_id": [
99 filter_settings['resource'][0]
100 ]
101 }
102 },
103 {
104 "terms": {
105 "tags.statement_hash.values": hashes
117 "resource_id": [filter_settings["resource"][0]]
106 118 }
107 119 },
120 {"terms": {"tags.statement_hash.values": hashes}},
108 121 {
109 122 "range": {
110 123 "timestamp": {
111 "gte": filter_settings['start_date'],
112 "lte": filter_settings['end_date']
113 }
124 "gte": filter_settings["start_date"],
125 "lte": filter_settings["end_date"],
114 126 }
115 127 }
128 },
116 129 ]
117 130 }
118 131 }
132 },
119 133 }
120 }
121 calls = Datastores.es.search(body=calls_query,
122 index=index_names,
123 doc_type='log',
124 size=0)
134 calls = Datastores.es.search(
135 body=calls_query, index=index_names, doc_type="log", size=0
136 )
125 137 call_results = {}
126 138 report_ids = []
127 for call in calls['aggregations']['top_calls']['buckets']:
128 hits = call['top_calls_hits']['hits']['hits']
129 call_results[call['key']] = [i['_source'] for i in hits]
130 report_ids.extend([i['_source']['tags']['report_id']['values']
131 for i in hits])
139 for call in calls["aggregations"]["top_calls"]["buckets"]:
140 hits = call["top_calls_hits"]["hits"]["hits"]
141 call_results[call["key"]] = [i["_source"] for i in hits]
142 report_ids.extend(
143 [i["_source"]["tags"]["report_id"]["values"] for i in hits]
144 )
132 145 if report_ids:
133 146 r_query = db_session.query(Report.group_id, Report.id)
134 147 r_query = r_query.filter(Report.id.in_(report_ids))
135 r_query = r_query.filter(
136 Report.start_time >= filter_settings['start_date'])
148 r_query = r_query.filter(Report.start_time >= filter_settings["start_date"])
137 149 else:
138 150 r_query = []
139 151 reports_reversed = {}
@@ -142,27 +154,32 b' class SlowCallService(BaseService):'
142 154
143 155 final_results = []
144 156 for item in results:
145 if item['key'] not in call_results:
157 if item["key"] not in call_results:
146 158 continue
147 call = call_results[item['key']][0]
148 row = {'occurences': item['total']['sub_agg']['value'],
149 'total_duration': round(
150 item['duration']['sub_agg']['value']),
151 'statement': call['message'],
152 'statement_type': call['tags']['type']['values'],
153 'statement_subtype': call['tags']['subtype']['values'],
154 'statement_hash': item['key'],
155 'latest_details': []}
156 if row['statement_type'] in ['tmpl', ' remote']:
157 params = call['tags']['parameters']['values'] \
158 if 'parameters' in call['tags'] else ''
159 row['statement'] = '{} ({})'.format(call['message'], params)
160 for call in call_results[item['key']]:
161 report_id = call['tags']['report_id']['values']
159 call = call_results[item["key"]][0]
160 row = {
161 "occurences": item["total"]["sub_agg"]["value"],
162 "total_duration": round(item["duration"]["sub_agg"]["value"]),
163 "statement": call["message"],
164 "statement_type": call["tags"]["type"]["values"],
165 "statement_subtype": call["tags"]["subtype"]["values"],
166 "statement_hash": item["key"],
167 "latest_details": [],
168 }
169 if row["statement_type"] in ["tmpl", " remote"]:
170 params = (
171 call["tags"]["parameters"]["values"]
172 if "parameters" in call["tags"]
173 else ""
174 )
175 row["statement"] = "{} ({})".format(call["message"], params)
176 for call in call_results[item["key"]]:
177 report_id = call["tags"]["report_id"]["values"]
162 178 group_id = reports_reversed.get(report_id)
163 179 if group_id:
164 row['latest_details'].append(
165 {'group_id': group_id, 'report_id': report_id})
180 row["latest_details"].append(
181 {"group_id": group_id, "report_id": report_id}
182 )
166 183
167 184 final_results.append(row)
168 185
@@ -34,8 +34,9 b' class TagService(BaseService):'
34 34 return value
35 35
36 36 @classmethod
37 def by_resource_id_and_value(cls, resource_id, tag_name, value,
38 db_session=None, create_missing=True):
37 def by_resource_id_and_value(
38 cls, resource_id, tag_name, value, db_session=None, create_missing=True
39 ):
39 40 """
40 41 Fetches tag and creates a new one if missing
41 42 """
@@ -43,39 +44,42 b' class TagService(BaseService):'
43 44 registry = get_current_registry()
44 45
45 46 @registry.cache_regions.redis_min_10.cache_on_arguments(
46 namespace='TagService.by_resource_id_and_value')
47 namespace="TagService.by_resource_id_and_value"
48 )
47 49 def cached(resource_id, tag_name, value):
48 reduced_name = cls.cut_name(tag_name.decode('utf8'))
49 reduced_value = cls.cut_value(value.decode('utf8'))
50 reduced_name = cls.cut_name(tag_name.decode("utf8"))
51 reduced_value = cls.cut_value(value.decode("utf8"))
50 52
51 53 query = db_session.query(Tag)
52 54 query = query.filter(Tag.resource_id == resource_id)
53 55 query = query.filter(Tag.name == reduced_name)
54 query = query.filter(sa.cast(Tag.value, sa.types.TEXT) ==
55 sa.cast(json.dumps(reduced_value),
56 sa.types.TEXT))
56 query = query.filter(
57 sa.cast(Tag.value, sa.types.TEXT)
58 == sa.cast(json.dumps(reduced_value), sa.types.TEXT)
59 )
57 60 tag = query.first()
58 61 if tag:
59 62 db_session.expunge(tag)
60 63 return tag
61 64
62 view = cached(resource_id, tag_name.encode('utf8'),
63 value.encode('utf8'))
65 view = cached(resource_id, tag_name.encode("utf8"), value.encode("utf8"))
64 66 if not view and create_missing:
65 view = cls.create_tag(resource_id,
66 cls.cut_name(tag_name),
67 cls.cut_value(value),
68 db_session)
69 cached.invalidate(resource_id, tag_name.encode('utf8'),
70 value.encode('utf8'))
67 view = cls.create_tag(
68 resource_id, cls.cut_name(tag_name), cls.cut_value(value), db_session
69 )
70 cached.invalidate(
71 resource_id, tag_name.encode("utf8"), value.encode("utf8")
72 )
71 73 return view
72 74
73 75 @classmethod
74 76 def create_tag(cls, resource_id, tag_name, value, db_session=None):
75 77
76 tag = Tag(resource_id=resource_id,
78 tag = Tag(
79 resource_id=resource_id,
77 80 name=cls.cut_name(tag_name),
78 value=cls.cut_value(value))
81 value=cls.cut_value(value),
82 )
79 83 db_session = get_db_session(db_session)
80 84 db_session.add(tag)
81 85 db_session.flush()
@@ -87,7 +91,8 b' class TagService(BaseService):'
87 91 registry = get_current_registry()
88 92
89 93 @registry.cache_regions.redis_min_10.cache_on_arguments(
90 namespace='TagService.by_tag_id')
94 namespace="TagService.by_tag_id"
95 )
91 96 def cached(tag_id):
92 97 tag = db_session.query(Tag).filter(Tag.id == tag_id).first()
93 98 if tag:
@@ -34,7 +34,7 b' from pyramid.threadlocal import get_current_registry'
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 GroupOccurence = namedtuple('GroupOccurence', ['occurences', 'group'])
37 GroupOccurence = namedtuple("GroupOccurence", ["occurences", "group"])
38 38
39 39
40 40 class UserService(UserService):
@@ -43,31 +43,40 b' class UserService(UserService):'
43 43 return get_db_session(db_session).query(User).order_by(User.user_name)
44 44
45 45 @classmethod
46 def send_email(cls, request, recipients, variables, template,
47 immediately=False, silent=False):
46 def send_email(
47 cls, request, recipients, variables, template, immediately=False, silent=False
48 ):
48 49 html = pyramid.renderers.render(template, variables, request)
49 title = variables.get('email_title',
50 variables.get('title', "No Title"))
51 title = title.replace('\r', '').replace('\n', '')
50 title = variables.get("email_title", variables.get("title", "No Title"))
51 title = title.replace("\r", "").replace("\n", "")
52 52 sender = "{} <{}>".format(
53 request.registry.settings['mailing.from_name'],
54 request.registry.settings['mailing.from_email'])
53 request.registry.settings["mailing.from_name"],
54 request.registry.settings["mailing.from_email"],
55 )
55 56 message = pyramid_mailer.message.Message(
56 subject=title, sender=sender, recipients=recipients, html=html)
57 subject=title, sender=sender, recipients=recipients, html=html
58 )
57 59 if immediately:
58 60 try:
59 61 request.registry.mailer.send_immediately(message)
60 62 except Exception as e:
61 log.warning('Exception %s' % e)
63 log.warning("Exception %s" % e)
62 64 if not silent:
63 65 raise
64 66 else:
65 67 request.registry.mailer.send(message)
66 68
67 69 @classmethod
68 def get_paginator(cls, page=1, item_count=None, items_per_page=50,
69 order_by=None, filter_settings=None,
70 exclude_columns=None, db_session=None):
70 def get_paginator(
71 cls,
72 page=1,
73 item_count=None,
74 items_per_page=50,
75 order_by=None,
76 filter_settings=None,
77 exclude_columns=None,
78 db_session=None,
79 ):
71 80 registry = get_current_registry()
72 81 if not exclude_columns:
73 82 exclude_columns = []
@@ -75,19 +84,19 b' class UserService(UserService):'
75 84 filter_settings = {}
76 85 db_session = get_db_session(db_session)
77 86 q = db_session.query(User)
78 if filter_settings.get('order_col'):
79 order_col = filter_settings.get('order_col')
80 if filter_settings.get('order_dir') == 'dsc':
81 sort_on = 'desc'
87 if filter_settings.get("order_col"):
88 order_col = filter_settings.get("order_col")
89 if filter_settings.get("order_dir") == "dsc":
90 sort_on = "desc"
82 91 else:
83 sort_on = 'asc'
92 sort_on = "asc"
84 93 q = q.order_by(getattr(sa, sort_on)(getattr(User, order_col)))
85 94 else:
86 95 q = q.order_by(sa.desc(User.registered_date))
87 96 # remove urlgen or it never caches count
88 97 cache_params = dict(filter_settings)
89 cache_params.pop('url', None)
90 cache_params.pop('url_maker', None)
98 cache_params.pop("url", None)
99 cache_params.pop("url_maker", None)
91 100
92 101 @registry.cache_regions.redis_min_5.cache_on_arguments()
93 102 def estimate_users(cache_key):
@@ -100,20 +109,23 b' class UserService(UserService):'
100 109 # errors just started to flow in
101 110 if item_count < 1000:
102 111 item_count = estimate_users.refresh(cache_params)
103 paginator = SqlalchemyOrmPage(q, page=page,
112 paginator = SqlalchemyOrmPage(
113 q,
114 page=page,
104 115 item_count=item_count,
105 116 items_per_page=items_per_page,
106 **filter_settings)
117 **filter_settings
118 )
107 119 return paginator
108 120
109 121 @classmethod
110 122 def get_valid_channels(cls, user):
111 return [channel for channel in user.alert_channels
112 if channel.channel_validated]
123 return [channel for channel in user.alert_channels if channel.channel_validated]
113 124
114 125 @classmethod
115 def report_notify(cls, user, request, application, report_groups,
116 occurence_dict, db_session=None):
126 def report_notify(
127 cls, user, request, application, report_groups, occurence_dict, db_session=None
128 ):
117 129 db_session = get_db_session(db_session)
118 130 if not report_groups:
119 131 return True
@@ -125,12 +137,12 b' class UserService(UserService):'
125 137 occurences = occurence_dict.get(group.id, 1)
126 138 for action in channel.channel_actions:
127 139 not_matched = (
128 action.resource_id and action.resource_id !=
129 application.resource_id)
130 if action.type != 'report' or not_matched:
140 action.resource_id
141 and action.resource_id != application.resource_id
142 )
143 if action.type != "report" or not_matched:
131 144 continue
132 should_notify = (action.action == 'always' or
133 not group.notified)
145 should_notify = action.action == "always" or not group.notified
134 146 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
135 147 report_dict = group.get_report().get_dict(request)
136 148 if rule_obj.match(report_dict) and should_notify:
@@ -143,10 +155,12 b' class UserService(UserService):'
143 155 if not total_confirmed:
144 156 continue
145 157 try:
146 channel.notify_reports(resource=application,
158 channel.notify_reports(
159 resource=application,
147 160 user=user,
148 161 request=request,
149 162 since_when=since_when,
150 reports=confirmed_groups)
163 reports=confirmed_groups,
164 )
151 165 except IntegrationException as e:
152 log.warning('%s' % e)
166 log.warning("%s" % e)
@@ -24,51 +24,53 b' from ziggurat_foundations.models.base import BaseModel'
24 24
25 25
26 26 class SlowCall(Base, BaseModel):
27 __tablename__ = 'slow_calls'
28 __table_args__ = {'implicit_returning': False}
27 __tablename__ = "slow_calls"
28 __table_args__ = {"implicit_returning": False}
29 29
30 30 resource_id = sa.Column(sa.Integer(), nullable=False, index=True)
31 31 id = sa.Column(sa.Integer, nullable=False, primary_key=True)
32 report_id = sa.Column(sa.BigInteger,
33 sa.ForeignKey('reports.id',
34 ondelete='cascade',
35 onupdate='cascade'),
36 primary_key=True)
32 report_id = sa.Column(
33 sa.BigInteger,
34 sa.ForeignKey("reports.id", ondelete="cascade", onupdate="cascade"),
35 primary_key=True,
36 )
37 37 duration = sa.Column(sa.Float(), default=0)
38 statement = sa.Column(sa.UnicodeText(), default='')
39 statement_hash = sa.Column(sa.Unicode(60), default='')
38 statement = sa.Column(sa.UnicodeText(), default="")
39 statement_hash = sa.Column(sa.Unicode(60), default="")
40 40 parameters = sa.Column(JSON(), nullable=False, default=dict)
41 type = sa.Column(sa.Unicode(16), default='')
41 type = sa.Column(sa.Unicode(16), default="")
42 42 subtype = sa.Column(sa.Unicode(16), default=None)
43 location = sa.Column(sa.Unicode(255), default='')
44 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
45 server_default=sa.func.now())
46 report_group_time = sa.Column(sa.DateTime(), default=datetime.utcnow,
47 server_default=sa.func.now())
43 location = sa.Column(sa.Unicode(255), default="")
44 timestamp = sa.Column(
45 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
46 )
47 report_group_time = sa.Column(
48 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
49 )
48 50
49 def set_data(self, data, protocol_version=None, resource_id=None,
50 report_group=None):
51 def set_data(
52 self, data, protocol_version=None, resource_id=None, report_group=None
53 ):
51 54 self.resource_id = resource_id
52 if data.get('start') and data.get('end'):
53 self.timestamp = data.get('start')
54 d = data.get('end') - data.get('start')
55 if data.get("start") and data.get("end"):
56 self.timestamp = data.get("start")
57 d = data.get("end") - data.get("start")
55 58 self.duration = d.total_seconds()
56 self.statement = data.get('statement', '')
57 self.type = data.get('type', 'unknown')[:16]
58 self.parameters = data.get('parameters', {})
59 self.location = data.get('location', '')[:255]
59 self.statement = data.get("statement", "")
60 self.type = data.get("type", "unknown")[:16]
61 self.parameters = data.get("parameters", {})
62 self.location = data.get("location", "")[:255]
60 63 self.report_group_time = report_group.first_timestamp
61 if 'subtype' in data:
62 self.subtype = data.get('subtype', 'unknown')[:16]
63 if self.type == 'tmpl':
64 self.set_hash('{} {}'.format(self.statement, self.parameters))
64 if "subtype" in data:
65 self.subtype = data.get("subtype", "unknown")[:16]
66 if self.type == "tmpl":
67 self.set_hash("{} {}".format(self.statement, self.parameters))
65 68 else:
66 69 self.set_hash()
67 70
68 71 def set_hash(self, custom_statement=None):
69 72 statement = custom_statement or self.statement
70 self.statement_hash = hashlib.sha1(
71 statement.encode('utf8')).hexdigest()
73 self.statement_hash = hashlib.sha1(statement.encode("utf8")).hexdigest()
72 74
73 75 @property
74 76 def end_time(self):
@@ -78,38 +80,48 b' class SlowCall(Base, BaseModel):'
78 80
79 81 def get_dict(self):
80 82 instance_dict = super(SlowCall, self).get_dict()
81 instance_dict['children'] = []
82 instance_dict['end_time'] = self.end_time
83 instance_dict["children"] = []
84 instance_dict["end_time"] = self.end_time
83 85 return instance_dict
84 86
85 87 def es_doc(self):
86 88 doc = {
87 'resource_id': self.resource_id,
88 'timestamp': self.timestamp,
89 'pg_id': str(self.id),
90 'permanent': False,
91 'request_id': None,
92 'log_level': 'UNKNOWN',
93 'message': self.statement,
94 'namespace': 'appenlight.slow_call',
95 'tags': {
96 'report_id': {'values': self.report_id,
97 'numeric_values': self.report_id},
98 'duration': {'values': None, 'numeric_values': self.duration},
99 'statement_hash': {'values': self.statement_hash,
100 'numeric_values': None},
101 'type': {'values': self.type, 'numeric_values': None},
102 'subtype': {'values': self.subtype, 'numeric_values': None},
103 'location': {'values': self.location, 'numeric_values': None},
104 'parameters': {'values': None, 'numeric_values': None}
89 "resource_id": self.resource_id,
90 "timestamp": self.timestamp,
91 "pg_id": str(self.id),
92 "permanent": False,
93 "request_id": None,
94 "log_level": "UNKNOWN",
95 "message": self.statement,
96 "namespace": "appenlight.slow_call",
97 "tags": {
98 "report_id": {
99 "values": self.report_id,
100 "numeric_values": self.report_id,
105 101 },
106 'tag_list': ['report_id', 'duration', 'statement_hash', 'type',
107 'subtype', 'location']
102 "duration": {"values": None, "numeric_values": self.duration},
103 "statement_hash": {
104 "values": self.statement_hash,
105 "numeric_values": None,
106 },
107 "type": {"values": self.type, "numeric_values": None},
108 "subtype": {"values": self.subtype, "numeric_values": None},
109 "location": {"values": self.location, "numeric_values": None},
110 "parameters": {"values": None, "numeric_values": None},
111 },
112 "tag_list": [
113 "report_id",
114 "duration",
115 "statement_hash",
116 "type",
117 "subtype",
118 "location",
119 ],
108 120 }
109 121 if isinstance(self.parameters, str):
110 doc['tags']['parameters']['values'] = self.parameters[:255]
122 doc["tags"]["parameters"]["values"] = self.parameters[:255]
111 123 return doc
112 124
113 125 @property
114 126 def partition_id(self):
115 return 'rcae_sc_%s' % self.report_group_time.strftime('%Y_%m')
127 return "rcae_sc_%s" % self.report_group_time.strftime("%Y_%m")
@@ -23,15 +23,16 b' from . import Base'
23 23
24 24
25 25 class Tag(Base, BaseModel):
26 __tablename__ = 'tags'
26 __tablename__ = "tags"
27 27
28 28 id = sa.Column(sa.Integer, primary_key=True)
29 resource_id = sa.Column(sa.Integer,
30 sa.ForeignKey('resources.resource_id'))
29 resource_id = sa.Column(sa.Integer, sa.ForeignKey("resources.resource_id"))
31 30 name = sa.Column(sa.Unicode(512), nullable=False)
32 31 value = sa.Column(JSON, nullable=False)
33 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
34 server_default=sa.func.now())
35 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
36 server_default=sa.func.now())
32 first_timestamp = sa.Column(
33 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
34 )
35 last_timestamp = sa.Column(
36 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
37 )
37 38 times_seen = sa.Column(sa.Integer, nullable=False, default=0)
@@ -32,72 +32,84 b' class User(UserMixin, Base):'
32 32
33 33 first_name = sa.Column(sa.Unicode(25))
34 34 last_name = sa.Column(sa.Unicode(25))
35 company_name = sa.Column(sa.Unicode(255), default='')
36 company_address = sa.Column(sa.Unicode(255), default='')
37 zip_code = sa.Column(sa.Unicode(25), default='')
38 city = sa.Column(sa.Unicode(50), default='')
39 default_report_sort = sa.Column(sa.Unicode(25), default='newest')
40 notes = sa.Column(sa.UnicodeText, default='')
35 company_name = sa.Column(sa.Unicode(255), default="")
36 company_address = sa.Column(sa.Unicode(255), default="")
37 zip_code = sa.Column(sa.Unicode(25), default="")
38 city = sa.Column(sa.Unicode(50), default="")
39 default_report_sort = sa.Column(sa.Unicode(25), default="newest")
40 notes = sa.Column(sa.UnicodeText, default="")
41 41 notifications = sa.Column(sa.Boolean(), default=True)
42 registration_ip = sa.Column(sa.UnicodeText(), default='')
43 alert_channels = sa.orm.relationship('AlertChannel',
42 registration_ip = sa.Column(sa.UnicodeText(), default="")
43 alert_channels = sa.orm.relationship(
44 "AlertChannel",
44 45 cascade="all,delete-orphan",
45 46 passive_deletes=True,
46 47 passive_updates=True,
47 backref='owner',
48 order_by='AlertChannel.channel_name, '
49 'AlertChannel.channel_value')
48 backref="owner",
49 order_by="AlertChannel.channel_name, " "AlertChannel.channel_value",
50 )
50 51
51 alert_actions = sa.orm.relationship('AlertChannelAction',
52 alert_actions = sa.orm.relationship(
53 "AlertChannelAction",
52 54 cascade="all,delete-orphan",
53 55 passive_deletes=True,
54 56 passive_updates=True,
55 backref='owner',
56 order_by='AlertChannelAction.pkey')
57 backref="owner",
58 order_by="AlertChannelAction.pkey",
59 )
57 60
58 auth_tokens = sa.orm.relationship('AuthToken',
61 auth_tokens = sa.orm.relationship(
62 "AuthToken",
59 63 cascade="all,delete-orphan",
60 64 passive_deletes=True,
61 65 passive_updates=True,
62 backref='owner',
63 order_by='AuthToken.creation_date')
66 backref="owner",
67 order_by="AuthToken.creation_date",
68 )
64 69
65 def get_dict(self, exclude_keys=None, include_keys=None,
66 extended_info=False):
70 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
67 71 result = super(User, self).get_dict(exclude_keys, include_keys)
68 72 if extended_info:
69 result['groups'] = [g.group_name for g in self.groups]
70 result['permissions'] = [p.perm_name for p in UserService.permissions(self)]
73 result["groups"] = [g.group_name for g in self.groups]
74 result["permissions"] = [p.perm_name for p in UserService.permissions(self)]
71 75 request = get_current_request()
72 apps = UserService.resources_with_perms(self,
73 ['view'], resource_types=['application'])
74 result['applications'] = sorted(
75 [{'resource_id': a.resource_id,
76 'resource_name': a.resource_name}
77 for a in apps.all()],
78 key=lambda x: x['resource_name'].lower())
79 result['assigned_reports'] = [r.get_dict(request) for r
80 in self.assigned_report_groups]
81 result['latest_events'] = [ev.get_dict(request) for ev
82 in self.latest_events()]
76 apps = UserService.resources_with_perms(
77 self, ["view"], resource_types=["application"]
78 )
79 result["applications"] = sorted(
80 [
81 {"resource_id": a.resource_id, "resource_name": a.resource_name}
82 for a in apps.all()
83 ],
84 key=lambda x: x["resource_name"].lower(),
85 )
86 result["assigned_reports"] = [
87 r.get_dict(request) for r in self.assigned_report_groups
88 ]
89 result["latest_events"] = [
90 ev.get_dict(request) for ev in self.latest_events()
91 ]
83 92
84 93 exclude_keys_list = exclude_keys or []
85 94 include_keys_list = include_keys or []
86 95 d = {}
87 96 for k in result.keys():
88 if (k not in exclude_keys_list and
89 (k in include_keys_list or not include_keys)):
97 if k not in exclude_keys_list and (
98 k in include_keys_list or not include_keys
99 ):
90 100 d[k] = result[k]
91 101 return d
92 102
93 103 def __repr__(self):
94 return '<User: %s, id: %s>' % (self.user_name, self.id)
104 return "<User: %s, id: %s>" % (self.user_name, self.id)
95 105
96 106 @property
97 107 def assigned_report_groups(self):
98 108 from appenlight.models.report_group import ReportGroup
99 109
100 resources = UserService.resources_with_perms(self, ['view'], resource_types=['application'])
110 resources = UserService.resources_with_perms(
111 self, ["view"], resource_types=["application"]
112 )
101 113 query = self.assigned_reports_relation
102 114 rid_list = [r.resource_id for r in resources]
103 115 query = query.filter(ReportGroup.resource_id.in_(rid_list))
@@ -106,12 +118,13 b' class User(UserMixin, Base):'
106 118
107 119 def feed_report(self, report):
108 120 """ """
109 if not hasattr(self, 'current_reports'):
121 if not hasattr(self, "current_reports"):
110 122 self.current_reports = []
111 123 self.current_reports.append(report)
112 124
113 def send_digest(self, request, application, reports, since_when=None,
114 db_session=None):
125 def send_digest(
126 self, request, application, reports, since_when=None, db_session=None
127 ):
115 128 db_session = get_db_session(db_session)
116 129 if not reports:
117 130 return True
@@ -121,13 +134,15 b' class User(UserMixin, Base):'
121 134 if not channel.channel_validated or not channel.daily_digest:
122 135 continue
123 136 try:
124 channel.send_digest(resource=application,
137 channel.send_digest(
138 resource=application,
125 139 user=self,
126 140 request=request,
127 141 since_when=since_when,
128 reports=reports)
142 reports=reports,
143 )
129 144 except IntegrationException as e:
130 log.warning('%s' % e)
145 log.warning("%s" % e)
131 146
132 147 def latest_events(self):
133 148 return EventService.latest_for_user(self)
@@ -14,7 +14,9 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from ziggurat_foundations.models.user_resource_permission import UserResourcePermissionMixin
17 from ziggurat_foundations.models.user_resource_permission import (
18 UserResourcePermissionMixin,
19 )
18 20 from appenlight.models import Base
19 21
20 22
@@ -20,49 +20,41 b' from appenlight.forms import CSRFException'
20 20 log = logging.getLogger(__name__)
21 21
22 22 from pyramid.interfaces import IDefaultCSRFOptions
23 from pyramid.session import (
24 check_csrf_origin,
25 check_csrf_token,
26 )
23 from pyramid.session import check_csrf_origin, check_csrf_token
27 24
28 25 # taken directly from pyramid 1.7
29 26 # pyramid/viewderivers.py
30 27 # the difference is this deriver will ignore csrf_check when auth token
31 28 # policy is in effect
32 29
30
33 31 def csrf_view(view, info):
34 explicit_val = info.options.get('require_csrf')
32 explicit_val = info.options.get("require_csrf")
35 33 defaults = info.registry.queryUtility(IDefaultCSRFOptions)
36 34 if defaults is None:
37 35 default_val = False
38 token = 'csrf_token'
39 header = 'X-CSRF-Token'
36 token = "csrf_token"
37 header = "X-CSRF-Token"
40 38 safe_methods = frozenset(["GET", "HEAD", "OPTIONS", "TRACE"])
41 39 else:
42 40 default_val = defaults.require_csrf
43 41 token = defaults.token
44 42 header = defaults.header
45 43 safe_methods = defaults.safe_methods
46 enabled = (
47 explicit_val is True or
48 (explicit_val is not False and default_val)
49 )
44 enabled = explicit_val is True or (explicit_val is not False and default_val)
50 45 # disable if both header and token are disabled
51 46 enabled = enabled and (token or header)
52 47 wrapped_view = view
53 48 if enabled:
49
54 50 def csrf_view(context, request):
55 is_from_auth_token = 'auth:auth_token' in \
56 request.effective_principals
51 is_from_auth_token = "auth:auth_token" in request.effective_principals
57 52 if is_from_auth_token:
58 log.debug('ignoring CSRF check, auth token used')
59 elif (
60 request.method not in safe_methods and
61 (
53 log.debug("ignoring CSRF check, auth token used")
54 elif request.method not in safe_methods and (
62 55 # skip exception views unless value is explicitly defined
63 getattr(request, 'exception', None) is None or
64 explicit_val is not None
65 )
56 getattr(request, "exception", None) is None
57 or explicit_val is not None
66 58 ):
67 59 check_csrf_origin(request, raises=True)
68 60 check_csrf_token(request, token, header, raises=True)
@@ -71,7 +63,8 b' def csrf_view(view, info):'
71 63 wrapped_view = csrf_view
72 64 return wrapped_view
73 65
74 csrf_view.options = ('require_csrf',)
66
67 csrf_view.options = ("require_csrf",)
75 68
76 69
77 70 class PublicReportGroup(object):
@@ -79,12 +72,12 b' class PublicReportGroup(object):'
79 72 self.val = val
80 73
81 74 def text(self):
82 return 'public_report_group = %s' % (self.val,)
75 return "public_report_group = %s" % (self.val,)
83 76
84 77 phash = text
85 78
86 79 def __call__(self, context, request):
87 report_group = getattr(context, 'report_group', None)
80 report_group = getattr(context, "report_group", None)
88 81 if report_group:
89 82 return context.report_group.public == self.val
90 83
@@ -95,8 +88,7 b' class contextTypeClass(object):'
95 88 self.cls = context_property[1]
96 89
97 90 def text(self):
98 return 'context_type_class = %s, %s' % (
99 self.context_property, self.cls)
91 return "context_type_class = %s, %s" % (self.context_property, self.cls)
100 92
101 93 phash = text
102 94
@@ -13,4 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -24,16 +24,21 b' log = logging.getLogger(__name__)'
24 24
25 25
26 26 def main():
27 choices = ['logs']
28
29 parser = argparse.ArgumentParser(description='Cleanup AppEnlight logs')
30 parser.add_argument('-c', '--config', required=True,
31 help='Configuration ini file of application')
32 parser.add_argument('-t', '--types', choices=choices,
33 default='logs',
34 help='Which parts of database should get cleared')
35 parser.add_argument('-r', '--resource', required=True, help='Resource id')
36 parser.add_argument('-n', '--namespace', help='Limit to Namespace')
27 choices = ["logs"]
28
29 parser = argparse.ArgumentParser(description="Cleanup AppEnlight logs")
30 parser.add_argument(
31 "-c", "--config", required=True, help="Configuration ini file of application"
32 )
33 parser.add_argument(
34 "-t",
35 "--types",
36 choices=choices,
37 default="logs",
38 help="Which parts of database should get cleared",
39 )
40 parser.add_argument("-r", "--resource", required=True, help="Resource id")
41 parser.add_argument("-n", "--namespace", help="Limit to Namespace")
37 42 args = parser.parse_args()
38 43
39 44 config_uri = args.config
@@ -42,22 +47,20 b' def main():'
42 47 env = bootstrap(config_uri)
43 48
44 49 config = {
45 'types': args.types,
46 'namespace': args.namespace,
47 'resource': int(args.resource),
50 "types": args.types,
51 "namespace": args.namespace,
52 "resource": int(args.resource),
48 53 }
49 54
50 55 action_cleanup_logs(config)
51 56
52 57
53 58 def action_cleanup_logs(config):
54 filter_settings = {
55 'namespace': []
56 }
57 if config['namespace']:
58 filter_settings['namespace'].append(config['namespace'])
59 logs_cleanup(config['resource'], filter_settings)
59 filter_settings = {"namespace": []}
60 if config["namespace"]:
61 filter_settings["namespace"].append(config["namespace"])
62 logs_cleanup(config["resource"], filter_settings)
60 63
61 64
62 if __name__ == '__main__':
65 if __name__ == "__main__":
63 66 main()
@@ -25,13 +25,7 b' from ziggurat_foundations.models.services.user import UserService'
25 25
26 26 from appenlight.forms import UserRegisterForm
27 27 from appenlight.lib.ext_json import json
28 from appenlight.models import (
29 DBSession,
30 Group,
31 GroupPermission,
32 User,
33 AuthToken
34 )
28 from appenlight.models import DBSession, Group, GroupPermission, User, AuthToken
35 29 from appenlight.models.services.group import GroupService
36 30
37 31 log = logging.getLogger(__name__)
@@ -40,101 +34,101 b' _ = str'
40 34
41 35
42 36 def is_yes(input_data):
43 return input_data in ['y', 'yes']
37 return input_data in ["y", "yes"]
44 38
45 39
46 40 def is_no(input_data):
47 return input_data in ['n', 'no']
41 return input_data in ["n", "no"]
48 42
49 43
50 44 def main():
51 45 parser = argparse.ArgumentParser(
52 description='Populate AppEnlight database',
53 add_help=False)
54 parser.add_argument('-c', '--config', required=True,
55 help='Configuration ini file of application')
56 parser.add_argument('--username', default=None,
57 help='User to create')
58 parser.add_argument('--password', default=None,
59 help='Password for created user')
60 parser.add_argument('--email', default=None,
61 help='Email for created user')
62 parser.add_argument('--auth-token', default=None,
63 help='Auth token for created user')
46 description="Populate AppEnlight database", add_help=False
47 )
48 parser.add_argument(
49 "-c", "--config", required=True, help="Configuration ini file of application"
50 )
51 parser.add_argument("--username", default=None, help="User to create")
52 parser.add_argument("--password", default=None, help="Password for created user")
53 parser.add_argument("--email", default=None, help="Email for created user")
54 parser.add_argument(
55 "--auth-token", default=None, help="Auth token for created user"
56 )
64 57 args = parser.parse_args()
65 58 config_uri = args.config
66 59
67 60 setup_logging(config_uri)
68 61 env = bootstrap(config_uri)
69 request = env['request']
62 request = env["request"]
70 63 with get_current_request().tm:
71 64 group = GroupService.by_id(1)
72 65 if not group:
73 group = Group(id=1, group_name='Administrators',
74 description="Top level permission owners")
66 group = Group(
67 id=1,
68 group_name="Administrators",
69 description="Top level permission owners",
70 )
75 71 DBSession.add(group)
76 permission = GroupPermission(perm_name='root_administration')
72 permission = GroupPermission(perm_name="root_administration")
77 73 group.permissions.append(permission)
78 74
79 75 create_user = True if args.username else None
80 76 while create_user is None:
81 response = input(
82 'Do you want to create a new admin? (n)\n').lower()
77 response = input("Do you want to create a new admin? (n)\n").lower()
83 78
84 if is_yes(response or 'n'):
79 if is_yes(response or "n"):
85 80 create_user = True
86 elif is_no(response or 'n'):
81 elif is_no(response or "n"):
87 82 create_user = False
88 83
89 84 if create_user:
90 85 csrf_token = request.session.get_csrf_token()
91 86 user_name = args.username
92 print('*********************************************************')
87 print("*********************************************************")
93 88 while user_name is None:
94 response = input('What is the username of new admin?\n')
89 response = input("What is the username of new admin?\n")
95 90 form = UserRegisterForm(
96 user_name=response, csrf_token=csrf_token,
97 csrf_context=request)
91 user_name=response, csrf_token=csrf_token, csrf_context=request
92 )
98 93 form.validate()
99 94 if form.user_name.errors:
100 95 print(form.user_name.errors[0])
101 96 else:
102 97 user_name = response
103 98 print('The admin username is "{}"\n'.format(user_name))
104 print('*********************************************************')
99 print("*********************************************************")
105 100 email = args.email
106 101 while email is None:
107 response = input('What is the email of admin account?\n')
102 response = input("What is the email of admin account?\n")
108 103 form = UserRegisterForm(
109 email=response, csrf_token=csrf_token,
110 csrf_context=request)
104 email=response, csrf_token=csrf_token, csrf_context=request
105 )
111 106 form.validate()
112 107 if form.email.errors:
113 108 print(form.email.errors[0])
114 109 else:
115 110 email = response
116 111 print('The admin email is "{}"\n'.format(email))
117 print('*********************************************************')
112 print("*********************************************************")
118 113 user_password = args.password
119 114 confirmed_password = args.password
120 115 while user_password is None or confirmed_password is None:
121 response = getpass.getpass(
122 'What is the password for admin account?\n')
116 response = getpass.getpass("What is the password for admin account?\n")
123 117 form = UserRegisterForm(
124 user_password=response, csrf_token=csrf_token,
125 csrf_context=request)
118 user_password=response, csrf_token=csrf_token, csrf_context=request
119 )
126 120 form.validate()
127 121 if form.user_password.errors:
128 122 print(form.user_password.errors[0])
129 123 else:
130 124 user_password = response
131 125
132 response = getpass.getpass('Please confirm the password.\n')
126 response = getpass.getpass("Please confirm the password.\n")
133 127 if user_password == response:
134 128 confirmed_password = response
135 129 else:
136 print('Passwords do not match. Please try again')
137 print('*********************************************************')
130 print("Passwords do not match. Please try again")
131 print("*********************************************************")
138 132
139 133 with get_current_request().tm:
140 134 if create_user:
@@ -148,8 +142,8 b' def main():'
148 142 token.token = args.auth_token
149 143 user.auth_tokens.append(token)
150 144 group.users.append(user)
151 print('USER CREATED')
145 print("USER CREATED")
152 146 print(json.dumps(user.get_dict()))
153 print('*********************************************************')
154 print('AUTH TOKEN')
147 print("*********************************************************")
148 print("AUTH TOKEN")
155 149 print(json.dumps(user.auth_tokens[0].get_dict()))
@@ -26,32 +26,35 b' log = logging.getLogger(__name__)'
26 26
27 27
28 28 def gen_secret():
29 return Fernet.generate_key().decode('utf8')
29 return Fernet.generate_key().decode("utf8")
30 30
31 31
32 32 def main():
33 33 parser = argparse.ArgumentParser(
34 description='Generate AppEnlight static resources',
35 add_help=False)
36 parser.add_argument('config', help='Name of generated file')
34 description="Generate AppEnlight static resources", add_help=False
35 )
36 parser.add_argument("config", help="Name of generated file")
37 37 parser.add_argument(
38 '--domain',
39 default='appenlight-rhodecode.local',
40 help='Domain which will be used to serve the application')
38 "--domain",
39 default="appenlight-rhodecode.local",
40 help="Domain which will be used to serve the application",
41 )
41 42 parser.add_argument(
42 '--dbstring',
43 default='postgresql://appenlight:test@127.0.0.1:5432/appenlight',
44 help='Domain which will be used to serve the application')
43 "--dbstring",
44 default="postgresql://appenlight:test@127.0.0.1:5432/appenlight",
45 help="Domain which will be used to serve the application",
46 )
45 47 args = parser.parse_args()
46 ini_path = os.path.join('templates', 'ini', 'production.ini.jinja2')
47 template_str = pkg_resources.resource_string('appenlight', ini_path)
48 template = jinja2.Template(template_str.decode('utf8'))
49 template_vars = {'appenlight_encryption_secret': gen_secret(),
50 'appenlight_authtkt_secret': gen_secret(),
51 'appenlight_redis_session_secret': gen_secret(),
52 'appenlight_domain': args.domain,
53 'appenlight_dbstring': args.dbstring,
48 ini_path = os.path.join("templates", "ini", "production.ini.jinja2")
49 template_str = pkg_resources.resource_string("appenlight", ini_path)
50 template = jinja2.Template(template_str.decode("utf8"))
51 template_vars = {
52 "appenlight_encryption_secret": gen_secret(),
53 "appenlight_authtkt_secret": gen_secret(),
54 "appenlight_redis_session_secret": gen_secret(),
55 "appenlight_domain": args.domain,
56 "appenlight_dbstring": args.dbstring,
54 57 }
55 58 compiled = template.render(**template_vars)
56 with open(args.config, 'w') as f:
59 with open(args.config, "w") as f:
57 60 f.write(compiled)
@@ -31,10 +31,11 b' log = logging.getLogger(__name__)'
31 31
32 32 def main(argv=sys.argv):
33 33 parser = argparse.ArgumentParser(
34 description='Migrate AppEnlight database to latest version',
35 add_help=False)
36 parser.add_argument('-c', '--config', required=True,
37 help='Configuration ini file of application')
34 description="Migrate AppEnlight database to latest version", add_help=False
35 )
36 parser.add_argument(
37 "-c", "--config", required=True, help="Configuration ini file of application"
38 )
38 39 args = parser.parse_args()
39 40 config_uri = args.config
40 41
@@ -42,32 +43,31 b' def main(argv=sys.argv):'
42 43 bootstrap(config_uri)
43 44 registry = get_current_registry()
44 45 alembic_cfg = Config()
45 alembic_cfg.set_main_option("sqlalchemy.echo", 'true')
46 alembic_cfg.set_main_option("script_location",
47 "ziggurat_foundations:migrations")
48 alembic_cfg.set_main_option("sqlalchemy.url",
49 registry.settings["sqlalchemy.url"])
46 alembic_cfg.set_main_option("sqlalchemy.echo", "true")
47 alembic_cfg.set_main_option("script_location", "ziggurat_foundations:migrations")
48 alembic_cfg.set_main_option("sqlalchemy.url", registry.settings["sqlalchemy.url"])
50 49 command.upgrade(alembic_cfg, "head")
51 50 alembic_cfg = Config()
52 alembic_cfg.set_main_option("sqlalchemy.echo", 'true')
51 alembic_cfg.set_main_option("sqlalchemy.echo", "true")
53 52 alembic_cfg.set_main_option("script_location", "appenlight:migrations")
54 alembic_cfg.set_main_option("sqlalchemy.url",
55 registry.settings["sqlalchemy.url"])
53 alembic_cfg.set_main_option("sqlalchemy.url", registry.settings["sqlalchemy.url"])
56 54 command.upgrade(alembic_cfg, "head")
57 55
58 56 for plugin_name, config in registry.appenlight_plugins.items():
59 if config['sqlalchemy_migrations']:
57 if config["sqlalchemy_migrations"]:
60 58 alembic_cfg = Config()
61 alembic_cfg.set_main_option("script_location",
62 config['sqlalchemy_migrations'])
63 alembic_cfg.set_main_option("sqlalchemy.url",
64 registry.settings["sqlalchemy.url"])
65 alembic_cfg.set_main_option("sqlalchemy.echo", 'true')
59 alembic_cfg.set_main_option(
60 "script_location", config["sqlalchemy_migrations"]
61 )
62 alembic_cfg.set_main_option(
63 "sqlalchemy.url", registry.settings["sqlalchemy.url"]
64 )
65 alembic_cfg.set_main_option("sqlalchemy.echo", "true")
66 66 command.upgrade(alembic_cfg, "head")
67 67
68 68 with get_current_request().tm:
69 69 ConfigService.setup_default_values()
70 70
71 71 for plugin_name, config in registry.appenlight_plugins.items():
72 if config['default_values_setter']:
73 get_callable(config['default_values_setter'])()
72 if config["default_values_setter"]:
73 get_callable(config["default_values_setter"])()
@@ -25,11 +25,7 b' import elasticsearch.helpers'
25 25 from collections import defaultdict
26 26 from pyramid.paster import setup_logging
27 27 from pyramid.paster import bootstrap
28 from appenlight.models import (
29 DBSession,
30 Datastores,
31 metadata
32 )
28 from appenlight.models import DBSession, Datastores, metadata
33 29 from appenlight.lib import get_callable
34 30 from appenlight.models.report_group import ReportGroup
35 31 from appenlight.models.report import Report
@@ -42,25 +38,27 b' from appenlight.models.metric import Metric'
42 38 log = logging.getLogger(__name__)
43 39
44 40 tables = {
45 'slow_calls_p_': [],
46 'reports_stats_p_': [],
47 'reports_p_': [],
48 'reports_groups_p_': [],
49 'logs_p_': [],
50 'metrics_p_': [],
41 "slow_calls_p_": [],
42 "reports_stats_p_": [],
43 "reports_p_": [],
44 "reports_groups_p_": [],
45 "logs_p_": [],
46 "metrics_p_": [],
51 47 }
52 48
49
53 50 def detect_tables(table_prefix):
54 51 found_tables = []
55 db_tables_query = '''
52 db_tables_query = """
56 53 SELECT tablename FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
57 tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;'''
54 tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;"""
58 55
59 56 for table in DBSession.execute(db_tables_query).fetchall():
60 57 tablename = table.tablename
61 58 if tablename.startswith(table_prefix):
62 t = sa.Table(tablename, metadata, autoload=True,
63 autoload_with=DBSession.bind.engine)
59 t = sa.Table(
60 tablename, metadata, autoload=True, autoload_with=DBSession.bind.engine
61 )
64 62 found_tables.append(t)
65 63 return found_tables
66 64
@@ -75,69 +73,78 b' def main():'
75 73 # need parser twice because we first need to load ini file
76 74 # bootstrap pyramid and then load plugins
77 75 pre_parser = argparse.ArgumentParser(
78 description='Reindex AppEnlight data',
79 add_help=False)
80 pre_parser.add_argument('-c', '--config', required=True,
81 help='Configuration ini file of application')
82 pre_parser.add_argument('-h', '--help', help='Show help', nargs='?')
83 pre_parser.add_argument('-t', '--types', nargs='+',
84 help='Which parts of database should get reindexed')
76 description="Reindex AppEnlight data", add_help=False
77 )
78 pre_parser.add_argument(
79 "-c", "--config", required=True, help="Configuration ini file of application"
80 )
81 pre_parser.add_argument("-h", "--help", help="Show help", nargs="?")
82 pre_parser.add_argument(
83 "-t", "--types", nargs="+", help="Which parts of database should get reindexed"
84 )
85 85 args = pre_parser.parse_args()
86 86
87 87 config_uri = args.config
88 88 setup_logging(config_uri)
89 89 log.setLevel(logging.INFO)
90 90 env = bootstrap(config_uri)
91 parser = argparse.ArgumentParser(description='Reindex AppEnlight data')
91 parser = argparse.ArgumentParser(description="Reindex AppEnlight data")
92 92 choices = {
93 'reports': 'appenlight.scripts.reindex_elasticsearch:reindex_reports',
94 'logs': 'appenlight.scripts.reindex_elasticsearch:reindex_logs',
95 'metrics': 'appenlight.scripts.reindex_elasticsearch:reindex_metrics',
96 'slow_calls': 'appenlight.scripts.reindex_elasticsearch:reindex_slow_calls',
97 'template': 'appenlight.scripts.reindex_elasticsearch:update_template'
93 "reports": "appenlight.scripts.reindex_elasticsearch:reindex_reports",
94 "logs": "appenlight.scripts.reindex_elasticsearch:reindex_logs",
95 "metrics": "appenlight.scripts.reindex_elasticsearch:reindex_metrics",
96 "slow_calls": "appenlight.scripts.reindex_elasticsearch:reindex_slow_calls",
97 "template": "appenlight.scripts.reindex_elasticsearch:update_template",
98 98 }
99 for k, v in env['registry'].appenlight_plugins.items():
100 if v.get('fulltext_indexer'):
101 choices[k] = v['fulltext_indexer']
102 parser.add_argument('-t', '--types', nargs='*',
103 choices=['all'] + list(choices.keys()), default=[],
104 help='Which parts of database should get reindexed')
105 parser.add_argument('-c', '--config', required=True,
106 help='Configuration ini file of application')
99 for k, v in env["registry"].appenlight_plugins.items():
100 if v.get("fulltext_indexer"):
101 choices[k] = v["fulltext_indexer"]
102 parser.add_argument(
103 "-t",
104 "--types",
105 nargs="*",
106 choices=["all"] + list(choices.keys()),
107 default=[],
108 help="Which parts of database should get reindexed",
109 )
110 parser.add_argument(
111 "-c", "--config", required=True, help="Configuration ini file of application"
112 )
107 113 args = parser.parse_args()
108 114
109
110 if 'all' in args.types:
115 if "all" in args.types:
111 116 args.types = list(choices.keys())
112 117
113 118 print("Selected types to reindex: {}".format(args.types))
114 119
115 log.info('settings {}'.format(args.types))
120 log.info("settings {}".format(args.types))
116 121
117 if 'template' in args.types:
118 get_callable(choices['template'])()
119 args.types.remove('template')
122 if "template" in args.types:
123 get_callable(choices["template"])()
124 args.types.remove("template")
120 125 for selected in args.types:
121 126 get_callable(choices[selected])()
122 127
123 128
124 129 def update_template():
125 130 try:
126 Datastores.es.indices.delete_template('rcae')
131 Datastores.es.indices.delete_template("rcae")
127 132 except elasticsearch.exceptions.NotFoundError as e:
128 133 log.error(e)
129 log.info('updating elasticsearch template')
134 log.info("updating elasticsearch template")
130 135 tag_templates = [
131 {"values": {
136 {
137 "values": {
132 138 "path_match": "tags.*",
133 139 "mapping": {
134 140 "type": "object",
135 141 "properties": {
136 142 "values": {"type": "string", "analyzer": "tag_value"},
137 "numeric_values": {"type": "float"}
143 "numeric_values": {"type": "float"},
144 },
145 },
138 146 }
139 147 }
140 }}
141 148 ]
142 149
143 150 template_schema = {
@@ -145,8 +152,7 b' def update_template():'
145 152 "settings": {
146 153 "index": {
147 154 "refresh_interval": "5s",
148 "translog": {"sync_interval": "5s",
149 "durability": "async"}
155 "translog": {"sync_interval": "5s", "durability": "async"},
150 156 },
151 157 "number_of_shards": 5,
152 158 "analysis": {
@@ -155,13 +161,13 b' def update_template():'
155 161 "type": "custom",
156 162 "char_filter": [],
157 163 "tokenizer": "path_hierarchy",
158 "filter": []
164 "filter": [],
159 165 },
160 166 "tag_value": {
161 167 "type": "custom",
162 168 "char_filter": [],
163 169 "tokenizer": "keyword",
164 "filter": ["lowercase"]
170 "filter": ["lowercase"],
165 171 },
166 172 }
167 173 },
@@ -182,8 +188,8 b' def update_template():'
182 188 "last_timestamp": {"type": "date"},
183 189 "average_duration": {"type": "float"},
184 190 "summed_duration": {"type": "float"},
185 "public": {"type": "boolean"}
186 }
191 "public": {"type": "boolean"},
192 },
187 193 },
188 194 "report": {
189 195 "_all": {"enabled": False},
@@ -202,15 +208,11 b' def update_template():'
202 208 "request_id": {"type": "string", "index": "not_analyzed"},
203 209 "end_time": {"type": "date"},
204 210 "duration": {"type": "float"},
205 "tags": {
206 "type": "object"
207 },
211 "tags": {"type": "object"},
208 212 "tag_list": {"type": "string", "analyzer": "tag_value"},
209 "extra": {
210 "type": "object"
213 "extra": {"type": "object"},
211 214 },
212 },
213 "_parent": {"type": "report_group"}
215 "_parent": {"type": "report_group"},
214 216 },
215 217 "log": {
216 218 "_all": {"enabled": False},
@@ -225,26 +227,24 b' def update_template():'
225 227 "log_level": {"type": "string", "analyzer": "simple"},
226 228 "message": {"type": "string", "analyzer": "simple"},
227 229 "namespace": {"type": "string", "index": "not_analyzed"},
228 "tags": {
229 "type": "object"
230 "tags": {"type": "object"},
231 "tag_list": {"type": "string", "analyzer": "tag_value"},
232 },
233 },
230 234 },
231 "tag_list": {"type": "string", "analyzer": "tag_value"}
232 }
233 }
234 }
235 235 }
236 236
237 Datastores.es.indices.put_template('rcae', body=template_schema)
237 Datastores.es.indices.put_template("rcae", body=template_schema)
238 238
239 239
240 240 def reindex_reports():
241 reports_groups_tables = detect_tables('reports_groups_p_')
241 reports_groups_tables = detect_tables("reports_groups_p_")
242 242 try:
243 Datastores.es.indices.delete('rcae_r*')
243 Datastores.es.indices.delete("rcae_r*")
244 244 except elasticsearch.exceptions.NotFoundError as e:
245 245 log.error(e)
246 246
247 log.info('reindexing report groups')
247 log.info("reindexing report groups")
248 248 i = 0
249 249 task_start = datetime.datetime.now()
250 250 for partition_table in reports_groups_tables:
@@ -262,19 +262,18 b' def reindex_reports():'
262 262 es_docs[d_range].append(item.es_doc())
263 263 if es_docs:
264 264 name = partition_table.name
265 log.info('round {}, {}'.format(i, name))
265 log.info("round {}, {}".format(i, name))
266 266 for k, v in es_docs.items():
267 to_update = {'_index': k, '_type': 'report_group'}
267 to_update = {"_index": k, "_type": "report_group"}
268 268 [i.update(to_update) for i in v]
269 269 elasticsearch.helpers.bulk(Datastores.es, v)
270 270
271 log.info(
272 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
271 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
273 272
274 273 i = 0
275 log.info('reindexing reports')
274 log.info("reindexing reports")
276 275 task_start = datetime.datetime.now()
277 reports_tables = detect_tables('reports_p_')
276 reports_tables = detect_tables("reports_p_")
278 277 for partition_table in reports_tables:
279 278 conn = DBSession.connection().execution_options(stream_results=True)
280 279 result = conn.execute(partition_table.select())
@@ -290,19 +289,18 b' def reindex_reports():'
290 289 es_docs[d_range].append(item.es_doc())
291 290 if es_docs:
292 291 name = partition_table.name
293 log.info('round {}, {}'.format(i, name))
292 log.info("round {}, {}".format(i, name))
294 293 for k, v in es_docs.items():
295 to_update = {'_index': k, '_type': 'report'}
294 to_update = {"_index": k, "_type": "report"}
296 295 [i.update(to_update) for i in v]
297 296 elasticsearch.helpers.bulk(Datastores.es, v)
298 297
299 log.info(
300 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
298 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
301 299
302 log.info('reindexing reports stats')
300 log.info("reindexing reports stats")
303 301 i = 0
304 302 task_start = datetime.datetime.now()
305 reports_stats_tables = detect_tables('reports_stats_p_')
303 reports_stats_tables = detect_tables("reports_stats_p_")
306 304 for partition_table in reports_stats_tables:
307 305 conn = DBSession.connection().execution_options(stream_results=True)
308 306 result = conn.execute(partition_table.select())
@@ -315,34 +313,33 b' def reindex_reports():'
315 313 rd = dict(list(row.items()))
316 314 # remove legacy columns
317 315 # TODO: remove the column later
318 rd.pop('size', None)
316 rd.pop("size", None)
319 317 item = ReportStat(**rd)
320 318 i += 1
321 319 d_range = item.partition_id
322 320 es_docs[d_range].append(item.es_doc())
323 321 if es_docs:
324 322 name = partition_table.name
325 log.info('round {}, {}'.format(i, name))
323 log.info("round {}, {}".format(i, name))
326 324 for k, v in es_docs.items():
327 to_update = {'_index': k, '_type': 'log'}
325 to_update = {"_index": k, "_type": "log"}
328 326 [i.update(to_update) for i in v]
329 327 elasticsearch.helpers.bulk(Datastores.es, v)
330 328
331 log.info(
332 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
329 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
333 330
334 331
335 332 def reindex_logs():
336 333 try:
337 Datastores.es.indices.delete('rcae_l*')
334 Datastores.es.indices.delete("rcae_l*")
338 335 except elasticsearch.exceptions.NotFoundError as e:
339 336 log.error(e)
340 337
341 338 # logs
342 log.info('reindexing logs')
339 log.info("reindexing logs")
343 340 i = 0
344 341 task_start = datetime.datetime.now()
345 log_tables = detect_tables('logs_p_')
342 log_tables = detect_tables("logs_p_")
346 343 for partition_table in log_tables:
347 344 conn = DBSession.connection().execution_options(stream_results=True)
348 345 result = conn.execute(partition_table.select())
@@ -359,26 +356,25 b' def reindex_logs():'
359 356 es_docs[d_range].append(item.es_doc())
360 357 if es_docs:
361 358 name = partition_table.name
362 log.info('round {}, {}'.format(i, name))
359 log.info("round {}, {}".format(i, name))
363 360 for k, v in es_docs.items():
364 to_update = {'_index': k, '_type': 'log'}
361 to_update = {"_index": k, "_type": "log"}
365 362 [i.update(to_update) for i in v]
366 363 elasticsearch.helpers.bulk(Datastores.es, v)
367 364
368 log.info(
369 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
365 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
370 366
371 367
372 368 def reindex_metrics():
373 369 try:
374 Datastores.es.indices.delete('rcae_m*')
370 Datastores.es.indices.delete("rcae_m*")
375 371 except elasticsearch.exceptions.NotFoundError as e:
376 372 log.error(e)
377 373
378 log.info('reindexing applications metrics')
374 log.info("reindexing applications metrics")
379 375 i = 0
380 376 task_start = datetime.datetime.now()
381 metric_tables = detect_tables('metrics_p_')
377 metric_tables = detect_tables("metrics_p_")
382 378 for partition_table in metric_tables:
383 379 conn = DBSession.connection().execution_options(stream_results=True)
384 380 result = conn.execute(partition_table.select())
@@ -394,26 +390,25 b' def reindex_metrics():'
394 390 es_docs[d_range].append(item.es_doc())
395 391 if es_docs:
396 392 name = partition_table.name
397 log.info('round {}, {}'.format(i, name))
393 log.info("round {}, {}".format(i, name))
398 394 for k, v in es_docs.items():
399 to_update = {'_index': k, '_type': 'log'}
395 to_update = {"_index": k, "_type": "log"}
400 396 [i.update(to_update) for i in v]
401 397 elasticsearch.helpers.bulk(Datastores.es, v)
402 398
403 log.info(
404 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
399 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
405 400
406 401
407 402 def reindex_slow_calls():
408 403 try:
409 Datastores.es.indices.delete('rcae_sc*')
404 Datastores.es.indices.delete("rcae_sc*")
410 405 except elasticsearch.exceptions.NotFoundError as e:
411 406 log.error(e)
412 407
413 log.info('reindexing slow calls')
408 log.info("reindexing slow calls")
414 409 i = 0
415 410 task_start = datetime.datetime.now()
416 slow_calls_tables = detect_tables('slow_calls_p_')
411 slow_calls_tables = detect_tables("slow_calls_p_")
417 412 for partition_table in slow_calls_tables:
418 413 conn = DBSession.connection().execution_options(stream_results=True)
419 414 result = conn.execute(partition_table.select())
@@ -429,15 +424,14 b' def reindex_slow_calls():'
429 424 es_docs[d_range].append(item.es_doc())
430 425 if es_docs:
431 426 name = partition_table.name
432 log.info('round {}, {}'.format(i, name))
427 log.info("round {}, {}".format(i, name))
433 428 for k, v in es_docs.items():
434 to_update = {'_index': k, '_type': 'log'}
429 to_update = {"_index": k, "_type": "log"}
435 430 [i.update(to_update) for i in v]
436 431 elasticsearch.helpers.bulk(Datastores.es, v)
437 432
438 log.info(
439 'total docs {} {}'.format(i, datetime.datetime.now() - task_start))
433 log.info("total docs {} {}".format(i, datetime.datetime.now() - task_start))
440 434
441 435
442 if __name__ == '__main__':
436 if __name__ == "__main__":
443 437 main()
@@ -28,30 +28,30 b' log = logging.getLogger(__name__)'
28 28
29 29 def main():
30 30 parser = argparse.ArgumentParser(
31 description='Generate AppEnlight static resources',
32 add_help=False)
33 parser.add_argument('-c', '--config', required=True,
34 help='Configuration ini file of application')
31 description="Generate AppEnlight static resources", add_help=False
32 )
33 parser.add_argument(
34 "-c", "--config", required=True, help="Configuration ini file of application"
35 )
35 36 args = parser.parse_args()
36 37 config_uri = args.config
37 38 setup_logging(config_uri)
38 39 env = bootstrap(config_uri)
39 registry = env['registry']
40 registry = env["registry"]
40 41 settings = registry.settings
41 if os.path.exists(settings['webassets.dir']):
42 shutil.rmtree(settings['webassets.dir'])
43 os.mkdir(settings['webassets.dir'])
44 ae_basedir = pkg_resources.resource_filename('appenlight', 'static')
45 shutil.copytree(ae_basedir,
46 os.path.join(settings['webassets.dir'], 'appenlight'))
42 if os.path.exists(settings["webassets.dir"]):
43 shutil.rmtree(settings["webassets.dir"])
44 os.mkdir(settings["webassets.dir"])
45 ae_basedir = pkg_resources.resource_filename("appenlight", "static")
46 shutil.copytree(ae_basedir, os.path.join(settings["webassets.dir"], "appenlight"))
47 47
48 48 for plugin_name, config in registry.appenlight_plugins.items():
49 if config['static']:
50 shutil.copytree(config['static'],
51 os.path.join(settings['webassets.dir'],
52 plugin_name))
49 if config["static"]:
50 shutil.copytree(
51 config["static"], os.path.join(settings["webassets.dir"], plugin_name)
52 )
53 53
54 for root, dirs, files in os.walk(settings['webassets.dir']):
54 for root, dirs, files in os.walk(settings["webassets.dir"]):
55 55 for item in dirs:
56 56 os.chmod(os.path.join(root, item), 0o775)
57 57 for item in files:
@@ -36,8 +36,8 b' log = logging.getLogger(__name__)'
36 36
37 37
38 38 def groupfinder(userid, request):
39 if userid and hasattr(request, 'user') and request.user:
40 groups = ['group:%s' % g.id for g in request.user.groups]
39 if userid and hasattr(request, "user") and request.user:
40 groups = ["group:%s" % g.id for g in request.user.groups]
41 41 return groups
42 42 return []
43 43
@@ -53,16 +53,16 b' class AuthTokenAuthenticationPolicy(CallbackAuthenticationPolicy):'
53 53 return []
54 54
55 55 def unauthenticated_userid(self, request):
56 token = request.headers.get('x-appenlight-auth-token')
56 token = request.headers.get("x-appenlight-auth-token")
57 57 if token:
58 58 auth_token = AuthTokenService.by_token(token)
59 59 if auth_token and not auth_token.is_expired:
60 log.info('%s is valid' % auth_token)
60 log.info("%s is valid" % auth_token)
61 61 return auth_token.owner_id
62 62 elif auth_token:
63 log.warning('%s is expired' % auth_token)
63 log.warning("%s is expired" % auth_token)
64 64 else:
65 log.warning('token: %s is not found' % token)
65 log.warning("token: %s is not found" % token)
66 66
67 67 def authenticated_userid(self, request):
68 68 return self.unauthenticated_userid(request)
@@ -72,10 +72,10 b' def rewrite_root_perm(perm_user, perm_name):'
72 72 """
73 73 Translates root_administration into ALL_PERMISSIONS object
74 74 """
75 if perm_name == 'root_administration':
76 return (Allow, perm_user, ALL_PERMISSIONS,)
75 if perm_name == "root_administration":
76 return (Allow, perm_user, ALL_PERMISSIONS)
77 77 else:
78 return (Allow, perm_user, perm_name,)
78 return (Allow, perm_user, perm_name)
79 79
80 80
81 81 def add_root_superperm(request, context):
@@ -83,10 +83,10 b' def add_root_superperm(request, context):'
83 83 Adds ALL_PERMISSIONS to every resource if user somehow has 'root_permission'
84 84 non-resource permission
85 85 """
86 if hasattr(request, 'user') and request.user:
86 if hasattr(request, "user") and request.user:
87 87 acls = permission_to_04_acls(UserService.permissions(request.user))
88 88 for perm_user, perm_name in acls:
89 if perm_name == 'root_administration':
89 if perm_name == "root_administration":
90 90 context.__acl__.append(rewrite_root_perm(perm_user, perm_name))
91 91
92 92
@@ -96,14 +96,17 b' class RootFactory(object):'
96 96 """
97 97
98 98 def __init__(self, request):
99 self.__acl__ = [(Allow, Authenticated, 'authenticated'),
100 (Allow, Authenticated, 'create_resources')]
99 self.__acl__ = [
100 (Allow, Authenticated, "authenticated"),
101 (Allow, Authenticated, "create_resources"),
102 ]
101 103 # general page factory - append custom non resource permissions
102 if hasattr(request, 'user') and request.user:
104 if hasattr(request, "user") and request.user:
103 105 acls = permission_to_04_acls(UserService.permissions(request.user))
104 106 for perm_user, perm_name in acls:
105 107 self.__acl__.append(rewrite_root_perm(perm_user, perm_name))
106 108
109
107 110 class ResourceFactory(object):
108 111 """
109 112 Checks permissions to specific resource based on user permissions or
@@ -114,11 +117,13 b' class ResourceFactory(object):'
114 117 Resource = appenlight.models.resource.Resource
115 118
116 119 self.__acl__ = []
117 resource_id = request.matchdict.get("resource_id",
118 request.GET.get("resource_id"))
120 resource_id = request.matchdict.get(
121 "resource_id", request.GET.get("resource_id")
122 )
119 123 resource_id = to_integer_safe(resource_id)
120 self.resource = ResourceService.by_resource_id(resource_id) \
121 if resource_id else None
124 self.resource = (
125 ResourceService.by_resource_id(resource_id) if resource_id else None
126 )
122 127 if self.resource and request.user:
123 128 self.__acl__ = self.resource.__acl__
124 129 permissions = ResourceService.perms_for_user(self.resource, request.user)
@@ -138,17 +143,18 b' class ResourceReportFactory(object):'
138 143 Resource = appenlight.models.resource.Resource
139 144
140 145 self.__acl__ = []
141 group_id = request.matchdict.get("group_id",
142 request.params.get("group_id"))
146 group_id = request.matchdict.get("group_id", request.params.get("group_id"))
143 147 group_id = to_integer_safe(group_id)
144 self.report_group = ReportGroupService.by_id(
145 group_id) if group_id else None
148 self.report_group = ReportGroupService.by_id(group_id) if group_id else None
146 149 if not self.report_group:
147 150 raise HTTPNotFound()
148 151
149 152 self.public = self.report_group.public
150 self.resource = ResourceService.by_resource_id(self.report_group.resource_id) \
151 if self.report_group else None
153 self.resource = (
154 ResourceService.by_resource_id(self.report_group.resource_id)
155 if self.report_group
156 else None
157 )
152 158
153 159 if self.resource:
154 160 self.__acl__ = self.resource.__acl__
@@ -157,11 +163,12 b' class ResourceReportFactory(object):'
157 163 for perm_user, perm_name in permission_to_04_acls(permissions):
158 164 self.__acl__.append(rewrite_root_perm(perm_user, perm_name))
159 165 if self.public:
160 self.__acl__.append((Allow, Everyone, 'view',))
166 self.__acl__.append((Allow, Everyone, "view"))
161 167 if not request.user:
162 168 # unauthed users need to visit using both group and report pair
163 report_id = request.params.get('reportId',
164 request.params.get('report_id', -1))
169 report_id = request.params.get(
170 "reportId", request.params.get("report_id", -1)
171 )
165 172 report = self.report_group.get_report(report_id, public=True)
166 173 if not report:
167 174 raise HTTPNotFound()
@@ -177,24 +184,23 b' class APIFactory(object):'
177 184 self.__acl__ = []
178 185 self.possibly_public = False
179 186 private_api_key = request.headers.get(
180 'x-appenlight-api-key',
181 request.params.get('api_key')
187 "x-appenlight-api-key", request.params.get("api_key")
182 188 )
183 189 log.debug("private key: %s" % private_api_key)
184 190 if private_api_key:
185 self.resource = ApplicationService.by_api_key_cached()(
186 private_api_key)
191 self.resource = ApplicationService.by_api_key_cached()(private_api_key)
187 192 # then try public key
188 193 else:
189 194 public_api_key = request.headers.get(
190 'x-appenlight-public-api-key',
191 request.GET.get('public_api_key'))
195 "x-appenlight-public-api-key", request.GET.get("public_api_key")
196 )
192 197 log.debug("public key: %s" % public_api_key)
193 198 self.resource = ApplicationService.by_public_api_key(
194 public_api_key, from_cache=True, request=request)
199 public_api_key, from_cache=True, request=request
200 )
195 201 self.possibly_public = True
196 202 if self.resource:
197 self.__acl__.append((Allow, Everyone, 'create',))
203 self.__acl__.append((Allow, Everyone, "create"))
198 204
199 205
200 206 class AirbrakeV2APIFactory(object):
@@ -205,14 +211,13 b' class AirbrakeV2APIFactory(object):'
205 211 def __init__(self, request):
206 212 self.__acl__ = []
207 213 self.possibly_public = False
208 fixed_xml_data = ''
214 fixed_xml_data = ""
209 215 try:
210 data = request.GET.get('data')
216 data = request.GET.get("data")
211 217 if data:
212 218 self.possibly_public = True
213 219 except (UnicodeDecodeError, UnicodeEncodeError) as exc:
214 log.warning(
215 'Problem parsing Airbrake data: %s, failed decoding' % exc)
220 log.warning("Problem parsing Airbrake data: %s, failed decoding" % exc)
216 221 raise HTTPBadRequest()
217 222 try:
218 223 if not data:
@@ -220,39 +225,38 b' class AirbrakeV2APIFactory(object):'
220 225 # fix shitty airbrake js client not escaping line method attr
221 226
222 227 def repl(input):
223 return 'line method=%s file' % quoteattr(input.group(1))
228 return "line method=%s file" % quoteattr(input.group(1))
224 229
225 230 fixed_xml_data = re.sub('line method="(.*?)" file', repl, data)
226 231 root = ElementTree.fromstring(fixed_xml_data)
227 232 except Exception as exc:
228 log.info(
229 'Problem parsing Airbrake '
230 'data: %s, trying unquoting' % exc)
233 log.info("Problem parsing Airbrake " "data: %s, trying unquoting" % exc)
231 234 self.possibly_public = True
232 235 try:
233 236 root = ElementTree.fromstring(urllib.parse.unquote(fixed_xml_data))
234 237 except Exception as exc:
235 log.warning('Problem parsing Airbrake '
236 'data: %s, failed completly' % exc)
238 log.warning(
239 "Problem parsing Airbrake " "data: %s, failed completly" % exc
240 )
237 241 raise HTTPBadRequest()
238 242 self.airbrake_xml_etree = root
239 api_key = root.findtext('api-key', '')
243 api_key = root.findtext("api-key", "")
240 244
241 245 self.resource = ApplicationService.by_api_key_cached()(api_key)
242 246 if not self.resource:
243 self.resource = ApplicationService.by_public_api_key(api_key,
244 from_cache=True,
245 request=request)
247 self.resource = ApplicationService.by_public_api_key(
248 api_key, from_cache=True, request=request
249 )
246 250 if self.resource:
247 251 self.possibly_public = True
248 252
249 253 if self.resource:
250 self.__acl__.append((Allow, Everyone, 'create',))
254 self.__acl__.append((Allow, Everyone, "create"))
251 255
252 256
253 257 def parse_sentry_header(header):
254 parsed = header.split(' ', 1)[1].split(',') or []
255 return dict([x.strip().split('=') for x in parsed])
258 parsed = header.split(" ", 1)[1].split(",") or []
259 return dict([x.strip().split("=") for x in parsed])
256 260
257 261
258 262 class SentryAPIFactory(object):
@@ -263,34 +267,34 b' class SentryAPIFactory(object):'
263 267 def __init__(self, request):
264 268 self.__acl__ = []
265 269 self.possibly_public = False
266 if request.headers.get('X-Sentry-Auth', '').startswith('Sentry'):
267 header_string = request.headers['X-Sentry-Auth']
270 if request.headers.get("X-Sentry-Auth", "").startswith("Sentry"):
271 header_string = request.headers["X-Sentry-Auth"]
268 272 result = parse_sentry_header(header_string)
269 elif request.headers.get('Authorization', '').startswith('Sentry'):
270 header_string = request.headers['Authorization']
273 elif request.headers.get("Authorization", "").startswith("Sentry"):
274 header_string = request.headers["Authorization"]
271 275 result = parse_sentry_header(header_string)
272 276 else:
273 result = dict((k, v) for k, v in list(request.GET.items())
274 if k.startswith('sentry_'))
275 key = result.get('sentry_key')
276 log.info('sentry request {}'.format(result))
277 result = dict(
278 (k, v) for k, v in list(request.GET.items()) if k.startswith("sentry_")
279 )
280 key = result.get("sentry_key")
281 log.info("sentry request {}".format(result))
277 282
278 283 self.resource = ApplicationService.by_api_key_cached()(key)
279 if not self.resource or \
280 result.get('sentry_client', '').startswith('raven-js'):
284 if not self.resource or result.get("sentry_client", "").startswith("raven-js"):
281 285 self.resource = ApplicationService.by_public_api_key(
282 key, from_cache=True, request=request)
286 key, from_cache=True, request=request
287 )
283 288 if self.resource:
284 self.__acl__.append((Allow, Everyone, 'create',))
289 self.__acl__.append((Allow, Everyone, "create"))
285 290
286 291
287 292 class ResourcePluginConfigFactory(object):
288
289 293 def __init__(self, request):
290 294 Resource = appenlight.models.resource.Resource
291 295 self.__acl__ = []
292 296 self.resource = None
293 plugin_id = to_integer_safe(request.matchdict.get('id'))
297 plugin_id = to_integer_safe(request.matchdict.get("id"))
294 298 self.plugin = PluginConfigService.by_id(plugin_id)
295 299 if not self.plugin:
296 300 raise HTTPNotFound()
@@ -316,7 +320,7 b' class ResourceJSONBodyFactory(object):'
316 320 Resource = appenlight.models.resource.Resource
317 321
318 322 self.__acl__ = []
319 resource_id = request.unsafe_json_body().get('resource_id')
323 resource_id = request.unsafe_json_body().get("resource_id")
320 324 resource_id = to_integer_safe(resource_id)
321 325 self.resource = ResourceService.by_resource_id(resource_id)
322 326 if self.resource and request.user:
@@ -334,9 +338,9 b' class ResourcePluginMixedFactory(object):'
334 338 json_body = request.safe_json_body
335 339 self.resource = None
336 340 if json_body:
337 resource_id = json_body.get('resource_id')
341 resource_id = json_body.get("resource_id")
338 342 else:
339 resource_id = request.GET.get('resource_id')
343 resource_id = request.GET.get("resource_id")
340 344 if resource_id:
341 345 resource_id = to_integer_safe(resource_id)
342 346 self.resource = ResourceService.by_resource_id(resource_id)
@@ -20,7 +20,7 b' import os'
20 20 from pyramid.i18n import TranslationStringFactory
21 21 from pyramid import threadlocal
22 22
23 _ = TranslationStringFactory('pyramid')
23 _ = TranslationStringFactory("pyramid")
24 24
25 25 from appenlight import security
26 26 from appenlight.lib import helpers, generate_random_string
@@ -29,70 +29,133 b' from appenlight.models.services.config import ConfigService'
29 29
30 30 def gen_urls(request):
31 31 urls = {
32 'baseUrl': request.route_url('/'),
33 'applicationsNoId': request.route_url('applications_no_id'),
34 'applications': request.route_url('applications', resource_id='REPLACE_ID').replace('REPLACE_ID',':resourceId'),
35 'applicationsProperty': request.route_url('applications_property',key='REPLACE_KEY', resource_id='REPLACE_ID').replace('REPLACE_ID',':resourceId').replace('REPLACE_KEY',':key'),
36 'configsNoId': request.route_url('admin_configs'),
37 'configs': request.route_url('admin_config', key='REPLACE_KEY', section='REPLACE_SECTION').replace('REPLACE_SECTION',':section').replace('REPLACE_KEY',':key'),
38 'docs': 'http://getappenlight.com/page/api/main.html',
39 'eventsNoId': request.route_url('events_no_id'),
40 'events': request.route_url('events', event_id='REPLACE_ID').replace('REPLACE_ID',':eventId'),
41 'eventsProperty': request.route_url('events_property',key='REPLACE_KEY', event_id='REPLACE_ID').replace('REPLACE_ID',':eventId').replace('REPLACE_KEY',':key'),
42 'groupsNoId': request.route_url('groups_no_id'),
43 'groups': request.route_url('groups', group_id='REPLACE_ID').replace('REPLACE_ID',':groupId'),
44 'groupsProperty': request.route_url('groups_property',key='REPLACE_KEY', group_id='REPLACE_ID').replace('REPLACE_ID',':groupId').replace('REPLACE_KEY',':key'),
45 'logsNoId': request.route_url('logs_no_id'),
46 'integrationAction': request.route_url('integrations_id',action='REPLACE_ACT', resource_id='REPLACE_RID', integration='REPLACE_IID').replace('REPLACE_RID',':resourceId').replace('REPLACE_ACT',':action').replace('REPLACE_IID',':integration'),
47 'usersNoId': request.route_url('users_no_id'),
48 'users': request.route_url('users', user_id='REPLACE_ID').replace('REPLACE_ID',':userId'),
49 'usersProperty': request.route_url('users_property',key='REPLACE_KEY', user_id='REPLACE_ID').replace('REPLACE_ID',':userId').replace('REPLACE_KEY',':key'),
50 'userSelf': request.route_url('users_self'),
51 'userSelfProperty': request.route_url('users_self_property',key='REPLACE_KEY').replace('REPLACE_KEY',':key'),
52 'reports': request.route_url('reports'),
53 'reportGroup': request.route_url('report_groups', group_id='REPLACE_RID').replace('REPLACE_RID',':groupId'),
54 'reportGroupProperty': request.route_url('report_groups_property', key='REPLACE_KEY', group_id='REPLACE_GID').replace('REPLACE_KEY',':key').replace('REPLACE_GID',':groupId'),
55 'pluginConfigsNoId': request.route_url('plugin_configs', plugin_name='REPLACE_TYPE').replace('REPLACE_TYPE',':plugin_name'),
56 'pluginConfigs': request.route_url('plugin_config', id='REPLACE_ID', plugin_name='REPLACE_TYPE').replace('REPLACE_ID',':id').replace('REPLACE_TYPE',':plugin_name'),
57 'resourceProperty': request.route_url('resources_property',key='REPLACE_KEY', resource_id='REPLACE_ID').replace('REPLACE_ID',':resourceId').replace('REPLACE_KEY',':key'),
58 'slowReports': request.route_url('slow_reports'),
59 'sectionView': request.route_url('section_view', section='REPLACE_S', view='REPLACE_V').replace('REPLACE_S',':section').replace('REPLACE_V',':view'),
60 'otherRoutes': {
61 'register': request.route_url('register'),
62 'lostPassword': request.route_url('lost_password'),
63 'lostPasswordGenerate': request.route_url('lost_password_generate'),
64 'signOut': request.route_url('ziggurat.routes.sign_out')
32 "baseUrl": request.route_url("/"),
33 "applicationsNoId": request.route_url("applications_no_id"),
34 "applications": request.route_url(
35 "applications", resource_id="REPLACE_ID"
36 ).replace("REPLACE_ID", ":resourceId"),
37 "applicationsProperty": request.route_url(
38 "applications_property", key="REPLACE_KEY", resource_id="REPLACE_ID"
39 )
40 .replace("REPLACE_ID", ":resourceId")
41 .replace("REPLACE_KEY", ":key"),
42 "configsNoId": request.route_url("admin_configs"),
43 "configs": request.route_url(
44 "admin_config", key="REPLACE_KEY", section="REPLACE_SECTION"
45 )
46 .replace("REPLACE_SECTION", ":section")
47 .replace("REPLACE_KEY", ":key"),
48 "docs": "http://getappenlight.com/page/api/main.html",
49 "eventsNoId": request.route_url("events_no_id"),
50 "events": request.route_url("events", event_id="REPLACE_ID").replace(
51 "REPLACE_ID", ":eventId"
52 ),
53 "eventsProperty": request.route_url(
54 "events_property", key="REPLACE_KEY", event_id="REPLACE_ID"
55 )
56 .replace("REPLACE_ID", ":eventId")
57 .replace("REPLACE_KEY", ":key"),
58 "groupsNoId": request.route_url("groups_no_id"),
59 "groups": request.route_url("groups", group_id="REPLACE_ID").replace(
60 "REPLACE_ID", ":groupId"
61 ),
62 "groupsProperty": request.route_url(
63 "groups_property", key="REPLACE_KEY", group_id="REPLACE_ID"
64 )
65 .replace("REPLACE_ID", ":groupId")
66 .replace("REPLACE_KEY", ":key"),
67 "logsNoId": request.route_url("logs_no_id"),
68 "integrationAction": request.route_url(
69 "integrations_id",
70 action="REPLACE_ACT",
71 resource_id="REPLACE_RID",
72 integration="REPLACE_IID",
73 )
74 .replace("REPLACE_RID", ":resourceId")
75 .replace("REPLACE_ACT", ":action")
76 .replace("REPLACE_IID", ":integration"),
77 "usersNoId": request.route_url("users_no_id"),
78 "users": request.route_url("users", user_id="REPLACE_ID").replace(
79 "REPLACE_ID", ":userId"
80 ),
81 "usersProperty": request.route_url(
82 "users_property", key="REPLACE_KEY", user_id="REPLACE_ID"
83 )
84 .replace("REPLACE_ID", ":userId")
85 .replace("REPLACE_KEY", ":key"),
86 "userSelf": request.route_url("users_self"),
87 "userSelfProperty": request.route_url(
88 "users_self_property", key="REPLACE_KEY"
89 ).replace("REPLACE_KEY", ":key"),
90 "reports": request.route_url("reports"),
91 "reportGroup": request.route_url(
92 "report_groups", group_id="REPLACE_RID"
93 ).replace("REPLACE_RID", ":groupId"),
94 "reportGroupProperty": request.route_url(
95 "report_groups_property", key="REPLACE_KEY", group_id="REPLACE_GID"
96 )
97 .replace("REPLACE_KEY", ":key")
98 .replace("REPLACE_GID", ":groupId"),
99 "pluginConfigsNoId": request.route_url(
100 "plugin_configs", plugin_name="REPLACE_TYPE"
101 ).replace("REPLACE_TYPE", ":plugin_name"),
102 "pluginConfigs": request.route_url(
103 "plugin_config", id="REPLACE_ID", plugin_name="REPLACE_TYPE"
104 )
105 .replace("REPLACE_ID", ":id")
106 .replace("REPLACE_TYPE", ":plugin_name"),
107 "resourceProperty": request.route_url(
108 "resources_property", key="REPLACE_KEY", resource_id="REPLACE_ID"
109 )
110 .replace("REPLACE_ID", ":resourceId")
111 .replace("REPLACE_KEY", ":key"),
112 "slowReports": request.route_url("slow_reports"),
113 "sectionView": request.route_url(
114 "section_view", section="REPLACE_S", view="REPLACE_V"
115 )
116 .replace("REPLACE_S", ":section")
117 .replace("REPLACE_V", ":view"),
118 "otherRoutes": {
119 "register": request.route_url("register"),
120 "lostPassword": request.route_url("lost_password"),
121 "lostPasswordGenerate": request.route_url("lost_password_generate"),
122 "signOut": request.route_url("ziggurat.routes.sign_out"),
65 123 },
66 'social_auth': {
67 'google': request.route_url('social_auth', provider='google'),
68 'twitter': request.route_url('social_auth', provider='twitter'),
69 'bitbucket': request.route_url('social_auth', provider='bitbucket'),
70 'github': request.route_url('social_auth', provider='github'),
124 "social_auth": {
125 "google": request.route_url("social_auth", provider="google"),
126 "twitter": request.route_url("social_auth", provider="twitter"),
127 "bitbucket": request.route_url("social_auth", provider="bitbucket"),
128 "github": request.route_url("social_auth", provider="github"),
71 129 },
72 130 "plugins": {},
73 "adminAction": request.route_url('admin', action="REPLACE_ACT").replace('REPLACE_ACT',':action')
131 "adminAction": request.route_url("admin", action="REPLACE_ACT").replace(
132 "REPLACE_ACT", ":action"
133 ),
74 134 }
75 135 return urls
76 136
137
77 138 def new_request(event):
78 139 environ = event.request.environ
79 event.request.response.headers['X-Frame-Options'] = 'SAMEORIGIN'
80 event.request.response.headers['X-XSS-Protection'] = '1; mode=block'
140 event.request.response.headers["X-Frame-Options"] = "SAMEORIGIN"
141 event.request.response.headers["X-XSS-Protection"] = "1; mode=block"
81 142 # can this be enabled on non https deployments?
82 143 # event.request.response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubdomains;'
83 144
84 145 # do not send XSRF token with /api calls
85 if not event.request.path.startswith('/api'):
86 if environ['wsgi.url_scheme'] == 'https':
146 if not event.request.path.startswith("/api"):
147 if environ["wsgi.url_scheme"] == "https":
87 148 event.request.response.set_cookie(
88 'XSRF-TOKEN', event.request.session.get_csrf_token(),
89 secure=True)
149 "XSRF-TOKEN", event.request.session.get_csrf_token(), secure=True
150 )
90 151 else:
91 152 event.request.response.set_cookie(
92 'XSRF-TOKEN', event.request.session.get_csrf_token())
153 "XSRF-TOKEN", event.request.session.get_csrf_token()
154 )
93 155 if event.request.user:
94 event.request.response.headers[
95 'x-appenlight-uid'] = '%s' % event.request.user.id
156 event.request.response.headers["x-appenlight-uid"] = (
157 "%s" % event.request.user.id
158 )
96 159
97 160
98 161 def add_renderer_globals(event):
@@ -102,49 +165,55 b' def add_renderer_globals(event):'
102 165 renderer_globals["h"] = helpers
103 166 renderer_globals["js_hash"] = request.registry.js_hash
104 167 renderer_globals["css_hash"] = request.registry.css_hash
105 renderer_globals['_'] = _
106 renderer_globals['security'] = security
107 renderer_globals['flash_msgs'] = []
108 renderer_globals['appenlight_plugins'] = []
168 renderer_globals["_"] = _
169 renderer_globals["security"] = security
170 renderer_globals["flash_msgs"] = []
171 renderer_globals["appenlight_plugins"] = []
109 172
110 if 'jinja' in event['renderer_info'].type:
111 renderer_globals['url_list'] = gen_urls(request)
173 if "jinja" in event["renderer_info"].type:
174 renderer_globals["url_list"] = gen_urls(request)
112 175 # add footer html and some other global vars to renderer
113 176 for module, config in request.registry.appenlight_plugins.items():
114 if config['url_gen']:
115 urls = config['url_gen'](request)
116 renderer_globals['url_list']['plugins'][module] = urls
117
118 renderer_globals['appenlight_plugins'].append(
119 {'name': module,
120 'config': {
121 'javascript':config['javascript'],
122 'header_html':config['header_html']
123 }})
177 if config["url_gen"]:
178 urls = config["url_gen"](request)
179 renderer_globals["url_list"]["plugins"][module] = urls
180
181 renderer_globals["appenlight_plugins"].append(
182 {
183 "name": module,
184 "config": {
185 "javascript": config["javascript"],
186 "header_html": config["header_html"],
187 },
188 }
189 )
124 190
125 191 footer_config = ConfigService.by_key_and_section(
126 'template_footer_html', 'global', default_value='')
192 "template_footer_html", "global", default_value=""
193 )
127 194
128 renderer_globals['template_footer_html'] = footer_config.value
195 renderer_globals["template_footer_html"] = footer_config.value
129 196 try:
130 renderer_globals['root_administrator'] = request.has_permission(
131 'root_administration', security.RootFactory(request))
197 renderer_globals["root_administrator"] = request.has_permission(
198 "root_administration", security.RootFactory(request)
199 )
132 200 except AttributeError:
133 renderer_globals['root_administrator'] = False
201 renderer_globals["root_administrator"] = False
134 202
135 renderer_globals['_mail_url'] = request.registry.settings['_mail_url']
203 renderer_globals["_mail_url"] = request.registry.settings["_mail_url"]
136 204
137 205 if not request:
138 206 return
139 207
140 208 # do not sens flash headers with /api calls
141 if not request.path.startswith('/api'):
209 if not request.path.startswith("/api"):
142 210 flash_msgs = helpers.get_type_formatted_flash(request)
143 renderer_globals['flash_msgs'] = flash_msgs
211 renderer_globals["flash_msgs"] = flash_msgs
144 212 request.add_flash_to_headers()
145 213
214
146 215 def application_created(app):
147 webassets_dir = app.app.registry.settings.get('webassets.dir')
216 webassets_dir = app.app.registry.settings.get("webassets.dir")
148 217 js_hash = generate_random_string()
149 218 css_hash = generate_random_string()
150 219 if webassets_dir:
@@ -153,16 +222,14 b' def application_created(app):'
153 222 for root, dirs, files in os.walk(webassets_dir):
154 223 for name in files:
155 224 filename = os.path.join(root, name)
156 if name.endswith('css'):
157 with open(filename, 'r', encoding='utf8',
158 errors='replace') as f:
225 if name.endswith("css"):
226 with open(filename, "r", encoding="utf8", errors="replace") as f:
159 227 for line in f:
160 css_hasher.update(line.encode('utf8'))
161 elif name.endswith('js'):
162 with open(filename, 'r', encoding='utf8',
163 errors='replace') as f:
228 css_hasher.update(line.encode("utf8"))
229 elif name.endswith("js"):
230 with open(filename, "r", encoding="utf8", errors="replace") as f:
164 231 for line in f:
165 js_hasher.update(line.encode('utf8'))
232 js_hasher.update(line.encode("utf8"))
166 233 js_hash = js_hasher.hexdigest()
167 234 css_hash = css_hasher.hexdigest()
168 235 app.app.registry.js_hash = js_hash
@@ -13,4 +13,3 b''
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -31,17 +31,19 b' from pyramid import testing'
31 31 @pytest.fixture
32 32 def base_app(request, mocker):
33 33 # disable email sending
34 mocker.patch('pyramid_mailer.mailer_factory_from_settings', mocker.Mock())
34 mocker.patch("pyramid_mailer.mailer_factory_from_settings", mocker.Mock())
35 35
36 36 from appenlight import main
37 37 import transaction
38
38 39 current_dir = os.path.dirname(os.path.abspath(__file__))
39 path = os.path.join(current_dir, '../../../../',
40 os.environ.get("APPENLIGHT_INI", 'testing.ini'))
40 path = os.path.join(
41 current_dir, "../../../../", os.environ.get("APPENLIGHT_INI", "testing.ini")
42 )
41 43 # appsettings from ini
42 44 app_settings = get_appsettings(path, name="appenlight")
43 45 app = main({}, **app_settings)
44 app_request = testing.DummyRequest(base_url='https://appenlight.com')
46 app_request = testing.DummyRequest(base_url="https://appenlight.com")
45 47 app_request.tm = transaction.manager
46 48 app_request.add_flash_to_headers = mock.Mock()
47 49 testing.setUp(registry=app.registry, request=app_request)
@@ -58,8 +60,7 b' def base_app(request, mocker):'
58 60 def with_migrations(request, base_app):
59 61 settings = base_app.registry.settings
60 62 alembic_cfg = Config()
61 alembic_cfg.set_main_option("script_location",
62 "ziggurat_foundations:migrations")
63 alembic_cfg.set_main_option("script_location", "ziggurat_foundations:migrations")
63 64 alembic_cfg.set_main_option("sqlalchemy.url", settings["sqlalchemy.url"])
64 65 command.upgrade(alembic_cfg, "head")
65 66 alembic_cfg = Config()
@@ -68,13 +69,14 b' def with_migrations(request, base_app):'
68 69 command.upgrade(alembic_cfg, "head")
69 70
70 71 for plugin_name, config in base_app.registry.appenlight_plugins.items():
71 if config['sqlalchemy_migrations']:
72 if config["sqlalchemy_migrations"]:
72 73 alembic_cfg = Config()
73 alembic_cfg.set_main_option("script_location",
74 config['sqlalchemy_migrations'])
75 74 alembic_cfg.set_main_option(
76 "sqlalchemy.url",
77 base_app.registry.settings["sqlalchemy.url"])
75 "script_location", config["sqlalchemy_migrations"]
76 )
77 alembic_cfg.set_main_option(
78 "sqlalchemy.url", base_app.registry.settings["sqlalchemy.url"]
79 )
78 80 command.upgrade(alembic_cfg, "head")
79 81
80 82
@@ -82,11 +84,12 b' def with_migrations(request, base_app):'
82 84 def default_data(base_app):
83 85 from appenlight.models.services.config import ConfigService
84 86 from appenlight.lib import get_callable
87
85 88 transaction.begin()
86 89 ConfigService.setup_default_values()
87 90 for plugin_name, config in base_app.registry.appenlight_plugins.items():
88 if config['default_values_setter']:
89 get_callable(config['default_values_setter'])()
91 if config["default_values_setter"]:
92 get_callable(config["default_values_setter"])()
90 93 transaction.commit()
91 94
92 95
@@ -98,8 +101,8 b' def clean_tables(request):'
98 101 tables = Base.metadata.tables.keys()
99 102 transaction.begin()
100 103 for t in tables:
101 if not t.startswith('alembic_'):
102 DBSession.execute('truncate %s cascade' % t)
104 if not t.startswith("alembic_"):
105 DBSession.execute("truncate %s cascade" % t)
103 106 session = DBSession()
104 107 mark_changed(session)
105 108 transaction.commit()
@@ -112,14 +115,12 b' def default_user():'
112 115 from appenlight.models import DBSession
113 116 from appenlight.models.user import User
114 117 from appenlight.models.auth_token import AuthToken
118
115 119 transaction.begin()
116 120 session = DBSession()
117 user = User(id=1,
118 user_name='testuser',
119 status=1,
120 email='foo@barbaz99.com')
121 user = User(id=1, user_name="testuser", status=1, email="foo@barbaz99.com")
121 122 session.add(user)
122 token = AuthToken(token='1234')
123 token = AuthToken(token="1234")
123 124 user.auth_tokens.append(token)
124 125 session.execute("SELECT nextval('users_id_seq')")
125 126 transaction.commit()
@@ -133,8 +134,7 b' def default_application(default_user):'
133 134
134 135 transaction.begin()
135 136 session = DBSession()
136 application = Application(
137 resource_id=1, resource_name='testapp', api_key='xxxx')
137 application = Application(resource_id=1, resource_name="testapp", api_key="xxxx")
138 138 session.add(application)
139 139 default_user.resources.append(application)
140 140 session.execute("SELECT nextval('resources_resource_id_seq')")
@@ -145,6 +145,7 b' def default_application(default_user):'
145 145 @pytest.fixture
146 146 def report_type_matrix():
147 147 from appenlight.models.report import REPORT_TYPE_MATRIX
148
148 149 return REPORT_TYPE_MATRIX
149 150
150 151
@@ -153,38 +154,43 b' def chart_series():'
153 154 series = []
154 155
155 156 for x in range(1, 7):
156 tmp_list = [('key', 'X'), ('0_1', x)]
157 tmp_list = [("key", "X"), ("0_1", x)]
157 158 if x % 2 == 0:
158 tmp_list.append(('0_2', x))
159 tmp_list.append(("0_2", x))
159 160 if x % 3 == 0:
160 tmp_list.append(('0_3', x))
161 tmp_list.append(("0_3", x))
161 162
162 series.append(
163 OrderedDict(tmp_list)
164 )
163 series.append(OrderedDict(tmp_list))
165 164 return series
166 165
167 166
168 167 @pytest.fixture
169 168 def log_schema():
170 169 from appenlight.validators import LogListSchema
170
171 171 schema = LogListSchema().bind(utcnow=datetime.utcnow())
172 172 return schema
173 173
174
174 175 @pytest.fixture
175 176 def general_metrics_schema():
176 177 from appenlight.validators import GeneralMetricsListSchema
178
177 179 schema = GeneralMetricsListSchema().bind(utcnow=datetime.utcnow())
178 180 return schema
179 181
182
180 183 @pytest.fixture
181 184 def request_metrics_schema():
182 185 from appenlight.validators import MetricsListSchema
186
183 187 schema = MetricsListSchema().bind(utcnow=datetime.utcnow())
184 188 return schema
185 189
190
186 191 @pytest.fixture
187 192 def report_05_schema():
188 193 from appenlight.validators import ReportListSchema_0_5
194
189 195 schema = ReportListSchema_0_5().bind(utcnow=datetime.utcnow())
190 196 return schema
This diff has been collapsed as it changes many lines, (816 lines changed) Show them Hide them
@@ -20,12 +20,15 b' from datetime import datetime'
20 20
21 21 now = datetime.utcnow().date()
22 22
23 REQUEST_METRICS_EXAMPLES = [{
23 REQUEST_METRICS_EXAMPLES = [
24 {
24 25 "server": "some.server.hostname",
25 "timestamp": now.strftime('%Y-%m-%dT%H:%M:%S.0'),
26 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
26 27 "metrics": [
27 ["dir/module:func",
28 {"custom": 0.0,
28 [
29 "dir/module:func",
30 {
31 "custom": 0.0,
29 32 "custom_calls": 0,
30 33 "main": 0.01664,
31 34 "nosql": 0.00061,
@@ -36,9 +39,13 b' REQUEST_METRICS_EXAMPLES = [{'
36 39 "sql": 0.00105,
37 40 "sql_calls": 2,
38 41 "tmpl": 0.0,
39 "tmpl_calls": 0}],
40 ["SomeView.function",
41 {"custom": 0.0,
42 "tmpl_calls": 0,
43 },
44 ],
45 [
46 "SomeView.function",
47 {
48 "custom": 0.0,
42 49 "custom_calls": 0,
43 50 "main": 0.647261,
44 51 "nosql": 0.306554,
@@ -49,8 +56,12 b' REQUEST_METRICS_EXAMPLES = [{'
49 56 "sql": 0.0,
50 57 "sql_calls": 0,
51 58 "tmpl": 0.0,
52 "tmpl_calls": 0}]]
53 }]
59 "tmpl_calls": 0,
60 },
61 ],
62 ],
63 }
64 ]
54 65
55 66 LOG_EXAMPLES = [
56 67 {
@@ -59,11 +70,8 b' LOG_EXAMPLES = ['
59 70 "namespace": "some.namespace.indicator",
60 71 "request_id": "SOME_UUID",
61 72 "server": "some server",
62 "tags": [["tag_name", "tag_value"],
63 ["tag_name2", 2]
64
65 ],
66 "date": now.strftime('%Y-%m-%dT%H:%M:%S.%f')
73 "tags": [["tag_name", "tag_value"], ["tag_name2", 2]],
74 "date": now.strftime("%Y-%m-%dT%H:%M:%S.%f"),
67 75 },
68 76 {
69 77 "log_level": "ERROR",
@@ -71,76 +79,79 b' LOG_EXAMPLES = ['
71 79 "namespace": "some.namespace.indicator",
72 80 "request_id": "SOME_UUID",
73 81 "server": "some server",
74 "date": now.strftime('%Y-%m-%dT%H:%M:%S.%f')
75 }
82 "date": now.strftime("%Y-%m-%dT%H:%M:%S.%f"),
83 },
76 84 ]
77 85
78 86 PARSED_REPORT_404 = {
79 'report_details': [{
80 'username': 'foo',
81 'url': 'http://localhost:6543/test/error?aaa=1&bbb=2',
82 'ip': '127.0.0.1',
83 'start_time': now.strftime(
84 '%Y-%m-%dT%H:%M:%S.0'),
85 'slow_calls': [],
86 'request': {'COOKIES': {
87 'country': 'US',
88 'sessionId': '***',
89 'test_group_id': '5',
90 'http_referer': 'http://localhost:5000/'},
91 'POST': {},
92 'GET': {'aaa': ['1'], 'bbb': ['2']},
93 'HTTP_METHOD': 'GET',
94 },
95 'user_agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0.1) Gecko/20100101 Firefox/10.0.1',
96 'message': '',
97 'end_time': now.strftime(
98 '%Y-%m-%dT%H:%M:%S.0'),
99 'request_stats': {}
100 }],
101 'error': '404 Not Found',
102 'server': "servername/instancename",
103 'priority': 5,
104 'client': 'appenlight-python',
105 'language': 'python',
106 'http_status': 404}
87 "report_details": [
88 {
89 "username": "foo",
90 "url": "http://localhost:6543/test/error?aaa=1&bbb=2",
91 "ip": "127.0.0.1",
92 "start_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
93 "slow_calls": [],
94 "request": {
95 "COOKIES": {
96 "country": "US",
97 "sessionId": "***",
98 "test_group_id": "5",
99 "http_referer": "http://localhost:5000/",
100 },
101 "POST": {},
102 "GET": {"aaa": ["1"], "bbb": ["2"]},
103 "HTTP_METHOD": "GET",
104 },
105 "user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:10.0.1) Gecko/20100101 Firefox/10.0.1",
106 "message": "",
107 "end_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
108 "request_stats": {},
109 }
110 ],
111 "error": "404 Not Found",
112 "server": "servername/instancename",
113 "priority": 5,
114 "client": "appenlight-python",
115 "language": "python",
116 "http_status": 404,
117 }
107 118
108 119 PYTHON_PAYLOAD_0_4 = {
109 120 "client": "your-client-name-python",
110 121 "language": "python",
111 'view_name': 'views/foo:bar',
112 'server': "servername/instancename",
122 "view_name": "views/foo:bar",
123 "server": "servername/instancename",
113 124 "priority": 5,
114 125 "error": "OMG ValueError happened test",
115 126 "occurences": 1,
116 127 "http_status": 500,
117 128 "report_details": [
118 {"username": "USER",
129 {
130 "username": "USER",
119 131 "url": "HTTP://SOMEURL",
120 132 "ip": "127.0.0.1",
121 "start_time": now.strftime(
122 '%Y-%m-%dT%H:%M:%S.0'),
123 "end_time": now.strftime(
124 '%Y-%m-%dT%H:%M:%S.0'),
133 "start_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
134 "end_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
125 135 "user_agent": "BROWSER_AGENT",
126 136 "message": "arbitrary text that will get attached to your report",
127 137 "request_id": "SOME_UUID",
128 "request": {"REQUEST_METHOD": "GET",
138 "request": {
139 "REQUEST_METHOD": "GET",
129 140 "PATH_INFO": "/FOO/BAR",
130 "POST": {"FOO": "BAZ", "XXX": "YYY"}
141 "POST": {"FOO": "BAZ", "XXX": "YYY"},
131 142 },
132 143 "slow_calls": [
133 144 {
134 145 "type": "sql",
135 "start": now.strftime(
136 '%Y-%m-%dT%H:%M:%S.0'),
137 "end": now.strftime(
138 '%Y-%m-%dT%H:%M:%S.0'),
146 "start": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
147 "end": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
139 148 "subtype": "postgresql",
140 "parameters": ["QPARAM1", "QPARAM2",
141 "QPARAMX"],
142 "statement": "QUERY"}],
143 "request_stats": {"main": 0.50779,
149 "parameters": ["QPARAM1", "QPARAM2", "QPARAMX"],
150 "statement": "QUERY",
151 }
152 ],
153 "request_stats": {
154 "main": 0.50779,
144 155 "nosql": 0.01008,
145 156 "nosql_calls": 17.0,
146 157 "remote": 0.0,
@@ -150,35 +161,37 b' PYTHON_PAYLOAD_0_4 = {'
150 161 "sql": 0.42423,
151 162 "sql_calls": 1.0,
152 163 "tmpl": 0.0,
153 "tmpl_calls": 0.0},
164 "tmpl_calls": 0.0,
165 },
154 166 "traceback": [
155 {"cline": "return foo_bar_baz(1,2,3)",
167 {
168 "cline": "return foo_bar_baz(1,2,3)",
156 169 "file": "somedir/somefile.py",
157 170 "fn": "somefunction",
158 171 "line": 454,
159 "vars": [["a_list",
160 ["1",
161 "2",
162 "4",
163 "5",
164 "6"]],
165 ["b",
166 {1: "2", "ccc": "ddd", "1": "a"}],
167 ["obj",
168 "<object object at 0x7f0030853dc0>"]]},
169 {"cline": "OMG ValueError happened",
172 "vars": [
173 ["a_list", ["1", "2", "4", "5", "6"]],
174 ["b", {1: "2", "ccc": "ddd", "1": "a"}],
175 ["obj", "<object object at 0x7f0030853dc0>"],
176 ],
177 },
178 {
179 "cline": "OMG ValueError happened",
170 180 "file": "",
171 181 "fn": "",
172 182 "line": "",
173 "vars": []}]
174 }]
183 "vars": [],
184 },
185 ],
186 }
187 ],
175 188 }
176 189
177 190 PYTHON_PAYLOAD_0_5 = {
178 191 "client": "your-client-name-python",
179 192 "language": "python",
180 'view_name': 'views/foo:bar',
181 'server': "servername/instancename",
193 "view_name": "views/foo:bar",
194 "server": "servername/instancename",
182 195 "priority": 5,
183 196 "error": "OMG ValueError happened test",
184 197 "occurences": 1,
@@ -186,27 +199,28 b' PYTHON_PAYLOAD_0_5 = {'
186 199 "username": "USER",
187 200 "url": "HTTP://SOMEURL",
188 201 "ip": "127.0.0.1",
189 "start_time": now.strftime(
190 '%Y-%m-%dT%H:%M:%S.0'),
191 "end_time": now.strftime(
192 '%Y-%m-%dT%H:%M:%S.0'),
202 "start_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
203 "end_time": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
193 204 "user_agent": "BROWSER_AGENT",
194 205 "message": "arbitrary text that will get attached to your report",
195 206 "request_id": "SOME_UUID",
196 "request": {"REQUEST_METHOD": "GET",
207 "request": {
208 "REQUEST_METHOD": "GET",
197 209 "PATH_INFO": "/FOO/BAR",
198 "POST": {"FOO": "BAZ", "XXX": "YYY"}},
210 "POST": {"FOO": "BAZ", "XXX": "YYY"},
211 },
199 212 "slow_calls": [
200 213 {
201 214 "type": "sql",
202 "start": now.strftime(
203 '%Y-%m-%dT%H:%M:%S.0'),
204 "end": now.strftime(
205 '%Y-%m-%dT%H:%M:%S.0'),
215 "start": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
216 "end": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
206 217 "subtype": "postgresql",
207 218 "parameters": ["QPARAM1", "QPARAM2", "QPARAMX"],
208 "statement": "QUERY"}],
209 "request_stats": {"main": 0.50779,
219 "statement": "QUERY",
220 }
221 ],
222 "request_stats": {
223 "main": 0.50779,
210 224 "nosql": 0.01008,
211 225 "nosql_calls": 17.0,
212 226 "remote": 0.0,
@@ -216,130 +230,155 b' PYTHON_PAYLOAD_0_5 = {'
216 230 "sql": 0.42423,
217 231 "sql_calls": 1.0,
218 232 "tmpl": 0.0,
219 "tmpl_calls": 0.0},
233 "tmpl_calls": 0.0,
234 },
220 235 "traceback": [
221 {"cline": "return foo_bar_baz(1,2,3)",
236 {
237 "cline": "return foo_bar_baz(1,2,3)",
222 238 "file": "somedir/somefile.py",
223 239 "fn": "somefunction",
224 240 "line": 454,
225 "vars": [["a_list",
226 ["1",
227 "2",
228 "4",
229 "5",
230 "6"]],
241 "vars": [
242 ["a_list", ["1", "2", "4", "5", "6"]],
231 243 ["b", {1: "2", "ccc": "ddd", "1": "a"}],
232 ["obj",
233 "<object object at 0x7f0030853dc0>"]]},
234 {"cline": "OMG ValueError happened",
244 ["obj", "<object object at 0x7f0030853dc0>"],
245 ],
246 },
247 {
248 "cline": "OMG ValueError happened",
235 249 "file": "",
236 250 "fn": "",
237 251 "line": "",
238 "vars": []}]
252 "vars": [],
253 },
254 ],
239 255 }
240 256
241 257 PHP_PAYLOAD = {
242 'client': 'php',
243 'error': 'Nie mo\u017cna ustali\u0107 \u017c\u0105dania "feed.xml".',
244 'error_type': '',
245 'http_status': 404,
246 'language': 'unknown',
247 'priority': 1,
248 'report_details': [{'end_time': None,
249 'group_string': None,
250 'ip': None,
251 'message': 'exception \'CHttpException\' with message \'Nie mo\u017cna ustali\u0107 \u017c\u0105dania "feed.xml".\' in /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php:286\nStack trace:\n#0 /home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php(34): CWebApplication->runController(\'feed.xml\')\n#1 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php(141): WebApplication->runController(\'feed.xml\')\n#2 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php(180): CWebApplication->processRequest()\n#3 /home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php(23): CApplication->run()\n#4 {main}',
252 'occurences': 1,
253 'request': {
254 'COOKIES': [],
255 'FILES': [],
256 'GET': [],
257 'POST': [],
258 'REQUEST_METHOD': None,
259 'SERVER': {
260 'DOCUMENT_ROOT': '/home/dobryslownik/www/sites/dobryslownik/frontend/www/',
261 'GATEWAY_INTERFACE': 'CGI/1.1',
262 'HTTPS': 'on',
263 'HTTP_ACCEPT': '*/*',
264 'HTTP_ACCEPT_ENCODING': 'gzip, deflate',
265 'HTTP_ACCEPT_LANGUAGE': 'pl-PL',
266 'HTTP_CONNECTION': 'close',
267 'HTTP_HOST': 'dobryslownik.pl',
268 'HTTP_IF_MODIFIED_SINCE': 'Wed, 30 Jul 2014 18:26:32 GMT',
269 'HTTP_IF_NONE_MATCH': '"45de3-2a3-4ff6d4b9fbe7f"',
270 'HTTP_USER_AGENT': 'Apple-PubSub/28',
271 'HTTP_X_FORWARDED_FOR': '195.150.190.186',
272 'HTTP_X_FORWARDED_PROTO': 'https',
273 'PATH': '/bin:/usr/bin:/usr/ucb:/usr/bsd:/usr/local/bin',
274 'PHP_SELF': '/index.php',
275 'QUERY_STRING': '',
276 'REDIRECT_HTTPS': 'on',
277 'REDIRECT_STATUS': '200',
278 'REDIRECT_UNIQUE_ID': 'VFAhZQoCaXIAAAkd414AAAAC',
279 'REDIRECT_URL': '/feed.xml',
280 'REMOTE_ADDR': '195.150.190.186',
281 'REMOTE_PORT': '41728',
282 'REQUEST_METHOD': 'GET',
283 'REQUEST_TIME': 1414537573,
284 'REQUEST_TIME_FLOAT': 1414537573.32,
285 'REQUEST_URI': '/feed.xml',
286 'SCRIPT_FILENAME': '/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php',
287 'SCRIPT_NAME': '/index.php',
288 'SERVER_ADDR': '10.2.105.114',
289 'SERVER_ADMIN': '[no address given]',
290 'SERVER_NAME': 'dobryslownik.pl',
291 'SERVER_SIGNATURE': '',
292 'SERVER_SOFTWARE': 'Apache/2.2.22 (Ubuntu) PHP/5.4.17',
293 'UNIQUE_ID': 'VFAg4AoCaXIAAAkd40UAAAAC'},
294 'SESSION': []},
295 'request_id': 'VFAg4AoCaXIAAAkd40UAAAAC',
296 'request_stats': {'custom': 0,
297 'custom_calls': 0,
298 'main': 0,
299 'nosql': 0.0,
300 'nosql_calls': 0.0,
301 'remote': 0.0,
302 'remote_calls': 0.0,
303 'sql': 0.0,
304 'sql_calls': 0.0,
305 'tmpl': 0.0,
306 'tmpl_calls': 0.0,
307 'unknown': 0.0},
308 'slow_calls': [],
309 'start_time': None,
310 'frameinfo': [
311 {'cline': None,
312 'file': '/home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php',
313 'fn': 'CWebApplication->runController',
314 'line': 34,
315 'vars': ['feed.xml']},
316 {'cline': None,
317 'file': '/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php',
318 'fn': 'WebApplication->runController',
319 'line': 141,
320 'vars': ['feed.xml']},
321 {'cline': None,
322 'file': '/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php',
323 'fn': 'CWebApplication->processRequest',
324 'line': 180,
325 'vars': []},
326 {'cline': None,
327 'file': '/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php',
328 'fn': 'CApplication->run',
329 'line': 23,
330 'vars': []}],
331 'url': 'https://dobryslownik.pl/feed.xml',
332 'user_agent': 'magpie-crawler/1.1 (U; Linux amd64; en-GB; +http://www.brandwatch.net)',
333 'username': 'guest'}],
334 'server': 'unknown',
335 'traceback': '',
336 'view_name': ''}
258 "client": "php",
259 "error": 'Nie mo\u017cna ustali\u0107 \u017c\u0105dania "feed.xml".',
260 "error_type": "",
261 "http_status": 404,
262 "language": "unknown",
263 "priority": 1,
264 "report_details": [
265 {
266 "end_time": None,
267 "group_string": None,
268 "ip": None,
269 "message": "exception 'CHttpException' with message 'Nie mo\u017cna ustali\u0107 \u017c\u0105dania \"feed.xml\".' in /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php:286\nStack trace:\n#0 /home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php(34): CWebApplication->runController('feed.xml')\n#1 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php(141): WebApplication->runController('feed.xml')\n#2 /home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php(180): CWebApplication->processRequest()\n#3 /home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php(23): CApplication->run()\n#4 {main}",
270 "occurences": 1,
271 "request": {
272 "COOKIES": [],
273 "FILES": [],
274 "GET": [],
275 "POST": [],
276 "REQUEST_METHOD": None,
277 "SERVER": {
278 "DOCUMENT_ROOT": "/home/dobryslownik/www/sites/dobryslownik/frontend/www/",
279 "GATEWAY_INTERFACE": "CGI/1.1",
280 "HTTPS": "on",
281 "HTTP_ACCEPT": "*/*",
282 "HTTP_ACCEPT_ENCODING": "gzip, deflate",
283 "HTTP_ACCEPT_LANGUAGE": "pl-PL",
284 "HTTP_CONNECTION": "close",
285 "HTTP_HOST": "dobryslownik.pl",
286 "HTTP_IF_MODIFIED_SINCE": "Wed, 30 Jul 2014 18:26:32 GMT",
287 "HTTP_IF_NONE_MATCH": '"45de3-2a3-4ff6d4b9fbe7f"',
288 "HTTP_USER_AGENT": "Apple-PubSub/28",
289 "HTTP_X_FORWARDED_FOR": "195.150.190.186",
290 "HTTP_X_FORWARDED_PROTO": "https",
291 "PATH": "/bin:/usr/bin:/usr/ucb:/usr/bsd:/usr/local/bin",
292 "PHP_SELF": "/index.php",
293 "QUERY_STRING": "",
294 "REDIRECT_HTTPS": "on",
295 "REDIRECT_STATUS": "200",
296 "REDIRECT_UNIQUE_ID": "VFAhZQoCaXIAAAkd414AAAAC",
297 "REDIRECT_URL": "/feed.xml",
298 "REMOTE_ADDR": "195.150.190.186",
299 "REMOTE_PORT": "41728",
300 "REQUEST_METHOD": "GET",
301 "REQUEST_TIME": 1414537573,
302 "REQUEST_TIME_FLOAT": 1414537573.32,
303 "REQUEST_URI": "/feed.xml",
304 "SCRIPT_FILENAME": "/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php",
305 "SCRIPT_NAME": "/index.php",
306 "SERVER_ADDR": "10.2.105.114",
307 "SERVER_ADMIN": "[no address given]",
308 "SERVER_NAME": "dobryslownik.pl",
309 "SERVER_SIGNATURE": "",
310 "SERVER_SOFTWARE": "Apache/2.2.22 (Ubuntu) PHP/5.4.17",
311 "UNIQUE_ID": "VFAg4AoCaXIAAAkd40UAAAAC",
312 },
313 "SESSION": [],
314 },
315 "request_id": "VFAg4AoCaXIAAAkd40UAAAAC",
316 "request_stats": {
317 "custom": 0,
318 "custom_calls": 0,
319 "main": 0,
320 "nosql": 0.0,
321 "nosql_calls": 0.0,
322 "remote": 0.0,
323 "remote_calls": 0.0,
324 "sql": 0.0,
325 "sql_calls": 0.0,
326 "tmpl": 0.0,
327 "tmpl_calls": 0.0,
328 "unknown": 0.0,
329 },
330 "slow_calls": [],
331 "start_time": None,
332 "frameinfo": [
333 {
334 "cline": None,
335 "file": "/home/dobryslownik/www/sites/dobryslownik/common/components/WebApplication.php",
336 "fn": "CWebApplication->runController",
337 "line": 34,
338 "vars": ["feed.xml"],
339 },
340 {
341 "cline": None,
342 "file": "/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/web/CWebApplication.php",
343 "fn": "WebApplication->runController",
344 "line": 141,
345 "vars": ["feed.xml"],
346 },
347 {
348 "cline": None,
349 "file": "/home/dobryslownik/www/sites/dobryslownik/vendor/yiisoft/yii/framework/base/CApplication.php",
350 "fn": "CWebApplication->processRequest",
351 "line": 180,
352 "vars": [],
353 },
354 {
355 "cline": None,
356 "file": "/home/dobryslownik/www/sites/dobryslownik/frontend/www/index.php",
357 "fn": "CApplication->run",
358 "line": 23,
359 "vars": [],
360 },
361 ],
362 "url": "https://dobryslownik.pl/feed.xml",
363 "user_agent": "magpie-crawler/1.1 (U; Linux amd64; en-GB; +http://www.brandwatch.net)",
364 "username": "guest",
365 }
366 ],
367 "server": "unknown",
368 "traceback": "",
369 "view_name": "",
370 }
337 371
338 372 JS_PAYLOAD = {
339 "client": "javascript", "language": "javascript",
373 "client": "javascript",
374 "language": "javascript",
340 375 "error_type": "ReferenceError: non_existant_var is not defined",
341 "occurences": 1, "priority": 5, "server": "jstest.appenlight",
342 "http_status": 500, "report_details": [{
376 "occurences": 1,
377 "priority": 5,
378 "server": "jstest.appenlight",
379 "http_status": 500,
380 "report_details": [
381 {
343 382 "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
344 383 "start_time": "2014-10-29T19:59:30.589Z",
345 384 "frameinfo": [
@@ -348,24 +387,31 b' JS_PAYLOAD = {'
348 387 "file": "http://demo.appenlight.com/#",
349 388 "fn": "HTMLAnchorElement.onclick",
350 389 "line": 79,
351 "vars": []},
390 "vars": [],
391 },
352 392 {
353 393 "cline": "",
354 394 "file": "http://demo.appenlight.com/static/js/demo.js",
355 395 "fn": "test_error",
356 396 "line": 7,
357 "vars": []},
397 "vars": [],
398 },
358 399 {
359 400 "cline": "ReferenceError: non_existant_var is not defined",
360 401 "file": "http://demo.appenlight.com/static/js/demo.js",
361 402 "fn": "something",
362 403 "line": 2,
363 "vars": []}],
404 "vars": [],
405 },
406 ],
364 407 "url": "http://demo.appenlight.com/#",
365 408 "server": "jstest.appenlight",
366 409 "username": "i_am_mario",
367 410 "ip": "127.0.0.1",
368 "request_id": "0.01984176435507834"}]}
411 "request_id": "0.01984176435507834",
412 }
413 ],
414 }
369 415
370 416 AIRBRAKE_RUBY_EXAMPLE = """
371 417 <?xml version="1.0" ?>
@@ -590,9 +636,13 b' AIRBRAKE_RUBY_EXAMPLE = """'
590 636 </server-environment>
591 637 <framework>Rails: 3.2.11</framework>
592 638 </notice>
593 """.replace('\n', '').replace(' ', '')
639 """.replace(
640 "\n", ""
641 ).replace(
642 " ", ""
643 )
594 644
595 AIRBRAKE_EXAMPLE_SHORT = '''
645 AIRBRAKE_EXAMPLE_SHORT = """
596 646 <?xml version="1.0" encoding="UTF-8"?>
597 647 <notice version="2.3">
598 648 <api-key>76fdb93ab2cf276ec080671a8b3d3866</api-key>
@@ -624,232 +674,304 b" AIRBRAKE_EXAMPLE_SHORT = '''"
624 674 <app-version>1.0.0</app-version>
625 675 </server-environment>
626 676 </notice>
627 '''.replace('\n', '').replace(' ', '')
677 """.replace(
678 "\n", ""
679 ).replace(
680 " ", ""
681 )
628 682
629 683 SENTRY_PYTHON_PAYLOAD_7 = {
630 'culprit': 'djangoapp.views in error',
631 'event_id': '9fae652c8c1c4d6a8eee09260f613a98',
632 'exception': {
633 'values': [
634 {'module': 'exceptions',
635 'stacktrace': {'frames': [{
636 'abs_path': '/home/ergo/venvs/appenlight/local/lib/python2.7/site-packages/django/core/handlers/base.py',
637 'context_line': 'response = wrapped_callback(request, *callback_args, **callback_kwargs)',
638 'filename': 'django/core/handlers/base.py',
639 'function': 'get_response',
640 'in_app': False,
641 'lineno': 111,
642 'module': 'django.core.handlers.base',
643 'post_context': [
644 ' except Exception as e:',
645 ' # If the view raised an exception, run it through exception',
646 ' # middleware, and if the exception middleware returns a',
647 ' # response, use that. Otherwise, reraise the exception.',
648 ' for middleware_method in self._exception_middleware:'],
649 'pre_context': [
650 ' break',
651 '',
652 ' if response is None:',
653 ' wrapped_callback = self.make_view_atomic(callback)',
654 ' try:'],
655 'vars': {
656 'callback': '<function error from djangoapp.views at 0x7fe7c9f2cb90>',
657 'callback_args': [],
658 'callback_kwargs': {},
659 'e': "Exception(u'test 500 \\u0142\\xf3\\u201c\\u0107\\u201c\\u0107\\u017c\\u0105',)",
660 'middleware_method': '<bound method MessageMiddleware.process_request of <django.contrib.messages.middleware.MessageMiddleware object at 0x7fe7c8b0c950>>',
661 'request': '<WSGIRequest at 0x140633490316304>',
662 'resolver': "<RegexURLResolver 'djangoapp.urls' (None:None) ^/>",
663 'resolver_match': "ResolverMatch(func=<function error at 0x7fe7c9f2cb90>, args=(), kwargs={}, url_name='error', app_name='None', namespace='')",
664 'response': None,
665 'self': '<django.core.handlers.wsgi.WSGIHandler object at 0x7fe7cf75a790>',
666 'urlconf': "'djangoapp.urls'",
667 'wrapped_callback': '<function error from djangoapp.views at 0x7fe7c9f2cb90>'}},
668 {
669 'abs_path': '/home/ergo/IdeaProjects/django_raven/djangoapp/views.py',
670 'context_line': "raise Exception(u'test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105')",
671 'filename': 'djangoapp/views.py',
672 'function': 'error',
673 'in_app': False,
674 'lineno': 84,
675 'module': 'djangoapp.views',
676 'post_context': [
677 '',
678 '',
679 'def notfound(request):',
684 "culprit": "djangoapp.views in error",
685 "event_id": "9fae652c8c1c4d6a8eee09260f613a98",
686 "exception": {
687 "values": [
688 {
689 "module": "exceptions",
690 "stacktrace": {
691 "frames": [
692 {
693 "abs_path": "/home/ergo/venvs/appenlight/local/lib/python2.7/site-packages/django/core/handlers/base.py",
694 "context_line": "response = wrapped_callback(request, *callback_args, **callback_kwargs)",
695 "filename": "django/core/handlers/base.py",
696 "function": "get_response",
697 "in_app": False,
698 "lineno": 111,
699 "module": "django.core.handlers.base",
700 "post_context": [
701 " except Exception as e:",
702 " # If the view raised an exception, run it through exception",
703 " # middleware, and if the exception middleware returns a",
704 " # response, use that. Otherwise, reraise the exception.",
705 " for middleware_method in self._exception_middleware:",
706 ],
707 "pre_context": [
708 " break",
709 "",
710 " if response is None:",
711 " wrapped_callback = self.make_view_atomic(callback)",
712 " try:",
713 ],
714 "vars": {
715 "callback": "<function error from djangoapp.views at 0x7fe7c9f2cb90>",
716 "callback_args": [],
717 "callback_kwargs": {},
718 "e": "Exception(u'test 500 \\u0142\\xf3\\u201c\\u0107\\u201c\\u0107\\u017c\\u0105',)",
719 "middleware_method": "<bound method MessageMiddleware.process_request of <django.contrib.messages.middleware.MessageMiddleware object at 0x7fe7c8b0c950>>",
720 "request": "<WSGIRequest at 0x140633490316304>",
721 "resolver": "<RegexURLResolver 'djangoapp.urls' (None:None) ^/>",
722 "resolver_match": "ResolverMatch(func=<function error at 0x7fe7c9f2cb90>, args=(), kwargs={}, url_name='error', app_name='None', namespace='')",
723 "response": None,
724 "self": "<django.core.handlers.wsgi.WSGIHandler object at 0x7fe7cf75a790>",
725 "urlconf": "'djangoapp.urls'",
726 "wrapped_callback": "<function error from djangoapp.views at 0x7fe7c9f2cb90>",
727 },
728 },
729 {
730 "abs_path": "/home/ergo/IdeaProjects/django_raven/djangoapp/views.py",
731 "context_line": "raise Exception(u'test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105')",
732 "filename": "djangoapp/views.py",
733 "function": "error",
734 "in_app": False,
735 "lineno": 84,
736 "module": "djangoapp.views",
737 "post_context": [
738 "",
739 "",
740 "def notfound(request):",
680 741 " raise Http404('404 appenlight exception test')",
681 ''],
682 'pre_context': [
683 ' c.execute("INSERT INTO stocks VALUES (\'2006-01-05\',\'BUY\',\'RHAT\',100,35.14)")',
684 ' c.execute("INSERT INTO stocks VALUES (\'2006-01-05\',\'BUY\',\'RHAT\',100,35.14)")',
685 ' conn.commit()',
686 ' c.close()',
687 " request.POST.get('DUPA')"],
688 'vars': {
689 'c': '<sqlite3.Cursor object at 0x7fe7c82af8f0>',
690 'conn': '<sqlite3.Connection object at 0x7fe7c8b23bf8>',
691 'request': '<WSGIRequest at 0x140633490316304>'}}]},
692 'type': 'Exception',
693 'value': 'test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105'}]},
694 'extra': {
695 'sys.argv': ["'manage.py'", "'runserver'"]},
696 'level': 40,
697 'message': 'Exception: test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105',
698 'modules': {'django': '1.7.1',
699 'python': '2.7.6',
700 'raven': '5.9.2'},
701 'platform': 'python',
702 'project': 'sentry',
703 'release': 'test',
704 'request': {'cookies': {
705 'appenlight': 'X'},
706 'data': None,
707 'env': {'REMOTE_ADDR': '127.0.0.1',
708 'SERVER_NAME': 'localhost',
709 'SERVER_PORT': '8000'},
710 'headers': {
711 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
712 'Accept-Encoding': 'gzip, deflate, sdch',
713 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6',
714 'Connection': 'keep-alive',
715 'Content-Length': '',
716 'Content-Type': 'text/plain',
717 'Cookie': 'appenlight=X',
718 'Dnt': '1',
719 'Host': '127.0.0.1:8000',
720 'Upgrade-Insecure-Requests': '1',
721 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36'},
722 'method': 'GET',
723 'query_string': '',
724 'url': 'http://127.0.0.1:8000/error'},
725 'server_name': 'ergo-virtual-machine',
726 'tags': {'site': 'example.com'},
727 'time_spent': None,
728 'timestamp': now.strftime('%Y-%m-%dT%H:%M:%SZ')}
742 "",
743 ],
744 "pre_context": [
745 " c.execute(\"INSERT INTO stocks VALUES ('2006-01-05','BUY','RHAT',100,35.14)\")",
746 " c.execute(\"INSERT INTO stocks VALUES ('2006-01-05','BUY','RHAT',100,35.14)\")",
747 " conn.commit()",
748 " c.close()",
749 " request.POST.get('DUPA')",
750 ],
751 "vars": {
752 "c": "<sqlite3.Cursor object at 0x7fe7c82af8f0>",
753 "conn": "<sqlite3.Connection object at 0x7fe7c8b23bf8>",
754 "request": "<WSGIRequest at 0x140633490316304>",
755 },
756 },
757 ]
758 },
759 "type": "Exception",
760 "value": "test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105",
761 }
762 ]
763 },
764 "extra": {"sys.argv": ["'manage.py'", "'runserver'"]},
765 "level": 40,
766 "message": "Exception: test 500 \u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105",
767 "modules": {"django": "1.7.1", "python": "2.7.6", "raven": "5.9.2"},
768 "platform": "python",
769 "project": "sentry",
770 "release": "test",
771 "request": {
772 "cookies": {"appenlight": "X"},
773 "data": None,
774 "env": {
775 "REMOTE_ADDR": "127.0.0.1",
776 "SERVER_NAME": "localhost",
777 "SERVER_PORT": "8000",
778 },
779 "headers": {
780 "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
781 "Accept-Encoding": "gzip, deflate, sdch",
782 "Accept-Language": "en-US,en;q=0.8,pl;q=0.6",
783 "Connection": "keep-alive",
784 "Content-Length": "",
785 "Content-Type": "text/plain",
786 "Cookie": "appenlight=X",
787 "Dnt": "1",
788 "Host": "127.0.0.1:8000",
789 "Upgrade-Insecure-Requests": "1",
790 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36",
791 },
792 "method": "GET",
793 "query_string": "",
794 "url": "http://127.0.0.1:8000/error",
795 },
796 "server_name": "ergo-virtual-machine",
797 "tags": {"site": "example.com"},
798 "time_spent": None,
799 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%SZ"),
800 }
729 801
730 802
731 803 SENTRY_JS_PAYLOAD_7 = {
732 "project": "sentry", "logger": "javascript",
733 "platform": "javascript", "request": {"headers": {
734 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"},
735 "url": "http://localhost:6543/test/js_error#/"},
736 "exception": {"values": [{"type": "ReferenceError",
804 "project": "sentry",
805 "logger": "javascript",
806 "platform": "javascript",
807 "request": {
808 "headers": {
809 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"
810 },
811 "url": "http://localhost:6543/test/js_error#/",
812 },
813 "exception": {
814 "values": [
815 {
816 "type": "ReferenceError",
737 817 "value": "fateqtwetew is not defined",
738 "stacktrace": {"frames": [{
818 "stacktrace": {
819 "frames": [
820 {
739 821 "filename": "https://cdn.ravenjs.com/2.0.0/angular/raven.min.js",
740 822 "lineno": 1,
741 823 "colno": 4466,
742 824 "function": "c",
743 "in_app": False},
825 "in_app": False,
826 },
744 827 {
745 828 "filename": "http://localhost:6543/test/js_error",
746 829 "lineno": 47,
747 830 "colno": 19,
748 831 "function": "?",
749 "in_app": True}]}}]},
832 "in_app": True,
833 },
834 ]
835 },
836 }
837 ]
838 },
750 839 "culprit": "http://localhost:6543/test/js_error",
751 840 "message": "ReferenceError: fateqtwetew is not defined",
752 841 "extra": {"session:duration": 5009},
753 "event_id": "2bf514aaf0e94f35a8f435a0d29a888b"}
842 "event_id": "2bf514aaf0e94f35a8f435a0d29a888b",
843 }
754 844
755 845 SENTRY_JS_PAYLOAD_7_2 = {
756 "project": "sentry", "logger": "javascript",
757 "platform": "javascript", "request": {"headers": {
758 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"},
759 "url": "http://localhost:6543/#/report/927/9558"},
760 "exception": {"values": [{"type": "Error",
846 "project": "sentry",
847 "logger": "javascript",
848 "platform": "javascript",
849 "request": {
850 "headers": {
851 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"
852 },
853 "url": "http://localhost:6543/#/report/927/9558",
854 },
855 "exception": {
856 "values": [
857 {
858 "type": "Error",
761 859 "value": "[$injector:modulerr] http://errors.angularjs.org/1.5.0-rc.0/$injector/modulerr?p0=appenlight&p1=Erro…",
762 "stacktrace": {"frames": [{
860 "stacktrace": {
861 "frames": [
862 {
763 863 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
764 864 "lineno": 1647,
765 865 "colno": 112,
766 866 "function": "?",
767 "in_app": True},
867 "in_app": True,
868 },
768 869 {
769 870 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
770 871 "lineno": 1363,
771 872 "colno": 41,
772 873 "function": "be",
773 "in_app": True},
874 "in_app": True,
875 },
774 876 {
775 877 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
776 878 "lineno": 1364,
777 879 "colno": 225,
778 880 "function": "zc",
779 "in_app": True},
881 "in_app": True,
882 },
780 883 {
781 884 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
782 885 "lineno": 1363,
783 886 "colno": 421,
784 887 "function": "c",
785 "in_app": True},
888 "in_app": True,
889 },
786 890 {
787 891 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
788 892 "lineno": 1386,
789 893 "colno": 360,
790 894 "function": "fb",
791 "in_app": True},
895 "in_app": True,
896 },
792 897 {
793 898 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
794 899 "lineno": 1383,
795 900 "colno": 49,
796 901 "function": "g",
797 "in_app": True},
902 "in_app": True,
903 },
798 904 {
799 905 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
800 906 "lineno": 1351,
801 907 "colno": 344,
802 908 "function": "n",
803 "in_app": True},
909 "in_app": True,
910 },
804 911 {
805 912 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
806 913 "lineno": 1383,
807 914 "colno": 475,
808 915 "function": "?",
809 "in_app": True},
916 "in_app": True,
917 },
810 918 {
811 919 "filename": "http://localhost:6543/static/js/appenlight.js?rev=752",
812 920 "lineno": 1350,
813 921 "colno": 421,
814 922 "function": "?",
815 "in_app": True}]}}]},
923 "in_app": True,
924 },
925 ]
926 },
927 }
928 ]
929 },
816 930 "culprit": "http://localhost:6543/static/js/appenlight.js?rev=752",
817 931 "message": "Error: [$injector:modulerr] http://errors.angularjs.org/1.5.0-rc.0/$injector/modulerr?p0=appenlight&…",
818 932 "extra": {"session:duration": 330},
819 "event_id": "c50b5b6a13994f54b1d8da0c2e0e767a"}
933 "event_id": "c50b5b6a13994f54b1d8da0c2e0e767a",
934 }
820 935
821 936 SENTRY_LOG_PAYLOAD_7 = {
822 "project": "sentry", "sentry.interfaces.Message": {
823 "message": "TEST from django logging", "params": []},
937 "project": "sentry",
938 "sentry.interfaces.Message": {"message": "TEST from django logging", "params": []},
824 939 "server_name": "ergo-virtual-machine",
825 940 "culprit": "testlogger in index",
826 "extra": {"thread": 139723601139456, "process": 24645,
941 "extra": {
942 "thread": 139723601139456,
943 "process": 24645,
827 944 "sys.argv": ["'manage.py'", "'runserver'"],
828 "price": 6, "threadName": "'Thread-1'",
945 "price": 6,
946 "threadName": "'Thread-1'",
829 947 "filename": "'views.py'",
830 948 "processName": "'MainProcess'",
831 "tag": "'extra'", "dupa": True, "lineno": 22,
949 "tag": "'extra'",
950 "dupa": True,
951 "lineno": 22,
832 952 "asctime": "'2016-01-18 05:24:29,001'",
833 "pathname": "'/home/ergo/IdeaProjects/django_raven/djangoapp/views.py'"},
953 "pathname": "'/home/ergo/IdeaProjects/django_raven/djangoapp/views.py'",
954 },
834 955 "event_id": "9a6172f2e6d2444582f83a6c333d9cfb",
835 "timestamp": now.strftime('%Y-%m-%dT%H:%M:%SZ'),
956 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%SZ"),
836 957 "tags": {"site": "example.com"},
837 "modules": {"python": "2.7.6", "raven": "5.9.2",
838 "django": "1.7.1"}, "time_spent": None,
839 "platform": "python", "release": "test",
840 "logger": "testlogger", "level": 50,
841 "message": "TEST from django logging"}
958 "modules": {"python": "2.7.6", "raven": "5.9.2", "django": "1.7.1"},
959 "time_spent": None,
960 "platform": "python",
961 "release": "test",
962 "logger": "testlogger",
963 "level": 50,
964 "message": "TEST from django logging",
965 }
842 966
843 967 METRICS_PAYLOAD = {
844 968 "namespace": "some.monitor",
845 "timestamp": now.strftime('%Y-%m-%dT%H:%M:%S.0'),
969 "timestamp": now.strftime("%Y-%m-%dT%H:%M:%S.0"),
846 970 "server_name": "server.name",
847 "tags": [["usage_foo", 15.5],
848 ["usage_bar", 63],
849 ]
971 "tags": [["usage_foo", 15.5], ["usage_bar", 63]],
850 972 }
851 973
852 974
853 975 SENTRY_PYTHON_ENCODED = b'x\x9c\xedXmo\xdbF\x12\xfe+\x0b\xdd\x07I9\x89/z\x97[\x05p\x1d5\x0ej\'\x81,\xa7\xed\xd59bE\x8d$V|\xcb\xeeR\x96k\xf8\xbf\xdf\xcc.II\xb6\xdc\xf4\xd2\x16\xb8\x03\xa2 \x06\xb9\xdc\x9d\x9dy\xe6\x99y\x96\xbc\xaf\xa4"\xf9\x15|U9a\x15\t\xb1\x12w\x95\x06\xab\xc0\xd6\x87T\x05I\x8c\xc3\xf7\x95\r\x0f3\x90x\xf9\xcb}E*\xee\xaf\x95\xe0>\xe8G\x0b\xc1\xa3\xe2\xd1"\x8b\xfd|Me\t\xca\x13 \xd3$\x96@\x06\xf9Lz)W+zf\xaf\x92\x08l\x10\xcb\xc4\xde@\xbc\x91vz\xa7VI\xdc\xb2\xc3\xc4\xe7\xa1\x1d\x06\xb3b\xc4\xea\xdb2P\xd0LqO\xbe\x04i\xcf\x7f\xe51.\xf3\x13\x01\xf6\x8a\xc7\xf3\x10\x84\xb4g\\\x82\x95j\xbfS\x01\x9e\x9f\xc4\n\xb6\x14\xd0/\x15\xb6\xf7\x0b\x16\xac\xf0\x88\x05\x92\xbdMb8\xa15\xec\xd1\xefV\xf04\x85\xb9\x87\xbe\x843\xdc\x98\x8d\x98\x84paE|\r\xde&\x80[\x8f\xab$\n\xfcZ1\xa1~\xcc\n\x02y\xd4:\xfdJ7FO6\xab\t\xf8\x84X\xab\x06{Q\x0cy\\,%\xde\xef\x06\xd6\xb74tt[\x9386.\xf2\xc7\xb8d\x18\xe6G\xc2&\x91\xea\x00\x9c\xc7\xeb\xff\xc1\xce\x92(\ry\x10\x13Vj\x05\x8c\xa2EoU&b\x98k\xc4X\x8d3?\x89"\xb4\x0cB$\xa2n=\xb6\xf2Ga\xc6y\x81\x0cb\xe4S\xecC\x89e\x83\xa9\xbb\x14\xa4\xf5}\xce\xa5)\xde\xd5O\x8cw\xdf\x7f\xf7\xe19DuZb\xa4"BZ\x98\xb2<=\xe2y:\xfa\r\x17R3\x96x[)\xf1\xa9eU\x85p\xb3\xae\xe3\xb0\x9b\x9b\xccq;\xad\x9b\x9b\xed\xa2\x8d\xd7-\xc7\xf5\xf5\x90\xd3\x7fr\xe7\xb8\xfd\xfc\xae[m\xe8D\x1cd\x8b\xe0\xa5M\x11\x88$\xdc\x80\xf0"\xae|\xcd\xfdI>rI\x035\xaa\x98\x91\xe14\xd2\xc0\xa2(\xa4\xa5qm0\xb23\xaa\xd5\x1b\xccd{t\xff\xd0`\x99\x08uL\xa3bN\x9a\xea{9\xa2\xed\xf4\x15\x96\x8a\xbe\xd5N\x15\x89\xf0\x02\x89\xd5\x18\xcfA\xc0\x1c\xbdX\xf0P\x02>\x8e\x829V\x10\x9a\x07/\x02,8zV\xf9v\x96d\xf1\x9c\x99\x01v\tRb\xe5]\x963-\xec\x17\xb8\x03\xd9\xd3De\xc9\x82}kB\xb0\x88\\"\x98Y\x91Y$\xad\xdd\x06\xd6\x13C,\x99Q\xdfa\\1g\xdb_\xb4{\xdd\xb6\x03}\x7f\xe8\xbc|I\xaeS\xc9iwJ\xdbh\xa4(y\xebV.\x03\xeb\xc7\xab\xd7o\xce\xcd\xc8ScC\xd7\x1f\xf6\xba\xceK\x03\x83vU\x9b\xa3E\x93\xdcu=\xdbm\x0f\x07}\xb75p;\xbdA\xabc\x16\x14\xc9\xd4+\x8a\xb6f\x08\xcf\x16"\x89\xd8\xa3\x9c\xed\x07\xe1\xf7\x06\x83\xce@\x9by\\\xdc\x7f\xd2\\\xc1&mf\x02K\xd8^O.\nB\xb1\xea\xce\x08\xd2DVYM\x97\x1e\xfd\xa9\xb3\x7f\xdb\x07q\xe5\x1d\x84\xea\xe1a\x8f&x\x1fga\x88#h\x01\x93\xa9\x13\xf0\xd8n\x85VD\xc9<\x0bu%\x1dM\x0fud\xdao\x11\x84@\xac\xdcM|\xbeu\x87A\x0cq\x823\xdd\xce\x10o\x83\xd8\xc3-\xf7\xc8\x9aw.\x8f\xe6\x91\xbd\xcf4V\xdd\xb2\x0b\xae\x96r\xe6\xcd\xee\xbc\x1d)kh7\xe7F\x9d\xc2\xfa\x9f\x97\xb0\xfd\xdfL\x00_\xd3\x82/m\xc0\x7f\xa1\xce\x1d\x95\x97\xc73\x9f\x91\xa6\xcfk\xe4\x7f\x9d\xca#\xa0\xfc\x8d\xda\xf6U]\xbe\xaa\xcbWu\xf9\xffQ\x97\xfe_\xa0.\x7f\xea\xd8\xfeDit\xae~Gbn\x13\xb1\xd6\xa5\x97\x8b\x87\'8\xaa\x8e]Bg\x9b\xd2~^?|\x0b\xb6\xe0g\nj7\x957o\xaf\xc6\x93){\xf3v\xfa\x8eI\x95\xf8k\xc9>\x9c^\\\x8f\xafX\xad\xdar\x9c^\xd3q\x9b\xd4w\xaa\xdf]\xff\x8c\x7f\'\xe7\xa7\xd3j\xc3u\x9cF\xbbk\xb9\x9d\xfaM\xa5\x94\x81\xbf\xc9j\x12\xc7\x16\xb5\xe1@\xd5\xf6\xb6\xf2\xc3D\xc2n \xc7\xdbz\xff\xeejj\xa1R\xd7\xaa\xaf\xae\xdf\x9fV\xeb\xcf\xbf\xe9\xd0\xff9,X\x9c\xa8\x05\xf5\xa0"e\xf5R\x82\x04\x0f0\xb9\xe7J\xa5\x1d\xa7S\xab\xe2\x1fSE\xd8^\xb1\x94J\xe1a\xd4\xd2\xabFe\x0ez\xbf\xafKG~\nQ\xef\xdb\xd6Y&dr\xa4u\xb4\x1c\xde\x99\x7fq\xeb@p\x0ew\xc1\x010\x15\x7f\xa4\xe3\xcd\x17\xfd>.<VSe;8^I\x8fYU\xd6\xcf\xa0\xfbG\xcb\xc7\xc0y\\\x0b\x8d\x14f\x8e\x83Zh\xc4\xcfh\xdf\xc1\xb5\x96A\xa3\x82X4\x1f)\nz9<P\xd8\xcaAhe\x8etT\xfa\xb3\x05\r\x7f\xf1\xbe\xf9\xae~\x16\xa6"PG\xc0cA\xdei\x8d\xa8\x08R\xe3\x02H*\xdd\xe6&\x10*\xe3a3\xe2\xfe\x8a\xb0\xd1\xdfV\x94\xe0\x9a0\xf2NZ\xd8\x126\x9a\xa3\xd5\x88\xc7(a\x88w\x95fUE\x16\x1b\x83\xd5\x8av\x02\xb0\xe4\x95\x17h\x15u\x17\xbd\xe1\x00\x86>\xcc\x16\xb3N\xdb\x1f\xce\x16=\x07\xda\xb3\xd6\x00|\x7f\x01\xd0=\xe4\x1623I\xd6\x01\x18\x96R\x9f\xcf\x84\xfe|Sq55\xb0\xf1\xd2\xcd\n\x89\x7fb\xdbn\xabo9\xf8\xcf=\x198\x8ec\x97\xd1\xad\x80\xa3\xc2\x1b\x1bg\x94\xeeX5/ ^\x9anE3N}B\xbfy\x81\x08e\x18\x89\xc6 n^_5 \xfe\xe6\xd3\xc8\xb1\x06\x8d4\xd4\x17\xbd\xbd\xd9\xe3\xd8O\xe6A\xbc\xd4\'\xee\xdf\x82\xb4\xc1\xb0HC\xae\x90Ur\x8e\xa7\x1a\x9c\xb9\xe38MZ\x03\xa4M\x1e\x06\x1b\xd8Y1I\xde*{\xa5\xa2\xb0\x81\xd9\t\x03\x9f\xd3\x02{K#\xff\xdc>\x1e\x8d\x8c#\xc3F\x10\xa1\xa7\xf6-\xcc\xd2\xc6\x0b\xfb\x85q\x93\xec^\xa7K\x81\xf16\xdf`\x12\xfcL@3/Mi`\xc3\x19\xafbU^\x9f\'\xa6\x88\x0f\xb13\xbe\x13\xf2\xf4\xac\xc0}\xa4W\x9c!\x1f\xa0I8\x8aD\xa3\x1f\xf1m\x13]\x19\xe9U\xd7\x98\xf9\xe6\xe9\x12\xcc\x16\x97\xc9oA\x18r\xbbk9\xac\xf6\x93\xeb~\xc3.\x828\xdb\xb2\xed\xa0\xe7\xf5:uv\x8a\x91\xc1\x8f0\xfb!Pv\xb7\xdd\xb7\xda=V\xfb\xe1|zy\xd1`a\xb0\x06\xf6\x1a\xfcuRgg+A\x8a\xd2%\x07[\xbd\x9ek\r;\xec\x8a/\xb8\x08\xf2U9\xd6:\xb3\xd3\xbc\xd04\xaa\xfa\xdc\xac\xa9\x82\xef:\x9a\x00\xd8\xec?\x8c\'\xde\xdb\xd3\xcb1\xcd\xd2o=+\x02\x01\xe7\xe4\xcf\xde\xbf\x9bL\xe9Y\x81\xc4d|\xf9n:\xf6N_\xbd\x9a\x1c@\xa5\xed"\xb6\xe2\xce\x93x\xbe3L\xd0\xcd\x9a+\xbe;3\xec\x8e\x90\xaf\xc7S\xbd&\xc4\x8a \xe8:N\xd9\x03\x0c;\xcd\x9b\x17M\xc5\xb7/C6-\x984Bjci\x7fL%kW\xac!\xce\xd2\xed%\x88\xc0\x93\xa9\xc1=\xdf\x18\x83G\xc1\x10\x11\xcd\xcc-\xe73\xa5\xe2Q\xaa\xb7q\\\x14\xb8n\xd3\xe9L\xdd\xdeI\xc79i\r\xffE\x93\xe8\x15m\xee\x8b,\x9a\xc9\'\xdfQ\x91\x89\xb0L\xc4]\xd1\x9f\xc2d\xb9\x04]hE\\\xbbc\xc1\xfef\xa8\x06\xad6b\xda\x1aZ\x9dn\xbf7\xdc\x01u\xbf\xd7Y\xb0]\xe9\n\xef\xd1j\xbe4\xbd\x91\x1e6\n\xb3\'\xf8\xe6\x96\xc1\x03E=\xcf\x04\xcf\xab\xab\x04[\x1f\xa7i\xd9t|5\xdd?F2r\x94\xb2\xb4\xd7\x8d\xb1by\x16*s\xb0\xd9\x0f\xcc\xc0\xbe\x1f\xd3\x1cf\xd9\xf2wc\x1at\x9d\xfe^P\x9fu\xf0\n\xdf<\xd0\x1f\x96\x0f\xd1\x1bC\xa8\xdb\x12\xeb\xf6\xfbL%\xecH_\x1b86O\x03\xdb|\xef\xb6\xf1\xbc\x82\xa7\xc6\xa3\x01}4\x07\xd8\x10\xb8,\x95\xa4r\xb8\x7f)E\'\xec\xcbu\xc6\xa4\xc9\xb0\x84>\x17\x98\x84!:!\xd0YH\x93S\xce\xd7\x86E\xd8\x85\xf3^\xb8cs!:\x1a\xf1f\xce\xd3\x87\x87\xff\x00`\xb1k\xbd'
854 976
855 SENTRY_RUBY_ENCODED = b'eJzVVttu4zYQ/RVCfUgLRNfYsi002z5sf6DoU7cLgyJHEmOKFEjKW6+Rf++QkmM7ziJF0T7UMBzNhZzLOTPKMYI9KLcVPKoioDndLAvIH5YlrykHVpd8sSlhXfNstWTRfdSDtbQFdP4djP4o9sIKrX4xRpuKcBQ5cFIfSIa+TqC3o/2A3kWWl3G2jLPFb3lZLcuqyGePrR0wgahSo5T3kcR0ZFQtsvtoMPoJ2ItlkNQ12vR4mRnrA56Wum3BoIzPbJSDEegcOYyZmJoIRVJCHZFCAdmgiwWzB7NVtPfpg2l1vBfGjVTGPWUduvn6NB8l2Kg6RpQ5LK2nQoYgi6RISvSY1ANluxvlXsCXV8o9POn6RodRfJWvtAaYNvxGbcdh0MZd6k04XSZZ8oBiLVqESvTUK/PZpx6F5CHxB9QUQaP4VEqe5EWSn1XxqKSmPFiy4Mu0YqMxCEwcmn22AMrCekSTVeJRhj+BjY7WEuJO650Nvg/Bt6GGcupPZ8kmaFro4y+GDgMYOye78mqpayoDB7GkkL/I1yqIUxShY8zJaglBuQiFP1mtwi3rUI3UuqFdSG1qjMcuiGWy8CKyLXaHwcOLXcmuVDGnjk7dQqomWREI2gsltr77SIJivjmb9Z5oqFpi9HD7KJ0YqHHxoIPh5Kv0TrfCiJBpifX4Rgz2wE6prlE2E5/yOVUvxnOADHUPQSekvWBBkGMOA/KGOuBbSzEp8XWGODsfirnuw21CtbNt9WLrXC/jbx11Aq5D7nz/cw+Ar8JwzWazr9RTBRG28SXVFlNB+34inufGNI3KmUNsKK6fOai/wuLUsx24CaLyWhefWoDghVtdp03oUL4JDHCdAeob0cBMpaXXfhWq0TPdiujZc9YZBPuIj462dnoarc/4GMwMBj/Qfg3sqRx9Ez4dI0+UtzYfxgheaHu1Aqd1Mq0oXIVsh3EZ+Gsbg3touhYB3CK5HWaFckQICo1oE24VeSR3nXNDlablpqCroixZTetFttqs101RNIyXWVY2Bd1U7zn8nBcr3+Ukr9bZOksnBO7+UH6IFQ9/8eczkhMJfJ3Rd4TRwY0GCFUH8tIfS750gnWk8xOtCB8NMoxMGwHNRDfEdcKSWiKAIQAhOa7l7CIoxqO13Q7Udeft7ZfHqNiEQfQjDrL64Cccl7RCJFe4ENQWg0aVMyOEfeWT3XoHPPCrZ1VySpnrEG5+n2yN1i8vlQbnen4hnCI/37+BCCEcmlMPvtdz8Y/k+PzDXJb/iGaqdNvi2lY/XVgIqaEV6lvtnT4GLBuBBEpdnUUTFRYQhY9a3bkXLEKZBLy/vTpwuumEE3n8QOCm12mne0j9izBNcD5TP7qpn+EYxyRZTPLlnMZhSlMp6jTIaU0t3KA1Z3cB16Y849VQaW8BO1d6ECD538HrOoM3UAuXvMmEf43Pb4B5MUn4fj2D/h7Hw5X+n5Ybsm/eIfvlSP1zjv+/upX+x/35/Oy/fwFCRniE'
977 SENTRY_RUBY_ENCODED = b"eJzVVttu4zYQ/RVCfUgLRNfYsi002z5sf6DoU7cLgyJHEmOKFEjKW6+Rf++QkmM7ziJF0T7UMBzNhZzLOTPKMYI9KLcVPKoioDndLAvIH5YlrykHVpd8sSlhXfNstWTRfdSDtbQFdP4djP4o9sIKrX4xRpuKcBQ5cFIfSIa+TqC3o/2A3kWWl3G2jLPFb3lZLcuqyGePrR0wgahSo5T3kcR0ZFQtsvtoMPoJ2ItlkNQ12vR4mRnrA56Wum3BoIzPbJSDEegcOYyZmJoIRVJCHZFCAdmgiwWzB7NVtPfpg2l1vBfGjVTGPWUduvn6NB8l2Kg6RpQ5LK2nQoYgi6RISvSY1ANluxvlXsCXV8o9POn6RodRfJWvtAaYNvxGbcdh0MZd6k04XSZZ8oBiLVqESvTUK/PZpx6F5CHxB9QUQaP4VEqe5EWSn1XxqKSmPFiy4Mu0YqMxCEwcmn22AMrCekSTVeJRhj+BjY7WEuJO650Nvg/Bt6GGcupPZ8kmaFro4y+GDgMYOye78mqpayoDB7GkkL/I1yqIUxShY8zJaglBuQiFP1mtwi3rUI3UuqFdSG1qjMcuiGWy8CKyLXaHwcOLXcmuVDGnjk7dQqomWREI2gsltr77SIJivjmb9Z5oqFpi9HD7KJ0YqHHxoIPh5Kv0TrfCiJBpifX4Rgz2wE6prlE2E5/yOVUvxnOADHUPQSekvWBBkGMOA/KGOuBbSzEp8XWGODsfirnuw21CtbNt9WLrXC/jbx11Aq5D7nz/cw+Ar8JwzWazr9RTBRG28SXVFlNB+34inufGNI3KmUNsKK6fOai/wuLUsx24CaLyWhefWoDghVtdp03oUL4JDHCdAeob0cBMpaXXfhWq0TPdiujZc9YZBPuIj462dnoarc/4GMwMBj/Qfg3sqRx9Ez4dI0+UtzYfxgheaHu1Aqd1Mq0oXIVsh3EZ+Gsbg3touhYB3CK5HWaFckQICo1oE24VeSR3nXNDlablpqCroixZTetFttqs101RNIyXWVY2Bd1U7zn8nBcr3+Ukr9bZOksnBO7+UH6IFQ9/8eczkhMJfJ3Rd4TRwY0GCFUH8tIfS750gnWk8xOtCB8NMoxMGwHNRDfEdcKSWiKAIQAhOa7l7CIoxqO13Q7Udeft7ZfHqNiEQfQjDrL64Cccl7RCJFe4ENQWg0aVMyOEfeWT3XoHPPCrZ1VySpnrEG5+n2yN1i8vlQbnen4hnCI/37+BCCEcmlMPvtdz8Y/k+PzDXJb/iGaqdNvi2lY/XVgIqaEV6lvtnT4GLBuBBEpdnUUTFRYQhY9a3bkXLEKZBLy/vTpwuumEE3n8QOCm12mne0j9izBNcD5TP7qpn+EYxyRZTPLlnMZhSlMp6jTIaU0t3KA1Z3cB16Y849VQaW8BO1d6ECD538HrOoM3UAuXvMmEf43Pb4B5MUn4fj2D/h7Hw5X+n5Ybsm/eIfvlSP1zjv+/upX+x/35/Oy/fwFCRniE"
@@ -18,93 +18,101 b' import json'
18 18 from webtest import TestApp
19 19
20 20
21 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
22 'default_application')
21 @pytest.mark.usefixtures(
22 "base_app", "with_migrations", "clean_tables", "default_application"
23 )
23 24 class TestAPIReportsView(object):
24 25 def test_no_json_payload(self, base_app):
25 26 app = TestApp(base_app)
26 url_path = '/api/reports'
27 headers = {'x-appenlight-api-key': 'xxxx'}
28 res = app.post(url_path, {}, status=400,
29 headers=headers)
27 url_path = "/api/reports"
28 headers = {"x-appenlight-api-key": "xxxx"}
29 res = app.post(url_path, {}, status=400, headers=headers)
30 30
31 31 def test_wrong_json_payload(self, base_app):
32 32 app = TestApp(base_app)
33 url_path = '/api/reports'
34 headers = {'x-appenlight-api-key': 'xxxx'}
33 url_path = "/api/reports"
34 headers = {"x-appenlight-api-key": "xxxx"}
35 35 res = app.post(url_path, {}, status=400, headers=headers)
36 36
37 37 def test_correct_json_payload(self, base_app):
38 38 import appenlight.tests.payload_examples as payload_examples
39
39 40 app = TestApp(base_app)
40 url_path = '/api/reports'
41 headers = {'x-appenlight-api-key': 'xxxx'}
42 res = app.post_json(url_path, [payload_examples.PYTHON_PAYLOAD_0_5],
43 headers=headers)
41 url_path = "/api/reports"
42 headers = {"x-appenlight-api-key": "xxxx"}
43 res = app.post_json(
44 url_path, [payload_examples.PYTHON_PAYLOAD_0_5], headers=headers
45 )
44 46
45 47 def test_json_payload_wrong_key(self, base_app):
46 48 import appenlight.tests.payload_examples as payload_examples
49
47 50 app = TestApp(base_app)
48 url_path = '/api/reports'
49 res = app.post(url_path,
50 json.dumps([payload_examples.PYTHON_PAYLOAD_0_5]),
51 status=403)
51 url_path = "/api/reports"
52 res = app.post(
53 url_path, json.dumps([payload_examples.PYTHON_PAYLOAD_0_5]), status=403
54 )
52 55
53 56
54 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
55 'default_data', 'default_application')
57 @pytest.mark.usefixtures(
58 "base_app", "with_migrations", "clean_tables", "default_data", "default_application"
59 )
56 60 class TestRegistrationView(object):
57 61 def test_register_empty(self, base_app):
58 url_path = '/register'
62 url_path = "/register"
59 63 app = TestApp(base_app)
60 resp = app.get('/')
61 cookies = resp.headers.getall('Set-Cookie')
64 resp = app.get("/")
65 cookies = resp.headers.getall("Set-Cookie")
62 66 cookie = None
63 for name, value in [c.split('=', 1) for c in cookies]:
64 if name == 'XSRF-TOKEN':
65 cookie = value.split(';')[0]
66 headers = {'X-XSRF-TOKEN': cookie}
67 res = app.post(url_path,
68 params={'user_name': '',
69 'user_password': '',
70 'email': ''},
71 headers=headers)
72 assert 'This field is required.' in res
67 for name, value in [c.split("=", 1) for c in cookies]:
68 if name == "XSRF-TOKEN":
69 cookie = value.split(";")[0]
70 headers = {"X-XSRF-TOKEN": cookie}
71 res = app.post(
72 url_path,
73 params={"user_name": "", "user_password": "", "email": ""},
74 headers=headers,
75 )
76 assert "This field is required." in res
73 77
74 78 def test_register_proper(self, base_app):
75 url_path = '/register'
79 url_path = "/register"
76 80 app = TestApp(base_app)
77 resp = app.get('/')
78 cookies = resp.headers.getall('Set-Cookie')
81 resp = app.get("/")
82 cookies = resp.headers.getall("Set-Cookie")
79 83 cookie = None
80 for name, value in [c.split('=', 1) for c in cookies]:
81 if name == 'XSRF-TOKEN':
82 cookie = value.split(';')[0]
83 headers = {'X-XSRF-TOKEN': cookie}
84 res = app.post(url_path,
85 params={'user_name': 'user_foo',
86 'user_password': 'passbar',
87 'email': 'foobar@blablabla.com'},
84 for name, value in [c.split("=", 1) for c in cookies]:
85 if name == "XSRF-TOKEN":
86 cookie = value.split(";")[0]
87 headers = {"X-XSRF-TOKEN": cookie}
88 res = app.post(
89 url_path,
90 params={
91 "user_name": "user_foo",
92 "user_password": "passbar",
93 "email": "foobar@blablabla.com",
94 },
88 95 headers=headers,
89 status=302)
96 status=302,
97 )
90 98
91 99
92 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
93 'default_data', 'default_application')
100 @pytest.mark.usefixtures(
101 "base_app", "with_migrations", "clean_tables", "default_data", "default_application"
102 )
94 103 class TestRegistrationAuthTokenView(object):
95
96 104 def test_create_application_bad(self, base_app):
97 url_path = '/applications'
105 url_path = "/applications"
98 106 app = TestApp(base_app)
99 headers = {'x-appenlight-auth-token': ''}
100 app.post_json(url_path,
101 params={'resource_name': 'user_foo'},
102 headers=headers, status=403)
107 headers = {"x-appenlight-auth-token": ""}
108 app.post_json(
109 url_path, params={"resource_name": "user_foo"}, headers=headers, status=403
110 )
103 111
104 112 def test_create_application_proper(self, base_app):
105 url_path = '/applications'
113 url_path = "/applications"
106 114 app = TestApp(base_app)
107 headers = {'x-appenlight-auth-token': '1234'}
108 app.post_json(url_path,
109 params={'resource_name': 'user_foo'},
110 headers=headers, status=200)
115 headers = {"x-appenlight-auth-token": "1234"}
116 app.post_json(
117 url_path, params={"resource_name": "user_foo"}, headers=headers, status=200
118 )
This diff has been collapsed as it changes many lines, (1678 lines changed) Show them Hide them
@@ -37,12 +37,12 b' class DummyContext(object):'
37 37 pass
38 38
39 39
40 @pytest.mark.usefixtures('base_app')
40 @pytest.mark.usefixtures("base_app")
41 41 class BasicTest(object):
42 42 pass
43 43
44 44
45 @pytest.mark.usefixtures('base_app')
45 @pytest.mark.usefixtures("base_app")
46 46 class TestMigration(object):
47 47 def test_migration(self):
48 48 assert 1 == 1
@@ -53,108 +53,125 b' class TestSentryProto_7(object):'
53 53 import appenlight.tests.payload_examples as payload_examples
54 54 from appenlight.lib.enums import ParsedSentryEventType
55 55 from appenlight.lib.utils.sentry import parse_sentry_event
56
56 57 event_dict, event_type = parse_sentry_event(
57 payload_examples.SENTRY_LOG_PAYLOAD_7)
58 payload_examples.SENTRY_LOG_PAYLOAD_7
59 )
58 60 assert ParsedSentryEventType.LOG == event_type
59 assert event_dict['log_level'] == 'CRITICAL'
60 assert event_dict['message'] == 'TEST from django logging'
61 assert event_dict['namespace'] == 'testlogger'
62 assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb'
63 assert event_dict['server'] == 'ergo-virtual-machine'
64 assert event_dict['date'] == datetime.utcnow().date().strftime(
65 '%Y-%m-%dT%H:%M:%SZ')
66 tags = [('site', 'example.com'),
67 ('sys.argv', ["'manage.py'", "'runserver'"]),
68 ('price', 6),
69 ('tag', "'extra'"),
70 ('dupa', True),
71 ('project', 'sentry'),
72 ('sentry_culprit', 'testlogger in index'),
73 ('sentry_language', 'python'),
74 ('sentry_release', 'test')]
75 assert sorted(event_dict['tags']) == sorted(tags)
61 assert event_dict["log_level"] == "CRITICAL"
62 assert event_dict["message"] == "TEST from django logging"
63 assert event_dict["namespace"] == "testlogger"
64 assert event_dict["request_id"] == "9a6172f2e6d2444582f83a6c333d9cfb"
65 assert event_dict["server"] == "ergo-virtual-machine"
66 assert event_dict["date"] == datetime.utcnow().date().strftime(
67 "%Y-%m-%dT%H:%M:%SZ"
68 )
69 tags = [
70 ("site", "example.com"),
71 ("sys.argv", ["'manage.py'", "'runserver'"]),
72 ("price", 6),
73 ("tag", "'extra'"),
74 ("dupa", True),
75 ("project", "sentry"),
76 ("sentry_culprit", "testlogger in index"),
77 ("sentry_language", "python"),
78 ("sentry_release", "test"),
79 ]
80 assert sorted(event_dict["tags"]) == sorted(tags)
76 81
77 82 def test_report_payload(self):
78 83 import appenlight.tests.payload_examples as payload_examples
79 84 from appenlight.lib.enums import ParsedSentryEventType
80 85 from appenlight.lib.utils.sentry import parse_sentry_event
81 utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ')
86
87 utcnow = datetime.utcnow().date().strftime("%Y-%m-%dT%H:%M:%SZ")
82 88 event_dict, event_type = parse_sentry_event(
83 payload_examples.SENTRY_PYTHON_PAYLOAD_7)
89 payload_examples.SENTRY_PYTHON_PAYLOAD_7
90 )
84 91 assert ParsedSentryEventType.ERROR_REPORT == event_type
85 assert event_dict['client'] == 'sentry'
86 assert event_dict[
87 'error'] == 'Exception: test 500 ' \
88 '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105'
89 assert event_dict['language'] == 'python'
90 assert event_dict['ip'] == '127.0.0.1'
91 assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98'
92 assert event_dict['server'] == 'ergo-virtual-machine'
93 assert event_dict['start_time'] == utcnow
94 assert event_dict['url'] == 'http://127.0.0.1:8000/error'
95 assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \
96 'AppleWebKit/537.36 (KHTML, ' \
97 'like Gecko) Chrome/47.0.2526.106 ' \
98 'Safari/537.36'
99 assert event_dict['view_name'] == 'djangoapp.views in error'
100 tags = [('site', 'example.com'), ('sentry_release', 'test')]
101 assert sorted(event_dict['tags']) == sorted(tags)
102 extra = [('sys.argv', ["'manage.py'", "'runserver'"]),
103 ('project', 'sentry')]
104 assert sorted(event_dict['extra']) == sorted(extra)
105 request = event_dict['request']
106 assert request['url'] == 'http://127.0.0.1:8000/error'
107 assert request['cookies'] == {'appenlight': 'X'}
108 assert request['data'] is None
109 assert request['method'] == 'GET'
110 assert request['query_string'] == ''
111 assert request['env'] == {'REMOTE_ADDR': '127.0.0.1',
112 'SERVER_NAME': 'localhost',
113 'SERVER_PORT': '8000'}
114 assert request['headers'] == {
115 'Accept': 'text/html,application/xhtml+xml,'
116 'application/xml;q=0.9,image/webp,*/*;q=0.8',
117 'Accept-Encoding': 'gzip, deflate, sdch',
118 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6',
119 'Connection': 'keep-alive',
120 'Content-Length': '',
121 'Content-Type': 'text/plain',
122 'Cookie': 'appenlight=X',
123 'Dnt': '1',
124 'Host': '127.0.0.1:8000',
125 'Upgrade-Insecure-Requests': '1',
126 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) '
127 'AppleWebKit/537.36 (KHTML, like Gecko) '
128 'Chrome/47.0.2526.106 Safari/537.36'}
129 traceback = event_dict['traceback']
130 assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \
131 '*callback_args, **callback_kwargs)'
132 assert traceback[0]['file'] == 'django/core/handlers/base.py'
133 assert traceback[0]['fn'] == 'get_response'
134 assert traceback[0]['line'] == 111
135 assert traceback[0]['module'] == 'django.core.handlers.base'
136
137 assert traceback[1]['cline'] == "raise Exception(u'test 500 " \
138 "\u0142\xf3\u201c\u0107\u201c\u0107" \
92 assert event_dict["client"] == "sentry"
93 assert (
94 event_dict["error"] == "Exception: test 500 "
95 "\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105"
96 )
97 assert event_dict["language"] == "python"
98 assert event_dict["ip"] == "127.0.0.1"
99 assert event_dict["request_id"] == "9fae652c8c1c4d6a8eee09260f613a98"
100 assert event_dict["server"] == "ergo-virtual-machine"
101 assert event_dict["start_time"] == utcnow
102 assert event_dict["url"] == "http://127.0.0.1:8000/error"
103 assert (
104 event_dict["user_agent"] == "Mozilla/5.0 (X11; Linux x86_64) "
105 "AppleWebKit/537.36 (KHTML, "
106 "like Gecko) Chrome/47.0.2526.106 "
107 "Safari/537.36"
108 )
109 assert event_dict["view_name"] == "djangoapp.views in error"
110 tags = [("site", "example.com"), ("sentry_release", "test")]
111 assert sorted(event_dict["tags"]) == sorted(tags)
112 extra = [("sys.argv", ["'manage.py'", "'runserver'"]), ("project", "sentry")]
113 assert sorted(event_dict["extra"]) == sorted(extra)
114 request = event_dict["request"]
115 assert request["url"] == "http://127.0.0.1:8000/error"
116 assert request["cookies"] == {"appenlight": "X"}
117 assert request["data"] is None
118 assert request["method"] == "GET"
119 assert request["query_string"] == ""
120 assert request["env"] == {
121 "REMOTE_ADDR": "127.0.0.1",
122 "SERVER_NAME": "localhost",
123 "SERVER_PORT": "8000",
124 }
125 assert request["headers"] == {
126 "Accept": "text/html,application/xhtml+xml,"
127 "application/xml;q=0.9,image/webp,*/*;q=0.8",
128 "Accept-Encoding": "gzip, deflate, sdch",
129 "Accept-Language": "en-US,en;q=0.8,pl;q=0.6",
130 "Connection": "keep-alive",
131 "Content-Length": "",
132 "Content-Type": "text/plain",
133 "Cookie": "appenlight=X",
134 "Dnt": "1",
135 "Host": "127.0.0.1:8000",
136 "Upgrade-Insecure-Requests": "1",
137 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) "
138 "AppleWebKit/537.36 (KHTML, like Gecko) "
139 "Chrome/47.0.2526.106 Safari/537.36",
140 }
141 traceback = event_dict["traceback"]
142 assert (
143 traceback[0]["cline"] == "response = wrapped_callback(request, "
144 "*callback_args, **callback_kwargs)"
145 )
146 assert traceback[0]["file"] == "django/core/handlers/base.py"
147 assert traceback[0]["fn"] == "get_response"
148 assert traceback[0]["line"] == 111
149 assert traceback[0]["module"] == "django.core.handlers.base"
150
151 assert (
152 traceback[1]["cline"] == "raise Exception(u'test 500 "
153 "\u0142\xf3\u201c\u0107\u201c\u0107"
139 154 "\u017c\u0105')"
140 assert traceback[1]['file'] == 'djangoapp/views.py'
141 assert traceback[1]['fn'] == 'error'
142 assert traceback[1]['line'] == 84
143 assert traceback[1]['module'] == 'djangoapp.views'
144 assert sorted(traceback[1]['vars']) == sorted([
145 ('c',
146 '<sqlite3.Cursor object at 0x7fe7c82af8f0>'),
147 ('request',
148 '<WSGIRequest at 0x140633490316304>'),
149 ('conn',
150 '<sqlite3.Connection object at 0x7fe7c8b23bf8>')])
155 )
156 assert traceback[1]["file"] == "djangoapp/views.py"
157 assert traceback[1]["fn"] == "error"
158 assert traceback[1]["line"] == 84
159 assert traceback[1]["module"] == "djangoapp.views"
160 assert sorted(traceback[1]["vars"]) == sorted(
161 [
162 ("c", "<sqlite3.Cursor object at 0x7fe7c82af8f0>"),
163 ("request", "<WSGIRequest at 0x140633490316304>"),
164 ("conn", "<sqlite3.Connection object at 0x7fe7c8b23bf8>"),
165 ]
166 )
151 167
152 168
153 169 class TestAPIReports_0_5_Validation(object):
154 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
170 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
155 171 def test_no_payload(self, dummy_json):
156 172 import colander
157 173 from appenlight.validators import ReportListSchema_0_5
174
158 175 utcnow = datetime.utcnow()
159 176 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
160 177 with pytest.raises(colander.Invalid):
@@ -164,43 +181,46 b' class TestAPIReports_0_5_Validation(object):'
164 181 dummy_json = [{}]
165 182 import colander
166 183 from appenlight.validators import ReportListSchema_0_5
184
167 185 utcnow = datetime.utcnow()
168 186 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
169 187 with pytest.raises(colander.Invalid):
170 188 schema.deserialize(dummy_json)
171 189
172 190 def test_minimal_payload(self):
173 dummy_json = [{'report_details': [{}]}]
191 dummy_json = [{"report_details": [{}]}]
174 192 from appenlight.validators import ReportListSchema_0_5
193
175 194 utcnow = datetime.utcnow()
176 195 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
177 196
178 197 deserialized = schema.deserialize(dummy_json)
179 198
180 199 expected_deserialization = [
181 {'language': 'unknown',
182 'server': 'unknown',
183 'occurences': 1,
184 'priority': 5,
185 'view_name': '',
186 'client': 'unknown',
187 'http_status': 200,
188 'error': '',
189 'tags': None,
190 'username': '',
191 'traceback': None,
192 'extra': None,
193 'url': '',
194 'ip': None,
195 'start_time': utcnow,
196 'group_string': None,
197 'request': {},
198 'request_stats': None,
199 'end_time': None,
200 'request_id': '',
201 'message': '',
202 'slow_calls': [],
203 'user_agent': ''
200 {
201 "language": "unknown",
202 "server": "unknown",
203 "occurences": 1,
204 "priority": 5,
205 "view_name": "",
206 "client": "unknown",
207 "http_status": 200,
208 "error": "",
209 "tags": None,
210 "username": "",
211 "traceback": None,
212 "extra": None,
213 "url": "",
214 "ip": None,
215 "start_time": utcnow,
216 "group_string": None,
217 "request": {},
218 "request_stats": None,
219 "end_time": None,
220 "request_id": "",
221 "message": "",
222 "slow_calls": [],
223 "user_agent": "",
204 224 }
205 225 ]
206 226 assert deserialized == expected_deserialization
@@ -208,60 +228,81 b' class TestAPIReports_0_5_Validation(object):'
208 228 def test_full_payload(self):
209 229 import appenlight.tests.payload_examples as payload_examples
210 230 from appenlight.validators import ReportListSchema_0_5
231
211 232 PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5)
212 233 utcnow = datetime.utcnow()
213 234 schema = ReportListSchema_0_5().bind(utcnow=utcnow)
214 PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1),
215 ("date",
216 utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))]
235 PYTHON_PAYLOAD["tags"] = [
236 ("foo", 1),
237 ("action", "test"),
238 ("baz", 1.1),
239 ("date", utcnow.strftime("%Y-%m-%dT%H:%M:%S.0")),
240 ]
217 241 dummy_json = [PYTHON_PAYLOAD]
218 242 deserialized = schema.deserialize(dummy_json)[0]
219 assert deserialized['error'] == PYTHON_PAYLOAD['error']
220 assert deserialized['language'] == PYTHON_PAYLOAD['language']
221 assert deserialized['server'] == PYTHON_PAYLOAD['server']
222 assert deserialized['priority'] == PYTHON_PAYLOAD['priority']
223 assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name']
224 assert deserialized['client'] == PYTHON_PAYLOAD['client']
225 assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status']
226 assert deserialized['error'] == PYTHON_PAYLOAD['error']
227 assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences']
228 assert deserialized['username'] == PYTHON_PAYLOAD['username']
229 assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback']
230 assert deserialized['url'] == PYTHON_PAYLOAD['url']
231 assert deserialized['ip'] == PYTHON_PAYLOAD['ip']
232 assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \
233 PYTHON_PAYLOAD['start_time']
234 assert deserialized['ip'] == PYTHON_PAYLOAD['ip']
235 assert deserialized['group_string'] is None
236 assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats']
237 assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \
238 PYTHON_PAYLOAD['end_time']
239 assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id']
240 assert deserialized['message'] == PYTHON_PAYLOAD['message']
241 assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent']
242 assert deserialized['slow_calls'][0]['start'].strftime(
243 '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][
244 'start']
245 assert deserialized['slow_calls'][0]['end'].strftime(
246 '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][
247 'end']
248 assert deserialized['slow_calls'][0]['statement'] == \
249 PYTHON_PAYLOAD['slow_calls'][0]['statement']
250 assert deserialized['slow_calls'][0]['parameters'] == \
251 PYTHON_PAYLOAD['slow_calls'][0]['parameters']
252 assert deserialized['slow_calls'][0]['type'] == \
253 PYTHON_PAYLOAD['slow_calls'][0]['type']
254 assert deserialized['slow_calls'][0]['subtype'] == \
255 PYTHON_PAYLOAD['slow_calls'][0]['subtype']
256 assert deserialized['slow_calls'][0]['location'] == ''
257 assert deserialized['tags'] == [
258 ('foo', 1), ('action', 'test'),
259 ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))]
260
261
262 @pytest.mark.usefixtures('log_schema')
243 assert deserialized["error"] == PYTHON_PAYLOAD["error"]
244 assert deserialized["language"] == PYTHON_PAYLOAD["language"]
245 assert deserialized["server"] == PYTHON_PAYLOAD["server"]
246 assert deserialized["priority"] == PYTHON_PAYLOAD["priority"]
247 assert deserialized["view_name"] == PYTHON_PAYLOAD["view_name"]
248 assert deserialized["client"] == PYTHON_PAYLOAD["client"]
249 assert deserialized["http_status"] == PYTHON_PAYLOAD["http_status"]
250 assert deserialized["error"] == PYTHON_PAYLOAD["error"]
251 assert deserialized["occurences"] == PYTHON_PAYLOAD["occurences"]
252 assert deserialized["username"] == PYTHON_PAYLOAD["username"]
253 assert deserialized["traceback"] == PYTHON_PAYLOAD["traceback"]
254 assert deserialized["url"] == PYTHON_PAYLOAD["url"]
255 assert deserialized["ip"] == PYTHON_PAYLOAD["ip"]
256 assert (
257 deserialized["start_time"].strftime("%Y-%m-%dT%H:%M:%S.0")
258 == PYTHON_PAYLOAD["start_time"]
259 )
260 assert deserialized["ip"] == PYTHON_PAYLOAD["ip"]
261 assert deserialized["group_string"] is None
262 assert deserialized["request_stats"] == PYTHON_PAYLOAD["request_stats"]
263 assert (
264 deserialized["end_time"].strftime("%Y-%m-%dT%H:%M:%S.0")
265 == PYTHON_PAYLOAD["end_time"]
266 )
267 assert deserialized["request_id"] == PYTHON_PAYLOAD["request_id"]
268 assert deserialized["message"] == PYTHON_PAYLOAD["message"]
269 assert deserialized["user_agent"] == PYTHON_PAYLOAD["user_agent"]
270 assert (
271 deserialized["slow_calls"][0]["start"].strftime("%Y-%m-%dT%H:%M:%S.0")
272 == PYTHON_PAYLOAD["slow_calls"][0]["start"]
273 )
274 assert (
275 deserialized["slow_calls"][0]["end"].strftime("%Y-%m-%dT%H:%M:%S.0")
276 == PYTHON_PAYLOAD["slow_calls"][0]["end"]
277 )
278 assert (
279 deserialized["slow_calls"][0]["statement"]
280 == PYTHON_PAYLOAD["slow_calls"][0]["statement"]
281 )
282 assert (
283 deserialized["slow_calls"][0]["parameters"]
284 == PYTHON_PAYLOAD["slow_calls"][0]["parameters"]
285 )
286 assert (
287 deserialized["slow_calls"][0]["type"]
288 == PYTHON_PAYLOAD["slow_calls"][0]["type"]
289 )
290 assert (
291 deserialized["slow_calls"][0]["subtype"]
292 == PYTHON_PAYLOAD["slow_calls"][0]["subtype"]
293 )
294 assert deserialized["slow_calls"][0]["location"] == ""
295 assert deserialized["tags"] == [
296 ("foo", 1),
297 ("action", "test"),
298 ("baz", 1.1),
299 ("date", utcnow.strftime("%Y-%m-%dT%H:%M:%S.0")),
300 ]
301
302
303 @pytest.mark.usefixtures("log_schema")
263 304 class TestAPILogsValidation(object):
264 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
305 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
265 306 def test_no_payload(self, dummy_json, log_schema):
266 307 import colander
267 308
@@ -271,74 +312,81 b' class TestAPILogsValidation(object):'
271 312 def test_minimal_payload(self, log_schema):
272 313 dummy_json = [{}]
273 314 deserialized = log_schema.deserialize(dummy_json)[0]
274 expected = {'log_level': 'UNKNOWN',
275 'namespace': '',
276 'server': 'unknown',
277 'request_id': '',
278 'primary_key': None,
279 'date': datetime.utcnow(),
280 'message': '',
281 'tags': None}
282 assert deserialized['log_level'] == expected['log_level']
283 assert deserialized['message'] == expected['message']
284 assert deserialized['namespace'] == expected['namespace']
285 assert deserialized['request_id'] == expected['request_id']
286 assert deserialized['server'] == expected['server']
287 assert deserialized['tags'] == expected['tags']
288 assert deserialized['primary_key'] == expected['primary_key']
315 expected = {
316 "log_level": "UNKNOWN",
317 "namespace": "",
318 "server": "unknown",
319 "request_id": "",
320 "primary_key": None,
321 "date": datetime.utcnow(),
322 "message": "",
323 "tags": None,
324 }
325 assert deserialized["log_level"] == expected["log_level"]
326 assert deserialized["message"] == expected["message"]
327 assert deserialized["namespace"] == expected["namespace"]
328 assert deserialized["request_id"] == expected["request_id"]
329 assert deserialized["server"] == expected["server"]
330 assert deserialized["tags"] == expected["tags"]
331 assert deserialized["primary_key"] == expected["primary_key"]
289 332
290 333 def test_normal_payload(self, log_schema):
291 334 import appenlight.tests.payload_examples as payload_examples
335
292 336 deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0]
293 337 expected = payload_examples.LOG_EXAMPLES[0]
294 assert deserialized['log_level'] == expected['log_level']
295 assert deserialized['message'] == expected['message']
296 assert deserialized['namespace'] == expected['namespace']
297 assert deserialized['request_id'] == expected['request_id']
298 assert deserialized['server'] == expected['server']
299 assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \
300 expected['date']
301 assert deserialized['tags'][0][0] == "tag_name"
302 assert deserialized['tags'][0][1] == "tag_value"
303 assert deserialized['tags'][1][0] == "tag_name2"
304 assert deserialized['tags'][1][1] == 2
338 assert deserialized["log_level"] == expected["log_level"]
339 assert deserialized["message"] == expected["message"]
340 assert deserialized["namespace"] == expected["namespace"]
341 assert deserialized["request_id"] == expected["request_id"]
342 assert deserialized["server"] == expected["server"]
343 assert deserialized["date"].strftime("%Y-%m-%dT%H:%M:%S.%f") == expected["date"]
344 assert deserialized["tags"][0][0] == "tag_name"
345 assert deserialized["tags"][0][1] == "tag_value"
346 assert deserialized["tags"][1][0] == "tag_name2"
347 assert deserialized["tags"][1][1] == 2
305 348
306 349 def test_normal_payload_date_without_microseconds(self, log_schema):
307 350 import appenlight.tests.payload_examples as payload_examples
351
308 352 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
309 LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime(
310 '%Y-%m-%dT%H:%M:%S')
353 LOG_EXAMPLE[0]["date"] = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
311 354 deserialized = log_schema.deserialize(LOG_EXAMPLE)
312 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \
313 LOG_EXAMPLE[0]['date']
355 assert (
356 deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M:%S")
357 == LOG_EXAMPLE[0]["date"]
358 )
314 359
315 360 def test_normal_payload_date_without_seconds(self, log_schema):
316 361 import appenlight.tests.payload_examples as payload_examples
362
317 363 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
318 LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime(
319 '%Y-%m-%dT%H:%M')
364 LOG_EXAMPLE[0]["date"] = datetime.utcnow().date().strftime("%Y-%m-%dT%H:%M")
320 365 deserialized = log_schema.deserialize(LOG_EXAMPLE)
321 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \
322 LOG_EXAMPLE[0]['date']
366 assert (
367 deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M") == LOG_EXAMPLE[0]["date"]
368 )
323 369
324 370 def test_payload_empty_date(self, log_schema):
325 371 import appenlight.tests.payload_examples as payload_examples
372
326 373 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
327 LOG_EXAMPLE[0]['date'] = None
374 LOG_EXAMPLE[0]["date"] = None
328 375 deserialized = log_schema.deserialize(LOG_EXAMPLE)
329 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None
376 assert deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M") is not None
330 377
331 378 def test_payload_no_date(self, log_schema):
332 379 import appenlight.tests.payload_examples as payload_examples
380
333 381 LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES)
334 LOG_EXAMPLE[0].pop('date', None)
382 LOG_EXAMPLE[0].pop("date", None)
335 383 deserialized = log_schema.deserialize(LOG_EXAMPLE)
336 assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None
384 assert deserialized[0]["date"].strftime("%Y-%m-%dT%H:%M") is not None
337 385
338 386
339 @pytest.mark.usefixtures('general_metrics_schema')
387 @pytest.mark.usefixtures("general_metrics_schema")
340 388 class TestAPIGeneralMetricsValidation(object):
341 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
389 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
342 390 def test_no_payload(self, dummy_json, general_metrics_schema):
343 391 import colander
344 392
@@ -346,32 +394,37 b' class TestAPIGeneralMetricsValidation(object):'
346 394 general_metrics_schema.deserialize(dummy_json)
347 395
348 396 def test_minimal_payload(self, general_metrics_schema):
349 dummy_json = [{'tags': [['counter_a', 15.5], ['counter_b', 63]]}]
397 dummy_json = [{"tags": [["counter_a", 15.5], ["counter_b", 63]]}]
350 398 deserialized = general_metrics_schema.deserialize(dummy_json)[0]
351 expected = {'namespace': '',
352 'server_name': 'unknown',
353 'tags': [('counter_a', 15.5), ('counter_b', 63)],
354 'timestamp': datetime.utcnow()}
355 assert deserialized['namespace'] == expected['namespace']
356 assert deserialized['server_name'] == expected['server_name']
357 assert deserialized['tags'] == expected['tags']
399 expected = {
400 "namespace": "",
401 "server_name": "unknown",
402 "tags": [("counter_a", 15.5), ("counter_b", 63)],
403 "timestamp": datetime.utcnow(),
404 }
405 assert deserialized["namespace"] == expected["namespace"]
406 assert deserialized["server_name"] == expected["server_name"]
407 assert deserialized["tags"] == expected["tags"]
358 408
359 409 def test_normal_payload(self, general_metrics_schema):
360 410 import appenlight.tests.payload_examples as payload_examples
411
361 412 dummy_json = [payload_examples.METRICS_PAYLOAD]
362 413 deserialized = general_metrics_schema.deserialize(dummy_json)[0]
363 expected = {'namespace': 'some.monitor',
364 'server_name': 'server.name',
365 'tags': [('usage_foo', 15.5), ('usage_bar', 63)],
366 'timestamp': datetime.utcnow()}
367 assert deserialized['namespace'] == expected['namespace']
368 assert deserialized['server_name'] == expected['server_name']
369 assert deserialized['tags'] == expected['tags']
414 expected = {
415 "namespace": "some.monitor",
416 "server_name": "server.name",
417 "tags": [("usage_foo", 15.5), ("usage_bar", 63)],
418 "timestamp": datetime.utcnow(),
419 }
420 assert deserialized["namespace"] == expected["namespace"]
421 assert deserialized["server_name"] == expected["server_name"]
422 assert deserialized["tags"] == expected["tags"]
370 423
371 424
372 @pytest.mark.usefixtures('request_metrics_schema')
425 @pytest.mark.usefixtures("request_metrics_schema")
373 426 class TestAPIRequestMetricsValidation(object):
374 @pytest.mark.parametrize('dummy_json', ['', {}, [], None])
427 @pytest.mark.parametrize("dummy_json", ["", {}, [], None])
375 428 def test_no_payload(self, dummy_json, request_metrics_schema):
376 429 import colander
377 430
@@ -380,45 +433,58 b' class TestAPIRequestMetricsValidation(object):'
380 433
381 434 def test_normal_payload(self, request_metrics_schema):
382 435 import appenlight.tests.payload_examples as payload_examples
436
383 437 dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES
384 438 deserialized = request_metrics_schema.deserialize(dummy_json)[0]
385 expected = {'metrics': [('dir/module:func',
386 {'custom': 0.0,
387 'custom_calls': 0.0,
388 'main': 0.01664,
389 'nosql': 0.00061,
390 'nosql_calls': 23.0,
391 'remote': 0.0,
392 'remote_calls': 0.0,
393 'requests': 1,
394 'sql': 0.00105,
395 'sql_calls': 2.0,
396 'tmpl': 0.0,
397 'tmpl_calls': 0.0}),
398 ('SomeView.function',
399 {'custom': 0.0,
400 'custom_calls': 0.0,
401 'main': 0.647261,
402 'nosql': 0.306554,
403 'nosql_calls': 140.0,
404 'remote': 0.0,
405 'remote_calls': 0.0,
406 'requests': 28,
407 'sql': 0.0,
408 'sql_calls': 0.0,
409 'tmpl': 0.0,
410 'tmpl_calls': 0.0})],
411 'server': 'some.server.hostname',
412 'timestamp': datetime.utcnow()}
413 assert deserialized['server'] == expected['server']
414 metric = deserialized['metrics'][0]
415 expected_metric = expected['metrics'][0]
439 expected = {
440 "metrics": [
441 (
442 "dir/module:func",
443 {
444 "custom": 0.0,
445 "custom_calls": 0.0,
446 "main": 0.01664,
447 "nosql": 0.00061,
448 "nosql_calls": 23.0,
449 "remote": 0.0,
450 "remote_calls": 0.0,
451 "requests": 1,
452 "sql": 0.00105,
453 "sql_calls": 2.0,
454 "tmpl": 0.0,
455 "tmpl_calls": 0.0,
456 },
457 ),
458 (
459 "SomeView.function",
460 {
461 "custom": 0.0,
462 "custom_calls": 0.0,
463 "main": 0.647261,
464 "nosql": 0.306554,
465 "nosql_calls": 140.0,
466 "remote": 0.0,
467 "remote_calls": 0.0,
468 "requests": 28,
469 "sql": 0.0,
470 "sql_calls": 0.0,
471 "tmpl": 0.0,
472 "tmpl_calls": 0.0,
473 },
474 ),
475 ],
476 "server": "some.server.hostname",
477 "timestamp": datetime.utcnow(),
478 }
479 assert deserialized["server"] == expected["server"]
480 metric = deserialized["metrics"][0]
481 expected_metric = expected["metrics"][0]
416 482 assert metric[0] == expected_metric[0]
417 483 assert sorted(metric[1].items()) == sorted(expected_metric[1].items())
418 484
419 485
420 @pytest.mark.usefixtures('default_application')
421 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
486 @pytest.mark.usefixtures("default_application")
487 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
422 488 class TestAPIReportsView(object):
423 489 def test_no_json_payload(self, default_application):
424 490 import colander
@@ -427,12 +493,11 b' class TestAPIReportsView(object):'
427 493
428 494 context = DummyContext()
429 495 context.resource = ApplicationService.by_id(1)
430 request = testing.DummyRequest(
431 headers={'Content-Type': 'application/json'})
432 request.unsafe_json_body = ''
496 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
497 request.unsafe_json_body = ""
433 498 request.context = context
434 499 route = mock.Mock()
435 route.name = 'api_reports'
500 route.name = "api_reports"
436 501 request.matched_route = route
437 502 with pytest.raises(colander.Invalid):
438 503 response = reports_create(request)
@@ -442,8 +507,9 b' class TestAPIReportsView(object):'
442 507 from appenlight.views.api import reports_create
443 508 from appenlight.models.services.application import ApplicationService
444 509 from appenlight.models.report_group import ReportGroup
510
445 511 route = mock.Mock()
446 route.name = 'api_reports'
512 route.name = "api_reports"
447 513 request = pyramid.threadlocal.get_current_request()
448 514 context = DummyContext()
449 515 context.resource = ApplicationService.by_id(1)
@@ -462,16 +528,19 b' class TestAPIReportsView(object):'
462 528 from appenlight.views.api import reports_create
463 529 from appenlight.models.services.application import ApplicationService
464 530 from appenlight.models.report_group import ReportGroup
531
465 532 route = mock.Mock()
466 route.name = 'api_reports'
533 route.name = "api_reports"
467 534 request = pyramid.threadlocal.get_current_request()
468 535 context = DummyContext()
469 536 context.resource = ApplicationService.by_id(1)
470 537 request.context = context
471 538 request.matched_route = route
472 539 PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5
473 request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD),
474 copy.deepcopy(PYTHON_PAYLOAD)]
540 request.unsafe_json_body = [
541 copy.deepcopy(PYTHON_PAYLOAD),
542 copy.deepcopy(PYTHON_PAYLOAD),
543 ]
475 544 reports_create(request)
476 545 query = DBSession.query(ReportGroup)
477 546 report = query.first()
@@ -483,8 +552,9 b' class TestAPIReportsView(object):'
483 552 from appenlight.views.api import reports_create
484 553 from appenlight.models.services.application import ApplicationService
485 554 from appenlight.models.report_group import ReportGroup
555
486 556 route = mock.Mock()
487 route.name = 'api_reports'
557 route.name = "api_reports"
488 558 request = pyramid.threadlocal.get_current_request()
489 559 context = DummyContext()
490 560 context.resource = ApplicationService.by_id(1)
@@ -492,8 +562,10 b' class TestAPIReportsView(object):'
492 562 request.matched_route = route
493 563 PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5
494 564 PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404
495 request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD),
496 copy.deepcopy(PARSED_REPORT_404)]
565 request.unsafe_json_body = [
566 copy.deepcopy(PYTHON_PAYLOAD),
567 copy.deepcopy(PARSED_REPORT_404),
568 ]
497 569 reports_create(request)
498 570 query = DBSession.query(ReportGroup)
499 571 report = query.first()
@@ -501,10 +573,9 b' class TestAPIReportsView(object):'
501 573 assert report.total_reports == 1
502 574
503 575
504 @pytest.mark.usefixtures('default_application')
505 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
576 @pytest.mark.usefixtures("default_application")
577 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
506 578 class TestAirbrakeXMLView(object):
507
508 579 def test_normal_payload_parsing(self):
509 580 import datetime
510 581 import defusedxml.ElementTree as ElementTree
@@ -513,8 +584,7 b' class TestAirbrakeXMLView(object):'
513 584 from appenlight.validators import ReportListSchema_0_5
514 585
515 586 context = DummyContext()
516 request = testing.DummyRequest(
517 headers={'Content-Type': 'application/xml'})
587 request = testing.DummyRequest(headers={"Content-Type": "application/xml"})
518 588 request.context = context
519 589 request.context.possibly_public = False
520 590 root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE)
@@ -522,37 +592,45 b' class TestAirbrakeXMLView(object):'
522 592 error_dict = parse_airbrake_xml(request)
523 593 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
524 594 deserialized_report = schema.deserialize([error_dict])[0]
525 assert deserialized_report['client'] == 'Airbrake Notifier'
526 assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>'
527 assert deserialized_report['http_status'] == 500
528 assert deserialized_report['language'] == 'unknown'
529 assert deserialized_report['message'] == ''
530 assert deserialized_report['occurences'] == 1
531 assert deserialized_report['priority'] == 5
532 d_request = deserialized_report['request']
533 assert d_request['GET'] == {'test': '1234'}
534 assert d_request['action_dispatch.request.parameters'] == {
535 'action': 'index',
536 'controller': 'welcome',
537 'test': '1234'}
538 assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1'
539 assert deserialized_report['server'] == 'ergo-desktop'
540 assert deserialized_report['traceback'][0] == {
541 'cline': 'block in start_thread',
542 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb',
543 'fn': 'block in start_thread',
544 'line': '191',
545 'module': '',
546 'vars': {}}
547 assert deserialized_report['traceback'][-1] == {
548 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940',
549 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb',
550 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940',
551 'line': '3',
552 'module': '',
553 'vars': {}}
554 assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234'
555 assert deserialized_report['view_name'] == 'welcome:index'
595 assert deserialized_report["client"] == "Airbrake Notifier"
596 assert (
597 deserialized_report["error"]
598 == "NameError: undefined local variable or method `sdfdfdf' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>"
599 )
600 assert deserialized_report["http_status"] == 500
601 assert deserialized_report["language"] == "unknown"
602 assert deserialized_report["message"] == ""
603 assert deserialized_report["occurences"] == 1
604 assert deserialized_report["priority"] == 5
605 d_request = deserialized_report["request"]
606 assert d_request["GET"] == {"test": "1234"}
607 assert d_request["action_dispatch.request.parameters"] == {
608 "action": "index",
609 "controller": "welcome",
610 "test": "1234",
611 }
612 assert deserialized_report["request_id"] == "c11b2267f3ad8b00a1768cae35559fa1"
613 assert deserialized_report["server"] == "ergo-desktop"
614 assert deserialized_report["traceback"][0] == {
615 "cline": "block in start_thread",
616 "file": "/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb",
617 "fn": "block in start_thread",
618 "line": "191",
619 "module": "",
620 "vars": {},
621 }
622 assert deserialized_report["traceback"][-1] == {
623 "cline": "_app_views_welcome_index_html_erb___2570061166873166679_31748940",
624 "file": "[PROJECT_ROOT]/app/views/welcome/index.html.erb",
625 "fn": "_app_views_welcome_index_html_erb___2570061166873166679_31748940",
626 "line": "3",
627 "module": "",
628 "vars": {},
629 }
630 assert (
631 deserialized_report["url"] == "http://0.0.0.0:3000/welcome/index?test=1234"
632 )
633 assert deserialized_report["view_name"] == "welcome:index"
556 634
557 635 def test_normal_payload_view(self):
558 636 import defusedxml.ElementTree as ElementTree
@@ -563,21 +641,20 b' class TestAirbrakeXMLView(object):'
563 641
564 642 context = DummyContext()
565 643 context.resource = ApplicationService.by_id(1)
566 request = testing.DummyRequest(
567 headers={'Content-Type': 'application/xml'})
644 request = testing.DummyRequest(headers={"Content-Type": "application/xml"})
568 645 request.context = context
569 646 request.context.possibly_public = False
570 647 root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE)
571 648 request.context.airbrake_xml_etree = root
572 649 route = mock.Mock()
573 route.name = 'api_airbrake'
650 route.name = "api_airbrake"
574 651 request.matched_route = route
575 652 result = airbrake_xml_compat(request)
576 assert '<notice><id>' in result
653 assert "<notice><id>" in result
577 654
578 655
579 @pytest.mark.usefixtures('default_application')
580 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
656 @pytest.mark.usefixtures("default_application")
657 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
581 658 class TestAPILogView(object):
582 659 def test_no_json_payload(self, base_app):
583 660 import colander
@@ -586,13 +663,12 b' class TestAPILogView(object):'
586 663
587 664 context = DummyContext()
588 665 context.resource = ApplicationService.by_id(1)
589 request = testing.DummyRequest(
590 headers={'Content-Type': 'application/json'})
666 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
591 667 request.context = context
592 668 request.registry = base_app.registry
593 request.unsafe_json_body = ''
669 request.unsafe_json_body = ""
594 670 route = mock.Mock()
595 route.name = 'api_logs'
671 route.name = "api_logs"
596 672 request.matched_route = route
597 673 with pytest.raises(colander.Invalid):
598 674 response = logs_create(request)
@@ -602,15 +678,15 b' class TestAPILogView(object):'
602 678 from appenlight.models.log import Log
603 679 from appenlight.views.api import logs_create
604 680 from appenlight.models.services.application import ApplicationService
681
605 682 route = mock.Mock()
606 route.name = 'api_logs'
683 route.name = "api_logs"
607 684 request = pyramid.threadlocal.get_current_request()
608 685 context = DummyContext()
609 686 context.resource = ApplicationService.by_id(1)
610 687 request.context = context
611 688 request.matched_route = route
612 request.unsafe_json_body = [copy.deepcopy(
613 payload_examples.LOG_EXAMPLES[0])]
689 request.unsafe_json_body = [copy.deepcopy(payload_examples.LOG_EXAMPLES[0])]
614 690 logs_create(request)
615 691 query = DBSession.query(Log)
616 692 log = query.first()
@@ -622,8 +698,9 b' class TestAPILogView(object):'
622 698 from appenlight.models.log import Log
623 699 from appenlight.views.api import logs_create
624 700 from appenlight.models.services.application import ApplicationService
701
625 702 route = mock.Mock()
626 route.name = 'api_logs'
703 route.name = "api_logs"
627 704 request = pyramid.threadlocal.get_current_request()
628 705 context = DummyContext()
629 706 context.resource = ApplicationService.by_id(1)
@@ -643,8 +720,9 b' class TestAPILogView(object):'
643 720 from appenlight.models.log import Log
644 721 from appenlight.views.api import logs_create
645 722 from appenlight.models.services.application import ApplicationService
723
646 724 route = mock.Mock()
647 route.name = 'api_logs'
725 route.name = "api_logs"
648 726 request = pyramid.threadlocal.get_current_request()
649 727 context = DummyContext()
650 728 context.resource = ApplicationService.by_id(1)
@@ -653,8 +731,8 b' class TestAPILogView(object):'
653 731
654 732 LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0])
655 733 LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1])
656 LOG_PAYLOAD['primary_key'] = 'X2'
657 LOG_PAYLOAD2['primary_key'] = 'X2'
734 LOG_PAYLOAD["primary_key"] = "X2"
735 LOG_PAYLOAD2["primary_key"] = "X2"
658 736 request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2]
659 737 logs_create(request)
660 738
@@ -662,22 +740,23 b' class TestAPILogView(object):'
662 740 assert query.count() == 1
663 741 assert query[0].message == "OMG ValueError happened2"
664 742
665 @pytest.mark.usefixtures('default_application')
666 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
743
744 @pytest.mark.usefixtures("default_application")
745 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
667 746 class TestAPIGeneralMetricsView(object):
668 747 def test_no_json_payload(self, base_app):
669 748 import colander
670 749 from appenlight.models.services.application import ApplicationService
671 750 from appenlight.views.api import general_metrics_create
751
672 752 route = mock.Mock()
673 route.name = 'api_general_metrics'
753 route.name = "api_general_metrics"
674 754 context = DummyContext()
675 755 context.resource = ApplicationService.by_id(1)
676 request = testing.DummyRequest(
677 headers={'Content-Type': 'application/json'})
756 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
678 757 request.context = context
679 758 request.registry = base_app.registry
680 request.unsafe_json_body = ''
759 request.unsafe_json_body = ""
681 760 request.matched_route = route
682 761 with pytest.raises(colander.Invalid):
683 762 general_metrics_create(request)
@@ -687,8 +766,9 b' class TestAPIGeneralMetricsView(object):'
687 766 from appenlight.models.metric import Metric
688 767 from appenlight.views.api import general_metrics_create
689 768 from appenlight.models.services.application import ApplicationService
769
690 770 route = mock.Mock()
691 route.name = 'api_general_metric'
771 route.name = "api_general_metric"
692 772 request = pyramid.threadlocal.get_current_request()
693 773 request.matched_route = route
694 774 context = DummyContext()
@@ -699,15 +779,16 b' class TestAPIGeneralMetricsView(object):'
699 779 query = DBSession.query(Metric)
700 780 metric = query.first()
701 781 assert query.count() == 1
702 assert metric.namespace == 'some.monitor'
782 assert metric.namespace == "some.monitor"
703 783
704 784 def test_multiple_json_payload(self):
705 785 import appenlight.tests.payload_examples as payload_examples
706 786 from appenlight.models.metric import Metric
707 787 from appenlight.views.api import general_metrics_create
708 788 from appenlight.models.services.application import ApplicationService
789
709 790 route = mock.Mock()
710 route.name = 'api_general_metrics'
791 route.name = "api_general_metrics"
711 792 request = pyramid.threadlocal.get_current_request()
712 793 request.matched_route = route
713 794 context = DummyContext()
@@ -721,70 +802,49 b' class TestAPIGeneralMetricsView(object):'
721 802 query = DBSession.query(Metric)
722 803 metric = query.first()
723 804 assert query.count() == 2
724 assert metric.namespace == 'some.monitor'
805 assert metric.namespace == "some.monitor"
725 806
726 807
727 808 class TestGroupingMessageReplacements(object):
728 809 def replace_default_repr_python(self):
729 test_str = '''
810 test_str = """
730 811 ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)'))
731 '''
732 regex = r'<(.*?) object at (.*?)>'
812 """
813 regex = r"<(.*?) object at (.*?)>"
733 814
734 815
735 816 class TestRulesKeyGetter(object):
736 817 def test_default_dict_getter_top_key(self):
737 818 from appenlight.lib.rule import Rule
738 struct = {
739 "a": {
740 "b": 'b',
741 "c": {
742 "d": 'd',
743 "g": {
744 "h": 'h'
745 }
746 },
747 "e": 'e'
748 },
749 "f": 'f'
750 }
819
820 struct = {"a": {"b": "b", "c": {"d": "d", "g": {"h": "h"}}, "e": "e"}, "f": "f"}
751 821 result = Rule.default_dict_struct_getter(struct, "a")
752 assert result == struct['a']
822 assert result == struct["a"]
753 823
754 824 def test_default_dict_getter_sub_key(self):
755 825 from appenlight.lib.rule import Rule
756 struct = {
757 "a": {
758 "b": 'b',
759 "c": {
760 "d": 'd',
761 "g": {
762 "h": 'h'
763 }
764 },
765 "e": 'e'
766 },
767 "f": 'f'
768 }
769 result = Rule.default_dict_struct_getter(struct, 'a:b')
770 assert result == struct['a']['b']
771 result = Rule.default_dict_struct_getter(struct, 'a:c:d')
772 assert result == struct['a']['c']['d']
826
827 struct = {"a": {"b": "b", "c": {"d": "d", "g": {"h": "h"}}, "e": "e"}, "f": "f"}
828 result = Rule.default_dict_struct_getter(struct, "a:b")
829 assert result == struct["a"]["b"]
830 result = Rule.default_dict_struct_getter(struct, "a:c:d")
831 assert result == struct["a"]["c"]["d"]
773 832
774 833 def test_default_obj_getter_top_key(self):
775 834 from appenlight.lib.rule import Rule
835
776 836 class TestStruct(object):
777 837 def __init__(self, a, b):
778 838 self.a = a
779 839 self.b = b
780 840
781 struct = TestStruct(a='a',
782 b=TestStruct(a='x', b='y'))
841 struct = TestStruct(a="a", b=TestStruct(a="x", b="y"))
783 842 result = Rule.default_obj_struct_getter(struct, "a")
784 843 assert result == struct.a
785 844
786 845 def test_default_obj_getter_sub_key(self):
787 846 from appenlight.lib.rule import Rule
847
788 848 class TestStruct(object):
789 849 def __init__(self, name, a, b):
790 850 self.name = name
@@ -792,121 +852,122 b' class TestRulesKeyGetter(object):'
792 852 self.b = b
793 853
794 854 def __repr__(self):
795 return '<obj {}>'.format(self.name)
855 return "<obj {}>".format(self.name)
796 856
797 c = TestStruct('c', a=5, b='z')
798 b = TestStruct('b', a=c, b='y')
799 struct = TestStruct('a', a='a', b=b)
800 result = Rule.default_obj_struct_getter(struct, 'b:b')
857 c = TestStruct("c", a=5, b="z")
858 b = TestStruct("b", a=c, b="y")
859 struct = TestStruct("a", a="a", b=b)
860 result = Rule.default_obj_struct_getter(struct, "b:b")
801 861 assert result == struct.b.b
802 result = Rule.default_obj_struct_getter(struct, 'b:a:b')
862 result = Rule.default_obj_struct_getter(struct, "b:a:b")
803 863 assert result == struct.b.a.b
804 864
805 865
806 @pytest.mark.usefixtures('report_type_matrix')
807 class TestRulesParsing():
808 @pytest.mark.parametrize("op, struct_value, test_value, match_result", [
809 ('eq', 500, 500, True),
810 ('eq', 600, 500, False),
811 ('eq', 300, 500, False),
812 ('eq', "300", 500, False),
813 ('eq', "600", 500, False),
814 ('eq', "500", 500, True),
815 ('ne', 500, 500, False),
816 ('ne', 600, 500, True),
817 ('ne', 300, 500, True),
818 ('ne', "300", 500, True),
819 ('ne', "600", 500, True),
820 ('ne', "500", 500, False),
821 ('ge', 500, 500, True),
822 ('ge', 600, 500, True),
823 ('ge', 499, 500, False),
824 ('gt', 499, 500, False),
825 ('gt', 500, 500, False),
826 ('gt', 501, 500, True),
827 ('le', 499, 500, True),
828 ('le', 500, 500, True),
829 ('le', 501, 500, False),
830 ('lt', 499, 500, True),
831 ('lt', 500, 500, False),
832 ('lt', 501, 500, False),
833 ])
834 def test_single_op_int(self, op, struct_value, test_value, match_result,
835 report_type_matrix):
866 @pytest.mark.usefixtures("report_type_matrix")
867 class TestRulesParsing:
868 @pytest.mark.parametrize(
869 "op, struct_value, test_value, match_result",
870 [
871 ("eq", 500, 500, True),
872 ("eq", 600, 500, False),
873 ("eq", 300, 500, False),
874 ("eq", "300", 500, False),
875 ("eq", "600", 500, False),
876 ("eq", "500", 500, True),
877 ("ne", 500, 500, False),
878 ("ne", 600, 500, True),
879 ("ne", 300, 500, True),
880 ("ne", "300", 500, True),
881 ("ne", "600", 500, True),
882 ("ne", "500", 500, False),
883 ("ge", 500, 500, True),
884 ("ge", 600, 500, True),
885 ("ge", 499, 500, False),
886 ("gt", 499, 500, False),
887 ("gt", 500, 500, False),
888 ("gt", 501, 500, True),
889 ("le", 499, 500, True),
890 ("le", 500, 500, True),
891 ("le", 501, 500, False),
892 ("lt", 499, 500, True),
893 ("lt", 500, 500, False),
894 ("lt", 501, 500, False),
895 ],
896 )
897 def test_single_op_int(
898 self, op, struct_value, test_value, match_result, report_type_matrix
899 ):
836 900 from appenlight.lib.rule import Rule
837 rule_config = {
838 "op": op,
839 "field": "http_status",
840 "value": test_value
841 }
901
902 rule_config = {"op": op, "field": "http_status", "value": test_value}
842 903 rule = Rule(rule_config, report_type_matrix)
843 904
844 data = {
845 "http_status": struct_value
846 }
905 data = {"http_status": struct_value}
847 906 assert rule.match(data) is match_result
848 907
849 @pytest.mark.parametrize("op, struct_value, test_value, match_result", [
850 ('ge', "500.01", 500, True),
851 ('ge', "500.01", 500.02, False),
852 ('le', "500.01", 500.02, True)
853 ])
854 def test_single_op_float(self, op, struct_value, test_value, match_result,
855 report_type_matrix):
908 @pytest.mark.parametrize(
909 "op, struct_value, test_value, match_result",
910 [
911 ("ge", "500.01", 500, True),
912 ("ge", "500.01", 500.02, False),
913 ("le", "500.01", 500.02, True),
914 ],
915 )
916 def test_single_op_float(
917 self, op, struct_value, test_value, match_result, report_type_matrix
918 ):
856 919 from appenlight.lib.rule import Rule
857 rule_config = {
858 "op": op,
859 "field": "duration",
860 "value": test_value
861 }
920
921 rule_config = {"op": op, "field": "duration", "value": test_value}
862 922 rule = Rule(rule_config, report_type_matrix)
863 923
864 data = {
865 "duration": struct_value
866 }
924 data = {"duration": struct_value}
867 925 assert rule.match(data) is match_result
868 926
869 @pytest.mark.parametrize("op, struct_value, test_value, match_result", [
870 ('contains', 'foo bar baz', 'foo', True),
871 ('contains', 'foo bar baz', 'bar', True),
872 ('contains', 'foo bar baz', 'dupa', False),
873 ('startswith', 'foo bar baz', 'foo', True),
874 ('startswith', 'foo bar baz', 'bar', False),
875 ('endswith', 'foo bar baz', 'baz', True),
876 ('endswith', 'foo bar baz', 'bar', False),
877 ])
878 def test_single_op_string(self, op, struct_value, test_value,
879 match_result, report_type_matrix):
927 @pytest.mark.parametrize(
928 "op, struct_value, test_value, match_result",
929 [
930 ("contains", "foo bar baz", "foo", True),
931 ("contains", "foo bar baz", "bar", True),
932 ("contains", "foo bar baz", "dupa", False),
933 ("startswith", "foo bar baz", "foo", True),
934 ("startswith", "foo bar baz", "bar", False),
935 ("endswith", "foo bar baz", "baz", True),
936 ("endswith", "foo bar baz", "bar", False),
937 ],
938 )
939 def test_single_op_string(
940 self, op, struct_value, test_value, match_result, report_type_matrix
941 ):
880 942 from appenlight.lib.rule import Rule
881 rule_config = {
882 "op": op,
883 "field": "error",
884 "value": test_value
885 }
943
944 rule_config = {"op": op, "field": "error", "value": test_value}
886 945 rule = Rule(rule_config, report_type_matrix)
887 946
888 data = {
889 "error": struct_value
890 }
947 data = {"error": struct_value}
891 948 assert rule.match(data) is match_result
892 949
893 @pytest.mark.parametrize("field, value, s_type", [
894 ('field_unicode', 500, str),
895 ('field_unicode', 500.0, str),
896 ('field_unicode', "500", str),
897 ('field_int', "500", int),
898 ('field_int', 500, int),
899 ('field_int', 500.0, int),
900 ('field_float', "500", float),
901 ('field_float', 500, float),
902 ('field_float', 500.0, float),
903 ])
950 @pytest.mark.parametrize(
951 "field, value, s_type",
952 [
953 ("field_unicode", 500, str),
954 ("field_unicode", 500.0, str),
955 ("field_unicode", "500", str),
956 ("field_int", "500", int),
957 ("field_int", 500, int),
958 ("field_int", 500.0, int),
959 ("field_float", "500", float),
960 ("field_float", 500, float),
961 ("field_float", 500.0, float),
962 ],
963 )
904 964 def test_type_normalization(self, field, value, s_type):
905 965 from appenlight.lib.rule import Rule
966
906 967 type_matrix = {
907 'field_unicode': {"type": 'unicode'},
908 'field_float': {"type": 'float'},
909 'field_int': {"type": 'int'},
968 "field_unicode": {"type": "unicode"},
969 "field_float": {"type": "float"},
970 "field_int": {"type": "int"},
910 971 }
911 972
912 973 rule = Rule({}, type_matrix)
@@ -914,280 +975,275 b' class TestRulesParsing():'
914 975 assert isinstance(n_value, s_type) is True
915 976
916 977
917 @pytest.mark.usefixtures('report_type_matrix')
918 class TestNestedRuleParsing():
919
920 @pytest.mark.parametrize("data, result", [
921 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}},
922 False),
923 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
924 False),
925 ({"http_status": 500, "group": {"priority": 1, "occurences": 11}},
926 False),
927 ({"http_status": 101, "group": {"priority": 3, "occurences": 5}},
928 True),
929 ])
978 @pytest.mark.usefixtures("report_type_matrix")
979 class TestNestedRuleParsing:
980 @pytest.mark.parametrize(
981 "data, result",
982 [
983 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, False),
984 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, False),
985 ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, False),
986 ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, True),
987 ],
988 )
930 989 def test_NOT_rule(self, data, result, report_type_matrix):
931 990 from appenlight.lib.rule import Rule
991
932 992 rule_config = {
933 993 "field": "__NOT__",
934 994 "rules": [
935 {
936 "op": "ge",
937 "field": "group:occurences",
938 "value": "10"
939 },
940 {
941 "op": "ge",
942 "field": "group:priority",
943 "value": "4"
944 }
945 ]
995 {"op": "ge", "field": "group:occurences", "value": "10"},
996 {"op": "ge", "field": "group:priority", "value": "4"},
997 ],
946 998 }
947 999
948 1000 rule = Rule(rule_config, report_type_matrix)
949 1001 assert rule.match(data) is result
950 1002
951 @pytest.mark.parametrize("data, result", [
952 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}},
953 True),
954 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
955 True),
956 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}},
957 True),
958 ({"http_status": 101, "group": {"priority": 3, "occurences": 11}},
959 False),
960 ])
1003 @pytest.mark.parametrize(
1004 "data, result",
1005 [
1006 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, True),
1007 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, True),
1008 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, True),
1009 ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, False),
1010 ],
1011 )
961 1012 def test_nested_OR_AND_rule(self, data, result, report_type_matrix):
962 1013 from appenlight.lib.rule import Rule
1014
963 1015 rule_config = {
964 1016 "field": "__OR__",
965 1017 "rules": [
966 1018 {
967 1019 "field": "__AND__",
968 1020 "rules": [
969 {
970 "op": "ge",
971 "field": "group:occurences",
972 "value": "10"
1021 {"op": "ge", "field": "group:occurences", "value": "10"},
1022 {"op": "ge", "field": "group:priority", "value": "4"},
1023 ],
973 1024 },
974 {
975 "op": "ge",
976 "field": "group:priority",
977 "value": "4"
978 }
979 ]
980 },
981 {
982 "op": "eq",
983 "field": "http_status",
984 "value": "500"
985 }
986 ]
1025 {"op": "eq", "field": "http_status", "value": "500"},
1026 ],
987 1027 }
988 1028
989 1029 rule = Rule(rule_config, report_type_matrix)
990 1030 assert rule.match(data) is result
991 1031
992 @pytest.mark.parametrize("data, result", [
993 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}},
994 True),
995 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
996 True),
997 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}},
998 True),
999 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}},
1000 False),
1001 ])
1032 @pytest.mark.parametrize(
1033 "data, result",
1034 [
1035 ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, True),
1036 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, True),
1037 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, True),
1038 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, False),
1039 ],
1040 )
1002 1041 def test_nested_OR_OR_rule(self, data, result, report_type_matrix):
1003 1042 from appenlight.lib.rule import Rule
1043
1004 1044 rule_config = {
1005 1045 "field": "__OR__",
1006 1046 "rules": [
1007 {"field": "__OR__",
1047 {
1048 "field": "__OR__",
1008 1049 "rules": [
1009 {"op": "ge",
1010 "field": "group:occurences",
1011 "value": "10"
1050 {"op": "ge", "field": "group:occurences", "value": "10"},
1051 {"op": "ge", "field": "group:priority", "value": "4"},
1052 ],
1012 1053 },
1013 {"op": "ge",
1014 "field": "group:priority",
1015 "value": "4"
1016 }
1017 ]
1018 },
1019 {"op": "eq",
1020 "field": "http_status",
1021 "value": "500"
1022 }
1023 ]
1054 {"op": "eq", "field": "http_status", "value": "500"},
1055 ],
1024 1056 }
1025 1057
1026 1058 rule = Rule(rule_config, report_type_matrix)
1027 1059 assert rule.match(data) is result
1028 1060
1029 @pytest.mark.parametrize("data, result", [
1030 ({"http_status": 500, "group": {"priority": 7, "occurences": 11}},
1031 True),
1032 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}},
1033 False),
1034 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}},
1035 False),
1036 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}},
1037 False),
1038 ])
1061 @pytest.mark.parametrize(
1062 "data, result",
1063 [
1064 ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, True),
1065 ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, False),
1066 ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, False),
1067 ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, False),
1068 ],
1069 )
1039 1070 def test_nested_AND_AND_rule(self, data, result, report_type_matrix):
1040 1071 from appenlight.lib.rule import Rule
1072
1041 1073 rule_config = {
1042 1074 "field": "__AND__",
1043 1075 "rules": [
1044 {"field": "__AND__",
1076 {
1077 "field": "__AND__",
1045 1078 "rules": [
1046 {"op": "ge",
1047 "field": "group:occurences",
1048 "value": "10"
1049 },
1050 {"op": "ge",
1051 "field": "group:priority",
1052 "value": "4"
1053 }]
1079 {"op": "ge", "field": "group:occurences", "value": "10"},
1080 {"op": "ge", "field": "group:priority", "value": "4"},
1081 ],
1054 1082 },
1055 {"op": "eq",
1056 "field": "http_status",
1057 "value": "500"
1058 }
1059 ]
1083 {"op": "eq", "field": "http_status", "value": "500"},
1084 ],
1060 1085 }
1061 1086
1062 1087 rule = Rule(rule_config, report_type_matrix)
1063 1088 assert rule.match(data) is result
1064 1089
1065 @pytest.mark.parametrize("data, result", [
1066 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1067 "url_path": '/test/register', "error": "foo test bar"}, True),
1068 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1069 "url_path": '/test/register', "error": "foo INVALID bar"}, False),
1070 ])
1090 @pytest.mark.parametrize(
1091 "data, result",
1092 [
1093 (
1094 {
1095 "http_status": 500,
1096 "group": {"priority": 7, "occurences": 11},
1097 "url_path": "/test/register",
1098 "error": "foo test bar",
1099 },
1100 True,
1101 ),
1102 (
1103 {
1104 "http_status": 500,
1105 "group": {"priority": 7, "occurences": 11},
1106 "url_path": "/test/register",
1107 "error": "foo INVALID bar",
1108 },
1109 False,
1110 ),
1111 ],
1112 )
1071 1113 def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix):
1072 1114 from appenlight.lib.rule import Rule
1115
1073 1116 rule_config = {
1074 1117 "field": "__AND__",
1075 1118 "rules": [
1076 {"field": "__AND__",
1119 {
1120 "field": "__AND__",
1077 1121 "rules": [
1078 {"op": "ge",
1079 "field": "group:occurences",
1080 "value": "10"
1081 },
1082 {"field": "__AND__",
1122 {"op": "ge", "field": "group:occurences", "value": "10"},
1123 {
1124 "field": "__AND__",
1083 1125 "rules": [
1084 {"op": "endswith",
1126 {
1127 "op": "endswith",
1085 1128 "field": "url_path",
1086 "value": "register"},
1087 {"op": "contains",
1088 "field": "error",
1089 "value": "test"}]}]
1129 "value": "register",
1090 1130 },
1091 {"op": "eq",
1092 "field": "http_status",
1093 "value": "500"
1094 }
1095 ]
1131 {"op": "contains", "field": "error", "value": "test"},
1132 ],
1133 },
1134 ],
1135 },
1136 {"op": "eq", "field": "http_status", "value": "500"},
1137 ],
1096 1138 }
1097 1139
1098 1140 rule = Rule(rule_config, report_type_matrix)
1099 1141 assert rule.match(data) is result
1100 1142
1101 @pytest.mark.parametrize("data, result", [
1102 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1103 "url_path": 6, "error": 3}, False),
1104 ({"http_status": 500, "group": {"priority": 7, "occurences": 11},
1105 "url_path": '/test/register', "error": "foo INVALID bar"}, True),
1106 ])
1143 @pytest.mark.parametrize(
1144 "data, result",
1145 [
1146 (
1147 {
1148 "http_status": 500,
1149 "group": {"priority": 7, "occurences": 11},
1150 "url_path": 6,
1151 "error": 3,
1152 },
1153 False,
1154 ),
1155 (
1156 {
1157 "http_status": 500,
1158 "group": {"priority": 7, "occurences": 11},
1159 "url_path": "/test/register",
1160 "error": "foo INVALID bar",
1161 },
1162 True,
1163 ),
1164 ],
1165 )
1107 1166 def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix):
1108 1167 from appenlight.lib.rule import Rule
1168
1109 1169 rule_config = {
1110 1170 "field": "__AND__",
1111 1171 "rules": [
1112 {"field": "__AND__",
1172 {
1173 "field": "__AND__",
1113 1174 "rules": [
1114 {"op": "ge",
1115 "field": "group:occurences",
1116 "value": "10"
1117 },
1118 {"field": "__OR__",
1175 {"op": "ge", "field": "group:occurences", "value": "10"},
1176 {
1177 "field": "__OR__",
1119 1178 "rules": [
1120 {"op": "endswith",
1179 {
1180 "op": "endswith",
1121 1181 "field": "url_path",
1122 "value": "register"
1182 "value": "register",
1123 1183 },
1124 {"op": "contains",
1125 "field": "error",
1126 "value": "test"
1127 }]}]
1184 {"op": "contains", "field": "error", "value": "test"},
1185 ],
1128 1186 },
1129 {"op": "eq",
1130 "field": "http_status",
1131 "value": "500"
1132 }
1133 ]
1187 ],
1188 },
1189 {"op": "eq", "field": "http_status", "value": "500"},
1190 ],
1134 1191 }
1135 1192
1136 1193 rule = Rule(rule_config, report_type_matrix)
1137 1194 assert rule.match(data) is result
1138 1195
1139 @pytest.mark.parametrize("op, field, value, should_fail", [
1140 ('eq', 'http_status', "1", False),
1141 ('ne', 'http_status', "1", False),
1142 ('ne', 'http_status', "foo", True),
1143 ('startswith', 'http_status', "1", True),
1144 ('eq', 'group:priority', "1", False),
1145 ('ne', 'group:priority', "1", False),
1146 ('ge', 'group:priority', "1", False),
1147 ('le', 'group:priority', "1", False),
1148 ('startswith', 'group:priority', "1", True),
1149 ('eq', 'url_domain', "1", False),
1150 ('ne', 'url_domain', "1", False),
1151 ('startswith', 'url_domain', "1", False),
1152 ('endswith', 'url_domain', "1", False),
1153 ('contains', 'url_domain', "1", False),
1154 ('ge', 'url_domain', "1", True),
1155 ('eq', 'url_path', "1", False),
1156 ('ne', 'url_path', "1", False),
1157 ('startswith', 'url_path', "1", False),
1158 ('endswith', 'url_path', "1", False),
1159 ('contains', 'url_path', "1", False),
1160 ('ge', 'url_path', "1", True),
1161 ('eq', 'error', "1", False),
1162 ('ne', 'error', "1", False),
1163 ('startswith', 'error', "1", False),
1164 ('endswith', 'error', "1", False),
1165 ('contains', 'error', "1", False),
1166 ('ge', 'error', "1", True),
1167 ('ge', 'url_path', "1", True),
1168 ('eq', 'tags:server_name', "1", False),
1169 ('ne', 'tags:server_name', "1", False),
1170 ('startswith', 'tags:server_name', "1", False),
1171 ('endswith', 'tags:server_name', "1", False),
1172 ('contains', 'tags:server_name', "1", False),
1173 ('ge', 'tags:server_name', "1", True),
1174 ('contains', 'traceback', "1", False),
1175 ('ge', 'traceback', "1", True),
1176 ('eq', 'group:occurences', "1", False),
1177 ('ne', 'group:occurences', "1", False),
1178 ('ge', 'group:occurences', "1", False),
1179 ('le', 'group:occurences', "1", False),
1180 ('contains', 'group:occurences', "1", True),
1181 ])
1182 def test_rule_validation(self, op, field, value, should_fail,
1183 report_type_matrix):
1196 @pytest.mark.parametrize(
1197 "op, field, value, should_fail",
1198 [
1199 ("eq", "http_status", "1", False),
1200 ("ne", "http_status", "1", False),
1201 ("ne", "http_status", "foo", True),
1202 ("startswith", "http_status", "1", True),
1203 ("eq", "group:priority", "1", False),
1204 ("ne", "group:priority", "1", False),
1205 ("ge", "group:priority", "1", False),
1206 ("le", "group:priority", "1", False),
1207 ("startswith", "group:priority", "1", True),
1208 ("eq", "url_domain", "1", False),
1209 ("ne", "url_domain", "1", False),
1210 ("startswith", "url_domain", "1", False),
1211 ("endswith", "url_domain", "1", False),
1212 ("contains", "url_domain", "1", False),
1213 ("ge", "url_domain", "1", True),
1214 ("eq", "url_path", "1", False),
1215 ("ne", "url_path", "1", False),
1216 ("startswith", "url_path", "1", False),
1217 ("endswith", "url_path", "1", False),
1218 ("contains", "url_path", "1", False),
1219 ("ge", "url_path", "1", True),
1220 ("eq", "error", "1", False),
1221 ("ne", "error", "1", False),
1222 ("startswith", "error", "1", False),
1223 ("endswith", "error", "1", False),
1224 ("contains", "error", "1", False),
1225 ("ge", "error", "1", True),
1226 ("ge", "url_path", "1", True),
1227 ("eq", "tags:server_name", "1", False),
1228 ("ne", "tags:server_name", "1", False),
1229 ("startswith", "tags:server_name", "1", False),
1230 ("endswith", "tags:server_name", "1", False),
1231 ("contains", "tags:server_name", "1", False),
1232 ("ge", "tags:server_name", "1", True),
1233 ("contains", "traceback", "1", False),
1234 ("ge", "traceback", "1", True),
1235 ("eq", "group:occurences", "1", False),
1236 ("ne", "group:occurences", "1", False),
1237 ("ge", "group:occurences", "1", False),
1238 ("le", "group:occurences", "1", False),
1239 ("contains", "group:occurences", "1", True),
1240 ],
1241 )
1242 def test_rule_validation(self, op, field, value, should_fail, report_type_matrix):
1184 1243 import colander
1185 1244 from appenlight.validators import build_rule_schema
1186 rule_config = {
1187 "op": op,
1188 "field": field,
1189 "value": value
1190 }
1245
1246 rule_config = {"op": op, "field": field, "value": value}
1191 1247
1192 1248 schema = build_rule_schema(rule_config, report_type_matrix)
1193 1249 if should_fail:
@@ -1198,40 +1254,29 b' class TestNestedRuleParsing():'
1198 1254
1199 1255 def test_nested_proper_rule_validation(self, report_type_matrix):
1200 1256 from appenlight.validators import build_rule_schema
1257
1201 1258 rule_config = {
1202 1259 "field": "__AND__",
1203 1260 "rules": [
1204 1261 {
1205 1262 "field": "__AND__",
1206 1263 "rules": [
1207 {
1208 "op": "ge",
1209 "field": "group:occurences",
1210 "value": "10"
1211 },
1264 {"op": "ge", "field": "group:occurences", "value": "10"},
1212 1265 {
1213 1266 "field": "__OR__",
1214 1267 "rules": [
1215 1268 {
1216 1269 "op": "endswith",
1217 1270 "field": "url_path",
1218 "value": "register"
1271 "value": "register",
1219 1272 },
1220 {
1221 "op": "contains",
1222 "field": "error",
1223 "value": "test"
1224 }
1225 ]
1226 }
1227 ]
1273 {"op": "contains", "field": "error", "value": "test"},
1274 ],
1228 1275 },
1229 {
1230 "op": "eq",
1231 "field": "http_status",
1232 "value": "500"
1233 }
1234 ]
1276 ],
1277 },
1278 {"op": "eq", "field": "http_status", "value": "500"},
1279 ],
1235 1280 }
1236 1281
1237 1282 schema = build_rule_schema(rule_config, report_type_matrix)
@@ -1240,40 +1285,25 b' class TestNestedRuleParsing():'
1240 1285 def test_nested_bad_rule_validation(self, report_type_matrix):
1241 1286 import colander
1242 1287 from appenlight.validators import build_rule_schema
1288
1243 1289 rule_config = {
1244 1290 "field": "__AND__",
1245 1291 "rules": [
1246 1292 {
1247 1293 "field": "__AND__",
1248 1294 "rules": [
1249 {
1250 "op": "ge",
1251 "field": "group:occurences",
1252 "value": "10"
1253 },
1295 {"op": "ge", "field": "group:occurences", "value": "10"},
1254 1296 {
1255 1297 "field": "__OR__",
1256 1298 "rules": [
1257 {
1258 "op": "gt",
1259 "field": "url_path",
1260 "value": "register"
1299 {"op": "gt", "field": "url_path", "value": "register"},
1300 {"op": "contains", "field": "error", "value": "test"},
1301 ],
1261 1302 },
1262 {
1263 "op": "contains",
1264 "field": "error",
1265 "value": "test"
1266 }
1267 ]
1268 }
1269 ]
1303 ],
1270 1304 },
1271 {
1272 "op": "eq",
1273 "field": "http_status",
1274 "value": "500"
1275 }
1276 ]
1305 {"op": "eq", "field": "http_status", "value": "500"},
1306 ],
1277 1307 }
1278 1308
1279 1309 schema = build_rule_schema(rule_config, report_type_matrix)
@@ -1282,97 +1312,72 b' class TestNestedRuleParsing():'
1282 1312
1283 1313 def test_config_manipulator(self):
1284 1314 from appenlight.lib.rule import Rule
1315
1285 1316 type_matrix = {
1286 'a': {"type": 'int',
1287 "ops": ('eq', 'ne', 'ge', 'le',)},
1288 'b': {"type": 'int',
1289 "ops": ('eq', 'ne', 'ge', 'le',)},
1317 "a": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
1318 "b": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
1290 1319 }
1291 1320 rule_config = {
1292 1321 "field": "__OR__",
1293 1322 "rules": [
1294 1323 {
1295 1324 "field": "__OR__",
1296 "rules": [
1297 {
1298 "op": "ge",
1299 "field": "a",
1300 "value": "10"
1301 }
1302 ]
1325 "rules": [{"op": "ge", "field": "a", "value": "10"}],
1303 1326 },
1304 {
1305 "op": "eq",
1306 "field": "b",
1307 "value": "500"
1308 }
1309 ]
1327 {"op": "eq", "field": "b", "value": "500"},
1328 ],
1310 1329 }
1311 1330
1312 1331 def rule_manipulator(rule):
1313 if 'value' in rule.config:
1314 rule.config['value'] = "1"
1332 if "value" in rule.config:
1333 rule.config["value"] = "1"
1315 1334
1316 rule = Rule(rule_config, type_matrix,
1317 config_manipulator=rule_manipulator)
1318 rule.match({"a": 1,
1319 "b": "2"})
1320 assert rule.config['rules'][0]['rules'][0]['value'] == "1"
1321 assert rule.config['rules'][1]['value'] == "1"
1322 assert rule.type_matrix["b"]['type'] == "int"
1335 rule = Rule(rule_config, type_matrix, config_manipulator=rule_manipulator)
1336 rule.match({"a": 1, "b": "2"})
1337 assert rule.config["rules"][0]["rules"][0]["value"] == "1"
1338 assert rule.config["rules"][1]["value"] == "1"
1339 assert rule.type_matrix["b"]["type"] == "int"
1323 1340
1324 1341 def test_dynamic_config_manipulator(self):
1325 1342 from appenlight.lib.rule import Rule
1343
1326 1344 rule_config = {
1327 1345 "field": "__OR__",
1328 1346 "rules": [
1329 1347 {
1330 1348 "field": "__OR__",
1331 "rules": [
1332 {
1333 "op": "ge",
1334 "field": "a",
1335 "value": "10"
1336 }
1337 ]
1349 "rules": [{"op": "ge", "field": "a", "value": "10"}],
1338 1350 },
1339 {
1340 "op": "eq",
1341 "field": "b",
1342 "value": "500"
1343 }
1344 ]
1351 {"op": "eq", "field": "b", "value": "500"},
1352 ],
1345 1353 }
1346 1354
1347 1355 def rule_manipulator(rule):
1348 1356 rule.type_matrix = {
1349 'a': {"type": 'int',
1350 "ops": ('eq', 'ne', 'ge', 'le',)},
1351 'b': {"type": 'unicode',
1352 "ops": ('eq', 'ne', 'ge', 'le',)},
1357 "a": {"type": "int", "ops": ("eq", "ne", "ge", "le")},
1358 "b": {"type": "unicode", "ops": ("eq", "ne", "ge", "le")},
1353 1359 }
1354 1360
1355 if 'value' in rule.config:
1356 if rule.config['field'] == 'a':
1357 rule.config['value'] = "1"
1358 elif rule.config['field'] == 'b':
1359 rule.config['value'] = "2"
1361 if "value" in rule.config:
1362 if rule.config["field"] == "a":
1363 rule.config["value"] = "1"
1364 elif rule.config["field"] == "b":
1365 rule.config["value"] = "2"
1360 1366
1361 rule = Rule(rule_config, {},
1362 config_manipulator=rule_manipulator)
1363 rule.match({"a": 11,
1364 "b": "55"})
1365 assert rule.config['rules'][0]['rules'][0]['value'] == "1"
1366 assert rule.config['rules'][1]['value'] == "2"
1367 assert rule.type_matrix["b"]['type'] == "unicode"
1367 rule = Rule(rule_config, {}, config_manipulator=rule_manipulator)
1368 rule.match({"a": 11, "b": "55"})
1369 assert rule.config["rules"][0]["rules"][0]["value"] == "1"
1370 assert rule.config["rules"][1]["value"] == "2"
1371 assert rule.type_matrix["b"]["type"] == "unicode"
1368 1372
1369 1373
1370 @pytest.mark.usefixtures('base_app', 'with_migrations')
1374 @pytest.mark.usefixtures("base_app", "with_migrations")
1371 1375 class TestViewsWithForms(object):
1372 1376 def test_bad_csrf(self):
1373 1377 from appenlight.forms import CSRFException
1374 1378 from appenlight.views.index import register
1375 post_data = {'dupa': 'dupa'}
1379
1380 post_data = {"dupa": "dupa"}
1376 1381 request = testing.DummyRequest(post=post_data)
1377 1382 request.POST = webob.multidict.MultiDict(request.POST)
1378 1383 with pytest.raises(CSRFException):
@@ -1380,58 +1385,63 b' class TestViewsWithForms(object):'
1380 1385
1381 1386 def test_proper_csrf(self):
1382 1387 from appenlight.views.index import register
1388
1383 1389 request = pyramid.threadlocal.get_current_request()
1384 post_data = {'dupa': 'dupa',
1385 'csrf_token': request.session.get_csrf_token()}
1390 post_data = {"dupa": "dupa", "csrf_token": request.session.get_csrf_token()}
1386 1391 request = testing.DummyRequest(post=post_data)
1387 1392 request.POST = webob.multidict.MultiDict(request.POST)
1388 1393 result = register(request)
1389 assert result['form'].errors['email'][0] == 'This field is required.'
1394 assert result["form"].errors["email"][0] == "This field is required."
1390 1395
1391 1396
1392 @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data')
1397 @pytest.mark.usefixtures("base_app", "with_migrations", "default_data")
1393 1398 class TestRegistration(object):
1394 1399 def test_invalid_form(self):
1395 1400 from appenlight.views.index import register
1401
1396 1402 request = pyramid.threadlocal.get_current_request()
1397 post_data = {'user_name': '',
1398 'user_password': '',
1399 'email': '',
1400 'csrf_token': request.session.get_csrf_token()}
1403 post_data = {
1404 "user_name": "",
1405 "user_password": "",
1406 "email": "",
1407 "csrf_token": request.session.get_csrf_token(),
1408 }
1401 1409 request = testing.DummyRequest(post=post_data)
1402 1410 request.POST = webob.multidict.MultiDict(request.POST)
1403 1411 result = register(request)
1404 assert result['form'].errors['user_name'][0] == \
1405 'This field is required.'
1412 assert result["form"].errors["user_name"][0] == "This field is required."
1406 1413
1407 1414 def test_valid_form(self):
1408 1415 from appenlight.views.index import register
1409 1416 from ziggurat_foundations.models.services.user import UserService
1417
1410 1418 request = pyramid.threadlocal.get_current_request()
1411 post_data = {'user_name': 'foo',
1412 'user_password': 'barr',
1413 'email': 'test@test.foo',
1414 'csrf_token': request.session.get_csrf_token()}
1419 post_data = {
1420 "user_name": "foo",
1421 "user_password": "barr",
1422 "email": "test@test.foo",
1423 "csrf_token": request.session.get_csrf_token(),
1424 }
1415 1425 request = testing.DummyRequest(post=post_data)
1416 1426 request.add_flash_to_headers = mock.Mock()
1417 1427 request.POST = webob.multidict.MultiDict(request.POST)
1418 assert UserService.by_user_name('foo') is None
1428 assert UserService.by_user_name("foo") is None
1419 1429 register(request)
1420 user = UserService.by_user_name('foo')
1421 assert user.user_name == 'foo'
1430 user = UserService.by_user_name("foo")
1431 assert user.user_name == "foo"
1422 1432 assert len(user.user_password) >= 60
1423 1433
1424 1434
1425 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables',
1426 'default_user')
1435 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables", "default_user")
1427 1436 class TestApplicationCreation(object):
1428 1437 def test_wrong_data(self):
1429 1438 import appenlight.views.applications as applications
1430 1439 from ziggurat_foundations.models.services.user import UserService
1440
1431 1441 request = pyramid.threadlocal.get_current_request()
1432 request.user = UserService.by_user_name('testuser')
1442 request.user = UserService.by_user_name("testuser")
1433 1443 request.unsafe_json_body = {}
1434 request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token()
1444 request.headers["X-XSRF-TOKEN"] = request.session.get_csrf_token()
1435 1445 response = applications.application_create(request)
1436 1446 assert response.code == 422
1437 1447
@@ -1440,31 +1450,30 b' class TestApplicationCreation(object):'
1440 1450 from ziggurat_foundations.models.services.user import UserService
1441 1451
1442 1452 request = pyramid.threadlocal.get_current_request()
1443 request.user = UserService.by_user_name('testuser')
1444 request.unsafe_json_body = {"resource_name": "app name",
1445 "domains": "foo"}
1446 request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token()
1453 request.user = UserService.by_user_name("testuser")
1454 request.unsafe_json_body = {"resource_name": "app name", "domains": "foo"}
1455 request.headers["X-XSRF-TOKEN"] = request.session.get_csrf_token()
1447 1456 app_dict = applications.application_create(request)
1448 assert app_dict['public_key'] is not None
1449 assert app_dict['api_key'] is not None
1450 assert app_dict['resource_name'] == 'app name'
1451 assert app_dict['owner_group_id'] is None
1452 assert app_dict['resource_id'] is not None
1453 assert app_dict['default_grouping'] == 'url_traceback'
1454 assert app_dict['possible_permissions'] == ('view', 'update_reports')
1455 assert app_dict['slow_report_threshold'] == 10
1456 assert app_dict['owner_user_name'] == 'testuser'
1457 assert app_dict['owner_user_id'] == request.user.id
1458 assert app_dict['domains'] is 'foo'
1459 assert app_dict['postprocessing_rules'] == []
1460 assert app_dict['error_report_threshold'] == 10
1461 assert app_dict['allow_permanent_storage'] is False
1462 assert app_dict['resource_type'] == 'application'
1463 assert app_dict['current_permissions'] == []
1464
1465
1466 @pytest.mark.usefixtures('default_application')
1467 @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables')
1457 assert app_dict["public_key"] is not None
1458 assert app_dict["api_key"] is not None
1459 assert app_dict["resource_name"] == "app name"
1460 assert app_dict["owner_group_id"] is None
1461 assert app_dict["resource_id"] is not None
1462 assert app_dict["default_grouping"] == "url_traceback"
1463 assert app_dict["possible_permissions"] == ("view", "update_reports")
1464 assert app_dict["slow_report_threshold"] == 10
1465 assert app_dict["owner_user_name"] == "testuser"
1466 assert app_dict["owner_user_id"] == request.user.id
1467 assert app_dict["domains"] is "foo"
1468 assert app_dict["postprocessing_rules"] == []
1469 assert app_dict["error_report_threshold"] == 10
1470 assert app_dict["allow_permanent_storage"] is False
1471 assert app_dict["resource_type"] == "application"
1472 assert app_dict["current_permissions"] == []
1473
1474
1475 @pytest.mark.usefixtures("default_application")
1476 @pytest.mark.usefixtures("base_app", "with_migrations", "clean_tables")
1468 1477 class TestAPISentryView(object):
1469 1478 def test_no_payload(self, default_application):
1470 1479 import colander
@@ -1474,12 +1483,11 b' class TestAPISentryView(object):'
1474 1483
1475 1484 context = DummyContext()
1476 1485 context.resource = ApplicationService.by_id(1)
1477 request = testing.DummyRequest(
1478 headers={'Content-Type': 'application/json'})
1479 request.unsafe_json_body = ''
1486 request = testing.DummyRequest(headers={"Content-Type": "application/json"})
1487 request.unsafe_json_body = ""
1480 1488 request.context = context
1481 1489 route = mock.Mock()
1482 route.name = 'api_sentry'
1490 route.name = "api_sentry"
1483 1491 request.matched_route = route
1484 1492 with pytest.raises(JSONException):
1485 1493 sentry_compat(request)
@@ -1488,28 +1496,31 b' class TestAPISentryView(object):'
1488 1496 from appenlight.views.api import sentry_compat
1489 1497 from appenlight.models.services.application import ApplicationService
1490 1498 from appenlight.models.report_group import ReportGroup
1499
1491 1500 route = mock.Mock()
1492 route.name = 'api_sentry'
1501 route.name = "api_sentry"
1493 1502 request = pyramid.threadlocal.get_current_request()
1494 1503 context = DummyContext()
1495 1504 context.resource = ApplicationService.by_id(1)
1496 1505 context.resource.allow_permanent_storage = True
1497 1506 request.context = context
1498 1507 request.matched_route = route
1499 request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \
1500 b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \
1501 b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \
1502 b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \
1503 b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \
1504 b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \
1505 b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \
1506 b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \
1507 b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \
1508 b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \
1509 b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \
1510 b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \
1511 b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \
1512 b'w7CtfWmP85SdCs8OvA53fUV19cg=='
1508 request.body = (
1509 b"eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki"
1510 b"RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87"
1511 b"JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa"
1512 b"fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b"
1513 b"oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz"
1514 b"m1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5"
1515 b"JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+"
1516 b"lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs"
1517 b"3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN"
1518 b"Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/"
1519 b"IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P"
1520 b"MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0"
1521 b"Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb"
1522 b"w7CtfWmP85SdCs8OvA53fUV19cg=="
1523 )
1513 1524 sentry_compat(request)
1514 1525 query = DBSession.query(ReportGroup)
1515 1526 report = query.first()
@@ -1521,16 +1532,19 b' class TestAPISentryView(object):'
1521 1532 from appenlight.models.services.application import ApplicationService
1522 1533 from appenlight.models.report_group import ReportGroup
1523 1534 from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED
1535
1524 1536 route = mock.Mock()
1525 route.name = 'api_sentry'
1537 route.name = "api_sentry"
1526 1538 request = testing.DummyRequest(
1527 headers={'Content-Type': 'application/octet-stream',
1528 'User-Agent': 'sentry-ruby/1.0.0',
1529 'X-Sentry-Auth': 'Sentry sentry_version=5, '
1530 'sentry_client=raven-ruby/1.0.0, '
1531 'sentry_timestamp=1462378483, '
1532 'sentry_key=xxx, sentry_secret=xxx'
1533 })
1539 headers={
1540 "Content-Type": "application/octet-stream",
1541 "User-Agent": "sentry-ruby/1.0.0",
1542 "X-Sentry-Auth": "Sentry sentry_version=5, "
1543 "sentry_client=raven-ruby/1.0.0, "
1544 "sentry_timestamp=1462378483, "
1545 "sentry_key=xxx, sentry_secret=xxx",
1546 }
1547 )
1534 1548 context = DummyContext()
1535 1549 context.resource = ApplicationService.by_id(1)
1536 1550 context.resource.allow_permanent_storage = True
@@ -1548,15 +1562,16 b' class TestAPISentryView(object):'
1548 1562 from appenlight.models.services.application import ApplicationService
1549 1563 from appenlight.models.report_group import ReportGroup
1550 1564 from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7
1565
1551 1566 route = mock.Mock()
1552 route.name = 'api_sentry'
1567 route.name = "api_sentry"
1553 1568 request = pyramid.threadlocal.get_current_request()
1554 1569 context = DummyContext()
1555 1570 context.resource = ApplicationService.by_id(1)
1556 1571 context.resource.allow_permanent_storage = True
1557 1572 request.context = context
1558 1573 request.matched_route = route
1559 request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8')
1574 request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode("utf8")
1560 1575 sentry_compat(request)
1561 1576 query = DBSession.query(ReportGroup)
1562 1577 report = query.first()
@@ -1568,17 +1583,20 b' class TestAPISentryView(object):'
1568 1583 from appenlight.models.services.application import ApplicationService
1569 1584 from appenlight.models.report_group import ReportGroup
1570 1585 from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED
1586
1571 1587 route = mock.Mock()
1572 route.name = 'api_sentry'
1588 route.name = "api_sentry"
1573 1589 request = testing.DummyRequest(
1574 headers={'Content-Type': 'application/octet-stream',
1575 'Content-Encoding': 'deflate',
1576 'User-Agent': 'sentry-ruby/1.0.0',
1577 'X-Sentry-Auth': 'Sentry sentry_version=5, '
1578 'sentry_client=raven-ruby/1.0.0, '
1579 'sentry_timestamp=1462378483, '
1580 'sentry_key=xxx, sentry_secret=xxx'
1581 })
1590 headers={
1591 "Content-Type": "application/octet-stream",
1592 "Content-Encoding": "deflate",
1593 "User-Agent": "sentry-ruby/1.0.0",
1594 "X-Sentry-Auth": "Sentry sentry_version=5, "
1595 "sentry_client=raven-ruby/1.0.0, "
1596 "sentry_timestamp=1462378483, "
1597 "sentry_key=xxx, sentry_secret=xxx",
1598 }
1599 )
1582 1600 context = DummyContext()
1583 1601 context.resource = ApplicationService.by_id(1)
1584 1602 context.resource.allow_permanent_storage = True
This diff has been collapsed as it changes many lines, (617 lines changed) Show them Hide them
@@ -21,33 +21,35 b' from colander import null'
21 21
22 22 # those keywords are here so we can distingush between searching for tags and
23 23 # normal properties of reports/logs
24 accepted_search_params = ['resource',
25 'request_id',
26 'start_date',
27 'end_date',
28 'page',
29 'min_occurences',
30 'http_status',
31 'priority',
32 'error',
33 'url_path',
34 'url_domain',
35 'report_status',
36 'min_duration',
37 'max_duration',
38 'message',
39 'level',
40 'namespace']
24 accepted_search_params = [
25 "resource",
26 "request_id",
27 "start_date",
28 "end_date",
29 "page",
30 "min_occurences",
31 "http_status",
32 "priority",
33 "error",
34 "url_path",
35 "url_domain",
36 "report_status",
37 "min_duration",
38 "max_duration",
39 "message",
40 "level",
41 "namespace",
42 ]
41 43
42 44
43 45 @colander.deferred
44 46 def deferred_utcnow(node, kw):
45 return kw['utcnow']
47 return kw["utcnow"]
46 48
47 49
48 50 @colander.deferred
49 51 def optional_limited_date(node, kw):
50 if not kw.get('allow_permanent_storage'):
52 if not kw.get("allow_permanent_storage"):
51 53 return limited_date
52 54
53 55
@@ -123,21 +125,21 b' class UnknownType(object):'
123 125
124 126 # SLOW REPORT SCHEMA
125 127
128
126 129 def rewrite_type(input_data):
127 130 """
128 131 Fix for legacy appenlight clients
129 132 """
130 if input_data == 'remote_call':
131 return 'remote'
133 if input_data == "remote_call":
134 return "remote"
132 135 return input_data
133 136
134 137
135 138 class ExtraTupleSchema(colander.TupleSchema):
136 name = colander.SchemaNode(colander.String(),
137 validator=colander.Length(1, 64))
138 value = colander.SchemaNode(UnknownType(),
139 preparer=shortener_factory(512),
140 missing=None)
139 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 64))
140 value = colander.SchemaNode(
141 UnknownType(), preparer=shortener_factory(512), missing=None
142 )
141 143
142 144
143 145 class ExtraSchemaList(colander.SequenceSchema):
@@ -146,11 +148,10 b' class ExtraSchemaList(colander.SequenceSchema):'
146 148
147 149
148 150 class TagsTupleSchema(colander.TupleSchema):
149 name = colander.SchemaNode(colander.String(),
150 validator=colander.Length(1, 128))
151 value = colander.SchemaNode(UnknownType(),
152 preparer=shortener_factory(128),
153 missing=None)
151 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 128))
152 value = colander.SchemaNode(
153 UnknownType(), preparer=shortener_factory(128), missing=None
154 )
154 155
155 156
156 157 class TagSchemaList(colander.SequenceSchema):
@@ -159,8 +160,7 b' class TagSchemaList(colander.SequenceSchema):'
159 160
160 161
161 162 class NumericTagsTupleSchema(colander.TupleSchema):
162 name = colander.SchemaNode(colander.String(),
163 validator=colander.Length(1, 128))
163 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 128))
164 164 value = colander.SchemaNode(colander.Float(), missing=0)
165 165
166 166
@@ -173,41 +173,46 b' class SlowCallSchema(colander.MappingSchema):'
173 173 """
174 174 Validates slow call format in slow call list
175 175 """
176
176 177 start = colander.SchemaNode(NonTZDate())
177 178 end = colander.SchemaNode(NonTZDate())
178 statement = colander.SchemaNode(colander.String(), missing='')
179 statement = colander.SchemaNode(colander.String(), missing="")
179 180 parameters = colander.SchemaNode(UnknownType(), missing=None)
180 181 type = colander.SchemaNode(
181 182 colander.String(),
182 183 preparer=rewrite_type,
183 184 validator=colander.OneOf(
184 ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']),
185 missing='unknown')
186 subtype = colander.SchemaNode(colander.String(),
187 validator=colander.Length(1, 16),
188 missing='unknown')
189 location = colander.SchemaNode(colander.String(),
190 validator=colander.Length(1, 255),
191 missing='')
185 ["tmpl", "sql", "nosql", "remote", "unknown", "custom"]
186 ),
187 missing="unknown",
188 )
189 subtype = colander.SchemaNode(
190 colander.String(), validator=colander.Length(1, 16), missing="unknown"
191 )
192 location = colander.SchemaNode(
193 colander.String(), validator=colander.Length(1, 255), missing=""
194 )
192 195
193 196
194 197 def limited_date(node, value):
195 198 """ checks to make sure that the value is not older/newer than 2h """
196 199 past_hours = 72
197 200 future_hours = 2
198 min_time = datetime.datetime.utcnow() - datetime.timedelta(
199 hours=past_hours)
200 max_time = datetime.datetime.utcnow() + datetime.timedelta(
201 hours=future_hours)
201 min_time = datetime.datetime.utcnow() - datetime.timedelta(hours=past_hours)
202 max_time = datetime.datetime.utcnow() + datetime.timedelta(hours=future_hours)
202 203 if min_time > value:
203 msg = '%r is older from current UTC time by ' + str(past_hours)
204 msg += ' hours. Ask administrator to enable permanent logging for ' \
205 'your application to store logs with dates in past.'
204 msg = "%r is older from current UTC time by " + str(past_hours)
205 msg += (
206 " hours. Ask administrator to enable permanent logging for "
207 "your application to store logs with dates in past."
208 )
206 209 raise colander.Invalid(node, msg % value)
207 210 if max_time < value:
208 msg = '%r is newer from current UTC time by ' + str(future_hours)
209 msg += ' hours. Ask administrator to enable permanent logging for ' \
210 'your application to store logs with dates in future.'
211 msg = "%r is newer from current UTC time by " + str(future_hours)
212 msg += (
213 " hours. Ask administrator to enable permanent logging for "
214 "your application to store logs with dates in future."
215 )
211 216 raise colander.Invalid(node, msg % value)
212 217
213 218
@@ -215,6 +220,7 b' class SlowCallListSchema(colander.SequenceSchema):'
215 220 """
216 221 Validates list of individual slow calls
217 222 """
223
218 224 slow_call = SlowCallSchema()
219 225
220 226
@@ -222,52 +228,54 b' class RequestStatsSchema(colander.MappingSchema):'
222 228 """
223 229 Validates format of requests statistics dictionary
224 230 """
225 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
226 missing=0)
227 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
228 missing=0)
229 nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
230 missing=0)
231 remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
232 missing=0)
233 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
234 missing=0)
235 custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
236 missing=0)
237 sql_calls = colander.SchemaNode(colander.Float(),
238 validator=colander.Range(0),
239 missing=0)
240 nosql_calls = colander.SchemaNode(colander.Float(),
241 validator=colander.Range(0),
242 missing=0)
243 remote_calls = colander.SchemaNode(colander.Float(),
244 validator=colander.Range(0),
245 missing=0)
246 tmpl_calls = colander.SchemaNode(colander.Float(),
247 validator=colander.Range(0),
248 missing=0)
249 custom_calls = colander.SchemaNode(colander.Float(),
250 validator=colander.Range(0),
251 missing=0)
231
232 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0), missing=0)
233 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), missing=0)
234 nosql = colander.SchemaNode(
235 colander.Float(), validator=colander.Range(0), missing=0
236 )
237 remote = colander.SchemaNode(
238 colander.Float(), validator=colander.Range(0), missing=0
239 )
240 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0), missing=0)
241 custom = colander.SchemaNode(
242 colander.Float(), validator=colander.Range(0), missing=0
243 )
244 sql_calls = colander.SchemaNode(
245 colander.Float(), validator=colander.Range(0), missing=0
246 )
247 nosql_calls = colander.SchemaNode(
248 colander.Float(), validator=colander.Range(0), missing=0
249 )
250 remote_calls = colander.SchemaNode(
251 colander.Float(), validator=colander.Range(0), missing=0
252 )
253 tmpl_calls = colander.SchemaNode(
254 colander.Float(), validator=colander.Range(0), missing=0
255 )
256 custom_calls = colander.SchemaNode(
257 colander.Float(), validator=colander.Range(0), missing=0
258 )
252 259
253 260
254 261 class FrameInfoVarSchema(colander.SequenceSchema):
255 262 """
256 263 Validates format of frame variables of a traceback
257 264 """
258 vars = colander.SchemaNode(UnknownType(),
259 validator=colander.Length(2, 2))
265
266 vars = colander.SchemaNode(UnknownType(), validator=colander.Length(2, 2))
260 267
261 268
262 269 class FrameInfoSchema(colander.MappingSchema):
263 270 """
264 271 Validates format of a traceback line
265 272 """
266 cline = colander.SchemaNode(colander.String(), missing='')
267 module = colander.SchemaNode(colander.String(), missing='')
268 line = colander.SchemaNode(colander.String(), missing='')
269 file = colander.SchemaNode(colander.String(), missing='')
270 fn = colander.SchemaNode(colander.String(), missing='')
273
274 cline = colander.SchemaNode(colander.String(), missing="")
275 module = colander.SchemaNode(colander.String(), missing="")
276 line = colander.SchemaNode(colander.String(), missing="")
277 file = colander.SchemaNode(colander.String(), missing="")
278 fn = colander.SchemaNode(colander.String(), missing="")
271 279 vars = FrameInfoVarSchema()
272 280
273 281
@@ -275,6 +283,7 b' class FrameInfoListSchema(colander.SequenceSchema):'
275 283 """
276 284 Validates format of list of traceback lines
277 285 """
286
278 287 frame = colander.SchemaNode(UnknownType())
279 288
280 289
@@ -282,36 +291,40 b' class ReportDetailBaseSchema(colander.MappingSchema):'
282 291 """
283 292 Validates format of report - ie. request parameters and stats for a request in report group
284 293 """
285 username = colander.SchemaNode(colander.String(),
286 preparer=[shortener_factory(255),
287 lambda x: x or ''],
288 missing='')
289 request_id = colander.SchemaNode(colander.String(),
290 preparer=shortener_factory(40),
291 missing='')
292 url = colander.SchemaNode(colander.String(),
293 preparer=shortener_factory(1024), missing='')
294 ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39),
295 missing=None)
296 start_time = colander.SchemaNode(NonTZDate(),
297 validator=optional_limited_date,
298 missing=deferred_utcnow)
299 end_time = colander.SchemaNode(NonTZDate(),
300 validator=optional_limited_date,
301 missing=None)
302 user_agent = colander.SchemaNode(colander.String(),
303 preparer=[shortener_factory(512),
304 lambda x: x or ''],
305 missing='')
306 message = colander.SchemaNode(colander.String(),
307 preparer=shortener_factory(2048),
308 missing='')
309 group_string = colander.SchemaNode(colander.String(),
310 preparer=shortener_factory(512),
311 missing=None)
294
295 username = colander.SchemaNode(
296 colander.String(),
297 preparer=[shortener_factory(255), lambda x: x or ""],
298 missing="",
299 )
300 request_id = colander.SchemaNode(
301 colander.String(), preparer=shortener_factory(40), missing=""
302 )
303 url = colander.SchemaNode(
304 colander.String(), preparer=shortener_factory(1024), missing=""
305 )
306 ip = colander.SchemaNode(
307 colander.String(), preparer=shortener_factory(39), missing=None
308 )
309 start_time = colander.SchemaNode(
310 NonTZDate(), validator=optional_limited_date, missing=deferred_utcnow
311 )
312 end_time = colander.SchemaNode(
313 NonTZDate(), validator=optional_limited_date, missing=None
314 )
315 user_agent = colander.SchemaNode(
316 colander.String(),
317 preparer=[shortener_factory(512), lambda x: x or ""],
318 missing="",
319 )
320 message = colander.SchemaNode(
321 colander.String(), preparer=shortener_factory(2048), missing=""
322 )
323 group_string = colander.SchemaNode(
324 colander.String(), preparer=shortener_factory(512), missing=None
325 )
312 326 request_stats = RequestStatsSchema(missing=None)
313 request = colander.SchemaNode(colander.Mapping(unknown='preserve'),
314 missing={})
327 request = colander.SchemaNode(colander.Mapping(unknown="preserve"), missing={})
315 328 traceback = FrameInfoListSchema(missing=None)
316 329 slow_calls = SlowCallListSchema(missing=[])
317 330 extra = ExtraSchemaList()
@@ -330,32 +343,35 b' class ReportSchemaBase(colander.MappingSchema):'
330 343 """
331 344 Validates format of report group
332 345 """
333 client = colander.SchemaNode(colander.String(),
334 preparer=lambda x: x or 'unknown')
346
347 client = colander.SchemaNode(colander.String(), preparer=lambda x: x or "unknown")
335 348 server = colander.SchemaNode(
336 349 colander.String(),
337 preparer=[
338 lambda x: x.lower() if x else 'unknown', shortener_factory(128)],
339 missing='unknown')
340 priority = colander.SchemaNode(colander.Int(),
350 preparer=[lambda x: x.lower() if x else "unknown", shortener_factory(128)],
351 missing="unknown",
352 )
353 priority = colander.SchemaNode(
354 colander.Int(),
341 355 preparer=[lambda x: x or 5],
342 356 validator=colander.Range(1, 10),
343 missing=5)
344 language = colander.SchemaNode(colander.String(), missing='unknown')
345 error = colander.SchemaNode(colander.String(),
346 preparer=shortener_factory(512),
347 missing='')
348 view_name = colander.SchemaNode(colander.String(),
349 preparer=[shortener_factory(128),
350 lambda x: x or ''],
351 missing='')
352 http_status = colander.SchemaNode(colander.Int(),
353 preparer=[lambda x: x or 200],
354 validator=colander.Range(1))
355
356 occurences = colander.SchemaNode(colander.Int(),
357 validator=colander.Range(1, 99999999999),
358 missing=1)
357 missing=5,
358 )
359 language = colander.SchemaNode(colander.String(), missing="unknown")
360 error = colander.SchemaNode(
361 colander.String(), preparer=shortener_factory(512), missing=""
362 )
363 view_name = colander.SchemaNode(
364 colander.String(),
365 preparer=[shortener_factory(128), lambda x: x or ""],
366 missing="",
367 )
368 http_status = colander.SchemaNode(
369 colander.Int(), preparer=[lambda x: x or 200], validator=colander.Range(1)
370 )
371
372 occurences = colander.SchemaNode(
373 colander.Int(), validator=colander.Range(1, 99999999999), missing=1
374 )
359 375 tags = TagSchemaList()
360 376
361 377
@@ -363,8 +379,9 b' class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5):'
363 379 pass
364 380
365 381
366 class ReportSchemaPermissiveDate_0_5(ReportSchemaBase,
367 ReportDetailSchemaPermissiveDate_0_5):
382 class ReportSchemaPermissiveDate_0_5(
383 ReportSchemaBase, ReportDetailSchemaPermissiveDate_0_5
384 ):
368 385 pass
369 386
370 387
@@ -372,6 +389,7 b' class ReportListSchema_0_5(colander.SequenceSchema):'
372 389 """
373 390 Validates format of list of report groups
374 391 """
392
375 393 report = ReportSchema_0_5()
376 394 validator = colander.Length(1)
377 395
@@ -380,6 +398,7 b' class ReportListPermissiveDateSchema_0_5(colander.SequenceSchema):'
380 398 """
381 399 Validates format of list of report groups
382 400 """
401
383 402 report = ReportSchemaPermissiveDate_0_5()
384 403 validator = colander.Length(1)
385 404
@@ -388,34 +407,35 b' class LogSchema(colander.MappingSchema):'
388 407 """
389 408 Validates format if individual log entry
390 409 """
391 primary_key = colander.SchemaNode(UnknownType(),
392 preparer=[cast_to_unicode_or_null,
393 shortener_factory(128)],
394 missing=None)
395 log_level = colander.SchemaNode(colander.String(),
396 preparer=shortener_factory(10),
397 missing='UNKNOWN')
398 message = colander.SchemaNode(colander.String(),
399 preparer=shortener_factory(4096),
400 missing='')
401 namespace = colander.SchemaNode(colander.String(),
402 preparer=shortener_factory(128),
403 missing='')
404 request_id = colander.SchemaNode(colander.String(),
405 preparer=shortener_factory(40),
406 missing='')
407 server = colander.SchemaNode(colander.String(),
408 preparer=shortener_factory(128),
409 missing='unknown')
410 date = colander.SchemaNode(NonTZDate(),
411 validator=limited_date,
412 missing=deferred_utcnow)
410
411 primary_key = colander.SchemaNode(
412 UnknownType(),
413 preparer=[cast_to_unicode_or_null, shortener_factory(128)],
414 missing=None,
415 )
416 log_level = colander.SchemaNode(
417 colander.String(), preparer=shortener_factory(10), missing="UNKNOWN"
418 )
419 message = colander.SchemaNode(
420 colander.String(), preparer=shortener_factory(4096), missing=""
421 )
422 namespace = colander.SchemaNode(
423 colander.String(), preparer=shortener_factory(128), missing=""
424 )
425 request_id = colander.SchemaNode(
426 colander.String(), preparer=shortener_factory(40), missing=""
427 )
428 server = colander.SchemaNode(
429 colander.String(), preparer=shortener_factory(128), missing="unknown"
430 )
431 date = colander.SchemaNode(
432 NonTZDate(), validator=limited_date, missing=deferred_utcnow
433 )
413 434 tags = TagSchemaList()
414 435
415 436
416 437 class LogSchemaPermanent(LogSchema):
417 date = colander.SchemaNode(NonTZDate(),
418 missing=deferred_utcnow)
438 date = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow)
419 439 permanent = colander.SchemaNode(colander.Boolean(), missing=False)
420 440
421 441
@@ -423,6 +443,7 b' class LogListSchema(colander.SequenceSchema):'
423 443 """
424 444 Validates format of list of log entries
425 445 """
446
426 447 log = LogSchema()
427 448 validator = colander.Length(1)
428 449
@@ -431,14 +452,15 b' class LogListPermanentSchema(colander.SequenceSchema):'
431 452 """
432 453 Validates format of list of log entries
433 454 """
455
434 456 log = LogSchemaPermanent()
435 457 validator = colander.Length(1)
436 458
437 459
438 460 class ViewRequestStatsSchema(RequestStatsSchema):
439 requests = colander.SchemaNode(colander.Integer(),
440 validator=colander.Range(0),
441 missing=0)
461 requests = colander.SchemaNode(
462 colander.Integer(), validator=colander.Range(0), missing=0
463 )
442 464
443 465
444 466 class ViewMetricTupleSchema(colander.TupleSchema):
@@ -446,10 +468,12 b' class ViewMetricTupleSchema(colander.TupleSchema):'
446 468 Validates list of views and their corresponding request stats object ie:
447 469 ["dir/module:func",{"custom": 0.0..}]
448 470 """
449 view_name = colander.SchemaNode(colander.String(),
450 preparer=[shortener_factory(128),
451 lambda x: x or 'unknown'],
452 missing='unknown')
471
472 view_name = colander.SchemaNode(
473 colander.String(),
474 preparer=[shortener_factory(128), lambda x: x or "unknown"],
475 missing="unknown",
476 )
453 477 metrics = ViewRequestStatsSchema()
454 478
455 479
@@ -458,6 +482,7 b' class ViewMetricListSchema(colander.SequenceSchema):'
458 482 Validates view breakdown stats objects list
459 483 {metrics key of server/time object}
460 484 """
485
461 486 view_tuple = ViewMetricTupleSchema()
462 487 validator = colander.Length(1)
463 488
@@ -468,13 +493,13 b' class ViewMetricSchema(colander.MappingSchema):'
468 493 {server/time object}
469 494
470 495 """
471 timestamp = colander.SchemaNode(NonTZDate(),
472 validator=limited_date,
473 missing=None)
474 server = colander.SchemaNode(colander.String(),
475 preparer=[shortener_factory(128),
476 lambda x: x or 'unknown'],
477 missing='unknown')
496
497 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date, missing=None)
498 server = colander.SchemaNode(
499 colander.String(),
500 preparer=[shortener_factory(128), lambda x: x or "unknown"],
501 missing="unknown",
502 )
478 503 metrics = ViewMetricListSchema()
479 504
480 505
@@ -483,15 +508,19 b' class GeneralMetricSchema(colander.MappingSchema):'
483 508 Validates universal metric schema
484 509
485 510 """
486 namespace = colander.SchemaNode(colander.String(), missing='',
487 preparer=shortener_factory(128))
488 511
489 server_name = colander.SchemaNode(colander.String(),
490 preparer=[shortener_factory(128),
491 lambda x: x or 'unknown'],
492 missing='unknown')
493 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date,
494 missing=deferred_utcnow)
512 namespace = colander.SchemaNode(
513 colander.String(), missing="", preparer=shortener_factory(128)
514 )
515
516 server_name = colander.SchemaNode(
517 colander.String(),
518 preparer=[shortener_factory(128), lambda x: x or "unknown"],
519 missing="unknown",
520 )
521 timestamp = colander.SchemaNode(
522 NonTZDate(), validator=limited_date, missing=deferred_utcnow
523 )
495 524 tags = TagSchemaList(missing=colander.required)
496 525
497 526
@@ -500,6 +529,7 b' class GeneralMetricPermanentSchema(GeneralMetricSchema):'
500 529 Validates universal metric schema
501 530
502 531 """
532
503 533 timestamp = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow)
504 534
505 535
@@ -520,6 +550,7 b' class MetricsListSchema(colander.SequenceSchema):'
520 550
521 551
522 552 """
553
523 554 metric = ViewMetricSchema()
524 555 validator = colander.Length(1)
525 556
@@ -540,7 +571,7 b' class StringToAppList(object):'
540 571 if cstruct is null:
541 572 return null
542 573
543 apps = set([int(a) for a in node.bindings['resources']])
574 apps = set([int(a) for a in node.bindings["resources"]])
544 575
545 576 if isinstance(cstruct, str):
546 577 cstruct = [cstruct]
@@ -558,41 +589,41 b' class StringToAppList(object):'
558 589
559 590 @colander.deferred
560 591 def possible_applications_validator(node, kw):
561 possible_apps = [int(a) for a in kw['resources']]
562 return colander.All(colander.ContainsOnly(possible_apps),
563 colander.Length(1))
592 possible_apps = [int(a) for a in kw["resources"]]
593 return colander.All(colander.ContainsOnly(possible_apps), colander.Length(1))
564 594
565 595
566 596 @colander.deferred
567 597 def possible_applications(node, kw):
568 return [int(a) for a in kw['resources']]
598 return [int(a) for a in kw["resources"]]
569 599
570 600
571 601 @colander.deferred
572 602 def today_start(node, kw):
573 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
574 minute=0,
575 hour=0)
603 return datetime.datetime.utcnow().replace(second=0, microsecond=0, minute=0, hour=0)
576 604
577 605
578 606 @colander.deferred
579 607 def today_end(node, kw):
580 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
581 minute=59, hour=23)
608 return datetime.datetime.utcnow().replace(
609 second=0, microsecond=0, minute=59, hour=23
610 )
582 611
583 612
584 613 @colander.deferred
585 614 def old_start(node, kw):
586 615 t_delta = datetime.timedelta(days=90)
587 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
588 minute=0,
589 hour=0) - t_delta
616 return (
617 datetime.datetime.utcnow().replace(second=0, microsecond=0, minute=0, hour=0)
618 - t_delta
619 )
590 620
591 621
592 622 @colander.deferred
593 623 def today_end(node, kw):
594 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
595 minute=59, hour=23)
624 return datetime.datetime.utcnow().replace(
625 second=0, microsecond=0, minute=59, hour=23
626 )
596 627
597 628
598 629 class PermissiveDate(colander.DateTime):
@@ -604,7 +635,8 b' class PermissiveDate(colander.DateTime):'
604 635
605 636 try:
606 637 result = colander.iso8601.parse_date(
607 cstruct, default_timezone=self.default_tzinfo)
638 cstruct, default_timezone=self.default_tzinfo
639 )
608 640 except colander.iso8601.ParseError:
609 641 return null
610 642 return result.replace(tzinfo=None)
@@ -612,99 +644,126 b' class PermissiveDate(colander.DateTime):'
612 644
613 645 class LogSearchSchema(colander.MappingSchema):
614 646 def schema_type(self, **kw):
615 return colander.Mapping(unknown='preserve')
647 return colander.Mapping(unknown="preserve")
616 648
617 resource = colander.SchemaNode(StringToAppList(),
649 resource = colander.SchemaNode(
650 StringToAppList(),
618 651 validator=possible_applications_validator,
619 missing=possible_applications)
652 missing=possible_applications,
653 )
620 654
621 message = colander.SchemaNode(colander.Sequence(accept_scalar=True),
655 message = colander.SchemaNode(
656 colander.Sequence(accept_scalar=True),
622 657 colander.SchemaNode(colander.String()),
623 missing=None)
624 level = colander.SchemaNode(colander.Sequence(accept_scalar=True),
658 missing=None,
659 )
660 level = colander.SchemaNode(
661 colander.Sequence(accept_scalar=True),
625 662 colander.SchemaNode(colander.String()),
626 663 preparer=lowercase_preparer,
627 missing=None)
628 namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True),
664 missing=None,
665 )
666 namespace = colander.SchemaNode(
667 colander.Sequence(accept_scalar=True),
629 668 colander.SchemaNode(colander.String()),
630 669 preparer=lowercase_preparer,
631 missing=None)
632 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
670 missing=None,
671 )
672 request_id = colander.SchemaNode(
673 colander.Sequence(accept_scalar=True),
633 674 colander.SchemaNode(colander.String()),
634 675 preparer=lowercase_preparer,
635 missing=None)
636 start_date = colander.SchemaNode(PermissiveDate(),
637 missing=None)
638 end_date = colander.SchemaNode(PermissiveDate(),
639 missing=None)
640 page = colander.SchemaNode(colander.Integer(),
641 validator=colander.Range(min=1),
642 missing=1)
676 missing=None,
677 )
678 start_date = colander.SchemaNode(PermissiveDate(), missing=None)
679 end_date = colander.SchemaNode(PermissiveDate(), missing=None)
680 page = colander.SchemaNode(
681 colander.Integer(), validator=colander.Range(min=1), missing=1
682 )
643 683
644 684
645 685 class ReportSearchSchema(colander.MappingSchema):
646 686 def schema_type(self, **kw):
647 return colander.Mapping(unknown='preserve')
687 return colander.Mapping(unknown="preserve")
648 688
649 resource = colander.SchemaNode(StringToAppList(),
689 resource = colander.SchemaNode(
690 StringToAppList(),
650 691 validator=possible_applications_validator,
651 missing=possible_applications)
652 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
692 missing=possible_applications,
693 )
694 request_id = colander.SchemaNode(
695 colander.Sequence(accept_scalar=True),
653 696 colander.SchemaNode(colander.String()),
654 missing=None)
655 start_date = colander.SchemaNode(PermissiveDate(),
656 missing=None)
657 end_date = colander.SchemaNode(PermissiveDate(),
658 missing=None)
659 page = colander.SchemaNode(colander.Integer(),
660 validator=colander.Range(min=1),
661 missing=1)
697 missing=None,
698 )
699 start_date = colander.SchemaNode(PermissiveDate(), missing=None)
700 end_date = colander.SchemaNode(PermissiveDate(), missing=None)
701 page = colander.SchemaNode(
702 colander.Integer(), validator=colander.Range(min=1), missing=1
703 )
662 704
663 705 min_occurences = colander.SchemaNode(
664 706 colander.Sequence(accept_scalar=True),
665 707 colander.SchemaNode(colander.Integer()),
666 missing=None)
708 missing=None,
709 )
667 710
668 http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
711 http_status = colander.SchemaNode(
712 colander.Sequence(accept_scalar=True),
669 713 colander.SchemaNode(colander.Integer()),
670 missing=None)
671 priority = colander.SchemaNode(colander.Sequence(accept_scalar=True),
714 missing=None,
715 )
716 priority = colander.SchemaNode(
717 colander.Sequence(accept_scalar=True),
672 718 colander.SchemaNode(colander.Integer()),
673 missing=None)
674 error = colander.SchemaNode(colander.Sequence(accept_scalar=True),
719 missing=None,
720 )
721 error = colander.SchemaNode(
722 colander.Sequence(accept_scalar=True),
675 723 colander.SchemaNode(colander.String()),
676 missing=None)
677 url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True),
724 missing=None,
725 )
726 url_path = colander.SchemaNode(
727 colander.Sequence(accept_scalar=True),
678 728 colander.SchemaNode(colander.String()),
679 missing=None)
680 url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True),
729 missing=None,
730 )
731 url_domain = colander.SchemaNode(
732 colander.Sequence(accept_scalar=True),
681 733 colander.SchemaNode(colander.String()),
682 missing=None)
683 report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
734 missing=None,
735 )
736 report_status = colander.SchemaNode(
737 colander.Sequence(accept_scalar=True),
684 738 colander.SchemaNode(colander.String()),
685 missing=None)
686 min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
739 missing=None,
740 )
741 min_duration = colander.SchemaNode(
742 colander.Sequence(accept_scalar=True),
687 743 colander.SchemaNode(colander.Float()),
688 missing=None)
689 max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
744 missing=None,
745 )
746 max_duration = colander.SchemaNode(
747 colander.Sequence(accept_scalar=True),
690 748 colander.SchemaNode(colander.Float()),
691 missing=None)
749 missing=None,
750 )
692 751
693 752
694 753 class TagSchema(colander.MappingSchema):
695 754 """
696 755 Used in log search
697 756 """
698 name = colander.SchemaNode(colander.String(),
699 validator=colander.Length(1, 32))
700 value = colander.SchemaNode(colander.Sequence(accept_scalar=True),
701 colander.SchemaNode(colander.String(),
702 validator=colander.Length(
703 1, 128)),
704 missing=None)
705 op = colander.SchemaNode(colander.String(),
706 validator=colander.Length(1, 128),
707 missing=None)
757
758 name = colander.SchemaNode(colander.String(), validator=colander.Length(1, 32))
759 value = colander.SchemaNode(
760 colander.Sequence(accept_scalar=True),
761 colander.SchemaNode(colander.String(), validator=colander.Length(1, 128)),
762 missing=None,
763 )
764 op = colander.SchemaNode(
765 colander.String(), validator=colander.Length(1, 128), missing=None
766 )
708 767
709 768
710 769 class TagListSchema(colander.SequenceSchema):
@@ -720,16 +779,16 b' class RuleFieldType(object):'
720 779
721 780 def __call__(self, node, value):
722 781 try:
723 if self.cast_to == 'int':
782 if self.cast_to == "int":
724 783 int(value)
725 elif self.cast_to == 'float':
784 elif self.cast_to == "float":
726 785 float(value)
727 elif self.cast_to == 'unicode':
786 elif self.cast_to == "unicode":
728 787 str(value)
729 788 except:
730 raise colander.Invalid(node,
731 "Can't cast {} to {}".format(
732 value, self.cast_to))
789 raise colander.Invalid(
790 node, "Can't cast {} to {}".format(value, self.cast_to)
791 )
733 792
734 793
735 794 def build_rule_schema(ruleset, check_matrix):
@@ -739,23 +798,27 b' def build_rule_schema(ruleset, check_matrix):'
739 798 """
740 799
741 800 schema = colander.SchemaNode(colander.Mapping())
742 schema.add(colander.SchemaNode(colander.String(), name='field'))
801 schema.add(colander.SchemaNode(colander.String(), name="field"))
743 802
744 if ruleset['field'] in ['__AND__', '__OR__', '__NOT__']:
745 subrules = colander.SchemaNode(colander.Tuple(), name='rules')
746 for rule in ruleset['rules']:
803 if ruleset["field"] in ["__AND__", "__OR__", "__NOT__"]:
804 subrules = colander.SchemaNode(colander.Tuple(), name="rules")
805 for rule in ruleset["rules"]:
747 806 subrules.add(build_rule_schema(rule, check_matrix))
748 807 schema.add(subrules)
749 808 else:
750 op_choices = check_matrix[ruleset['field']]['ops']
751 cast_to = check_matrix[ruleset['field']]['type']
752 schema.add(colander.SchemaNode(colander.String(),
753 validator=colander.OneOf(op_choices),
754 name='op'))
755
756 schema.add(colander.SchemaNode(colander.String(),
757 name='value',
758 validator=RuleFieldType(cast_to)))
809 op_choices = check_matrix[ruleset["field"]]["ops"]
810 cast_to = check_matrix[ruleset["field"]]["type"]
811 schema.add(
812 colander.SchemaNode(
813 colander.String(), validator=colander.OneOf(op_choices), name="op"
814 )
815 )
816
817 schema.add(
818 colander.SchemaNode(
819 colander.String(), name="value", validator=RuleFieldType(cast_to)
820 )
821 )
759 822 return schema
760 823
761 824
@@ -28,148 +28,206 b' def includeme(config):'
28 28 """Add the application's view handlers.
29 29 """
30 30
31 config.add_route('/', '/')
32 config.add_route('angular_app_ui_ix', '/ui')
33 config.add_route('angular_app_ui', '/ui/*remainder')
31 config.add_route("/", "/")
32 config.add_route("angular_app_ui_ix", "/ui")
33 config.add_route("angular_app_ui", "/ui/*remainder")
34 34
35 35 # applications API
36 config.add_route('applications_no_id', '/applications')
37 config.add_route('applications', '/applications/{resource_id}',
38 factory='appenlight.security.ResourceFactory')
39 config.add_route('applications_property',
40 '/applications/{resource_id}/{key}',
41 factory='appenlight.security.ResourceFactory')
36 config.add_route("applications_no_id", "/applications")
42 37 config.add_route(
43 'integrations_id',
44 '/applications/{resource_id}/integrations/{integration}/{action}',
45 factory='appenlight.security.ResourceFactory')
38 "applications",
39 "/applications/{resource_id}",
40 factory="appenlight.security.ResourceFactory",
41 )
42 config.add_route(
43 "applications_property",
44 "/applications/{resource_id}/{key}",
45 factory="appenlight.security.ResourceFactory",
46 )
47 config.add_route(
48 "integrations_id",
49 "/applications/{resource_id}/integrations/{integration}/{action}",
50 factory="appenlight.security.ResourceFactory",
51 )
46 52
47 53 # users API
48 config.add_route('users_self', '/users/self')
49 config.add_route('users_self_property', '/users/self/{key}')
50 config.add_route('users_no_id', '/users')
51 config.add_route('users', '/users/{user_id}')
52 config.add_route('users_property', '/users/{user_id}/{key}')
54 config.add_route("users_self", "/users/self")
55 config.add_route("users_self_property", "/users/self/{key}")
56 config.add_route("users_no_id", "/users")
57 config.add_route("users", "/users/{user_id}")
58 config.add_route("users_property", "/users/{user_id}/{key}")
53 59
54 60 # events
55 config.add_route('events_no_id', '/events')
56 config.add_route('events', '/events/{event_id}')
57 config.add_route('events_property', '/events/{event_id}/{key}')
61 config.add_route("events_no_id", "/events")
62 config.add_route("events", "/events/{event_id}")
63 config.add_route("events_property", "/events/{event_id}/{key}")
58 64
59 65 # groups
60 config.add_route('groups_no_id', '/groups')
61 config.add_route('groups', '/groups/{group_id}')
62 config.add_route('groups_property', '/groups/{group_id}/{key}')
66 config.add_route("groups_no_id", "/groups")
67 config.add_route("groups", "/groups/{group_id}")
68 config.add_route("groups_property", "/groups/{group_id}/{key}")
63 69
64 70 # reports API
65 config.add_route('reports', '/reports')
66 config.add_route('slow_reports', '/slow_reports')
67 config.add_route('report_groups', '/report_groups/{group_id}',
68 factory='appenlight.security.ResourceReportFactory')
69 config.add_route('report_groups_property',
70 '/report_groups/{group_id}/{key}',
71 factory='appenlight.security.ResourceReportFactory')
71 config.add_route("reports", "/reports")
72 config.add_route("slow_reports", "/slow_reports")
73 config.add_route(
74 "report_groups",
75 "/report_groups/{group_id}",
76 factory="appenlight.security.ResourceReportFactory",
77 )
78 config.add_route(
79 "report_groups_property",
80 "/report_groups/{group_id}/{key}",
81 factory="appenlight.security.ResourceReportFactory",
82 )
72 83
73 84 #generic resource API
74 config.add_route('resources_property',
75 '/resources/{resource_id}/{key}',
76 factory='appenlight.security.ResourceFactory')
85 config.add_route(
86 "resources_property",
87 "/resources/{resource_id}/{key}",
88 factory="appenlight.security.ResourceFactory",
89 )
77 90
78 91 # plugin configs API
79 config.add_route('plugin_configs', '/plugin_configs/{plugin_name}',
80 factory='appenlight.security.ResourcePluginMixedFactory')
81 config.add_route('plugin_config', '/plugin_configs/{plugin_name}/{id}',
82 factory='appenlight.security.ResourcePluginConfigFactory')
92 config.add_route(
93 "plugin_configs",
94 "/plugin_configs/{plugin_name}",
95 factory="appenlight.security.ResourcePluginMixedFactory",
96 )
97 config.add_route(
98 "plugin_config",
99 "/plugin_configs/{plugin_name}/{id}",
100 factory="appenlight.security.ResourcePluginConfigFactory",
101 )
83 102
84 103 # client endpoints API
85 config.add_route('api_reports', '/api/reports',
86 factory='appenlight.security.APIFactory')
87 config.add_route('api_report', '/api/report',
88 factory='appenlight.security.APIFactory')
89 config.add_route('api_logs', '/api/logs',
90 factory='appenlight.security.APIFactory')
91 config.add_route('api_log', '/api/log',
92 factory='appenlight.security.APIFactory')
93 config.add_route('api_slow_reports', '/api/slow_reports',
94 factory='appenlight.security.APIFactory')
95 config.add_route('api_request_stats', '/api/request_stats',
96 factory='appenlight.security.APIFactory')
97 config.add_route('api_metrics', '/api/metrics',
98 factory='appenlight.security.APIFactory')
99 config.add_route('api_general_metrics', '/api/general_metrics',
100 factory='appenlight.security.APIFactory')
101 config.add_route('api_general_metric', '/api/general_metric',
102 factory='appenlight.security.APIFactory')
103 config.add_route('api_airbrake', '/notifier_api/v2/{action}',
104 factory='appenlight.security.AirbrakeV2APIFactory')
105 config.add_route('api_sentry', '/api/{project}/store',
106 factory='appenlight.security.SentryAPIFactory')
107 config.add_route('api_sentry_slash', '/api/{project}/store/',
108 factory='appenlight.security.SentryAPIFactory')
104 config.add_route(
105 "api_reports", "/api/reports", factory="appenlight.security.APIFactory"
106 )
107 config.add_route(
108 "api_report", "/api/report", factory="appenlight.security.APIFactory"
109 )
110 config.add_route("api_logs", "/api/logs", factory="appenlight.security.APIFactory")
111 config.add_route("api_log", "/api/log", factory="appenlight.security.APIFactory")
112 config.add_route(
113 "api_slow_reports",
114 "/api/slow_reports",
115 factory="appenlight.security.APIFactory",
116 )
117 config.add_route(
118 "api_request_stats",
119 "/api/request_stats",
120 factory="appenlight.security.APIFactory",
121 )
122 config.add_route(
123 "api_metrics", "/api/metrics", factory="appenlight.security.APIFactory"
124 )
125 config.add_route(
126 "api_general_metrics",
127 "/api/general_metrics",
128 factory="appenlight.security.APIFactory",
129 )
130 config.add_route(
131 "api_general_metric",
132 "/api/general_metric",
133 factory="appenlight.security.APIFactory",
134 )
135 config.add_route(
136 "api_airbrake",
137 "/notifier_api/v2/{action}",
138 factory="appenlight.security.AirbrakeV2APIFactory",
139 )
140 config.add_route(
141 "api_sentry",
142 "/api/{project}/store",
143 factory="appenlight.security.SentryAPIFactory",
144 )
145 config.add_route(
146 "api_sentry_slash",
147 "/api/{project}/store/",
148 factory="appenlight.security.SentryAPIFactory",
149 )
109 150
110 151 # other
111 config.add_route('register', '/register')
112 config.add_route('register_ajax', '/register_ajax')
113 config.add_route('lost_password', '/lost_password')
114 config.add_route('lost_password_generate', '/lost_password_generate')
115 config.add_route('logs_no_id', '/logs')
116 config.add_route('forbidden', '/forbidden')
117 config.add_route('test', '/test/{action}')
118 config.add_route('section_view', '/sections/{section}/{view}')
119
120 config.add_view('appenlight.views.forbidden_view',
121 context='pyramid.exceptions.Forbidden',
122 renderer='appenlight:templates/forbidden.jinja2',
123 permission='__no_permission_required__')
124 config.add_view('appenlight.views.not_found_view',
125 context='pyramid.exceptions.NotFound',
126 renderer='appenlight:templates/not_found.jinja2',
127 permission='__no_permission_required__')
128 config.add_view('appenlight.views.csrf_view',
129 context='appenlight.lib.request.CSRFException',
130 renderer='appenlight:templates/forbidden.jinja2',
131 permission='__no_permission_required__')
132 config.add_view('appenlight.views.csrf_view',
133 context='appenlight.forms.CSRFException',
134 renderer='appenlight:templates/forbidden.jinja2',
135 permission='__no_permission_required__')
136 config.add_view('appenlight.views.colander_invalid_view',
137 context='colander.Invalid',
138 renderer='json',
139 permission='__no_permission_required__')
140 config.add_view('appenlight.views.bad_json_view',
141 context='appenlight.lib.request.JSONException',
142 renderer='json',
143 permission='__no_permission_required__')
152 config.add_route("register", "/register")
153 config.add_route("register_ajax", "/register_ajax")
154 config.add_route("lost_password", "/lost_password")
155 config.add_route("lost_password_generate", "/lost_password_generate")
156 config.add_route("logs_no_id", "/logs")
157 config.add_route("forbidden", "/forbidden")
158 config.add_route("test", "/test/{action}")
159 config.add_route("section_view", "/sections/{section}/{view}")
160
161 config.add_view(
162 "appenlight.views.forbidden_view",
163 context="pyramid.exceptions.Forbidden",
164 renderer="appenlight:templates/forbidden.jinja2",
165 permission="__no_permission_required__",
166 )
167 config.add_view(
168 "appenlight.views.not_found_view",
169 context="pyramid.exceptions.NotFound",
170 renderer="appenlight:templates/not_found.jinja2",
171 permission="__no_permission_required__",
172 )
173 config.add_view(
174 "appenlight.views.csrf_view",
175 context="appenlight.lib.request.CSRFException",
176 renderer="appenlight:templates/forbidden.jinja2",
177 permission="__no_permission_required__",
178 )
179 config.add_view(
180 "appenlight.views.csrf_view",
181 context="appenlight.forms.CSRFException",
182 renderer="appenlight:templates/forbidden.jinja2",
183 permission="__no_permission_required__",
184 )
185 config.add_view(
186 "appenlight.views.colander_invalid_view",
187 context="colander.Invalid",
188 renderer="json",
189 permission="__no_permission_required__",
190 )
191 config.add_view(
192 "appenlight.views.bad_json_view",
193 context="appenlight.lib.request.JSONException",
194 renderer="json",
195 permission="__no_permission_required__",
196 )
144 197
145 198 # handle authomatic
146 config.add_route('social_auth', '/social_auth/{provider}')
147 config.add_route('social_auth_abort', '/social_auth/{provider}/abort')
199 config.add_route("social_auth", "/social_auth/{provider}")
200 config.add_route("social_auth_abort", "/social_auth/{provider}/abort")
148 201
149 202 # only use in production
150 if (config.registry.settings.get('pyramid.reload_templates') is False
151 and config.registry.settings.get('pyramid.debug_templates') is False):
152 config.add_view('appenlight.views.error_view',
203 if (
204 config.registry.settings.get("pyramid.reload_templates") is False
205 and config.registry.settings.get("pyramid.debug_templates") is False
206 ):
207 config.add_view(
208 "appenlight.views.error_view",
153 209 context=Exception,
154 renderer='appenlight:templates/error.jinja2',
155 permission='__no_permission_required__')
210 renderer="appenlight:templates/error.jinja2",
211 permission="__no_permission_required__",
212 )
156 213
157 214
158 215 def bad_json_view(exc, request):
159 request.environ['appenlight.ignore_error'] = 1
160 request.response.headers.add('X-AppEnlight-Error', 'Incorrect JSON')
216 request.environ["appenlight.ignore_error"] = 1
217 request.response.headers.add("X-AppEnlight-Error", "Incorrect JSON")
161 218 request.response.status_int = 400
162 219 return "Incorrect JSON"
163 220
164 221
165 222 def colander_invalid_view(exc, request):
166 request.environ['appenlight.ignore_error'] = 1
167 log.warning('API version %s, %s' % (
168 request.params.get('protocol_version'),
169 request.context.resource))
170 log.warning('Invalid payload sent')
223 request.environ["appenlight.ignore_error"] = 1
224 log.warning(
225 "API version %s, %s"
226 % (request.params.get("protocol_version"), request.context.resource)
227 )
228 log.warning("Invalid payload sent")
171 229 errors = exc.asdict()
172 request.response.headers.add('X-AppEnlight-Error', 'Invalid payload sent')
230 request.response.headers.add("X-AppEnlight-Error", "Invalid payload sent")
173 231 request.response.status_int = 422
174 232 return errors
175 233
@@ -177,11 +235,12 b' def colander_invalid_view(exc, request):'
177 235 def csrf_view(exc, request):
178 236 request.response.status = 403
179 237 from ..models import DBSession
238
180 239 request.environ["appenlight.ignore_error"] = 1
181 request.response.headers.add('X-AppEnlight-Error', str(exc))
240 request.response.headers.add("X-AppEnlight-Error", str(exc))
182 241 if request.user:
183 242 request.user = DBSession.merge(request.user)
184 return {'forbidden_view': True, 'csrf': True}
243 return {"forbidden_view": True, "csrf": True}
185 244
186 245
187 246 def not_found_view(exc, request):
@@ -192,9 +251,10 b' def not_found_view(exc, request):'
192 251 request.user = DBSession.merge(request.user)
193 252
194 253 if request.user:
195 request.response.headers['x-appenlight-uid'] = '%s' % request.user.id
196 request.response.headers['x-appenlight-flash'] = json.dumps(
197 helpers.get_flash(request))
254 request.response.headers["x-appenlight-uid"] = "%s" % request.user.id
255 request.response.headers["x-appenlight-flash"] = json.dumps(
256 helpers.get_flash(request)
257 )
198 258
199 259 return {}
200 260
@@ -205,34 +265,34 b' def forbidden_view(exc, request):'
205 265
206 266 if request.user:
207 267 request.user = DBSession.merge(request.user)
208 if request.path.startswith('/api'):
209 logging.warning('Wrong API Key sent')
268 if request.path.startswith("/api"):
269 logging.warning("Wrong API Key sent")
210 270 logging.info(request.url)
211 logging.info(
212 '\n'.join(
213 ['%s:%s' % (k, v) for k, v in request.headers.items()]))
271 logging.info("\n".join(["%s:%s" % (k, v) for k, v in request.headers.items()]))
214 272 resp = Response(
215 "Wrong api key",
216 headers=(('X-AppEnlight-Error', 'Incorrect API key',),))
273 "Wrong api key", headers=(("X-AppEnlight-Error", "Incorrect API key"),)
274 )
217 275 resp.status_int = 403
218 276 return resp
219 277
220 278 if request.user:
221 request.response.headers['x-appenlight-uid'] = '%s' % request.user.id
222 request.response.headers['x-appenlight-flash'] = json.dumps(
223 helpers.get_flash(request))
279 request.response.headers["x-appenlight-uid"] = "%s" % request.user.id
280 request.response.headers["x-appenlight-flash"] = json.dumps(
281 helpers.get_flash(request)
282 )
224 283 request.response.status = 403
225 return {'forbidden_view': True}
284 return {"forbidden_view": True}
226 285
227 286
228 287 def error_view(exc, request):
229 288 from ..models import DBSession
289
230 290 if request.user:
231 291 request.user = DBSession.merge(request.user)
232 if request.path.startswith('/api'):
292 if request.path.startswith("/api"):
233 293 resp = Response(
234 294 "There was a problem handling your request please try again",
235 headers=(('X-AppEnlight-Error', 'Problem handling request',),)
295 headers=(("X-AppEnlight-Error", "Problem handling request"),),
236 296 )
237 297 resp.status_int = 500
238 298 return resp
@@ -24,8 +24,8 b' log = logging.getLogger(__name__)'
24 24 def includeme(config):
25 25 """Add the application's view handlers.
26 26 """
27 config.add_route('admin_users_no_id', '/admin/users/')
28 config.add_route('admin_users', '/admin/users/{user_id}')
29 config.add_route('admin', '/admin/{action}')
30 config.add_route('admin_configs', '/configs')
31 config.add_route('admin_config', '/configs/{key}/{section}')
27 config.add_route("admin_users_no_id", "/admin/users/")
28 config.add_route("admin_users", "/admin/users/{user_id}")
29 config.add_route("admin", "/admin/{action}")
30 config.add_route("admin_configs", "/configs")
31 config.add_route("admin_config", "/configs/{key}/{section}")
@@ -34,31 +34,38 b' def bytes2human(total):'
34 34 mega = 1024.0 ** 2
35 35 kilo = 1024.0
36 36 if giga <= total:
37 return '{:0.1f}G'.format(total / giga)
37 return "{:0.1f}G".format(total / giga)
38 38 elif mega <= total:
39 return '{:0.1f}M'.format(total / mega)
39 return "{:0.1f}M".format(total / mega)
40 40 else:
41 return '{:0.1f}K'.format(total / kilo)
41 return "{:0.1f}K".format(total / kilo)
42 42
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 @view_config(route_name='section_view',
48 match_param=['section=admin_section', 'view=system'],
49 renderer='json', permission='root_administration')
47 @view_config(
48 route_name="section_view",
49 match_param=["section=admin_section", "view=system"],
50 renderer="json",
51 permission="root_administration",
52 )
50 53 def system(request):
51 current_time = datetime.utcnow(). \
52 replace(second=0, microsecond=0) - timedelta(minutes=1)
54 current_time = datetime.utcnow().replace(second=0, microsecond=0) - timedelta(
55 minutes=1
56 )
53 57 # global app counter
54 58 processed_reports = request.registry.redis_conn.get(
55 REDIS_KEYS['counters']['reports_per_minute'].format(current_time))
59 REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
60 )
56 61 processed_reports = int(processed_reports) if processed_reports else 0
57 62 processed_logs = request.registry.redis_conn.get(
58 REDIS_KEYS['counters']['logs_per_minute'].format(current_time))
63 REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
64 )
59 65 processed_logs = int(processed_logs) if processed_logs else 0
60 66 processed_metrics = request.registry.redis_conn.get(
61 REDIS_KEYS['counters']['metrics_per_minute'].format(current_time))
67 REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
68 )
62 69 processed_metrics = int(processed_metrics) if processed_metrics else 0
63 70
64 71 waiting_reports = 0
@@ -66,61 +73,62 b' def system(request):'
66 73 waiting_metrics = 0
67 74 waiting_other = 0
68 75
69 if 'redis' in request.registry.settings['celery.broker_type']:
76 if "redis" in request.registry.settings["celery.broker_type"]:
70 77 redis_client = redis.StrictRedis.from_url(
71 request.registry.settings['celery.broker_url'])
72 waiting_reports = redis_client.llen('reports')
73 waiting_logs = redis_client.llen('logs')
74 waiting_metrics = redis_client.llen('metrics')
75 waiting_other = redis_client.llen('default')
78 request.registry.settings["celery.broker_url"]
79 )
80 waiting_reports = redis_client.llen("reports")
81 waiting_logs = redis_client.llen("logs")
82 waiting_metrics = redis_client.llen("metrics")
83 waiting_other = redis_client.llen("default")
76 84
77 85 # process
78 86 def replace_inf(val):
79 return val if val != psutil.RLIM_INFINITY else 'unlimited'
87 return val if val != psutil.RLIM_INFINITY else "unlimited"
80 88
81 89 p = psutil.Process()
82 90 fd = p.rlimit(psutil.RLIMIT_NOFILE)
83 91 memlock = p.rlimit(psutil.RLIMIT_MEMLOCK)
84 92 self_info = {
85 'fds': {'soft': replace_inf(fd[0]),
86 'hard': replace_inf(fd[1])},
87 'memlock': {'soft': replace_inf(memlock[0]),
88 'hard': replace_inf(memlock[1])},
93 "fds": {"soft": replace_inf(fd[0]), "hard": replace_inf(fd[1])},
94 "memlock": {"soft": replace_inf(memlock[0]), "hard": replace_inf(memlock[1])},
89 95 }
90 96
91 97 # disks
92 98 disks = []
93 99 for part in psutil.disk_partitions(all=False):
94 if os.name == 'nt':
95 if 'cdrom' in part.opts or part.fstype == '':
100 if os.name == "nt":
101 if "cdrom" in part.opts or part.fstype == "":
96 102 continue
97 103 usage = psutil.disk_usage(part.mountpoint)
98 disks.append({
99 'device': part.device,
100 'total': bytes2human(usage.total),
101 'used': bytes2human(usage.used),
102 'free': bytes2human(usage.free),
103 'percentage': int(usage.percent),
104 'mountpoint': part.mountpoint,
105 'fstype': part.fstype
106 })
104 disks.append(
105 {
106 "device": part.device,
107 "total": bytes2human(usage.total),
108 "used": bytes2human(usage.used),
109 "free": bytes2human(usage.free),
110 "percentage": int(usage.percent),
111 "mountpoint": part.mountpoint,
112 "fstype": part.fstype,
113 }
114 )
107 115
108 116 # memory
109 117 memory_v = psutil.virtual_memory()
110 118 memory_s = psutil.swap_memory()
111 119
112 120 memory = {
113 'total': bytes2human(memory_v.total),
114 'available': bytes2human(memory_v.available),
115 'percentage': memory_v.percent,
116 'used': bytes2human(memory_v.used),
117 'free': bytes2human(memory_v.free),
118 'active': bytes2human(memory_v.active),
119 'inactive': bytes2human(memory_v.inactive),
120 'buffers': bytes2human(memory_v.buffers),
121 'cached': bytes2human(memory_v.cached),
122 'swap_total': bytes2human(memory_s.total),
123 'swap_used': bytes2human(memory_s.used)
121 "total": bytes2human(memory_v.total),
122 "available": bytes2human(memory_v.available),
123 "percentage": memory_v.percent,
124 "used": bytes2human(memory_v.used),
125 "free": bytes2human(memory_v.free),
126 "active": bytes2human(memory_v.active),
127 "inactive": bytes2human(memory_v.inactive),
128 "buffers": bytes2human(memory_v.buffers),
129 "cached": bytes2human(memory_v.cached),
130 "swap_total": bytes2human(memory_s.total),
131 "swap_used": bytes2human(memory_s.used),
124 132 }
125 133
126 134 # load
@@ -133,60 +141,63 b' def system(request):'
133 141 mem_used = p.memory_info().rss
134 142 if mem_used < min_mem:
135 143 continue
136 process_info.append({'owner': p.username(),
137 'pid': p.pid,
138 'cpu': round(p.cpu_percent(interval=0), 1),
139 'mem_percentage': round(p.memory_percent(),1),
140 'mem_usage': bytes2human(mem_used),
141 'name': p.name(),
142 'command': ' '.join(p.cmdline())
143 })
144 process_info = sorted(process_info, key=lambda x: x['mem_percentage'],
145 reverse=True)
144 process_info.append(
145 {
146 "owner": p.username(),
147 "pid": p.pid,
148 "cpu": round(p.cpu_percent(interval=0), 1),
149 "mem_percentage": round(p.memory_percent(), 1),
150 "mem_usage": bytes2human(mem_used),
151 "name": p.name(),
152 "command": " ".join(p.cmdline()),
153 }
154 )
155 process_info = sorted(process_info, key=lambda x: x["mem_percentage"], reverse=True)
146 156
147 157 # pg tables
148 158
149 db_size_query = '''
159 db_size_query = """
150 160 SELECT tablename, pg_total_relation_size(tablename::text) size
151 161 FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
152 tablename NOT LIKE 'sql_%' ORDER BY size DESC;'''
162 tablename NOT LIKE 'sql_%' ORDER BY size DESC;"""
153 163
154 164 db_tables = []
155 165 for row in DBSession.execute(db_size_query):
156 db_tables.append({"size_human": bytes2human(row.size),
157 "table_name": row.tablename})
166 db_tables.append(
167 {"size_human": bytes2human(row.size), "table_name": row.tablename}
168 )
158 169
159 170 # es indices
160 171 es_indices = []
161 result = Datastores.es.indices.stats(metric=['store, docs'])
162 for ix, stats in result['indices'].items():
163 size = stats['primaries']['store']['size_in_bytes']
164 es_indices.append({'name': ix,
165 'size': size,
166 'size_human': bytes2human(size)})
172 result = Datastores.es.indices.stats(metric=["store, docs"])
173 for ix, stats in result["indices"].items():
174 size = stats["primaries"]["store"]["size_in_bytes"]
175 es_indices.append({"name": ix, "size": size, "size_human": bytes2human(size)})
167 176
168 177 # packages
169 178
170 packages = ({'name': p.project_name, 'version': p.version}
171 for p in pkg_resources.working_set)
172
173 return {'db_tables': db_tables,
174 'es_indices': sorted(es_indices,
175 key=lambda x: x['size'], reverse=True),
176 'process_info': process_info,
177 'system_load': system_load,
178 'disks': disks,
179 'memory': memory,
180 'packages': sorted(packages, key=lambda x: x['name'].lower()),
181 'current_time': current_time,
182 'queue_stats': {
183 'processed_reports': processed_reports,
184 'processed_logs': processed_logs,
185 'processed_metrics': processed_metrics,
186 'waiting_reports': waiting_reports,
187 'waiting_logs': waiting_logs,
188 'waiting_metrics': waiting_metrics,
189 'waiting_other': waiting_other
179 packages = (
180 {"name": p.project_name, "version": p.version}
181 for p in pkg_resources.working_set
182 )
183
184 return {
185 "db_tables": db_tables,
186 "es_indices": sorted(es_indices, key=lambda x: x["size"], reverse=True),
187 "process_info": process_info,
188 "system_load": system_load,
189 "disks": disks,
190 "memory": memory,
191 "packages": sorted(packages, key=lambda x: x["name"].lower()),
192 "current_time": current_time,
193 "queue_stats": {
194 "processed_reports": processed_reports,
195 "processed_logs": processed_logs,
196 "processed_metrics": processed_metrics,
197 "waiting_reports": waiting_reports,
198 "waiting_logs": waiting_logs,
199 "waiting_metrics": waiting_metrics,
200 "waiting_other": waiting_other,
190 201 },
191 'self_info': self_info
202 "self_info": self_info,
192 203 }
@@ -23,25 +23,33 b' import logging'
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 @view_config(route_name='admin_configs', renderer='json',
27 permission='root_administration', request_method='GET')
26 @view_config(
27 route_name="admin_configs",
28 renderer="json",
29 permission="root_administration",
30 request_method="GET",
31 )
28 32 def query(request):
29 33 ConfigService.setup_default_values()
30 34 pairs = []
31 for value in request.GET.getall('filter'):
32 split = value.split(':', 1)
33 pairs.append({'key': split[0], 'section': split[1]})
35 for value in request.GET.getall("filter"):
36 split = value.split(":", 1)
37 pairs.append({"key": split[0], "section": split[1]})
34 38 return [c for c in ConfigService.filtered_key_and_section(pairs)]
35 39
36 40
37 @view_config(route_name='admin_config', renderer='json',
38 permission='root_administration', request_method='POST')
41 @view_config(
42 route_name="admin_config",
43 renderer="json",
44 permission="root_administration",
45 request_method="POST",
46 )
39 47 def post(request):
40 48 row = ConfigService.by_key_and_section(
41 key=request.matchdict.get('key'),
42 section=request.matchdict.get('section'))
49 key=request.matchdict.get("key"), section=request.matchdict.get("section")
50 )
43 51 if not row:
44 52 raise HTTPNotFound()
45 53 row.value = None
46 row.value = request.unsafe_json_body['value']
54 row.value = request.unsafe_json_body["value"]
47 55 return row
@@ -48,101 +48,119 b' def get_partition_stats():'
48 48
49 49 def add_key(key, holder):
50 50 if not ix_time in holder:
51 holder[ix_time] = {'pg': [], 'elasticsearch': []}
51 holder[ix_time] = {"pg": [], "elasticsearch": []}
52 52
53 for partition in list(Datastores.es.indices.get_alias('rcae*')):
54 if not partition.startswith('rcae'):
53 for partition in list(Datastores.es.indices.get_alias("rcae*")):
54 if not partition.startswith("rcae"):
55 55 continue
56 split_data = partition.split('_')
56 split_data = partition.split("_")
57 57 permanent = False
58 58 # if we dont have a day then treat it as permanent partion
59 59 if False in list(map(is_int, split_data[-3:])):
60 ix_time = datetime(year=int(split_data[-2]),
61 month=int(split_data[-1]),
62 day=1).date()
60 ix_time = datetime(
61 year=int(split_data[-2]), month=int(split_data[-1]), day=1
62 ).date()
63 63 permanent = True
64 64 else:
65 ix_time = datetime(year=int(split_data[-3]),
65 ix_time = datetime(
66 year=int(split_data[-3]),
66 67 month=int(split_data[-2]),
67 day=int(split_data[-1])).date()
68 day=int(split_data[-1]),
69 ).date()
68 70
69 71 ix_time = str(ix_time)
70 72 if permanent:
71 73 add_key(ix_time, permanent_partitions)
72 74 if ix_time not in permanent_partitions:
73 permanent_partitions[ix_time]['elasticsearch'] = []
74 permanent_partitions[ix_time]['elasticsearch'].append(partition)
75 permanent_partitions[ix_time]["elasticsearch"] = []
76 permanent_partitions[ix_time]["elasticsearch"].append(partition)
75 77 else:
76 78 add_key(ix_time, daily_partitions)
77 79 if ix_time not in daily_partitions:
78 daily_partitions[ix_time]['elasticsearch'] = []
79 daily_partitions[ix_time]['elasticsearch'].append(partition)
80 daily_partitions[ix_time]["elasticsearch"] = []
81 daily_partitions[ix_time]["elasticsearch"].append(partition)
80 82
81 83 for row in DBSession.execute(table_query):
82 splitted = row['table_name'].split('_')
83 if 'p' in splitted:
84 splitted = row["table_name"].split("_")
85 if "p" in splitted:
84 86 # dealing with partition
85 split_data = [int(x) for x in splitted[splitted.index('p') + 1:]]
87 split_data = [int(x) for x in splitted[splitted.index("p") + 1 :]]
86 88 if len(split_data) == 3:
87 ix_time = datetime(split_data[0], split_data[1],
88 split_data[2]).date()
89 ix_time = datetime(split_data[0], split_data[1], split_data[2]).date()
89 90 ix_time = str(ix_time)
90 91 add_key(ix_time, daily_partitions)
91 daily_partitions[ix_time]['pg'].append(row['table_name'])
92 daily_partitions[ix_time]["pg"].append(row["table_name"])
92 93 else:
93 94 ix_time = datetime(split_data[0], split_data[1], 1).date()
94 95 ix_time = str(ix_time)
95 96 add_key(ix_time, permanent_partitions)
96 permanent_partitions[ix_time]['pg'].append(row['table_name'])
97 permanent_partitions[ix_time]["pg"].append(row["table_name"])
97 98
98 99 return permanent_partitions, daily_partitions
99 100
100 101
101 @view_config(route_name='section_view', permission='root_administration',
102 match_param=['section=admin_section', 'view=partitions'],
103 renderer='json', request_method='GET')
102 @view_config(
103 route_name="section_view",
104 permission="root_administration",
105 match_param=["section=admin_section", "view=partitions"],
106 renderer="json",
107 request_method="GET",
108 )
104 109 def index(request):
105 110 permanent_partitions, daily_partitions = get_partition_stats()
106 111
107 return {"permanent_partitions": sorted(list(permanent_partitions.items()),
108 key=lambda x: x[0], reverse=True),
109 "daily_partitions": sorted(list(daily_partitions.items()),
110 key=lambda x: x[0], reverse=True)}
111
112
113 @view_config(route_name='section_view', request_method='POST',
114 match_param=['section=admin_section', 'view=partitions_remove'],
115 renderer='json', permission='root_administration')
112 return {
113 "permanent_partitions": sorted(
114 list(permanent_partitions.items()), key=lambda x: x[0], reverse=True
115 ),
116 "daily_partitions": sorted(
117 list(daily_partitions.items()), key=lambda x: x[0], reverse=True
118 ),
119 }
120
121
122 @view_config(
123 route_name="section_view",
124 request_method="POST",
125 match_param=["section=admin_section", "view=partitions_remove"],
126 renderer="json",
127 permission="root_administration",
128 )
116 129 def partitions_remove(request):
117 130 permanent_partitions, daily_partitions = get_partition_stats()
118 131 pg_partitions = []
119 132 es_partitions = []
120 133 for item in list(permanent_partitions.values()) + list(daily_partitions.values()):
121 es_partitions.extend(item['elasticsearch'])
122 pg_partitions.extend(item['pg'])
134 es_partitions.extend(item["elasticsearch"])
135 pg_partitions.extend(item["pg"])
123 136 FormCls = get_partition_deletion_form(es_partitions, pg_partitions)
124 form = FormCls(es_index=request.unsafe_json_body['es_indices'],
125 pg_index=request.unsafe_json_body['pg_indices'],
126 confirm=request.unsafe_json_body['confirm'],
127 csrf_context=request)
137 form = FormCls(
138 es_index=request.unsafe_json_body["es_indices"],
139 pg_index=request.unsafe_json_body["pg_indices"],
140 confirm=request.unsafe_json_body["confirm"],
141 csrf_context=request,
142 )
128 143 if form.validate():
129 for ix in form.data['es_index']:
130 log.warning('deleting ES partition: {}'.format(ix))
144 for ix in form.data["es_index"]:
145 log.warning("deleting ES partition: {}".format(ix))
131 146 Datastores.es.indices.delete(ix)
132 for ix in form.data['pg_index']:
133 log.warning('deleting PG partition: {}'.format(ix))
134 stmt = sa.text('DROP TABLE %s CASCADE' % sa.text(ix))
147 for ix in form.data["pg_index"]:
148 log.warning("deleting PG partition: {}".format(ix))
149 stmt = sa.text("DROP TABLE %s CASCADE" % sa.text(ix))
135 150 session = DBSession()
136 151 session.connection().execute(stmt)
137 152 mark_changed(session)
138 153
139 154 for field, error in form.errors.items():
140 msg = '%s: %s' % (field, error[0])
141 request.session.flash(msg, 'error')
155 msg = "%s: %s" % (field, error[0])
156 request.session.flash(msg, "error")
142 157
143 158 permanent_partitions, daily_partitions = get_partition_stats()
144 159 return {
145 160 "permanent_partitions": sorted(
146 list(permanent_partitions.items()), key=lambda x: x[0], reverse=True),
161 list(permanent_partitions.items()), key=lambda x: x[0], reverse=True
162 ),
147 163 "daily_partitions": sorted(
148 list(daily_partitions.items()), key=lambda x: x[0], reverse=True)}
164 list(daily_partitions.items()), key=lambda x: x[0], reverse=True
165 ),
166 }
@@ -24,13 +24,16 b' import logging'
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 @view_config(route_name='section_view', permission='root_administration',
28 match_param=['section=admin_section', 'view=relogin_user'],
29 renderer='json', request_method='GET')
27 @view_config(
28 route_name="section_view",
29 permission="root_administration",
30 match_param=["section=admin_section", "view=relogin_user"],
31 renderer="json",
32 request_method="GET",
33 )
30 34 def relogin_to_user(request):
31 user = UserService.by_id(request.GET.get('user_id'))
35 user = UserService.by_id(request.GET.get("user_id"))
32 36 if not user:
33 37 return HTTPNotFound()
34 38 headers = security.remember(request, user.id)
35 return HTTPFound(location=request.route_url('/'),
36 headers=headers)
39 return HTTPFound(location=request.route_url("/"), headers=headers)
@@ -34,7 +34,8 b' from appenlight.lib.utils.airbrake import parse_airbrake_xml'
34 34 from appenlight.lib.utils.date_utils import convert_date
35 35 from appenlight.lib.utils.sentry import parse_sentry_event
36 36 from appenlight.lib.request import JSONException
37 from appenlight.validators import (LogListSchema,
37 from appenlight.validators import (
38 LogListSchema,
38 39 MetricsListSchema,
39 40 GeneralMetricsListSchema,
40 41 GeneralMetricsPermanentListSchema,
@@ -44,148 +45,156 b' from appenlight.validators import (LogListSchema,'
44 45 ReportListSchema_0_5,
45 46 LogSchema,
46 47 LogSchemaPermanent,
47 ReportSchema_0_5)
48 ReportSchema_0_5,
49 )
48 50
49 51 log = logging.getLogger(__name__)
50 52
51 53
52 @view_config(route_name='api_logs', renderer='string', permission='create',
53 require_csrf=False)
54 @view_config(route_name='api_log', renderer='string', permission='create',
55 require_csrf=False)
54 @view_config(
55 route_name="api_logs", renderer="string", permission="create", require_csrf=False
56 )
57 @view_config(
58 route_name="api_log", renderer="string", permission="create", require_csrf=False
59 )
56 60 def logs_create(request):
57 61 """
58 62 Endpoint for log aggregation
59 63 """
60 64 application = request.context.resource
61 if request.method.upper() == 'OPTIONS':
65 if request.method.upper() == "OPTIONS":
62 66 return check_cors(request, application)
63 67 else:
64 68 check_cors(request, application, should_return=False)
65 69
66 70 params = dict(request.params.copy())
67 proto_version = parse_proto(params.get('protocol_version', ''))
71 proto_version = parse_proto(params.get("protocol_version", ""))
68 72 payload = request.unsafe_json_body
69 sequence_accepted = request.matched_route.name == 'api_logs'
73 sequence_accepted = request.matched_route.name == "api_logs"
70 74
71 75 if sequence_accepted:
72 76 if application.allow_permanent_storage:
73 schema = LogListPermanentSchema().bind(
74 utcnow=datetime.datetime.utcnow())
77 schema = LogListPermanentSchema().bind(utcnow=datetime.datetime.utcnow())
75 78 else:
76 schema = LogListSchema().bind(
77 utcnow=datetime.datetime.utcnow())
79 schema = LogListSchema().bind(utcnow=datetime.datetime.utcnow())
78 80 else:
79 81 if application.allow_permanent_storage:
80 schema = LogSchemaPermanent().bind(
81 utcnow=datetime.datetime.utcnow())
82 schema = LogSchemaPermanent().bind(utcnow=datetime.datetime.utcnow())
82 83 else:
83 schema = LogSchema().bind(
84 utcnow=datetime.datetime.utcnow())
84 schema = LogSchema().bind(utcnow=datetime.datetime.utcnow())
85 85
86 86 deserialized_logs = schema.deserialize(payload)
87 87 if sequence_accepted is False:
88 88 deserialized_logs = [deserialized_logs]
89 89
90 rate_limiting(request, application, 'per_application_logs_rate_limit',
91 len(deserialized_logs))
90 rate_limiting(
91 request, application, "per_application_logs_rate_limit", len(deserialized_logs)
92 )
92 93
93 94 # pprint.pprint(deserialized_logs)
94 95
95 96 # we need to split those out so we can process the pkey ones one by one
96 non_pkey_logs = [log_dict for log_dict in deserialized_logs
97 if not log_dict['primary_key']]
97 non_pkey_logs = [
98 log_dict for log_dict in deserialized_logs if not log_dict["primary_key"]
99 ]
98 100 pkey_dict = {}
99 101 # try to process the logs as best as we can and group together to reduce
100 102 # the amount of
101 103 for log_dict in deserialized_logs:
102 if log_dict['primary_key']:
103 key = (log_dict['primary_key'], log_dict['namespace'],)
104 if log_dict["primary_key"]:
105 key = (log_dict["primary_key"], log_dict["namespace"])
104 106 if not key in pkey_dict:
105 107 pkey_dict[key] = []
106 108 pkey_dict[key].append(log_dict)
107 109
108 110 if non_pkey_logs:
109 log.debug('%s non-pkey logs received: %s' % (application,
110 len(non_pkey_logs)))
111 log.debug("%s non-pkey logs received: %s" % (application, len(non_pkey_logs)))
111 112 tasks.add_logs.delay(application.resource_id, params, non_pkey_logs)
112 113 if pkey_dict:
113 114 logs_to_insert = []
114 115 for primary_key_tuple, payload in pkey_dict.items():
115 sorted_logs = sorted(payload, key=lambda x: x['date'])
116 sorted_logs = sorted(payload, key=lambda x: x["date"])
116 117 logs_to_insert.append(sorted_logs[-1])
117 log.debug('%s pkey logs received: %s' % (application,
118 len(logs_to_insert)))
118 log.debug("%s pkey logs received: %s" % (application, len(logs_to_insert)))
119 119 tasks.add_logs.delay(application.resource_id, params, logs_to_insert)
120 120
121 log.info('LOG call %s %s client:%s' % (
122 application, proto_version, request.headers.get('user_agent')))
123 return 'OK: Logs accepted'
121 log.info(
122 "LOG call %s %s client:%s"
123 % (application, proto_version, request.headers.get("user_agent"))
124 )
125 return "OK: Logs accepted"
124 126
125 127
126 @view_config(route_name='api_request_stats', renderer='string',
127 permission='create', require_csrf=False)
128 @view_config(route_name='api_metrics', renderer='string',
129 permission='create', require_csrf=False)
128 @view_config(
129 route_name="api_request_stats",
130 renderer="string",
131 permission="create",
132 require_csrf=False,
133 )
134 @view_config(
135 route_name="api_metrics", renderer="string", permission="create", require_csrf=False
136 )
130 137 def request_metrics_create(request):
131 138 """
132 139 Endpoint for performance metrics, aggregates view performance stats
133 140 and converts them to general metric row
134 141 """
135 142 application = request.context.resource
136 if request.method.upper() == 'OPTIONS':
143 if request.method.upper() == "OPTIONS":
137 144 return check_cors(request, application)
138 145 else:
139 146 check_cors(request, application, should_return=False)
140 147
141 148 params = dict(request.params.copy())
142 proto_version = parse_proto(params.get('protocol_version', ''))
149 proto_version = parse_proto(params.get("protocol_version", ""))
143 150
144 151 payload = request.unsafe_json_body
145 152 schema = MetricsListSchema()
146 153 dataset = schema.deserialize(payload)
147 154
148 rate_limiting(request, application, 'per_application_metrics_rate_limit',
149 len(dataset))
155 rate_limiting(
156 request, application, "per_application_metrics_rate_limit", len(dataset)
157 )
150 158
151 159 # looping report data
152 160 metrics = {}
153 161 for metric in dataset:
154 server_name = metric.get('server', '').lower() or 'unknown'
155 start_interval = convert_date(metric['timestamp'])
162 server_name = metric.get("server", "").lower() or "unknown"
163 start_interval = convert_date(metric["timestamp"])
156 164 start_interval = start_interval.replace(second=0, microsecond=0)
157 165
158 for view_name, view_metrics in metric['metrics']:
159 key = '%s%s%s' % (metric['server'], start_interval, view_name)
166 for view_name, view_metrics in metric["metrics"]:
167 key = "%s%s%s" % (metric["server"], start_interval, view_name)
160 168 if start_interval not in metrics:
161 metrics[key] = {"requests": 0, "main": 0, "sql": 0,
162 "nosql": 0, "remote": 0, "tmpl": 0,
163 "custom": 0, 'sql_calls': 0,
164 'nosql_calls': 0,
165 'remote_calls': 0, 'tmpl_calls': 0,
166 'custom_calls': 0,
169 metrics[key] = {
170 "requests": 0,
171 "main": 0,
172 "sql": 0,
173 "nosql": 0,
174 "remote": 0,
175 "tmpl": 0,
176 "custom": 0,
177 "sql_calls": 0,
178 "nosql_calls": 0,
179 "remote_calls": 0,
180 "tmpl_calls": 0,
181 "custom_calls": 0,
167 182 "start_interval": start_interval,
168 183 "server_name": server_name,
169 "view_name": view_name
184 "view_name": view_name,
170 185 }
171 metrics[key]["requests"] += int(view_metrics['requests'])
172 metrics[key]["main"] += round(view_metrics['main'], 5)
173 metrics[key]["sql"] += round(view_metrics['sql'], 5)
174 metrics[key]["nosql"] += round(view_metrics['nosql'], 5)
175 metrics[key]["remote"] += round(view_metrics['remote'], 5)
176 metrics[key]["tmpl"] += round(view_metrics['tmpl'], 5)
177 metrics[key]["custom"] += round(view_metrics.get('custom', 0.0),
178 5)
179 metrics[key]["sql_calls"] += int(
180 view_metrics.get('sql_calls', 0))
181 metrics[key]["nosql_calls"] += int(
182 view_metrics.get('nosql_calls', 0))
183 metrics[key]["remote_calls"] += int(
184 view_metrics.get('remote_calls', 0))
185 metrics[key]["tmpl_calls"] += int(
186 view_metrics.get('tmpl_calls', 0))
187 metrics[key]["custom_calls"] += int(
188 view_metrics.get('custom_calls', 0))
186 metrics[key]["requests"] += int(view_metrics["requests"])
187 metrics[key]["main"] += round(view_metrics["main"], 5)
188 metrics[key]["sql"] += round(view_metrics["sql"], 5)
189 metrics[key]["nosql"] += round(view_metrics["nosql"], 5)
190 metrics[key]["remote"] += round(view_metrics["remote"], 5)
191 metrics[key]["tmpl"] += round(view_metrics["tmpl"], 5)
192 metrics[key]["custom"] += round(view_metrics.get("custom", 0.0), 5)
193 metrics[key]["sql_calls"] += int(view_metrics.get("sql_calls", 0))
194 metrics[key]["nosql_calls"] += int(view_metrics.get("nosql_calls", 0))
195 metrics[key]["remote_calls"] += int(view_metrics.get("remote_calls", 0))
196 metrics[key]["tmpl_calls"] += int(view_metrics.get("tmpl_calls", 0))
197 metrics[key]["custom_calls"] += int(view_metrics.get("custom_calls", 0))
189 198
190 199 if not metrics[key]["requests"]:
191 200 # fix this here because validator can't
@@ -197,126 +206,153 b' def request_metrics_create(request):'
197 206 normalized_metrics = []
198 207 for metric in metrics.values():
199 208 new_metric = {
200 'namespace': 'appenlight.request_metric',
201 'timestamp': metric.pop('start_interval'),
202 'server_name': metric['server_name'],
203 'tags': list(metric.items())
209 "namespace": "appenlight.request_metric",
210 "timestamp": metric.pop("start_interval"),
211 "server_name": metric["server_name"],
212 "tags": list(metric.items()),
204 213 }
205 214 normalized_metrics.append(new_metric)
206 215
207 tasks.add_metrics.delay(application.resource_id, params,
208 normalized_metrics, proto_version)
216 tasks.add_metrics.delay(
217 application.resource_id, params, normalized_metrics, proto_version
218 )
209 219
210 log.info('REQUEST METRICS call {} {} client:{}'.format(
211 application.resource_name, proto_version,
212 request.headers.get('user_agent')))
213 return 'OK: request metrics accepted'
220 log.info(
221 "REQUEST METRICS call {} {} client:{}".format(
222 application.resource_name, proto_version, request.headers.get("user_agent")
223 )
224 )
225 return "OK: request metrics accepted"
214 226
215 227
216 @view_config(route_name='api_general_metrics', renderer='string',
217 permission='create', require_csrf=False)
218 @view_config(route_name='api_general_metric', renderer='string',
219 permission='create', require_csrf=False)
228 @view_config(
229 route_name="api_general_metrics",
230 renderer="string",
231 permission="create",
232 require_csrf=False,
233 )
234 @view_config(
235 route_name="api_general_metric",
236 renderer="string",
237 permission="create",
238 require_csrf=False,
239 )
220 240 def general_metrics_create(request):
221 241 """
222 242 Endpoint for general metrics aggregation
223 243 """
224 244 application = request.context.resource
225 if request.method.upper() == 'OPTIONS':
245 if request.method.upper() == "OPTIONS":
226 246 return check_cors(request, application)
227 247 else:
228 248 check_cors(request, application, should_return=False)
229 249
230 250 params = dict(request.params.copy())
231 proto_version = parse_proto(params.get('protocol_version', ''))
251 proto_version = parse_proto(params.get("protocol_version", ""))
232 252 payload = request.unsafe_json_body
233 sequence_accepted = request.matched_route.name == 'api_general_metrics'
253 sequence_accepted = request.matched_route.name == "api_general_metrics"
234 254 if sequence_accepted:
235 255 if application.allow_permanent_storage:
236 256 schema = GeneralMetricsPermanentListSchema().bind(
237 utcnow=datetime.datetime.utcnow())
257 utcnow=datetime.datetime.utcnow()
258 )
238 259 else:
239 schema = GeneralMetricsListSchema().bind(
240 utcnow=datetime.datetime.utcnow())
260 schema = GeneralMetricsListSchema().bind(utcnow=datetime.datetime.utcnow())
241 261 else:
242 262 if application.allow_permanent_storage:
243 263 schema = GeneralMetricPermanentSchema().bind(
244 utcnow=datetime.datetime.utcnow())
264 utcnow=datetime.datetime.utcnow()
265 )
245 266 else:
246 schema = GeneralMetricSchema().bind(
247 utcnow=datetime.datetime.utcnow())
267 schema = GeneralMetricSchema().bind(utcnow=datetime.datetime.utcnow())
248 268
249 269 deserialized_metrics = schema.deserialize(payload)
250 270 if sequence_accepted is False:
251 271 deserialized_metrics = [deserialized_metrics]
252 272
253 rate_limiting(request, application, 'per_application_metrics_rate_limit',
254 len(deserialized_metrics))
273 rate_limiting(
274 request,
275 application,
276 "per_application_metrics_rate_limit",
277 len(deserialized_metrics),
278 )
255 279
256 tasks.add_metrics.delay(application.resource_id, params,
257 deserialized_metrics, proto_version)
280 tasks.add_metrics.delay(
281 application.resource_id, params, deserialized_metrics, proto_version
282 )
258 283
259 log.info('METRICS call {} {} client:{}'.format(
260 application.resource_name, proto_version,
261 request.headers.get('user_agent')))
262 return 'OK: Metrics accepted'
284 log.info(
285 "METRICS call {} {} client:{}".format(
286 application.resource_name, proto_version, request.headers.get("user_agent")
287 )
288 )
289 return "OK: Metrics accepted"
263 290
264 291
265 @view_config(route_name='api_reports', renderer='string', permission='create',
266 require_csrf=False)
267 @view_config(route_name='api_slow_reports', renderer='string',
268 permission='create', require_csrf=False)
269 @view_config(route_name='api_report', renderer='string', permission='create',
270 require_csrf=False)
292 @view_config(
293 route_name="api_reports", renderer="string", permission="create", require_csrf=False
294 )
295 @view_config(
296 route_name="api_slow_reports",
297 renderer="string",
298 permission="create",
299 require_csrf=False,
300 )
301 @view_config(
302 route_name="api_report", renderer="string", permission="create", require_csrf=False
303 )
271 304 def reports_create(request):
272 305 """
273 306 Endpoint for exception and slowness reports
274 307 """
275 308 # route_url('reports')
276 309 application = request.context.resource
277 if request.method.upper() == 'OPTIONS':
310 if request.method.upper() == "OPTIONS":
278 311 return check_cors(request, application)
279 312 else:
280 313 check_cors(request, application, should_return=False)
281 314 params = dict(request.params.copy())
282 proto_version = parse_proto(params.get('protocol_version', ''))
315 proto_version = parse_proto(params.get("protocol_version", ""))
283 316 payload = request.unsafe_json_body
284 sequence_accepted = request.matched_route.name == 'api_reports'
317 sequence_accepted = request.matched_route.name == "api_reports"
285 318
286 319 if sequence_accepted:
287 schema = ReportListSchema_0_5().bind(
288 utcnow=datetime.datetime.utcnow())
320 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
289 321 else:
290 schema = ReportSchema_0_5().bind(
291 utcnow=datetime.datetime.utcnow())
322 schema = ReportSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
292 323
293 324 deserialized_reports = schema.deserialize(payload)
294 325 if sequence_accepted is False:
295 326 deserialized_reports = [deserialized_reports]
296 327 if deserialized_reports:
297 rate_limiting(request, application,
298 'per_application_reports_rate_limit',
299 len(deserialized_reports))
328 rate_limiting(
329 request,
330 application,
331 "per_application_reports_rate_limit",
332 len(deserialized_reports),
333 )
300 334
301 335 # pprint.pprint(deserialized_reports)
302 tasks.add_reports.delay(application.resource_id, params,
303 deserialized_reports)
304 log.info('REPORT call %s, %s client:%s' % (
305 application,
306 proto_version,
307 request.headers.get('user_agent'))
336 tasks.add_reports.delay(application.resource_id, params, deserialized_reports)
337 log.info(
338 "REPORT call %s, %s client:%s"
339 % (application, proto_version, request.headers.get("user_agent"))
308 340 )
309 return 'OK: Reports accepted'
341 return "OK: Reports accepted"
310 342
311 343
312 @view_config(route_name='api_airbrake', renderer='string', permission='create',
313 require_csrf=False)
344 @view_config(
345 route_name="api_airbrake",
346 renderer="string",
347 permission="create",
348 require_csrf=False,
349 )
314 350 def airbrake_xml_compat(request):
315 351 """
316 352 Airbrake compatible endpoint for XML reports
317 353 """
318 354 application = request.context.resource
319 if request.method.upper() == 'OPTIONS':
355 if request.method.upper() == "OPTIONS":
320 356 return check_cors(request, application)
321 357 else:
322 358 check_cors(request, application, should_return=False)
@@ -326,18 +362,27 b' def airbrake_xml_compat(request):'
326 362 error_dict = parse_airbrake_xml(request)
327 363 schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow())
328 364 deserialized_reports = schema.deserialize([error_dict])
329 rate_limiting(request, application, 'per_application_reports_rate_limit',
330 len(deserialized_reports))
365 rate_limiting(
366 request,
367 application,
368 "per_application_reports_rate_limit",
369 len(deserialized_reports),
370 )
331 371
332 tasks.add_reports.delay(application.resource_id, params,
333 deserialized_reports)
334 log.info('%s AIRBRAKE call for application %s, api_ver:%s client:%s' % (
335 500, application.resource_name,
336 request.params.get('protocol_version', 'unknown'),
337 request.headers.get('user_agent'))
372 tasks.add_reports.delay(application.resource_id, params, deserialized_reports)
373 log.info(
374 "%s AIRBRAKE call for application %s, api_ver:%s client:%s"
375 % (
376 500,
377 application.resource_name,
378 request.params.get("protocol_version", "unknown"),
379 request.headers.get("user_agent"),
380 )
381 )
382 return (
383 "<notice><id>no-id</id><url>%s</url></notice>"
384 % request.registry.settings["mailing.app_url"]
338 385 )
339 return '<notice><id>no-id</id><url>%s</url></notice>' % \
340 request.registry.settings['mailing.app_url']
341 386
342 387
343 388 def decompress_gzip(data):
@@ -369,33 +414,38 b' def decode_b64(data):'
369 414 raise HTTPBadRequest()
370 415
371 416
372 @view_config(route_name='api_sentry', renderer='string', permission='create',
373 require_csrf=False)
374 @view_config(route_name='api_sentry_slash', renderer='string',
375 permission='create', require_csrf=False)
417 @view_config(
418 route_name="api_sentry", renderer="string", permission="create", require_csrf=False
419 )
420 @view_config(
421 route_name="api_sentry_slash",
422 renderer="string",
423 permission="create",
424 require_csrf=False,
425 )
376 426 def sentry_compat(request):
377 427 """
378 428 Sentry compatible endpoint
379 429 """
380 430 application = request.context.resource
381 if request.method.upper() == 'OPTIONS':
431 if request.method.upper() == "OPTIONS":
382 432 return check_cors(request, application)
383 433 else:
384 434 check_cors(request, application, should_return=False)
385 435
386 436 # handle various report encoding
387 content_encoding = request.headers.get('Content-Encoding')
388 content_type = request.headers.get('Content-Type')
389 if content_encoding == 'gzip':
437 content_encoding = request.headers.get("Content-Encoding")
438 content_type = request.headers.get("Content-Type")
439 if content_encoding == "gzip":
390 440 body = decompress_gzip(request.body)
391 elif content_encoding == 'deflate':
441 elif content_encoding == "deflate":
392 442 body = decompress_zlib(request.body)
393 443 else:
394 444 body = request.body
395 445 # attempt to fix string before decoding for stupid clients
396 if content_type == 'application/x-www-form-urlencoded':
397 body = urllib.parse.unquote(body.decode('utf8'))
398 check_char = '{' if isinstance(body, str) else b'{'
446 if content_type == "application/x-www-form-urlencoded":
447 body = urllib.parse.unquote(body.decode("utf8"))
448 check_char = "{" if isinstance(body, str) else b"{"
399 449 if not body.startswith(check_char):
400 450 try:
401 451 body = decode_b64(body)
@@ -404,7 +454,7 b' def sentry_compat(request):'
404 454 log.info(exc)
405 455
406 456 try:
407 json_body = json.loads(body.decode('utf8'))
457 json_body = json.loads(body.decode("utf8"))
408 458 except ValueError:
409 459 raise JSONException("Incorrect JSON")
410 460
@@ -412,24 +462,24 b' def sentry_compat(request):'
412 462
413 463 if event_type == ParsedSentryEventType.LOG:
414 464 if application.allow_permanent_storage:
415 schema = LogSchemaPermanent().bind(
416 utcnow=datetime.datetime.utcnow())
465 schema = LogSchemaPermanent().bind(utcnow=datetime.datetime.utcnow())
417 466 else:
418 schema = LogSchema().bind(
419 utcnow=datetime.datetime.utcnow())
467 schema = LogSchema().bind(utcnow=datetime.datetime.utcnow())
420 468 deserialized_logs = schema.deserialize(event)
421 469 non_pkey_logs = [deserialized_logs]
422 log.debug('%s non-pkey logs received: %s' % (application,
423 len(non_pkey_logs)))
470 log.debug("%s non-pkey logs received: %s" % (application, len(non_pkey_logs)))
424 471 tasks.add_logs.delay(application.resource_id, {}, non_pkey_logs)
425 472 if event_type == ParsedSentryEventType.ERROR_REPORT:
426 473 schema = ReportSchema_0_5().bind(
427 474 utcnow=datetime.datetime.utcnow(),
428 allow_permanent_storage=application.allow_permanent_storage)
475 allow_permanent_storage=application.allow_permanent_storage,
476 )
429 477 deserialized_reports = [schema.deserialize(event)]
430 rate_limiting(request, application,
431 'per_application_reports_rate_limit',
432 len(deserialized_reports))
433 tasks.add_reports.delay(application.resource_id, {},
434 deserialized_reports)
435 return 'OK: Events accepted'
478 rate_limiting(
479 request,
480 application,
481 "per_application_reports_rate_limit",
482 len(deserialized_reports),
483 )
484 tasks.add_reports.delay(application.resource_id, {}, deserialized_reports)
485 return "OK: Events accepted"
This diff has been collapsed as it changes many lines, (675 lines changed) Show them Hide them
@@ -31,19 +31,22 b' import appenlight.forms as forms'
31 31 from appenlight.models import DBSession
32 32 from appenlight.models.resource import Resource
33 33 from appenlight.models.application import Application
34 from appenlight.models.application_postprocess_conf import \
35 ApplicationPostprocessConf
34 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
36 35 from ziggurat_foundations.models.services.user import UserService
37 36 from ziggurat_foundations.models.services.resource import ResourceService
38 from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService
37 from ziggurat_foundations.models.services.user_resource_permission import (
38 UserResourcePermissionService,
39 )
39 40 from appenlight.models.user_resource_permission import UserResourcePermission
40 41 from appenlight.models.group_resource_permission import GroupResourcePermission
41 42 from appenlight.models.services.application import ApplicationService
42 from appenlight.models.services.application_postprocess_conf import \
43 ApplicationPostprocessConfService
43 from appenlight.models.services.application_postprocess_conf import (
44 ApplicationPostprocessConfService,
45 )
44 46 from appenlight.models.services.group import GroupService
45 from appenlight.models.services.group_resource_permission import \
46 GroupResourcePermissionService
47 from appenlight.models.services.group_resource_permission import (
48 GroupResourcePermissionService,
49 )
47 50 from appenlight.models.services.request_metric import RequestMetricService
48 51 from appenlight.models.services.report_group import ReportGroupService
49 52 from appenlight.models.services.slow_call import SlowCallService
@@ -62,13 +65,16 b' def app_not_found(request, id):'
62 65 """
63 66 Redirects on non found and sets a flash message
64 67 """
65 request.session.flash(_('Application not found'), 'warning')
66 return HTTPFound(
67 location=request.route_url('applications', action='index'))
68 request.session.flash(_("Application not found"), "warning")
69 return HTTPFound(location=request.route_url("applications", action="index"))
68 70
69 71
70 @view_config(route_name='applications_no_id',
71 renderer='json', request_method="GET", permission='authenticated')
72 @view_config(
73 route_name="applications_no_id",
74 renderer="json",
75 request_method="GET",
76 permission="authenticated",
77 )
72 78 def applications_list(request):
73 79 """
74 80 Applications list
@@ -82,55 +88,68 b' def applications_list(request):'
82 88 applications in the system
83 89
84 90 """
85 is_root = request.has_permission('root_administration',
86 RootFactory(request))
87 if is_root and request.GET.get('root_list'):
91 is_root = request.has_permission("root_administration", RootFactory(request))
92 if is_root and request.GET.get("root_list"):
88 93 resources = Resource.all().order_by(Resource.resource_name)
89 resource_type = request.GET.get('resource_type', 'application')
94 resource_type = request.GET.get("resource_type", "application")
90 95 if resource_type:
91 resources = resources.filter(
92 Resource.resource_type == resource_type)
96 resources = resources.filter(Resource.resource_type == resource_type)
93 97 else:
94 permissions = request.params.getall('permission')
98 permissions = request.params.getall("permission")
95 99 if permissions:
96 100 resources = UserService.resources_with_perms(
97 101 request.user,
98 102 permissions,
99 resource_types=[request.GET.get('resource_type',
100 'application')])
103 resource_types=[request.GET.get("resource_type", "application")],
104 )
101 105 else:
102 106 resources = request.user.resources.filter(
103 Application.resource_type == request.GET.get(
104 'resource_type',
105 'application'))
106 return [r.get_dict(include_keys=['resource_id', 'resource_name', 'domains',
107 'owner_user_name', 'owner_group_name'])
108 for
109 r in resources]
110
111
112 @view_config(route_name='applications', renderer='json',
113 request_method="GET", permission='view')
107 Application.resource_type
108 == request.GET.get("resource_type", "application")
109 )
110 return [
111 r.get_dict(
112 include_keys=[
113 "resource_id",
114 "resource_name",
115 "domains",
116 "owner_user_name",
117 "owner_group_name",
118 ]
119 )
120 for r in resources
121 ]
122
123
124 @view_config(
125 route_name="applications", renderer="json", request_method="GET", permission="view"
126 )
114 127 def application_GET(request):
115 128 resource = request.context.resource
116 129 include_sensitive_info = False
117 if request.has_permission('edit'):
130 if request.has_permission("edit"):
118 131 include_sensitive_info = True
119 132 resource_dict = resource.get_dict(
120 133 include_perms=include_sensitive_info,
121 include_processing_rules=include_sensitive_info)
134 include_processing_rules=include_sensitive_info,
135 )
122 136 return resource_dict
123 137
124 138
125 @view_config(route_name='applications_no_id', request_method="POST",
126 renderer='json', permission='create_resources')
139 @view_config(
140 route_name="applications_no_id",
141 request_method="POST",
142 renderer="json",
143 permission="create_resources",
144 )
127 145 def application_create(request):
128 146 """
129 147 Creates new application instances
130 148 """
131 149 user = request.user
132 form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body),
133 csrf_context=request)
150 form = forms.ApplicationCreateForm(
151 MultiDict(request.unsafe_json_body), csrf_context=request
152 )
134 153 if form.validate():
135 154 session = DBSession()
136 155 resource = Application()
@@ -138,7 +157,7 b' def application_create(request):'
138 157 form.populate_obj(resource)
139 158 resource.api_key = resource.generate_api_key()
140 159 user.resources.append(resource)
141 request.session.flash(_('Application created'))
160 request.session.flash(_("Application created"))
142 161 DBSession.flush()
143 162 mark_changed(session)
144 163 else:
@@ -147,8 +166,12 b' def application_create(request):'
147 166 return resource.get_dict()
148 167
149 168
150 @view_config(route_name='applications', request_method="PATCH",
151 renderer='json', permission='edit')
169 @view_config(
170 route_name="applications",
171 request_method="PATCH",
172 renderer="json",
173 permission="edit",
174 )
152 175 def application_update(request):
153 176 """
154 177 Updates main application configuration
@@ -160,60 +183,72 b' def application_update(request):'
160 183 # disallow setting permanent storage by non-admins
161 184 # use default/non-resource based context for this check
162 185 req_dict = copy.copy(request.unsafe_json_body)
163 if not request.has_permission('root_administration', RootFactory(request)):
164 req_dict['allow_permanent_storage'] = ''
165 if not req_dict.get('uptime_url'):
186 if not request.has_permission("root_administration", RootFactory(request)):
187 req_dict["allow_permanent_storage"] = ""
188 if not req_dict.get("uptime_url"):
166 189 # needed cause validator is still triggered by default
167 req_dict.pop('uptime_url', '')
168 application_form = forms.ApplicationUpdateForm(MultiDict(req_dict),
169 csrf_context=request)
190 req_dict.pop("uptime_url", "")
191 application_form = forms.ApplicationUpdateForm(
192 MultiDict(req_dict), csrf_context=request
193 )
170 194 if application_form.validate():
171 195 application_form.populate_obj(resource)
172 request.session.flash(_('Application updated'))
196 request.session.flash(_("Application updated"))
173 197 else:
174 198 return HTTPUnprocessableEntity(body=application_form.errors_json)
175 199
176 200 include_sensitive_info = False
177 if request.has_permission('edit'):
201 if request.has_permission("edit"):
178 202 include_sensitive_info = True
179 203 resource_dict = resource.get_dict(
180 204 include_perms=include_sensitive_info,
181 include_processing_rules=include_sensitive_info)
205 include_processing_rules=include_sensitive_info,
206 )
182 207 return resource_dict
183 208
184 209
185 @view_config(route_name='applications_property', match_param='key=api_key',
186 request_method="POST", renderer='json',
187 permission='delete')
210 @view_config(
211 route_name="applications_property",
212 match_param="key=api_key",
213 request_method="POST",
214 renderer="json",
215 permission="delete",
216 )
188 217 def application_regenerate_key(request):
189 218 """
190 219 Regenerates API keys for application
191 220 """
192 221 resource = request.context.resource
193 222
194 form = forms.CheckPasswordForm(MultiDict(request.unsafe_json_body),
195 csrf_context=request)
223 form = forms.CheckPasswordForm(
224 MultiDict(request.unsafe_json_body), csrf_context=request
225 )
196 226 form.password.user = request.user
197 227
198 228 if form.validate():
199 229 resource.api_key = resource.generate_api_key()
200 230 resource.public_key = resource.generate_api_key()
201 msg = 'API keys regenerated - please update your application config.'
231 msg = "API keys regenerated - please update your application config."
202 232 request.session.flash(_(msg))
203 233 else:
204 234 return HTTPUnprocessableEntity(body=form.errors_json)
205 235
206 if request.has_permission('edit'):
236 if request.has_permission("edit"):
207 237 include_sensitive_info = True
208 238 resource_dict = resource.get_dict(
209 239 include_perms=include_sensitive_info,
210 include_processing_rules=include_sensitive_info)
240 include_processing_rules=include_sensitive_info,
241 )
211 242 return resource_dict
212 243
213 244
214 @view_config(route_name='applications_property',
215 match_param='key=delete_resource',
216 request_method="PATCH", renderer='json', permission='delete')
245 @view_config(
246 route_name="applications_property",
247 match_param="key=delete_resource",
248 request_method="PATCH",
249 renderer="json",
250 permission="delete",
251 )
217 252 def application_remove(request):
218 253 """
219 254 Removes application resources
@@ -221,27 +256,34 b' def application_remove(request):'
221 256 resource = request.context.resource
222 257 # we need polymorphic object here, to properly launch sqlalchemy events
223 258 resource = ApplicationService.by_id(resource.resource_id)
224 form = forms.CheckPasswordForm(MultiDict(request.safe_json_body or {}),
225 csrf_context=request)
259 form = forms.CheckPasswordForm(
260 MultiDict(request.safe_json_body or {}), csrf_context=request
261 )
226 262 form.password.user = request.user
227 263 if form.validate():
228 264 DBSession.delete(resource)
229 request.session.flash(_('Application removed'))
265 request.session.flash(_("Application removed"))
230 266 else:
231 267 return HTTPUnprocessableEntity(body=form.errors_json)
232 268
233 269 return True
234 270
235 271
236 @view_config(route_name='applications_property', match_param='key=owner',
237 request_method="PATCH", renderer='json', permission='delete')
272 @view_config(
273 route_name="applications_property",
274 match_param="key=owner",
275 request_method="PATCH",
276 renderer="json",
277 permission="delete",
278 )
238 279 def application_ownership_transfer(request):
239 280 """
240 281 Allows application owner to transfer application ownership to other user
241 282 """
242 283 resource = request.context.resource
243 284 form = forms.ChangeApplicationOwnerForm(
244 MultiDict(request.safe_json_body or {}), csrf_context=request)
285 MultiDict(request.safe_json_body or {}), csrf_context=request
286 )
245 287 form.password.user = request.user
246 288 if form.validate():
247 289 user = UserService.by_user_name(form.user_name.data)
@@ -249,55 +291,68 b' def application_ownership_transfer(request):'
249 291 # remove integrations to not leak security data of external applications
250 292 for integration in resource.integrations[:]:
251 293 resource.integrations.remove(integration)
252 request.session.flash(_('Application transfered'))
294 request.session.flash(_("Application transfered"))
253 295 else:
254 296 return HTTPUnprocessableEntity(body=form.errors_json)
255 297 return True
256 298
257 299
258 @view_config(route_name='applications_property',
259 match_param='key=postprocessing_rules', renderer='json',
260 request_method='POST', permission='edit')
300 @view_config(
301 route_name="applications_property",
302 match_param="key=postprocessing_rules",
303 renderer="json",
304 request_method="POST",
305 permission="edit",
306 )
261 307 def applications_postprocess_POST(request):
262 308 """
263 309 Creates new postprocessing rules for applications
264 310 """
265 311 resource = request.context.resource
266 312 conf = ApplicationPostprocessConf()
267 conf.do = 'postprocess'
268 conf.new_value = '1'
313 conf.do = "postprocess"
314 conf.new_value = "1"
269 315 resource.postprocess_conf.append(conf)
270 316 DBSession.flush()
271 317 return conf.get_dict()
272 318
273 319
274 @view_config(route_name='applications_property',
275 match_param='key=postprocessing_rules', renderer='json',
276 request_method='PATCH', permission='edit')
320 @view_config(
321 route_name="applications_property",
322 match_param="key=postprocessing_rules",
323 renderer="json",
324 request_method="PATCH",
325 permission="edit",
326 )
277 327 def applications_postprocess_PATCH(request):
278 328 """
279 329 Creates new postprocessing rules for applications
280 330 """
281 331 json_body = request.unsafe_json_body
282 332
283 schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX)
333 schema = build_rule_schema(json_body["rule"], REPORT_TYPE_MATRIX)
284 334 try:
285 schema.deserialize(json_body['rule'])
335 schema.deserialize(json_body["rule"])
286 336 except colander.Invalid as exc:
287 337 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
288 338
289 339 resource = request.context.resource
290 340 conf = ApplicationPostprocessConfService.by_pkey_and_resource_id(
291 json_body['pkey'], resource.resource_id)
292 conf.rule = request.unsafe_json_body['rule']
341 json_body["pkey"], resource.resource_id
342 )
343 conf.rule = request.unsafe_json_body["rule"]
293 344 # for now hardcode int since we dont support anything else so far
294 conf.new_value = int(request.unsafe_json_body['new_value'])
345 conf.new_value = int(request.unsafe_json_body["new_value"])
295 346 return conf.get_dict()
296 347
297 348
298 @view_config(route_name='applications_property',
299 match_param='key=postprocessing_rules', renderer='json',
300 request_method='DELETE', permission='edit')
349 @view_config(
350 route_name="applications_property",
351 match_param="key=postprocessing_rules",
352 renderer="json",
353 request_method="DELETE",
354 permission="edit",
355 )
301 356 def applications_postprocess_DELETE(request):
302 357 """
303 358 Removes application postprocessing rules
@@ -306,89 +361,99 b' def applications_postprocess_DELETE(request):'
306 361 resource = request.context.resource
307 362 if form.validate():
308 363 for postprocess_conf in resource.postprocess_conf:
309 if postprocess_conf.pkey == int(request.GET['pkey']):
364 if postprocess_conf.pkey == int(request.GET["pkey"]):
310 365 # remove rule
311 366 DBSession.delete(postprocess_conf)
312 367 return True
313 368
314 369
315 @view_config(route_name='applications_property',
316 match_param='key=report_graphs', renderer='json',
317 permission='view')
318 @view_config(route_name='applications_property',
319 match_param='key=slow_report_graphs', renderer='json',
320 permission='view')
370 @view_config(
371 route_name="applications_property",
372 match_param="key=report_graphs",
373 renderer="json",
374 permission="view",
375 )
376 @view_config(
377 route_name="applications_property",
378 match_param="key=slow_report_graphs",
379 renderer="json",
380 permission="view",
381 )
321 382 def get_application_report_stats(request):
322 383 query_params = request.GET.mixed()
323 query_params['resource'] = (request.context.resource.resource_id,)
384 query_params["resource"] = (request.context.resource.resource_id,)
324 385
325 filter_settings = build_filter_settings_from_query_dict(request,
326 query_params)
327 if not filter_settings.get('end_date'):
386 filter_settings = build_filter_settings_from_query_dict(request, query_params)
387 if not filter_settings.get("end_date"):
328 388 end_date = datetime.utcnow().replace(microsecond=0, second=0)
329 filter_settings['end_date'] = end_date
389 filter_settings["end_date"] = end_date
330 390
331 if not filter_settings.get('start_date'):
391 if not filter_settings.get("start_date"):
332 392 delta = timedelta(hours=1)
333 filter_settings['start_date'] = filter_settings['end_date'] - delta
393 filter_settings["start_date"] = filter_settings["end_date"] - delta
334 394
335 395 result = ReportGroupService.get_report_stats(request, filter_settings)
336 396 return result
337 397
338 398
339 @view_config(route_name='applications_property',
340 match_param='key=metrics_graphs', renderer='json',
341 permission='view')
399 @view_config(
400 route_name="applications_property",
401 match_param="key=metrics_graphs",
402 renderer="json",
403 permission="view",
404 )
342 405 def metrics_graphs(request):
343 406 """
344 407 Handles metric dashboard graphs
345 408 Returns information for time/tier breakdown
346 409 """
347 410 query_params = request.GET.mixed()
348 query_params['resource'] = (request.context.resource.resource_id,)
411 query_params["resource"] = (request.context.resource.resource_id,)
349 412
350 filter_settings = build_filter_settings_from_query_dict(request,
351 query_params)
413 filter_settings = build_filter_settings_from_query_dict(request, query_params)
352 414
353 if not filter_settings.get('end_date'):
415 if not filter_settings.get("end_date"):
354 416 end_date = datetime.utcnow().replace(microsecond=0, second=0)
355 filter_settings['end_date'] = end_date
417 filter_settings["end_date"] = end_date
356 418
357 419 delta = timedelta(hours=1)
358 if not filter_settings.get('start_date'):
359 filter_settings['start_date'] = filter_settings['end_date'] - delta
360 if filter_settings['end_date'] <= filter_settings['start_date']:
361 filter_settings['end_date'] = filter_settings['start_date']
420 if not filter_settings.get("start_date"):
421 filter_settings["start_date"] = filter_settings["end_date"] - delta
422 if filter_settings["end_date"] <= filter_settings["start_date"]:
423 filter_settings["end_date"] = filter_settings["start_date"]
362 424
363 delta = filter_settings['end_date'] - filter_settings['start_date']
364 if delta < h.time_deltas.get('12h')['delta']:
425 delta = filter_settings["end_date"] - filter_settings["start_date"]
426 if delta < h.time_deltas.get("12h")["delta"]:
365 427 divide_by_min = 1
366 elif delta <= h.time_deltas.get('3d')['delta']:
428 elif delta <= h.time_deltas.get("3d")["delta"]:
367 429 divide_by_min = 5.0
368 elif delta >= h.time_deltas.get('2w')['delta']:
430 elif delta >= h.time_deltas.get("2w")["delta"]:
369 431 divide_by_min = 60.0 * 24
370 432 else:
371 433 divide_by_min = 60.0
372 434
373 results = RequestMetricService.get_metrics_stats(
374 request, filter_settings)
435 results = RequestMetricService.get_metrics_stats(request, filter_settings)
375 436 # because requests are PER SECOND / we divide 1 min stats by 60
376 437 # requests are normalized to 1 min average
377 438 # results are average seconds time spent per request in specific area
378 439 for point in results:
379 if point['requests']:
380 point['main'] = (point['main'] - point['sql'] -
381 point['nosql'] - point['remote'] -
382 point['tmpl'] -
383 point['custom']) / point['requests']
384 point['sql'] = point['sql'] / point['requests']
385 point['nosql'] = point['nosql'] / point['requests']
386 point['remote'] = point['remote'] / point['requests']
387 point['tmpl'] = point['tmpl'] / point['requests']
388 point['custom'] = point['custom'] / point['requests']
389 point['requests_2'] = point['requests'] / 60.0 / divide_by_min
390
391 selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom']
440 if point["requests"]:
441 point["main"] = (
442 point["main"]
443 - point["sql"]
444 - point["nosql"]
445 - point["remote"]
446 - point["tmpl"]
447 - point["custom"]
448 ) / point["requests"]
449 point["sql"] = point["sql"] / point["requests"]
450 point["nosql"] = point["nosql"] / point["requests"]
451 point["remote"] = point["remote"] / point["requests"]
452 point["tmpl"] = point["tmpl"] / point["requests"]
453 point["custom"] = point["custom"] / point["requests"]
454 point["requests_2"] = point["requests"] / 60.0 / divide_by_min
455
456 selected_types = ["main", "sql", "nosql", "remote", "tmpl", "custom"]
392 457
393 458 for point in results:
394 459 for stat_type in selected_types:
@@ -397,228 +462,231 b' def metrics_graphs(request):'
397 462 return results
398 463
399 464
400 @view_config(route_name='applications_property',
401 match_param='key=response_graphs', renderer='json',
402 permission='view')
465 @view_config(
466 route_name="applications_property",
467 match_param="key=response_graphs",
468 renderer="json",
469 permission="view",
470 )
403 471 def response_graphs(request):
404 472 """
405 473 Handles dashboard infomation for avg. response time split by today,
406 474 2 days ago and week ago
407 475 """
408 476 query_params = request.GET.mixed()
409 query_params['resource'] = (request.context.resource.resource_id,)
477 query_params["resource"] = (request.context.resource.resource_id,)
410 478
411 filter_settings = build_filter_settings_from_query_dict(request,
412 query_params)
479 filter_settings = build_filter_settings_from_query_dict(request, query_params)
413 480
414 if not filter_settings.get('end_date'):
481 if not filter_settings.get("end_date"):
415 482 end_date = datetime.utcnow().replace(microsecond=0, second=0)
416 filter_settings['end_date'] = end_date
483 filter_settings["end_date"] = end_date
417 484
418 485 delta = timedelta(hours=1)
419 if not filter_settings.get('start_date'):
420 filter_settings['start_date'] = filter_settings['end_date'] - delta
486 if not filter_settings.get("start_date"):
487 filter_settings["start_date"] = filter_settings["end_date"] - delta
421 488
422 result_now = RequestMetricService.get_metrics_stats(
423 request, filter_settings)
489 result_now = RequestMetricService.get_metrics_stats(request, filter_settings)
424 490
425 491 filter_settings_2d = filter_settings.copy()
426 filter_settings_2d['start_date'] = filter_settings['start_date'] - \
427 timedelta(days=2)
428 filter_settings_2d['end_date'] = filter_settings['end_date'] - \
429 timedelta(days=2)
430 result_2d = RequestMetricService.get_metrics_stats(
431 request, filter_settings_2d)
492 filter_settings_2d["start_date"] = filter_settings["start_date"] - timedelta(days=2)
493 filter_settings_2d["end_date"] = filter_settings["end_date"] - timedelta(days=2)
494 result_2d = RequestMetricService.get_metrics_stats(request, filter_settings_2d)
432 495
433 496 filter_settings_7d = filter_settings.copy()
434 filter_settings_7d['start_date'] = filter_settings['start_date'] - \
435 timedelta(days=7)
436 filter_settings_7d['end_date'] = filter_settings['end_date'] - \
437 timedelta(days=7)
438 result_7d = RequestMetricService.get_metrics_stats(
439 request, filter_settings_7d)
497 filter_settings_7d["start_date"] = filter_settings["start_date"] - timedelta(days=7)
498 filter_settings_7d["end_date"] = filter_settings["end_date"] - timedelta(days=7)
499 result_7d = RequestMetricService.get_metrics_stats(request, filter_settings_7d)
440 500
441 501 plot_data = []
442 502
443 503 for item in result_now:
444 point = {'x': item['x'], 'today': 0, 'days_ago_2': 0,
445 'days_ago_7': 0}
446 if item['requests']:
447 point['today'] = round(item['main'] / item['requests'], 3)
504 point = {"x": item["x"], "today": 0, "days_ago_2": 0, "days_ago_7": 0}
505 if item["requests"]:
506 point["today"] = round(item["main"] / item["requests"], 3)
448 507 plot_data.append(point)
449 508
450 509 for i, item in enumerate(result_2d[:len(plot_data)]):
451 plot_data[i]['days_ago_2'] = 0
510 plot_data[i]["days_ago_2"] = 0
452 511 point = result_2d[i]
453 if point['requests']:
454 plot_data[i]['days_ago_2'] = round(point['main'] /
455 point['requests'], 3)
512 if point["requests"]:
513 plot_data[i]["days_ago_2"] = round(point["main"] / point["requests"], 3)
456 514
457 515 for i, item in enumerate(result_7d[:len(plot_data)]):
458 plot_data[i]['days_ago_7'] = 0
516 plot_data[i]["days_ago_7"] = 0
459 517 point = result_7d[i]
460 if point['requests']:
461 plot_data[i]['days_ago_7'] = round(point['main'] /
462 point['requests'], 3)
518 if point["requests"]:
519 plot_data[i]["days_ago_7"] = round(point["main"] / point["requests"], 3)
463 520
464 521 return plot_data
465 522
466 523
467 @view_config(route_name='applications_property',
468 match_param='key=requests_graphs', renderer='json',
469 permission='view')
524 @view_config(
525 route_name="applications_property",
526 match_param="key=requests_graphs",
527 renderer="json",
528 permission="view",
529 )
470 530 def requests_graphs(request):
471 531 """
472 532 Handles dashboard infomation for avg. response time split by today,
473 533 2 days ago and week ago
474 534 """
475 535 query_params = request.GET.mixed()
476 query_params['resource'] = (request.context.resource.resource_id,)
536 query_params["resource"] = (request.context.resource.resource_id,)
477 537
478 filter_settings = build_filter_settings_from_query_dict(request,
479 query_params)
538 filter_settings = build_filter_settings_from_query_dict(request, query_params)
480 539
481 if not filter_settings.get('end_date'):
540 if not filter_settings.get("end_date"):
482 541 end_date = datetime.utcnow().replace(microsecond=0, second=0)
483 filter_settings['end_date'] = end_date
542 filter_settings["end_date"] = end_date
484 543
485 544 delta = timedelta(hours=1)
486 if not filter_settings.get('start_date'):
487 filter_settings['start_date'] = filter_settings['end_date'] - delta
488
489 result_now = RequestMetricService.get_metrics_stats(
490 request, filter_settings)
491
492 delta = filter_settings['end_date'] - filter_settings['start_date']
493 if delta < h.time_deltas.get('12h')['delta']:
494 seconds = h.time_deltas['1m']['minutes'] * 60.0
495 elif delta <= h.time_deltas.get('3d')['delta']:
496 seconds = h.time_deltas['5m']['minutes'] * 60.0
497 elif delta >= h.time_deltas.get('2w')['delta']:
498 seconds = h.time_deltas['24h']['minutes'] * 60.0
545 if not filter_settings.get("start_date"):
546 filter_settings["start_date"] = filter_settings["end_date"] - delta
547
548 result_now = RequestMetricService.get_metrics_stats(request, filter_settings)
549
550 delta = filter_settings["end_date"] - filter_settings["start_date"]
551 if delta < h.time_deltas.get("12h")["delta"]:
552 seconds = h.time_deltas["1m"]["minutes"] * 60.0
553 elif delta <= h.time_deltas.get("3d")["delta"]:
554 seconds = h.time_deltas["5m"]["minutes"] * 60.0
555 elif delta >= h.time_deltas.get("2w")["delta"]:
556 seconds = h.time_deltas["24h"]["minutes"] * 60.0
499 557 else:
500 seconds = h.time_deltas['1h']['minutes'] * 60.0
558 seconds = h.time_deltas["1h"]["minutes"] * 60.0
501 559
502 560 for item in result_now:
503 if item['requests']:
504 item['requests'] = round(item['requests'] / seconds, 3)
561 if item["requests"]:
562 item["requests"] = round(item["requests"] / seconds, 3)
505 563 return result_now
506 564
507 565
508 @view_config(route_name='applications_property',
509 match_param='key=apdex_stats', renderer='json',
510 permission='view')
566 @view_config(
567 route_name="applications_property",
568 match_param="key=apdex_stats",
569 renderer="json",
570 permission="view",
571 )
511 572 def get_apdex_stats(request):
512 573 """
513 574 Returns information and calculates APDEX score per server for dashboard
514 575 server information (upper right stats boxes)
515 576 """
516 577 query_params = request.GET.mixed()
517 query_params['resource'] = (request.context.resource.resource_id,)
578 query_params["resource"] = (request.context.resource.resource_id,)
518 579
519 filter_settings = build_filter_settings_from_query_dict(request,
520 query_params)
580 filter_settings = build_filter_settings_from_query_dict(request, query_params)
521 581 # make sure we have only one resource here to don't produce
522 582 # weird results when we have wrong app in app selector
523 filter_settings['resource'] = [filter_settings['resource'][0]]
583 filter_settings["resource"] = [filter_settings["resource"][0]]
524 584
525 if not filter_settings.get('end_date'):
585 if not filter_settings.get("end_date"):
526 586 end_date = datetime.utcnow().replace(microsecond=0, second=0)
527 filter_settings['end_date'] = end_date
587 filter_settings["end_date"] = end_date
528 588
529 589 delta = timedelta(hours=1)
530 if not filter_settings.get('start_date'):
531 filter_settings['start_date'] = filter_settings['end_date'] - delta
590 if not filter_settings.get("start_date"):
591 filter_settings["start_date"] = filter_settings["end_date"] - delta
532 592
533 593 return RequestMetricService.get_apdex_stats(request, filter_settings)
534 594
535 595
536 @view_config(route_name='applications_property', match_param='key=slow_calls',
537 renderer='json', permission='view')
596 @view_config(
597 route_name="applications_property",
598 match_param="key=slow_calls",
599 renderer="json",
600 permission="view",
601 )
538 602 def get_slow_calls(request):
539 603 """
540 604 Returns information for time consuming calls in specific time interval
541 605 """
542 606 query_params = request.GET.mixed()
543 query_params['resource'] = (request.context.resource.resource_id,)
607 query_params["resource"] = (request.context.resource.resource_id,)
544 608
545 filter_settings = build_filter_settings_from_query_dict(request,
546 query_params)
609 filter_settings = build_filter_settings_from_query_dict(request, query_params)
547 610
548 if not filter_settings.get('end_date'):
611 if not filter_settings.get("end_date"):
549 612 end_date = datetime.utcnow().replace(microsecond=0, second=0)
550 filter_settings['end_date'] = end_date
613 filter_settings["end_date"] = end_date
551 614
552 615 delta = timedelta(hours=1)
553 if not filter_settings.get('start_date'):
554 filter_settings['start_date'] = filter_settings['end_date'] - delta
616 if not filter_settings.get("start_date"):
617 filter_settings["start_date"] = filter_settings["end_date"] - delta
555 618
556 619 return SlowCallService.get_time_consuming_calls(request, filter_settings)
557 620
558 621
559 @view_config(route_name='applications_property',
560 match_param='key=requests_breakdown',
561 renderer='json', permission='view')
622 @view_config(
623 route_name="applications_property",
624 match_param="key=requests_breakdown",
625 renderer="json",
626 permission="view",
627 )
562 628 def get_requests_breakdown(request):
563 629 """
564 630 Used on dashboard to get information which views are most used in
565 631 a time interval
566 632 """
567 633 query_params = request.GET.mixed()
568 query_params['resource'] = (request.context.resource.resource_id,)
634 query_params["resource"] = (request.context.resource.resource_id,)
569 635
570 filter_settings = build_filter_settings_from_query_dict(request,
571 query_params)
572 if not filter_settings.get('end_date'):
636 filter_settings = build_filter_settings_from_query_dict(request, query_params)
637 if not filter_settings.get("end_date"):
573 638 end_date = datetime.utcnow().replace(microsecond=0, second=0)
574 filter_settings['end_date'] = end_date
639 filter_settings["end_date"] = end_date
575 640
576 if not filter_settings.get('start_date'):
641 if not filter_settings.get("start_date"):
577 642 delta = timedelta(hours=1)
578 filter_settings['start_date'] = filter_settings['end_date'] - delta
643 filter_settings["start_date"] = filter_settings["end_date"] - delta
579 644
580 series = RequestMetricService.get_requests_breakdown(
581 request, filter_settings)
645 series = RequestMetricService.get_requests_breakdown(request, filter_settings)
582 646
583 647 results = []
584 648 for row in series:
585 d_row = {'avg_response': round(row['main'] / row['requests'], 3),
586 'requests': row['requests'],
587 'main': row['main'],
588 'view_name': row['key'],
589 'latest_details': row['latest_details'],
590 'percentage': round(row['percentage'] * 100, 1)}
649 d_row = {
650 "avg_response": round(row["main"] / row["requests"], 3),
651 "requests": row["requests"],
652 "main": row["main"],
653 "view_name": row["key"],
654 "latest_details": row["latest_details"],
655 "percentage": round(row["percentage"] * 100, 1),
656 }
591 657
592 658 results.append(d_row)
593 659
594 660 return results
595 661
596 662
597 @view_config(route_name='applications_property',
598 match_param='key=trending_reports', renderer='json',
599 permission='view')
663 @view_config(
664 route_name="applications_property",
665 match_param="key=trending_reports",
666 renderer="json",
667 permission="view",
668 )
600 669 def trending_reports(request):
601 670 """
602 671 Returns exception/slow reports trending for specific time interval
603 672 """
604 673 query_params = request.GET.mixed().copy()
605 674 # pop report type to rewrite it to tag later
606 report_type = query_params.pop('report_type', None)
675 report_type = query_params.pop("report_type", None)
607 676 if report_type:
608 query_params['type'] = report_type
677 query_params["type"] = report_type
609 678
610 query_params['resource'] = (request.context.resource.resource_id,)
679 query_params["resource"] = (request.context.resource.resource_id,)
611 680
612 filter_settings = build_filter_settings_from_query_dict(request,
613 query_params)
681 filter_settings = build_filter_settings_from_query_dict(request, query_params)
614 682
615 if not filter_settings.get('end_date'):
683 if not filter_settings.get("end_date"):
616 684 end_date = datetime.utcnow().replace(microsecond=0, second=0)
617 filter_settings['end_date'] = end_date
685 filter_settings["end_date"] = end_date
618 686
619 if not filter_settings.get('start_date'):
687 if not filter_settings.get("start_date"):
620 688 delta = timedelta(hours=1)
621 filter_settings['start_date'] = filter_settings['end_date'] - delta
689 filter_settings["start_date"] = filter_settings["end_date"] - delta
622 690
623 691 results = ReportGroupService.get_trending(request, filter_settings)
624 692
@@ -626,112 +694,133 b' def trending_reports(request):'
626 694 for occurences, group in results:
627 695 report_group = group.get_dict(request)
628 696 # show the occurences in time range instead of global ones
629 report_group['occurences'] = occurences
697 report_group["occurences"] = occurences
630 698 trending.append(report_group)
631 699
632 700 return trending
633 701
634 702
635 @view_config(route_name='applications_property',
636 match_param='key=integrations',
637 renderer='json', permission='view')
703 @view_config(
704 route_name="applications_property",
705 match_param="key=integrations",
706 renderer="json",
707 permission="view",
708 )
638 709 def integrations(request):
639 710 """
640 711 Integration list for given application
641 712 """
642 713 application = request.context.resource
643 return {'resource': application}
714 return {"resource": application}
644 715
645 716
646 @view_config(route_name='applications_property',
647 match_param='key=user_permissions', renderer='json',
648 permission='owner', request_method='POST')
717 @view_config(
718 route_name="applications_property",
719 match_param="key=user_permissions",
720 renderer="json",
721 permission="owner",
722 request_method="POST",
723 )
649 724 def user_resource_permission_create(request):
650 725 """
651 726 Set new permissions for user for a resource
652 727 """
653 728 resource = request.context.resource
654 user_name = request.unsafe_json_body.get('user_name')
729 user_name = request.unsafe_json_body.get("user_name")
655 730 user = UserService.by_user_name(user_name)
656 731 if not user:
657 732 user = UserService.by_email(user_name)
658 733 if not user:
659 734 return False
660 735
661 for perm_name in request.unsafe_json_body.get('permissions', []):
736 for perm_name in request.unsafe_json_body.get("permissions", []):
662 737 permission = UserResourcePermissionService.by_resource_user_and_perm(
663 user.id, perm_name, resource.resource_id)
738 user.id, perm_name, resource.resource_id
739 )
664 740 if not permission:
665 permission = UserResourcePermission(perm_name=perm_name,
666 user_id=user.id)
741 permission = UserResourcePermission(perm_name=perm_name, user_id=user.id)
667 742 resource.user_permissions.append(permission)
668 743 DBSession.flush()
669 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
670 if p.type == 'user']
671 result = {'user_name': user.user_name,
672 'permissions': list(set(perms))}
744 perms = [
745 p.perm_name
746 for p in ResourceService.perms_for_user(resource, user)
747 if p.type == "user"
748 ]
749 result = {"user_name": user.user_name, "permissions": list(set(perms))}
673 750 return result
674 751
675 752
676 @view_config(route_name='applications_property',
677 match_param='key=user_permissions', renderer='json',
678 permission='owner', request_method='DELETE')
753 @view_config(
754 route_name="applications_property",
755 match_param="key=user_permissions",
756 renderer="json",
757 permission="owner",
758 request_method="DELETE",
759 )
679 760 def user_resource_permission_delete(request):
680 761 """
681 762 Removes user permission from specific resource
682 763 """
683 764 resource = request.context.resource
684 765
685 user = UserService.by_user_name(request.GET.get('user_name'))
766 user = UserService.by_user_name(request.GET.get("user_name"))
686 767 if not user:
687 768 return False
688 769
689 for perm_name in request.GET.getall('permissions'):
770 for perm_name in request.GET.getall("permissions"):
690 771 permission = UserResourcePermissionService.by_resource_user_and_perm(
691 user.id, perm_name, resource.resource_id)
772 user.id, perm_name, resource.resource_id
773 )
692 774 resource.user_permissions.remove(permission)
693 775 DBSession.flush()
694 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
695 if p.type == 'user']
696 result = {'user_name': user.user_name,
697 'permissions': list(set(perms))}
776 perms = [
777 p.perm_name
778 for p in ResourceService.perms_for_user(resource, user)
779 if p.type == "user"
780 ]
781 result = {"user_name": user.user_name, "permissions": list(set(perms))}
698 782 return result
699 783
700 784
701 @view_config(route_name='applications_property',
702 match_param='key=group_permissions', renderer='json',
703 permission='owner', request_method='POST')
785 @view_config(
786 route_name="applications_property",
787 match_param="key=group_permissions",
788 renderer="json",
789 permission="owner",
790 request_method="POST",
791 )
704 792 def group_resource_permission_create(request):
705 793 """
706 794 Set new permissions for group for a resource
707 795 """
708 796 resource = request.context.resource
709 group = GroupService.by_id(request.unsafe_json_body.get('group_id'))
797 group = GroupService.by_id(request.unsafe_json_body.get("group_id"))
710 798 if not group:
711 799 return False
712 800
713 for perm_name in request.unsafe_json_body.get('permissions', []):
801 for perm_name in request.unsafe_json_body.get("permissions", []):
714 802 permission = GroupResourcePermissionService.by_resource_group_and_perm(
715 group.id, perm_name, resource.resource_id)
803 group.id, perm_name, resource.resource_id
804 )
716 805 if not permission:
717 permission = GroupResourcePermission(perm_name=perm_name,
718 group_id=group.id)
806 permission = GroupResourcePermission(perm_name=perm_name, group_id=group.id)
719 807 resource.group_permissions.append(permission)
720 808 DBSession.flush()
721 809 perm_tuples = ResourceService.groups_for_perm(
722 resource,
723 ANY_PERMISSION,
724 limit_group_permissions=True,
725 group_ids=[group.id])
726 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
727 result = {'group': group.get_dict(),
728 'permissions': list(set(perms))}
810 resource, ANY_PERMISSION, limit_group_permissions=True, group_ids=[group.id]
811 )
812 perms = [p.perm_name for p in perm_tuples if p.type == "group"]
813 result = {"group": group.get_dict(), "permissions": list(set(perms))}
729 814 return result
730 815
731 816
732 @view_config(route_name='applications_property',
733 match_param='key=group_permissions', renderer='json',
734 permission='owner', request_method='DELETE')
817 @view_config(
818 route_name="applications_property",
819 match_param="key=group_permissions",
820 renderer="json",
821 permission="owner",
822 request_method="DELETE",
823 )
735 824 def group_resource_permission_delete(request):
736 825 """
737 826 Removes group permission from specific resource
@@ -739,21 +828,19 b' def group_resource_permission_delete(request):'
739 828 form = forms.ReactorForm(request.POST, csrf_context=request)
740 829 form.validate()
741 830 resource = request.context.resource
742 group = GroupService.by_id(request.GET.get('group_id'))
831 group = GroupService.by_id(request.GET.get("group_id"))
743 832 if not group:
744 833 return False
745 834
746 for perm_name in request.GET.getall('permissions'):
835 for perm_name in request.GET.getall("permissions"):
747 836 permission = GroupResourcePermissionService.by_resource_group_and_perm(
748 group.id, perm_name, resource.resource_id)
837 group.id, perm_name, resource.resource_id
838 )
749 839 resource.group_permissions.remove(permission)
750 840 DBSession.flush()
751 841 perm_tuples = ResourceService.groups_for_perm(
752 resource,
753 ANY_PERMISSION,
754 limit_group_permissions=True,
755 group_ids=[group.id])
756 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
757 result = {'group': group.get_dict(),
758 'permissions': list(set(perms))}
842 resource, ANY_PERMISSION, limit_group_permissions=True, group_ids=[group.id]
843 )
844 perms = [p.perm_name for p in perm_tuples if p.type == "group"]
845 result = {"group": group.get_dict(), "permissions": list(set(perms))}
759 846 return result
@@ -21,16 +21,13 b' from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound'
21 21 from ziggurat_foundations.models.services.user import UserService
22 22
23 23
24 @view_config(route_name='events_no_id',
25 renderer='json', permission='authenticated')
24 @view_config(route_name="events_no_id", renderer="json", permission="authenticated")
26 25 def fetch_events(request):
27 26 """
28 27 Returns list of log entries from Elasticsearch
29 28 """
30 29 event_paginator = EventService.get_paginator(
31 user=request.user,
32 page=1,
33 items_per_page=100
30 user=request.user, page=1, items_per_page=100
34 31 )
35 32 headers = gen_pagination_headers(request, event_paginator)
36 33 request.response.headers.update(headers)
@@ -38,20 +35,25 b' def fetch_events(request):'
38 35 return [ev.get_dict() for ev in event_paginator.items]
39 36
40 37
41 @view_config(route_name='events', renderer='json', request_method='PATCH',
42 permission='authenticated')
38 @view_config(
39 route_name="events",
40 renderer="json",
41 request_method="PATCH",
42 permission="authenticated",
43 )
43 44 def event_PATCH(request):
44 45 resources = UserService.resources_with_perms(
45 request.user, ['view'], resource_types=request.registry.resource_types)
46 request.user, ["view"], resource_types=request.registry.resource_types
47 )
46 48 event = EventService.for_resource(
47 [r.resource_id for r in resources],
48 event_id=request.matchdict['event_id']).first()
49 [r.resource_id for r in resources], event_id=request.matchdict["event_id"]
50 ).first()
49 51 if not event:
50 52 return HTTPNotFound()
51 allowed_keys = ['status']
53 allowed_keys = ["status"]
52 54 for k, v in request.unsafe_json_body.items():
53 55 if k in allowed_keys:
54 if k == 'status':
56 if k == "status":
55 57 event.close()
56 58 else:
57 59 setattr(event, k, v)
@@ -34,76 +34,99 b' log = logging.getLogger(__name__)'
34 34 _ = str
35 35
36 36
37 @view_config(route_name='groups_no_id', renderer='json',
38 request_method="GET", permission='authenticated')
37 @view_config(
38 route_name="groups_no_id",
39 renderer="json",
40 request_method="GET",
41 permission="authenticated",
42 )
39 43 def groups_list(request):
40 44 """
41 45 Returns groups list
42 46 """
43 47 groups = Group.all().order_by(Group.group_name)
44 48 list_groups = ConfigService.by_key_and_section(
45 'list_groups_to_non_admins', 'global')
46 if list_groups.value or request.has_permission('root_administration'):
49 "list_groups_to_non_admins", "global"
50 )
51 if list_groups.value or request.has_permission("root_administration"):
47 52 return [g.get_dict() for g in groups]
48 53 else:
49 54 return []
50 55
51 56
52 @view_config(route_name='groups_no_id', renderer='json',
53 request_method="POST", permission='root_administration')
57 @view_config(
58 route_name="groups_no_id",
59 renderer="json",
60 request_method="POST",
61 permission="root_administration",
62 )
54 63 def groups_create(request):
55 64 """
56 65 Returns groups list
57 66 """
58 67 form = forms.GroupCreateForm(
59 MultiDict(request.safe_json_body or {}), csrf_context=request)
68 MultiDict(request.safe_json_body or {}), csrf_context=request
69 )
60 70 if form.validate():
61 log.info('registering group')
71 log.info("registering group")
62 72 group = Group()
63 73 # insert new group here
64 74 DBSession.add(group)
65 75 form.populate_obj(group)
66 request.session.flash(_('Group created'))
76 request.session.flash(_("Group created"))
67 77 DBSession.flush()
68 78 return group.get_dict(include_perms=True)
69 79 else:
70 80 return HTTPUnprocessableEntity(body=form.errors_json)
71 81
72 82
73 @view_config(route_name='groups', renderer='json',
74 request_method="DELETE", permission='root_administration')
83 @view_config(
84 route_name="groups",
85 renderer="json",
86 request_method="DELETE",
87 permission="root_administration",
88 )
75 89 def groups_DELETE(request):
76 90 """
77 91 Removes a groups permanently from db
78 92 """
79 msg = _('You cannot remove administrator group from the system')
80 group = GroupService.by_id(request.matchdict.get('group_id'))
93 msg = _("You cannot remove administrator group from the system")
94 group = GroupService.by_id(request.matchdict.get("group_id"))
81 95 if group:
82 96 if group.id == 1:
83 request.session.flash(msg, 'warning')
97 request.session.flash(msg, "warning")
84 98 else:
85 99 DBSession.delete(group)
86 request.session.flash(_('Group removed'))
100 request.session.flash(_("Group removed"))
87 101 return True
88 102 request.response.status = 422
89 103 return False
90 104
91 105
92 @view_config(route_name='groups', renderer='json',
93 request_method="GET", permission='root_administration')
94 @view_config(route_name='groups', renderer='json',
95 request_method="PATCH", permission='root_administration')
106 @view_config(
107 route_name="groups",
108 renderer="json",
109 request_method="GET",
110 permission="root_administration",
111 )
112 @view_config(
113 route_name="groups",
114 renderer="json",
115 request_method="PATCH",
116 permission="root_administration",
117 )
96 118 def group_update(request):
97 119 """
98 120 Updates group object
99 121 """
100 group = GroupService.by_id(request.matchdict.get('group_id'))
122 group = GroupService.by_id(request.matchdict.get("group_id"))
101 123 if not group:
102 124 return HTTPNotFound()
103 125
104 if request.method == 'PATCH':
126 if request.method == "PATCH":
105 127 form = forms.GroupCreateForm(
106 MultiDict(request.unsafe_json_body), csrf_context=request)
128 MultiDict(request.unsafe_json_body), csrf_context=request
129 )
107 130 form._modified_group = group
108 131 if form.validate():
109 132 form.populate_obj(group)
@@ -112,49 +135,69 b' def group_update(request):'
112 135 return group.get_dict(include_perms=True)
113 136
114 137
115 @view_config(route_name='groups_property',
116 match_param='key=resource_permissions',
117 renderer='json', permission='root_administration')
138 @view_config(
139 route_name="groups_property",
140 match_param="key=resource_permissions",
141 renderer="json",
142 permission="root_administration",
143 )
118 144 def groups_resource_permissions_list(request):
119 145 """
120 146 Get list of permissions assigned to specific resources
121 147 """
122 group = GroupService.by_id(request.matchdict.get('group_id'))
148 group = GroupService.by_id(request.matchdict.get("group_id"))
123 149 if not group:
124 150 return HTTPNotFound()
125 return [permission_tuple_to_dict(perm) for perm in
126 GroupService.resources_with_possible_perms(group)]
127
128
129 @view_config(route_name='groups_property',
130 match_param='key=users', request_method="GET",
131 renderer='json', permission='root_administration')
151 return [
152 permission_tuple_to_dict(perm)
153 for perm in GroupService.resources_with_possible_perms(group)
154 ]
155
156
157 @view_config(
158 route_name="groups_property",
159 match_param="key=users",
160 request_method="GET",
161 renderer="json",
162 permission="root_administration",
163 )
132 164 def groups_users_list(request):
133 165 """
134 166 Get list of permissions assigned to specific resources
135 167 """
136 group = GroupService.by_id(request.matchdict.get('group_id'))
168 group = GroupService.by_id(request.matchdict.get("group_id"))
137 169 if not group:
138 170 return HTTPNotFound()
139 props = ['user_name', 'id', 'first_name', 'last_name', 'email',
140 'last_login_date', 'status']
171 props = [
172 "user_name",
173 "id",
174 "first_name",
175 "last_name",
176 "email",
177 "last_login_date",
178 "status",
179 ]
141 180 users_dicts = []
142 181 for user in group.users:
143 182 u_dict = user.get_dict(include_keys=props)
144 u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20)
183 u_dict["gravatar_url"] = UserService.gravatar_url(user, s=20)
145 184 users_dicts.append(u_dict)
146 185 return users_dicts
147 186
148 187
149 @view_config(route_name='groups_property',
150 match_param='key=users', request_method="DELETE",
151 renderer='json', permission='root_administration')
188 @view_config(
189 route_name="groups_property",
190 match_param="key=users",
191 request_method="DELETE",
192 renderer="json",
193 permission="root_administration",
194 )
152 195 def groups_users_remove(request):
153 196 """
154 197 Get list of permissions assigned to specific resources
155 198 """
156 group = GroupService.by_id(request.matchdict.get('group_id'))
157 user = UserService.by_user_name(request.GET.get('user_name'))
199 group = GroupService.by_id(request.matchdict.get("group_id"))
200 user = UserService.by_user_name(request.GET.get("user_name"))
158 201 if not group or not user:
159 202 return HTTPNotFound()
160 203 if len(group.users) > 1:
@@ -164,29 +207,40 b' def groups_users_remove(request):'
164 207 group.member_count = group.users_dynamic.count()
165 208 return True
166 209 msg = "Administrator group needs to contain at least one user"
167 request.session.flash(msg, 'warning')
210 request.session.flash(msg, "warning")
168 211 return False
169 212
170 213
171 @view_config(route_name='groups_property',
172 match_param='key=users', request_method="POST",
173 renderer='json', permission='root_administration')
214 @view_config(
215 route_name="groups_property",
216 match_param="key=users",
217 request_method="POST",
218 renderer="json",
219 permission="root_administration",
220 )
174 221 def groups_users_add(request):
175 222 """
176 223 Get list of permissions assigned to specific resources
177 224 """
178 group = GroupService.by_id(request.matchdict.get('group_id'))
179 user = UserService.by_user_name(request.unsafe_json_body.get('user_name'))
225 group = GroupService.by_id(request.matchdict.get("group_id"))
226 user = UserService.by_user_name(request.unsafe_json_body.get("user_name"))
180 227 if not user:
181 user = UserService.by_email(request.unsafe_json_body.get('user_name'))
228 user = UserService.by_email(request.unsafe_json_body.get("user_name"))
182 229
183 230 if not group or not user:
184 231 return HTTPNotFound()
185 232 if user not in group.users:
186 233 group.users.append(user)
187 234 group.member_count = group.users_dynamic.count()
188 props = ['user_name', 'id', 'first_name', 'last_name', 'email',
189 'last_login_date', 'status']
235 props = [
236 "user_name",
237 "id",
238 "first_name",
239 "last_name",
240 "email",
241 "last_login_date",
242 "status",
243 ]
190 244 u_dict = user.get_dict(include_keys=props)
191 u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20)
245 u_dict["gravatar_url"] = UserService.gravatar_url(user, s=20)
192 246 return u_dict
@@ -50,18 +50,20 b' def sign_in(request):'
50 50 if user.status == 1:
51 51 request.session.new_csrf_token()
52 52 user.last_login_date = datetime.datetime.utcnow()
53 social_data = request.session.get('zigg.social_auth')
53 social_data = request.session.get("zigg.social_auth")
54 54 if social_data:
55 55 handle_social_data(request, user, social_data)
56 56 else:
57 request.session.flash(_('Account got disabled'))
57 request.session.flash(_("Account got disabled"))
58 58
59 if request.context.came_from != '/':
60 return HTTPFound(location=request.context.came_from,
61 headers=request.context.headers)
59 if request.context.came_from != "/":
60 return HTTPFound(
61 location=request.context.came_from, headers=request.context.headers
62 )
62 63 else:
63 return HTTPFound(location=request.route_url('/'),
64 headers=request.context.headers)
64 return HTTPFound(
65 location=request.route_url("/"), headers=request.context.headers
66 )
65 67
66 68
67 69 @view_config(context=ZigguratSignInBadAuth, permission=NO_PERMISSION_REQUIRED)
@@ -69,9 +71,10 b' def bad_auth(request):'
69 71 """
70 72 Handles incorrect login flow
71 73 """
72 request.session.flash(_('Incorrect username or password'), 'warning')
73 return HTTPFound(location=request.route_url('register'),
74 headers=request.context.headers)
74 request.session.flash(_("Incorrect username or password"), "warning")
75 return HTTPFound(
76 location=request.route_url("register"), headers=request.context.headers
77 )
75 78
76 79
77 80 @view_config(context=ZigguratSignOut, permission=NO_PERMISSION_REQUIRED)
@@ -79,13 +82,16 b' def sign_out(request):'
79 82 """
80 83 Removes user identification cookie
81 84 """
82 return HTTPFound(location=request.route_url('register'),
83 headers=request.context.headers)
85 return HTTPFound(
86 location=request.route_url("register"), headers=request.context.headers
87 )
84 88
85 89
86 @view_config(route_name='lost_password',
87 renderer='appenlight:templates/user/lost_password.jinja2',
88 permission=NO_PERMISSION_REQUIRED)
90 @view_config(
91 route_name="lost_password",
92 renderer="appenlight:templates/user/lost_password.jinja2",
93 permission=NO_PERMISSION_REQUIRED,
94 )
89 95 def lost_password(request):
90 96 """
91 97 Presents lost password page - sends password reset link to
@@ -93,36 +99,43 b' def lost_password(request):'
93 99 This link is valid only for 10 minutes
94 100 """
95 101 form = forms.LostPasswordForm(request.POST, csrf_context=request)
96 if request.method == 'POST' and form.validate():
102 if request.method == "POST" and form.validate():
97 103 user = UserService.by_email(form.email.data)
98 104 if user:
99 105 UserService.regenerate_security_code(user)
100 106 user.security_code_date = datetime.datetime.utcnow()
101 107 email_vars = {
102 'user': user,
103 'request': request,
104 'email_title': "AppEnlight :: New password request"
108 "user": user,
109 "request": request,
110 "email_title": "AppEnlight :: New password request",
105 111 }
106 112 UserService.send_email(
107 request, recipients=[user.email],
113 request,
114 recipients=[user.email],
108 115 variables=email_vars,
109 template='/email_templates/lost_password.jinja2')
110 msg = 'Password reset email had been sent. ' \
111 'Please check your mailbox for further instructions.'
116 template="/email_templates/lost_password.jinja2",
117 )
118 msg = (
119 "Password reset email had been sent. "
120 "Please check your mailbox for further instructions."
121 )
112 122 request.session.flash(_(msg))
113 return HTTPFound(location=request.route_url('lost_password'))
123 return HTTPFound(location=request.route_url("lost_password"))
114 124 return {"form": form}
115 125
116 126
117 @view_config(route_name='lost_password_generate',
127 @view_config(
128 route_name="lost_password_generate",
118 129 permission=NO_PERMISSION_REQUIRED,
119 renderer='appenlight:templates/user/lost_password_generate.jinja2')
130 renderer="appenlight:templates/user/lost_password_generate.jinja2",
131 )
120 132 def lost_password_generate(request):
121 133 """
122 134 Shows new password form - perform time check and set new password for user
123 135 """
124 136 user = UserService.by_user_name_and_security_code(
125 request.GET.get('user_name'), request.GET.get('security_code'))
137 request.GET.get("user_name"), request.GET.get("security_code")
138 )
126 139 if user:
127 140 delta = datetime.datetime.utcnow() - user.security_code_date
128 141
@@ -130,56 +143,54 b' def lost_password_generate(request):'
130 143 form = forms.NewPasswordForm(request.POST, csrf_context=request)
131 144 if request.method == "POST" and form.validate():
132 145 UserService.set_password(user, form.new_password.data)
133 request.session.flash(_('You can sign in with your new password.'))
134 return HTTPFound(location=request.route_url('register'))
146 request.session.flash(_("You can sign in with your new password."))
147 return HTTPFound(location=request.route_url("register"))
135 148 else:
136 149 return {"form": form}
137 150 else:
138 return Response('Security code expired')
151 return Response("Security code expired")
139 152
140 153
141 @view_config(route_name='register',
142 renderer='appenlight:templates/user/register.jinja2',
143 permission=NO_PERMISSION_REQUIRED)
154 @view_config(
155 route_name="register",
156 renderer="appenlight:templates/user/register.jinja2",
157 permission=NO_PERMISSION_REQUIRED,
158 )
144 159 def register(request):
145 160 """
146 161 Render register page with form
147 162 Also handles oAuth flow for registration
148 163 """
149 login_url = request.route_url('ziggurat.routes.sign_in')
164 login_url = request.route_url("ziggurat.routes.sign_in")
150 165 if request.query_string:
151 query_string = '?%s' % request.query_string
166 query_string = "?%s" % request.query_string
152 167 else:
153 query_string = ''
154 referrer = '%s%s' % (request.path, query_string)
168 query_string = ""
169 referrer = "%s%s" % (request.path, query_string)
155 170
156 if referrer in [login_url, '/register', '/register?sign_in=1']:
157 referrer = '/' # never use the login form itself as came_from
171 if referrer in [login_url, "/register", "/register?sign_in=1"]:
172 referrer = "/" # never use the login form itself as came_from
158 173 sign_in_form = forms.SignInForm(
159 came_from=request.params.get('came_from', referrer),
160 csrf_context=request)
174 came_from=request.params.get("came_from", referrer), csrf_context=request
175 )
161 176
162 177 # populate form from oAuth session data returned by authomatic
163 social_data = request.session.get('zigg.social_auth')
164 if request.method != 'POST' and social_data:
178 social_data = request.session.get("zigg.social_auth")
179 if request.method != "POST" and social_data:
165 180 log.debug(social_data)
166 user_name = social_data['user'].get('user_name', '').split('@')[0]
167 form_data = {
168 'user_name': user_name,
169 'email': social_data['user'].get('email')
170 }
171 form_data['user_password'] = str(uuid.uuid4())
172 form = forms.UserRegisterForm(MultiDict(form_data),
173 csrf_context=request)
181 user_name = social_data["user"].get("user_name", "").split("@")[0]
182 form_data = {"user_name": user_name, "email": social_data["user"].get("email")}
183 form_data["user_password"] = str(uuid.uuid4())
184 form = forms.UserRegisterForm(MultiDict(form_data), csrf_context=request)
174 185 form.user_password.widget.hide_value = False
175 186 else:
176 187 form = forms.UserRegisterForm(request.POST, csrf_context=request)
177 if request.method == 'POST' and form.validate():
178 log.info('registering user')
188 if request.method == "POST" and form.validate():
189 log.info("registering user")
179 190 # insert new user here
180 if request.registry.settings['appenlight.disable_registration']:
181 request.session.flash(_('Registration is currently disabled.'))
182 return HTTPFound(location=request.route_url('/'))
191 if request.registry.settings["appenlight.disable_registration"]:
192 request.session.flash(_("Registration is currently disabled."))
193 return HTTPFound(location=request.route_url("/"))
183 194
184 195 new_user = User()
185 196 DBSession.add(new_user)
@@ -187,49 +198,59 b' def register(request):'
187 198 UserService.regenerate_security_code(new_user)
188 199 new_user.status = 1
189 200 UserService.set_password(new_user, new_user.user_password)
190 new_user.registration_ip = request.environ.get('REMOTE_ADDR')
201 new_user.registration_ip = request.environ.get("REMOTE_ADDR")
191 202
192 203 if social_data:
193 204 handle_social_data(request, new_user, social_data)
194 205
195 email_vars = {'user': new_user,
196 'request': request,
197 'email_title': "AppEnlight :: Start information"}
206 email_vars = {
207 "user": new_user,
208 "request": request,
209 "email_title": "AppEnlight :: Start information",
210 }
198 211 UserService.send_email(
199 request, recipients=[new_user.email], variables=email_vars,
200 template='/email_templates/registered.jinja2')
201 request.session.flash(_('You have successfully registered.'))
212 request,
213 recipients=[new_user.email],
214 variables=email_vars,
215 template="/email_templates/registered.jinja2",
216 )
217 request.session.flash(_("You have successfully registered."))
202 218 DBSession.flush()
203 219 headers = security.remember(request, new_user.id)
204 return HTTPFound(location=request.route_url('/'),
205 headers=headers)
220 return HTTPFound(location=request.route_url("/"), headers=headers)
206 221 settings = request.registry.settings
207 222 social_plugins = {}
208 if settings.get('authomatic.pr.twitter.key', ''):
209 social_plugins['twitter'] = True
210 if settings.get('authomatic.pr.google.key', ''):
211 social_plugins['google'] = True
212 if settings.get('authomatic.pr.github.key', ''):
213 social_plugins['github'] = True
214 if settings.get('authomatic.pr.bitbucket.key', ''):
215 social_plugins['bitbucket'] = True
223 if settings.get("authomatic.pr.twitter.key", ""):
224 social_plugins["twitter"] = True
225 if settings.get("authomatic.pr.google.key", ""):
226 social_plugins["google"] = True
227 if settings.get("authomatic.pr.github.key", ""):
228 social_plugins["github"] = True
229 if settings.get("authomatic.pr.bitbucket.key", ""):
230 social_plugins["bitbucket"] = True
216 231
217 232 return {
218 233 "form": form,
219 234 "sign_in_form": sign_in_form,
220 "social_plugins": social_plugins
235 "social_plugins": social_plugins,
221 236 }
222 237
223 238
224 @view_config(route_name='/',
225 renderer='appenlight:templates/app.jinja2',
226 permission=NO_PERMISSION_REQUIRED)
227 @view_config(route_name='angular_app_ui',
228 renderer='appenlight:templates/app.jinja2',
229 permission=NO_PERMISSION_REQUIRED)
230 @view_config(route_name='angular_app_ui_ix',
231 renderer='appenlight:templates/app.jinja2',
232 permission=NO_PERMISSION_REQUIRED)
239 @view_config(
240 route_name="/",
241 renderer="appenlight:templates/app.jinja2",
242 permission=NO_PERMISSION_REQUIRED,
243 )
244 @view_config(
245 route_name="angular_app_ui",
246 renderer="appenlight:templates/app.jinja2",
247 permission=NO_PERMISSION_REQUIRED,
248 )
249 @view_config(
250 route_name="angular_app_ui_ix",
251 renderer="appenlight:templates/app.jinja2",
252 permission=NO_PERMISSION_REQUIRED,
253 )
233 254 def app_main_index(request):
234 255 """
235 256 Render dashoard/report browser page page along with:
@@ -32,14 +32,16 b' from appenlight.lib import generate_random_string'
32 32 log = logging.getLogger(__name__)
33 33
34 34 dummy_report = ReportGroup()
35 dummy_report.error = "ProtocolError: ('Connection aborted.', " \
36 "error(111, 'Connection refused'))"
35 dummy_report.error = (
36 "ProtocolError: ('Connection aborted.', " "error(111, 'Connection refused'))"
37 )
37 38 dummy_report.total_reports = 4
38 39 dummy_report.occurences = 4
39 40
40 41 dummy_report2 = ReportGroup()
41 dummy_report2.error = "UnboundLocalError: local variable " \
42 "'hits' referenced before assignment"
42 dummy_report2.error = (
43 "UnboundLocalError: local variable " "'hits' referenced before assignment"
44 )
43 45 dummy_report2.total_reports = 8
44 46 dummy_report2.occurences = 8
45 47
@@ -54,9 +56,10 b' class IntegrationView(object):'
54 56 def __init__(self, request):
55 57 self.request = request
56 58 resource = self.request.context.resource
57 integration_name = request.matchdict['integration']
59 integration_name = request.matchdict["integration"]
58 60 integration = IntegrationBase.by_app_id_and_integration_name(
59 resource.resource_id, integration_name)
61 resource.resource_id, integration_name
62 )
60 63 if integration:
61 64 dict_config = integration.config
62 65 else:
@@ -64,25 +67,28 b' class IntegrationView(object):'
64 67 self.integration = integration
65 68 self.integration_config = dict_config
66 69
67 @view_config(route_name='integrations_id',
70 @view_config(
71 route_name="integrations_id",
68 72 request_method="DELETE",
69 renderer='json',
70 permission='edit')
73 renderer="json",
74 permission="edit",
75 )
71 76 def remove_integration(self):
72 77 if self.integration:
73 78 DBSession.delete(self.integration)
74 self.request.session.flash('Integration removed')
75 return ''
79 self.request.session.flash("Integration removed")
80 return ""
76 81
77 @view_config(route_name='integrations_id',
82 @view_config(
83 route_name="integrations_id",
78 84 request_method="POST",
79 match_param=['action=test_report_notification'],
80 renderer='json',
81 permission='edit')
85 match_param=["action=test_report_notification"],
86 renderer="json",
87 permission="edit",
88 )
82 89 def test_report_notification(self):
83 90 if not self.integration:
84 self.request.session.flash('Integration needs to be configured',
85 'warning')
91 self.request.session.flash("Integration needs to be configured", "warning")
86 92 return False
87 93
88 94 resource = self.integration.resource
@@ -94,138 +100,154 b' class IntegrationView(object):'
94 100 else:
95 101 confirmed_reports = [random.choice(dummy_reports)]
96 102
97 channel.notify_reports(resource=resource,
103 channel.notify_reports(
104 resource=resource,
98 105 user=self.request.user,
99 106 request=self.request,
100 107 since_when=datetime.utcnow(),
101 reports=confirmed_reports)
102 self.request.session.flash('Report notification sent')
108 reports=confirmed_reports,
109 )
110 self.request.session.flash("Report notification sent")
103 111 return True
104 112
105 @view_config(route_name='integrations_id',
113 @view_config(
114 route_name="integrations_id",
106 115 request_method="POST",
107 match_param=['action=test_error_alert'],
108 renderer='json',
109 permission='edit')
116 match_param=["action=test_error_alert"],
117 renderer="json",
118 permission="edit",
119 )
110 120 def test_error_alert(self):
111 121 if not self.integration:
112 self.request.session.flash('Integration needs to be configured',
113 'warning')
122 self.request.session.flash("Integration needs to be configured", "warning")
114 123 return False
115 124
116 125 resource = self.integration.resource
117 126
118 event_name = random.choice(('error_report_alert',
119 'slow_report_alert',))
120 new_event = Event(resource_id=resource.resource_id,
127 event_name = random.choice(("error_report_alert", "slow_report_alert"))
128 new_event = Event(
129 resource_id=resource.resource_id,
121 130 event_type=Event.types[event_name],
122 131 start_date=datetime.utcnow(),
123 status=Event.statuses['active'],
124 values={'reports': random.randint(11, 99),
125 'threshold': 10}
132 status=Event.statuses["active"],
133 values={"reports": random.randint(11, 99), "threshold": 10},
126 134 )
127 135
128 136 channel = AlertChannelService.by_integration_id(self.integration.id)
129 137
130 channel.notify_alert(resource=resource,
138 channel.notify_alert(
139 resource=resource,
131 140 event=new_event,
132 141 user=self.request.user,
133 request=self.request)
134 self.request.session.flash('Notification sent')
142 request=self.request,
143 )
144 self.request.session.flash("Notification sent")
135 145 return True
136 146
137 @view_config(route_name='integrations_id',
147 @view_config(
148 route_name="integrations_id",
138 149 request_method="POST",
139 match_param=['action=test_daily_digest'],
140 renderer='json',
141 permission='edit')
150 match_param=["action=test_daily_digest"],
151 renderer="json",
152 permission="edit",
153 )
142 154 def test_daily_digest(self):
143 155 if not self.integration:
144 self.request.session.flash('Integration needs to be configured',
145 'warning')
156 self.request.session.flash("Integration needs to be configured", "warning")
146 157 return False
147 158
148 159 resource = self.integration.resource
149 160 channel = AlertChannelService.by_integration_id(self.integration.id)
150 161
151 channel.send_digest(resource=resource,
162 channel.send_digest(
163 resource=resource,
152 164 user=self.request.user,
153 165 request=self.request,
154 166 since_when=datetime.utcnow(),
155 reports=dummy_reports)
156 self.request.session.flash('Notification sent')
167 reports=dummy_reports,
168 )
169 self.request.session.flash("Notification sent")
157 170 return True
158 171
159 @view_config(route_name='integrations_id',
172 @view_config(
173 route_name="integrations_id",
160 174 request_method="POST",
161 match_param=['action=test_uptime_alert'],
162 renderer='json',
163 permission='edit')
175 match_param=["action=test_uptime_alert"],
176 renderer="json",
177 permission="edit",
178 )
164 179 def test_uptime_alert(self):
165 180 if not self.integration:
166 self.request.session.flash('Integration needs to be configured',
167 'warning')
181 self.request.session.flash("Integration needs to be configured", "warning")
168 182 return False
169 183
170 184 resource = self.integration.resource
171 185
172 new_event = Event(resource_id=resource.resource_id,
173 event_type=Event.types['uptime_alert'],
186 new_event = Event(
187 resource_id=resource.resource_id,
188 event_type=Event.types["uptime_alert"],
174 189 start_date=datetime.utcnow(),
175 status=Event.statuses['active'],
176 values={"status_code": 500,
177 "tries": 2,
178 "response_time": 0})
190 status=Event.statuses["active"],
191 values={"status_code": 500, "tries": 2, "response_time": 0},
192 )
179 193
180 194 channel = AlertChannelService.by_integration_id(self.integration.id)
181 channel.notify_uptime_alert(resource=resource,
195 channel.notify_uptime_alert(
196 resource=resource,
182 197 event=new_event,
183 198 user=self.request.user,
184 request=self.request)
199 request=self.request,
200 )
185 201
186 self.request.session.flash('Notification sent')
202 self.request.session.flash("Notification sent")
187 203 return True
188 204
189 @view_config(route_name='integrations_id',
205 @view_config(
206 route_name="integrations_id",
190 207 request_method="POST",
191 match_param=['action=test_chart_alert'],
192 renderer='json',
193 permission='edit')
208 match_param=["action=test_chart_alert"],
209 renderer="json",
210 permission="edit",
211 )
194 212 def test_chart_alert(self):
195 213 if not self.integration:
196 self.request.session.flash('Integration needs to be configured',
197 'warning')
214 self.request.session.flash("Integration needs to be configured", "warning")
198 215 return False
199 216
200 217 resource = self.integration.resource
201 218
202 219 chart_values = {
203 "matched_rule": {'name': 'Fraud attempt limit'},
204 "matched_step_values": {"labels": {
205 "0_1": {"human_label": "Attempts sum"}},
206 "values": {"0_1": random.randint(11, 55),
207 "key": "2015-12-16T15:49:00"}},
220 "matched_rule": {"name": "Fraud attempt limit"},
221 "matched_step_values": {
222 "labels": {"0_1": {"human_label": "Attempts sum"}},
223 "values": {"0_1": random.randint(11, 55), "key": "2015-12-16T15:49:00"},
224 },
208 225 "start_interval": datetime.utcnow(),
209 226 "resource": 1,
210 227 "chart_name": "Fraud attempts per day",
211 228 "chart_uuid": "some_uuid",
212 229 "step_size": 3600,
213 "action_name": "Notify excessive fraud attempts"}
230 "action_name": "Notify excessive fraud attempts",
231 }
214 232
215 new_event = Event(resource_id=resource.resource_id,
216 event_type=Event.types['chart_alert'],
217 status=Event.statuses['active'],
233 new_event = Event(
234 resource_id=resource.resource_id,
235 event_type=Event.types["chart_alert"],
236 status=Event.statuses["active"],
218 237 values=chart_values,
219 238 target_uuid="some_uuid",
220 start_date=datetime.utcnow())
239 start_date=datetime.utcnow(),
240 )
221 241
222 242 channel = AlertChannelService.by_integration_id(self.integration.id)
223 channel.notify_chart_alert(resource=resource,
243 channel.notify_chart_alert(
244 resource=resource,
224 245 event=new_event,
225 246 user=self.request.user,
226 request=self.request)
247 request=self.request,
248 )
227 249
228 self.request.session.flash('Notification sent')
250 self.request.session.flash("Notification sent")
229 251 return True
230 252
231 253 def create_missing_channel(self, resource, channel_name):
@@ -240,5 +262,5 b' class IntegrationView(object):'
240 262 channel.channel_value = resource.resource_id
241 263 channel.integration_id = self.integration.id
242 264 security_code = generate_random_string(10)
243 channel.channel_json_conf = {'security_code': security_code}
265 channel.channel_json_conf = {"security_code": security_code}
244 266 resource.owner.alert_channels.append(channel)
@@ -14,8 +14,10 b''
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from appenlight.models.integrations.bitbucket import BitbucketIntegration, \
18 IntegrationException
17 from appenlight.models.integrations.bitbucket import (
18 BitbucketIntegration,
19 IntegrationException,
20 )
19 21 from appenlight.models.report_comment import ReportComment
20 22 from appenlight.models.services.report_group import ReportGroupService
21 23 from pyramid.view import view_config
@@ -31,9 +33,11 b' from . import IntegrationView'
31 33
32 34
33 35 class BitbucketView(IntegrationView):
34 @view_config(route_name='integrations_id',
35 match_param=['action=info', 'integration=bitbucket'],
36 renderer='json')
36 @view_config(
37 route_name="integrations_id",
38 match_param=["action=info", "integration=bitbucket"],
39 renderer="json",
40 )
37 41 def get_bitbucket_info(self):
38 42 """
39 43 Grab information about possible priority levels and assignable users
@@ -41,56 +45,60 b' class BitbucketView(IntegrationView):'
41 45 try:
42 46 client = BitbucketIntegration.create_client(
43 47 self.request,
44 self.integration.config['user_name'],
45 self.integration.config['repo_name'])
48 self.integration.config["user_name"],
49 self.integration.config["repo_name"],
50 )
46 51 except IntegrationException as e:
47 52 self.request.response.status_code = 503
48 return {'error_messages': [str(e)]}
53 return {"error_messages": [str(e)]}
49 54 assignees = client.get_assignees()
50 55 priorities = client.get_priorities()
51 return {'assignees': assignees,
52 'priorities': priorities}
56 return {"assignees": assignees, "priorities": priorities}
53 57
54 @view_config(route_name='integrations_id',
55 match_param=['action=create-issue',
56 'integration=bitbucket'],
57 renderer='json')
58 @view_config(
59 route_name="integrations_id",
60 match_param=["action=create-issue", "integration=bitbucket"],
61 renderer="json",
62 )
58 63 def create_issue(self):
59 64 """
60 65 Creates a new issue in bitbucket issue tracker from report group
61 66 """
62 report = ReportGroupService.by_id(
63 self.request.unsafe_json_body['group_id'])
67 report = ReportGroupService.by_id(self.request.unsafe_json_body["group_id"])
64 68 form_data = {
65 'title': self.request.unsafe_json_body.get('title',
66 'Unknown Title'),
67 'content': self.request.unsafe_json_body.get('content', ''),
68 'kind': 'bug',
69 'priority': self.request.unsafe_json_body['priority'],
70 'responsible': self.request.unsafe_json_body['responsible']['user']
69 "title": self.request.unsafe_json_body.get("title", "Unknown Title"),
70 "content": self.request.unsafe_json_body.get("content", ""),
71 "kind": "bug",
72 "priority": self.request.unsafe_json_body["priority"],
73 "responsible": self.request.unsafe_json_body["responsible"]["user"],
71 74 }
72 75
73 76 try:
74 77 client = BitbucketIntegration.create_client(
75 78 self.request,
76 self.integration.config['user_name'],
77 self.integration.config['repo_name'])
79 self.integration.config["user_name"],
80 self.integration.config["repo_name"],
81 )
78 82 issue = client.create_issue(form_data)
79 83 except IntegrationException as e:
80 84 self.request.response.status_code = 503
81 return {'error_messages': [str(e)]}
85 return {"error_messages": [str(e)]}
82 86
83 comment_body = 'Bitbucket issue created: %s ' % issue['web_url']
84 comment = ReportComment(owner_id=self.request.user.id,
87 comment_body = "Bitbucket issue created: %s " % issue["web_url"]
88 comment = ReportComment(
89 owner_id=self.request.user.id,
85 90 report_time=report.first_timestamp,
86 body=comment_body)
91 body=comment_body,
92 )
87 93 report.comments.append(comment)
88 94 return True
89 95
90 @view_config(route_name='integrations_id',
91 match_param=['action=setup', 'integration=bitbucket'],
92 renderer='json',
93 permission='edit')
96 @view_config(
97 route_name="integrations_id",
98 match_param=["action=setup", "integration=bitbucket"],
99 renderer="json",
100 permission="edit",
101 )
94 102 def setup(self):
95 103 """
96 104 Validates and creates integration between application and bitbucket
@@ -98,24 +106,24 b' class BitbucketView(IntegrationView):'
98 106 resource = self.request.context.resource
99 107 form = forms.IntegrationBitbucketForm(
100 108 MultiDict(self.request.safe_json_body or {}),
101 csrf_context=self.request, **self.integration_config)
102 if self.request.method == 'POST' and form.validate():
109 csrf_context=self.request,
110 **self.integration_config
111 )
112 if self.request.method == "POST" and form.validate():
103 113 integration_config = {
104 'repo_name': form.repo_name.data,
105 'user_name': form.user_name.data,
106 'host_name': 'https://bitbucket.org'
114 "repo_name": form.repo_name.data,
115 "user_name": form.user_name.data,
116 "host_name": "https://bitbucket.org",
107 117 }
108 118 if not self.integration:
109 119 # add new integration
110 self.integration = BitbucketIntegration(
111 modified_date=datetime.utcnow(),
112 )
113 self.request.session.flash('Integration added')
120 self.integration = BitbucketIntegration(modified_date=datetime.utcnow())
121 self.request.session.flash("Integration added")
114 122 resource.integrations.append(self.integration)
115 123 else:
116 self.request.session.flash('Integration updated')
124 self.request.session.flash("Integration updated")
117 125 self.integration.config = integration_config
118 126 return integration_config
119 elif self.request.method == 'POST':
127 elif self.request.method == "POST":
120 128 return HTTPUnprocessableEntity(body=form.errors_json)
121 129 return self.integration_config
@@ -15,8 +15,7 b''
15 15 # limitations under the License.
16 16
17 17 from ...models import DBSession
18 from ...models.integrations.campfire import CampfireIntegration, \
19 IntegrationException
18 from ...models.integrations.campfire import CampfireIntegration, IntegrationException
20 19 from ...models.alert_channel import AlertChannel
21 20 from ...lib import generate_random_string
22 21 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
@@ -32,47 +31,50 b' from . import IntegrationView'
32 31
33 32
34 33 class CampfireView(IntegrationView):
35 @view_config(route_name='integrations_id',
36 match_param=['action=info', 'integration=campfire'],
37 renderer='json')
34 @view_config(
35 route_name="integrations_id",
36 match_param=["action=info", "integration=campfire"],
37 renderer="json",
38 )
38 39 def get_info(self):
39 40 pass
40 41
41 @view_config(route_name='integrations_id',
42 match_param=['action=setup', 'integration=campfire'],
43 renderer='json',
44 permission='edit')
42 @view_config(
43 route_name="integrations_id",
44 match_param=["action=setup", "integration=campfire"],
45 renderer="json",
46 permission="edit",
47 )
45 48 def setup(self):
46 49 """
47 50 Validates and creates integration between application and campfire
48 51 """
49 52 resource = self.request.context.resource
50 self.create_missing_channel(resource, 'campfire')
53 self.create_missing_channel(resource, "campfire")
51 54
52 55 form = forms.IntegrationCampfireForm(
53 56 MultiDict(self.request.safe_json_body or {}),
54 57 csrf_context=self.request,
55 **self.integration_config)
58 **self.integration_config
59 )
56 60
57 if self.request.method == 'POST' and form.validate():
61 if self.request.method == "POST" and form.validate():
58 62 integration_config = {
59 'account': form.account.data,
60 'api_token': form.api_token.data,
61 'rooms': form.rooms.data,
63 "account": form.account.data,
64 "api_token": form.api_token.data,
65 "rooms": form.rooms.data,
62 66 }
63 67 if not self.integration:
64 68 # add new integration
65 self.integration = CampfireIntegration(
66 modified_date=datetime.utcnow(),
67 )
68 self.request.session.flash('Integration added')
69 self.integration = CampfireIntegration(modified_date=datetime.utcnow())
70 self.request.session.flash("Integration added")
69 71 resource.integrations.append(self.integration)
70 72 else:
71 self.request.session.flash('Integration updated')
73 self.request.session.flash("Integration updated")
72 74 self.integration.config = integration_config
73 75 DBSession.flush()
74 self.create_missing_channel(resource, 'campfire')
76 self.create_missing_channel(resource, "campfire")
75 77 return integration_config
76 elif self.request.method == 'POST':
78 elif self.request.method == "POST":
77 79 return HTTPUnprocessableEntity(body=form.errors_json)
78 80 return self.integration_config
@@ -29,43 +29,45 b' from . import IntegrationView'
29 29
30 30
31 31 class FlowdockView(IntegrationView):
32 @view_config(route_name='integrations_id',
33 match_param=['action=info', 'integration=flowdock'],
34 renderer='json')
32 @view_config(
33 route_name="integrations_id",
34 match_param=["action=info", "integration=flowdock"],
35 renderer="json",
36 )
35 37 def get_info(self):
36 38 pass
37 39
38 @view_config(route_name='integrations_id',
39 match_param=['action=setup', 'integration=flowdock'],
40 renderer='json',
41 permission='edit')
40 @view_config(
41 route_name="integrations_id",
42 match_param=["action=setup", "integration=flowdock"],
43 renderer="json",
44 permission="edit",
45 )
42 46 def setup(self):
43 47 """
44 48 Validates and creates integration between application and flowdock
45 49 """
46 50 resource = self.request.context.resource
47 self.create_missing_channel(resource, 'flowdock')
51 self.create_missing_channel(resource, "flowdock")
48 52
49 53 form = forms.IntegrationFlowdockForm(
50 54 MultiDict(self.request.safe_json_body or {}),
51 csrf_context=self.request, **self.integration_config)
52 if self.request.method == 'POST' and form.validate():
53 integration_config = {
54 'api_token': form.api_token.data,
55 }
55 csrf_context=self.request,
56 **self.integration_config
57 )
58 if self.request.method == "POST" and form.validate():
59 integration_config = {"api_token": form.api_token.data}
56 60 if not self.integration:
57 61 # add new integration
58 self.integration = FlowdockIntegration(
59 modified_date=datetime.utcnow(),
60 )
61 self.request.session.flash('Integration added')
62 self.integration = FlowdockIntegration(modified_date=datetime.utcnow())
63 self.request.session.flash("Integration added")
62 64 resource.integrations.append(self.integration)
63 65 else:
64 self.request.session.flash('Integration updated')
66 self.request.session.flash("Integration updated")
65 67 self.integration.config = integration_config
66 68 DBSession.flush()
67 self.create_missing_channel(resource, 'flowdock')
69 self.create_missing_channel(resource, "flowdock")
68 70 return integration_config
69 elif self.request.method == 'POST':
71 elif self.request.method == "POST":
70 72 return HTTPUnprocessableEntity(body=form.errors_json)
71 73 return self.integration_config
@@ -15,8 +15,10 b''
15 15 # limitations under the License.
16 16
17 17 from appenlight.models import DBSession
18 from appenlight.models.integrations.github import GithubIntegration, \
19 IntegrationException
18 from appenlight.models.integrations.github import (
19 GithubIntegration,
20 IntegrationException,
21 )
20 22 from appenlight.models.report_comment import ReportComment
21 23 from appenlight.models.services.report_group import ReportGroupService
22 24 from pyramid.view import view_config
@@ -32,9 +34,11 b' from . import IntegrationView'
32 34
33 35
34 36 class GithubView(IntegrationView):
35 @view_config(route_name='integrations_id',
36 match_param=['action=info', 'integration=github'],
37 renderer='json')
37 @view_config(
38 route_name="integrations_id",
39 match_param=["action=info", "integration=github"],
40 renderer="json",
41 )
38 42 def get_github_info(self):
39 43 """
40 44 Grab information about possible priority statuses and assignable users
@@ -42,57 +46,62 b' class GithubView(IntegrationView):'
42 46 try:
43 47 client = GithubIntegration.create_client(
44 48 self.request,
45 self.integration.config['user_name'],
46 self.integration.config['repo_name'])
49 self.integration.config["user_name"],
50 self.integration.config["repo_name"],
51 )
47 52 except IntegrationException as e:
48 53 self.request.response.status_code = 503
49 return {'error_messages': [str(e)]}
54 return {"error_messages": [str(e)]}
50 55 try:
51 56 assignees = client.get_assignees()
52 57 statuses = client.get_statuses()
53 58 except IntegrationException as e:
54 return {'error_messages': [str(e)]}
55 return {'assignees': assignees,
56 'statuses': statuses}
59 return {"error_messages": [str(e)]}
60 return {"assignees": assignees, "statuses": statuses}
57 61
58 @view_config(route_name='integrations_id',
59 match_param=['action=create-issue', 'integration=github'],
60 renderer='json')
62 @view_config(
63 route_name="integrations_id",
64 match_param=["action=create-issue", "integration=github"],
65 renderer="json",
66 )
61 67 def create_issue(self):
62 68 """
63 69 Creates a new issue in github issue tracker from report group
64 70 """
65 report = ReportGroupService.by_id(
66 self.request.unsafe_json_body['group_id'])
71 report = ReportGroupService.by_id(self.request.unsafe_json_body["group_id"])
67 72 form_data = {
68 'title': self.request.unsafe_json_body.get('title',
69 'Unknown Title'),
70 'content': self.request.unsafe_json_body.get('content'),
71 'kind': [self.request.unsafe_json_body['status']],
72 'responsible': self.request.unsafe_json_body['responsible']['user']
73 "title": self.request.unsafe_json_body.get("title", "Unknown Title"),
74 "content": self.request.unsafe_json_body.get("content"),
75 "kind": [self.request.unsafe_json_body["status"]],
76 "responsible": self.request.unsafe_json_body["responsible"]["user"],
73 77 }
74 78
75 79 try:
76 80 client = GithubIntegration.create_client(
77 81 self.request,
78 self.integration.config['user_name'],
79 self.integration.config['repo_name'])
82 self.integration.config["user_name"],
83 self.integration.config["repo_name"],
84 )
80 85 issue = client.create_issue(form_data)
81 86 except IntegrationException as e:
82 87 self.request.response.status_code = 503
83 return {'error_messages': [str(e)]}
88 return {"error_messages": [str(e)]}
84 89
85 comment_body = 'Github issue created: %s ' % issue['web_url']
86 comment = ReportComment(owner_id=self.request.user.id,
90 comment_body = "Github issue created: %s " % issue["web_url"]
91 comment = ReportComment(
92 owner_id=self.request.user.id,
87 93 report_time=report.first_timestamp,
88 body=comment_body)
94 body=comment_body,
95 )
89 96 report.comments.append(comment)
90 97 return True
91 98
92 @view_config(route_name='integrations_id',
93 match_param=['action=setup', 'integration=github'],
94 renderer='json',
95 permission='edit')
99 @view_config(
100 route_name="integrations_id",
101 match_param=["action=setup", "integration=github"],
102 renderer="json",
103 permission="edit",
104 )
96 105 def setup(self):
97 106 """
98 107 Validates and creates integration between application and github
@@ -101,22 +110,20 b' class GithubView(IntegrationView):'
101 110 form = forms.IntegrationGithubForm(
102 111 MultiDict(self.request.safe_json_body or {}),
103 112 csrf_context=self.request,
104 **self.integration_config)
105 if self.request.method == 'POST' and form.validate():
113 **self.integration_config
114 )
115 if self.request.method == "POST" and form.validate():
106 116 integration_config = {
107 'repo_name': form.repo_name.data,
108 'user_name': form.user_name.data,
109 'host_name': 'https://api.github.com'
117 "repo_name": form.repo_name.data,
118 "user_name": form.user_name.data,
119 "host_name": "https://api.github.com",
110 120 }
111 121 if not self.integration:
112 self.integration = GithubIntegration(
113 modified_date=datetime.utcnow(),
114
115 )
122 self.integration = GithubIntegration(modified_date=datetime.utcnow())
116 123 self.integration.config = integration_config
117 124 resource.integrations.append(self.integration)
118 self.request.session.flash('Integration updated')
125 self.request.session.flash("Integration updated")
119 126 return integration_config
120 elif self.request.method == 'POST':
127 elif self.request.method == "POST":
121 128 return HTTPUnprocessableEntity(body=form.errors_json)
122 129 return self.integration_config
@@ -31,43 +31,47 b' from . import IntegrationView'
31 31
32 32
33 33 class HipchatView(IntegrationView):
34 @view_config(route_name='integrations_id',
35 match_param=['action=info', 'integration=hipchat'],
36 renderer='json')
34 @view_config(
35 route_name="integrations_id",
36 match_param=["action=info", "integration=hipchat"],
37 renderer="json",
38 )
37 39 def get_info(self):
38 40 pass
39 41
40 @view_config(route_name='integrations_id',
41 match_param=['action=setup', 'integration=hipchat'],
42 renderer='json',
43 permission='edit')
42 @view_config(
43 route_name="integrations_id",
44 match_param=["action=setup", "integration=hipchat"],
45 renderer="json",
46 permission="edit",
47 )
44 48 def setup(self):
45 49 """
46 50 Validates and creates integration between application and hipchat
47 51 """
48 52 resource = self.request.context.resource
49 self.create_missing_channel(resource, 'hipchat')
53 self.create_missing_channel(resource, "hipchat")
50 54 form = forms.IntegrationHipchatForm(
51 55 MultiDict(self.request.safe_json_body or {}),
52 csrf_context=self.request, **self.integration_config)
53 if self.request.method == 'POST' and form.validate():
56 csrf_context=self.request,
57 **self.integration_config
58 )
59 if self.request.method == "POST" and form.validate():
54 60 integration_config = {
55 'api_token': form.api_token.data,
56 'rooms': form.rooms.data,
61 "api_token": form.api_token.data,
62 "rooms": form.rooms.data,
57 63 }
58 64 if not self.integration:
59 65 # add new integration
60 self.integration = HipchatIntegration(
61 modified_date=datetime.utcnow(),
62 )
63 self.request.session.flash('Integration added')
66 self.integration = HipchatIntegration(modified_date=datetime.utcnow())
67 self.request.session.flash("Integration added")
64 68 resource.integrations.append(self.integration)
65 69 else:
66 self.request.session.flash('Integration updated')
70 self.request.session.flash("Integration updated")
67 71 self.integration.config = integration_config
68 72 DBSession.flush()
69 self.create_missing_channel(resource, 'hipchat')
73 self.create_missing_channel(resource, "hipchat")
70 74 return integration_config
71 elif self.request.method == 'POST':
75 elif self.request.method == "POST":
72 76 return HTTPUnprocessableEntity(body=form.errors_json)
73 77 return self.integration_config
@@ -16,8 +16,11 b''
16 16
17 17 import copy
18 18 import logging
19 from appenlight.models.integrations.jira import JiraIntegration, \
20 JiraClient, IntegrationException
19 from appenlight.models.integrations.jira import (
20 JiraIntegration,
21 JiraClient,
22 IntegrationException,
23 )
21 24 from appenlight.models.report_comment import ReportComment
22 25 from appenlight.models.services.report_group import ReportGroupService
23 26 from pyramid.view import view_config
@@ -37,21 +40,24 b' class JiraView(IntegrationView):'
37 40 Creates a client that can make authenticated requests to jira
38 41 """
39 42 if self.integration and not form:
40 user_name = self.integration.config['user_name']
41 password = self.integration.config['password']
42 host_name = self.integration.config['host_name']
43 project = self.integration.config['project']
43 user_name = self.integration.config["user_name"]
44 password = self.integration.config["password"]
45 host_name = self.integration.config["host_name"]
46 project = self.integration.config["project"]
44 47 else:
45 48 user_name, password = form.user_name.data, form.password.data
46 49 host_name = form.host_name.data
47 50 project = form.host_name.data
48 client = JiraClient(user_name, password, host_name, project,
49 request=self.request)
51 client = JiraClient(
52 user_name, password, host_name, project, request=self.request
53 )
50 54 return client
51 55
52 @view_config(route_name='integrations_id',
53 match_param=['action=info', 'integration=jira'],
54 renderer='json')
56 @view_config(
57 route_name="integrations_id",
58 match_param=["action=info", "integration=jira"],
59 renderer="json",
60 )
55 61 def get_jira_info(self):
56 62 """
57 63 Get basic metadata - assignees and priority levels from tracker
@@ -60,60 +66,64 b' class JiraView(IntegrationView):'
60 66 client = self.create_client()
61 67 except IntegrationException as e:
62 68 self.request.response.status_code = 503
63 return {'error_messages': [str(e)]}
69 return {"error_messages": [str(e)]}
64 70 assignees = []
65 71 priorities = []
66 72 issue_types = []
67 73 possible_issue_types = client.get_issue_types(self.request)
68 74 for issue_type in possible_issue_types:
69 for field in issue_type['fields']:
70 if field['id'] == 'assignee':
71 assignees = field['values']
72 if field['id'] == 'priority':
73 priorities = field['values']
74 issue_types.append({'name':issue_type['name'],
75 'id':issue_type['id']})
76 return {'assignees': assignees,
77 'priorities': priorities,
78 'issue_types': issue_types}
75 for field in issue_type["fields"]:
76 if field["id"] == "assignee":
77 assignees = field["values"]
78 if field["id"] == "priority":
79 priorities = field["values"]
80 issue_types.append({"name": issue_type["name"], "id": issue_type["id"]})
81 return {
82 "assignees": assignees,
83 "priorities": priorities,
84 "issue_types": issue_types,
85 }
79 86
80 @view_config(route_name='integrations_id',
81 match_param=['action=create-issue',
82 'integration=jira'],
83 renderer='json')
87 @view_config(
88 route_name="integrations_id",
89 match_param=["action=create-issue", "integration=jira"],
90 renderer="json",
91 )
84 92 def create_issue(self):
85 93 """
86 94 Creates a new issue in jira from report group
87 95 """
88 report = ReportGroupService.by_id(
89 self.request.unsafe_json_body['group_id'])
96 report = ReportGroupService.by_id(self.request.unsafe_json_body["group_id"])
90 97 form_data = {
91 'title': self.request.unsafe_json_body.get('title',
92 'Unknown Title'),
93 'content': self.request.unsafe_json_body.get('content', ''),
94 'issue_type': self.request.unsafe_json_body['issue_type']['id'],
95 'priority': self.request.unsafe_json_body['priority']['id'],
96 'responsible': self.request.unsafe_json_body['responsible']['id'],
97 'project': self.integration.config['project']
98 "title": self.request.unsafe_json_body.get("title", "Unknown Title"),
99 "content": self.request.unsafe_json_body.get("content", ""),
100 "issue_type": self.request.unsafe_json_body["issue_type"]["id"],
101 "priority": self.request.unsafe_json_body["priority"]["id"],
102 "responsible": self.request.unsafe_json_body["responsible"]["id"],
103 "project": self.integration.config["project"],
98 104 }
99 105 try:
100 106 client = self.create_client()
101 107 issue = client.create_issue(form_data, request=self.request)
102 108 except IntegrationException as e:
103 109 self.request.response.status_code = 503
104 return {'error_messages': [str(e)]}
110 return {"error_messages": [str(e)]}
105 111
106 comment_body = 'Jira issue created: %s ' % issue['web_url']
107 comment = ReportComment(owner_id=self.request.user.id,
112 comment_body = "Jira issue created: %s " % issue["web_url"]
113 comment = ReportComment(
114 owner_id=self.request.user.id,
108 115 report_time=report.first_timestamp,
109 body=comment_body)
116 body=comment_body,
117 )
110 118 report.comments.append(comment)
111 119 return True
112 120
113 @view_config(route_name='integrations_id',
114 match_param=['action=setup', 'integration=jira'],
115 renderer='json',
116 permission='edit')
121 @view_config(
122 route_name="integrations_id",
123 match_param=["action=setup", "integration=jira"],
124 renderer="json",
125 permission="edit",
126 )
117 127 def setup(self):
118 128 """
119 129 Validates and creates integration between application and jira
@@ -121,28 +131,28 b' class JiraView(IntegrationView):'
121 131 resource = self.request.context.resource
122 132 form = forms.IntegrationJiraForm(
123 133 MultiDict(self.request.safe_json_body or {}),
124 csrf_context=self.request, **self.integration_config)
125 if self.request.method == 'POST' and form.validate():
134 csrf_context=self.request,
135 **self.integration_config
136 )
137 if self.request.method == "POST" and form.validate():
126 138 integration_config = {
127 'user_name': form.user_name.data,
128 'password': form.password.data,
129 'host_name': form.host_name.data,
130 'project': form.project.data
139 "user_name": form.user_name.data,
140 "password": form.password.data,
141 "host_name": form.host_name.data,
142 "project": form.project.data,
131 143 }
132 144 if not self.integration:
133 145 # add new integration
134 self.integration = JiraIntegration(
135 modified_date=datetime.utcnow(),
136 )
137 self.request.session.flash('Integration added')
146 self.integration = JiraIntegration(modified_date=datetime.utcnow())
147 self.request.session.flash("Integration added")
138 148 resource.integrations.append(self.integration)
139 149 else:
140 self.request.session.flash('Integration updated')
150 self.request.session.flash("Integration updated")
141 151 self.integration.config = integration_config
142 152 return integration_config
143 elif self.request.method == 'POST':
153 elif self.request.method == "POST":
144 154 return HTTPUnprocessableEntity(body=form.errors_json)
145 155
146 156 to_return = copy.deepcopy(self.integration_config)
147 to_return.pop('password', None)
157 to_return.pop("password", None)
148 158 return to_return
@@ -17,8 +17,7 b''
17 17 import logging
18 18
19 19 from appenlight.models import DBSession
20 from appenlight.models.integrations.slack import SlackIntegration, \
21 IntegrationException
20 from appenlight.models.integrations.slack import SlackIntegration, IntegrationException
22 21 from pyramid.httpexceptions import HTTPUnprocessableEntity
23 22 from pyramid.view import view_config
24 23 from appenlight import forms
@@ -31,43 +30,45 b' from . import IntegrationView'
31 30
32 31
33 32 class SlackView(IntegrationView):
34 @view_config(route_name='integrations_id',
35 match_param=['action=info', 'integration=slack'],
36 renderer='json')
33 @view_config(
34 route_name="integrations_id",
35 match_param=["action=info", "integration=slack"],
36 renderer="json",
37 )
37 38 def get_info(self):
38 39 pass
39 40
40 @view_config(route_name='integrations_id',
41 match_param=['action=setup', 'integration=slack'],
42 renderer='json',
43 permission='edit')
41 @view_config(
42 route_name="integrations_id",
43 match_param=["action=setup", "integration=slack"],
44 renderer="json",
45 permission="edit",
46 )
44 47 def setup(self):
45 48 """
46 49 Validates and creates integration between application and slack
47 50 """
48 51 resource = self.request.context.resource
49 self.create_missing_channel(resource, 'slack')
52 self.create_missing_channel(resource, "slack")
50 53 form = forms.IntegrationSlackForm(
51 54 MultiDict(self.request.safe_json_body or {}),
52 csrf_context=self.request, **self.integration_config)
55 csrf_context=self.request,
56 **self.integration_config
57 )
53 58
54 if self.request.method == 'POST' and form.validate():
55 integration_config = {
56 'webhook_url': form.webhook_url.data
57 }
59 if self.request.method == "POST" and form.validate():
60 integration_config = {"webhook_url": form.webhook_url.data}
58 61 if not self.integration:
59 62 # add new integration
60 self.integration = SlackIntegration(
61 modified_date=datetime.utcnow(),
62 )
63 self.request.session.flash('Integration added')
63 self.integration = SlackIntegration(modified_date=datetime.utcnow())
64 self.request.session.flash("Integration added")
64 65 resource.integrations.append(self.integration)
65 66 else:
66 self.request.session.flash('Integration updated')
67 self.request.session.flash("Integration updated")
67 68 self.integration.config = integration_config
68 69 DBSession.flush()
69 self.create_missing_channel(resource, 'slack')
70 self.create_missing_channel(resource, "slack")
70 71 return integration_config
71 elif self.request.method == 'POST':
72 elif self.request.method == "POST":
72 73 return HTTPUnprocessableEntity(body=form.errors_json)
73 74 return self.integration_config
@@ -15,8 +15,10 b''
15 15 # limitations under the License.
16 16
17 17 from appenlight.models import DBSession
18 from appenlight.models.integrations.webhooks import WebhooksIntegration, \
19 IntegrationException
18 from appenlight.models.integrations.webhooks import (
19 WebhooksIntegration,
20 IntegrationException,
21 )
20 22 from pyramid.httpexceptions import HTTPUnprocessableEntity
21 23 from pyramid.view import view_config
22 24 from appenlight import forms
@@ -30,43 +32,48 b' from . import IntegrationView'
30 32
31 33
32 34 class WebhooksView(IntegrationView):
33 @view_config(route_name='integrations_id',
34 match_param=['action=info', 'integration=webhooks'],
35 renderer='json')
35 @view_config(
36 route_name="integrations_id",
37 match_param=["action=info", "integration=webhooks"],
38 renderer="json",
39 )
36 40 def get_info(self):
37 41 pass
38 42
39 @view_config(route_name='integrations_id',
40 match_param=['action=setup', 'integration=webhooks'],
41 renderer='json', permission='edit')
43 @view_config(
44 route_name="integrations_id",
45 match_param=["action=setup", "integration=webhooks"],
46 renderer="json",
47 permission="edit",
48 )
42 49 def setup(self):
43 50 """
44 51 Creates webhook integration
45 52 """
46 53 resource = self.request.context.resource
47 self.create_missing_channel(resource, 'webhooks')
54 self.create_missing_channel(resource, "webhooks")
48 55
49 56 form = forms.IntegrationWebhooksForm(
50 57 MultiDict(self.request.safe_json_body or {}),
51 csrf_context=self.request, **self.integration_config)
52 if self.request.method == 'POST' and form.validate():
58 csrf_context=self.request,
59 **self.integration_config
60 )
61 if self.request.method == "POST" and form.validate():
53 62 integration_config = {
54 'reports_webhook': form.reports_webhook.data,
55 'alerts_webhook': form.alerts_webhook.data,
63 "reports_webhook": form.reports_webhook.data,
64 "alerts_webhook": form.alerts_webhook.data,
56 65 }
57 66 if not self.integration:
58 67 # add new integration
59 self.integration = WebhooksIntegration(
60 modified_date=datetime.utcnow(),
61 )
62 self.request.session.flash('Integration added')
68 self.integration = WebhooksIntegration(modified_date=datetime.utcnow())
69 self.request.session.flash("Integration added")
63 70 resource.integrations.append(self.integration)
64 71 else:
65 self.request.session.flash('Integration updated')
72 self.request.session.flash("Integration updated")
66 73 self.integration.config = integration_config
67 74 DBSession.flush()
68 self.create_missing_channel(resource, 'webhooks')
75 self.create_missing_channel(resource, "webhooks")
69 76 return integration_config
70 elif self.request.method == 'POST':
77 elif self.request.method == "POST":
71 78 return HTTPUnprocessableEntity(body=form.errors_json)
72 79 return self.integration_config
@@ -21,29 +21,31 b' from pyramid.view import view_config'
21 21 from pyramid.httpexceptions import HTTPUnprocessableEntity
22 22 from appenlight.models import Datastores, Log
23 23 from appenlight.models.services.log import LogService
24 from appenlight.lib.utils import (build_filter_settings_from_query_dict,
25 es_index_name_limiter)
24 from appenlight.lib.utils import (
25 build_filter_settings_from_query_dict,
26 es_index_name_limiter,
27 )
26 28 from appenlight.lib.helpers import gen_pagination_headers
27 29 from appenlight.celery.tasks import logs_cleanup
28 30
29 31 log = logging.getLogger(__name__)
30 32
31 section_filters_key = 'appenlight:logs:filter:%s'
33 section_filters_key = "appenlight:logs:filter:%s"
32 34
33 35
34 @view_config(route_name='logs_no_id', renderer='json',
35 permission='authenticated')
36 @view_config(route_name="logs_no_id", renderer="json", permission="authenticated")
36 37 def fetch_logs(request):
37 38 """
38 39 Returns list of log entries from Elasticsearch
39 40 """
40 41
41 filter_settings = build_filter_settings_from_query_dict(request,
42 request.GET.mixed())
42 filter_settings = build_filter_settings_from_query_dict(
43 request, request.GET.mixed()
44 )
43 45 logs_paginator = LogService.get_paginator_by_app_ids(
44 app_ids=filter_settings['resource'],
45 page=filter_settings['page'],
46 filter_settings=filter_settings
46 app_ids=filter_settings["resource"],
47 page=filter_settings["page"],
48 filter_settings=filter_settings,
47 49 )
48 50 headers = gen_pagination_headers(request, logs_paginator)
49 51 request.response.headers.update(headers)
@@ -51,91 +53,108 b' def fetch_logs(request):'
51 53 return [l.get_dict() for l in logs_paginator.sa_items]
52 54
53 55
54 @view_config(route_name='section_view',
55 match_param=['section=logs_section', 'view=fetch_series'],
56 renderer='json', permission='authenticated')
56 @view_config(
57 route_name="section_view",
58 match_param=["section=logs_section", "view=fetch_series"],
59 renderer="json",
60 permission="authenticated",
61 )
57 62 def logs_fetch_series(request):
58 63 """
59 64 Handles metric dashboard graphs
60 65 Returns information for time/tier breakdown
61 66 """
62 filter_settings = build_filter_settings_from_query_dict(request,
63 request.GET.mixed())
67 filter_settings = build_filter_settings_from_query_dict(
68 request, request.GET.mixed()
69 )
64 70 paginator = LogService.get_paginator_by_app_ids(
65 app_ids=filter_settings['resource'],
66 page=1, filter_settings=filter_settings, items_per_page=1)
71 app_ids=filter_settings["resource"],
72 page=1,
73 filter_settings=filter_settings,
74 items_per_page=1,
75 )
67 76 now = datetime.utcnow().replace(microsecond=0, second=0)
68 77 delta = timedelta(days=7)
69 78 if paginator.sa_items:
70 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0,
71 second=0)
72 filter_settings['start_date'] = start_date - delta
79 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0, second=0)
80 filter_settings["start_date"] = start_date - delta
73 81 else:
74 filter_settings['start_date'] = now - delta
75 filter_settings['end_date'] = filter_settings['start_date'] \
76 + timedelta(days=7)
82 filter_settings["start_date"] = now - delta
83 filter_settings["end_date"] = filter_settings["start_date"] + timedelta(days=7)
77 84
78 @request.registry.cache_regions.redis_sec_30.cache_on_arguments(
79 'logs_graphs')
85 @request.registry.cache_regions.redis_sec_30.cache_on_arguments("logs_graphs")
80 86 def cached(apps, search_params, delta, now):
81 87 data = LogService.get_time_series_aggregate(
82 filter_settings['resource'], filter_settings)
88 filter_settings["resource"], filter_settings
89 )
83 90 if not data:
84 91 return []
85 buckets = data['aggregations']['events_over_time']['buckets']
86 return [{"x": datetime.utcfromtimestamp(item["key"] / 1000),
87 "logs": item["doc_count"]} for item in buckets]
92 buckets = data["aggregations"]["events_over_time"]["buckets"]
93 return [
94 {
95 "x": datetime.utcfromtimestamp(item["key"] / 1000),
96 "logs": item["doc_count"],
97 }
98 for item in buckets
99 ]
88 100
89 101 return cached(filter_settings, request.GET.mixed(), delta, now)
90 102
91 103
92 @view_config(route_name='logs_no_id', renderer='json', request_method="DELETE",
93 permission='authenticated')
104 @view_config(
105 route_name="logs_no_id",
106 renderer="json",
107 request_method="DELETE",
108 permission="authenticated",
109 )
94 110 def logs_mass_delete(request):
95 111 params = request.GET.mixed()
96 if 'resource' not in params:
112 if "resource" not in params:
97 113 raise HTTPUnprocessableEntity()
98 114 # this might be '' and then colander will not validate the schema
99 if not params.get('namespace'):
100 params.pop('namespace', None)
115 if not params.get("namespace"):
116 params.pop("namespace", None)
101 117 filter_settings = build_filter_settings_from_query_dict(
102 request, params, resource_permissions=['update_reports'])
118 request, params, resource_permissions=["update_reports"]
119 )
103 120
104 resource_id = list(filter_settings['resource'])[0]
121 resource_id = list(filter_settings["resource"])[0]
105 122 # filter settings returns list of all of users applications
106 123 # if app is not matching - normally we would not care as its used for search
107 124 # but here user playing with params would possibly wipe out their whole data
108 if int(resource_id) != int(params['resource']):
125 if int(resource_id) != int(params["resource"]):
109 126 raise HTTPUnprocessableEntity()
110 127
111 128 logs_cleanup.delay(resource_id, filter_settings)
112 msg = 'Log cleanup process started - it may take a while for ' \
113 'everything to get removed'
129 msg = (
130 "Log cleanup process started - it may take a while for "
131 "everything to get removed"
132 )
114 133 request.session.flash(msg)
115 134 return {}
116 135
117 136
118 @view_config(route_name='section_view',
137 @view_config(
138 route_name="section_view",
119 139 match_param=("view=common_tags", "section=logs_section"),
120 renderer='json', permission='authenticated')
140 renderer="json",
141 permission="authenticated",
142 )
121 143 def common_tags(request):
122 144 config = request.GET.mixed()
123 filter_settings = build_filter_settings_from_query_dict(request,
124 config)
145 filter_settings = build_filter_settings_from_query_dict(request, config)
125 146
126 147 resources = list(filter_settings["resource"])
127 148 query = {
128 149 "query": {
129 150 "filtered": {
130 "filter": {
131 "and": [{"terms": {"resource_id": list(resources)}}]
151 "filter": {"and": [{"terms": {"resource_id": list(resources)}}]}
132 152 }
133 153 }
134 154 }
135 }
136 start_date = filter_settings.get('start_date')
137 end_date = filter_settings.get('end_date')
138 filter_part = query['query']['filtered']['filter']['and']
155 start_date = filter_settings.get("start_date")
156 end_date = filter_settings.get("end_date")
157 filter_part = query["query"]["filtered"]["filter"]["and"]
139 158
140 159 date_range = {"range": {"timestamp": {}}}
141 160 if start_date:
@@ -145,80 +164,56 b' def common_tags(request):'
145 164 if start_date or end_date:
146 165 filter_part.append(date_range)
147 166
148 levels = filter_settings.get('level')
167 levels = filter_settings.get("level")
149 168 if levels:
150 filter_part.append({"terms": {'log_level': levels}})
151 namespaces = filter_settings.get('namespace')
169 filter_part.append({"terms": {"log_level": levels}})
170 namespaces = filter_settings.get("namespace")
152 171 if namespaces:
153 filter_part.append({"terms": {'namespace': namespaces}})
172 filter_part.append({"terms": {"namespace": namespaces}})
154 173
155 query["aggs"] = {
156 "sub_agg": {
157 "terms": {
158 "field": "tag_list",
159 "size": 50
160 }
161 }
162 }
174 query["aggs"] = {"sub_agg": {"terms": {"field": "tag_list", "size": 50}}}
163 175 # tags
164 index_names = es_index_name_limiter(
165 ixtypes=[config.get('datasource', 'logs')])
166 result = Datastores.es.search(body=query, index=index_names, doc_type='log',
167 size=0)
168 tag_buckets = result['aggregations']['sub_agg'].get('buckets', [])
176 index_names = es_index_name_limiter(ixtypes=[config.get("datasource", "logs")])
177 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
178 tag_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
169 179 # namespaces
170 query["aggs"] = {
171 "sub_agg": {
172 "terms": {
173 "field": "namespace",
174 "size": 50
175 }
176 }
177 }
178 result = Datastores.es.search(body=query, index=index_names, doc_type='log',
179 size=0)
180 namespaces_buckets = result['aggregations']['sub_agg'].get('buckets', [])
180 query["aggs"] = {"sub_agg": {"terms": {"field": "namespace", "size": 50}}}
181 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
182 namespaces_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
181 183 return {
182 "tags": [item['key'] for item in tag_buckets],
183 "namespaces": [item['key'] for item in namespaces_buckets]
184 "tags": [item["key"] for item in tag_buckets],
185 "namespaces": [item["key"] for item in namespaces_buckets],
184 186 }
185 187
186 188
187 @view_config(route_name='section_view',
189 @view_config(
190 route_name="section_view",
188 191 match_param=("view=common_values", "section=logs_section"),
189 renderer='json', permission='authenticated')
192 renderer="json",
193 permission="authenticated",
194 )
190 195 def common_values(request):
191 196 config = request.GET.mixed()
192 datasource = config.pop('datasource', 'logs')
193 filter_settings = build_filter_settings_from_query_dict(request,
194 config)
197 datasource = config.pop("datasource", "logs")
198 filter_settings = build_filter_settings_from_query_dict(request, config)
195 199 resources = list(filter_settings["resource"])
196 tag_name = filter_settings['tags'][0]['value'][0]
200 tag_name = filter_settings["tags"][0]["value"][0]
197 201 query = {
198 'query': {
199 'filtered': {
200 'filter': {
201 'and': [
202 {'terms': {'resource_id': list(resources)}},
203 {'terms': {
204 'namespace': filter_settings['namespace']}}
202 "query": {
203 "filtered": {
204 "filter": {
205 "and": [
206 {"terms": {"resource_id": list(resources)}},
207 {"terms": {"namespace": filter_settings["namespace"]}},
205 208 ]
206 209 }
207 210 }
208 211 }
209 212 }
210 query['aggs'] = {
211 'sub_agg': {
212 'terms': {
213 'field': 'tags.{}.values'.format(tag_name),
214 'size': 50
215 }
216 }
213 query["aggs"] = {
214 "sub_agg": {"terms": {"field": "tags.{}.values".format(tag_name), "size": 50}}
217 215 }
218 216 index_names = es_index_name_limiter(ixtypes=[datasource])
219 result = Datastores.es.search(body=query, index=index_names, doc_type='log',
220 size=0)
221 values_buckets = result['aggregations']['sub_agg'].get('buckets', [])
222 return {
223 "values": [item['key'] for item in values_buckets]
224 }
217 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
218 values_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
219 return {"values": [item["key"] for item in values_buckets]}
@@ -22,11 +22,16 b' import logging'
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 @view_config(route_name='plugin_configs', renderer='json',
26 permission='edit', request_method='GET')
25 @view_config(
26 route_name="plugin_configs",
27 renderer="json",
28 permission="edit",
29 request_method="GET",
30 )
27 31 def query(request):
28 32 configs = PluginConfigService.by_query(
29 request.params.get('resource_id'),
30 plugin_name=request.matchdict.get('plugin_name'),
31 section=request.params.get('section'))
33 request.params.get("resource_id"),
34 plugin_name=request.matchdict.get("plugin_name"),
35 section=request.params.get("section"),
36 )
32 37 return [c for c in configs]
@@ -33,20 +33,22 b' from appenlight import forms'
33 33 from appenlight.lib.enums import ReportType
34 34 from appenlight.lib.helpers import gen_pagination_headers
35 35 from appenlight.lib.utils import build_filter_settings_from_query_dict
36 from appenlight.validators import ReportSearchSchema, TagListSchema, \
37 accepted_search_params
36 from appenlight.validators import (
37 ReportSearchSchema,
38 TagListSchema,
39 accepted_search_params,
40 )
38 41 from webob.multidict import MultiDict
39 42
40 43 _ = str
41 44
42 45 log = logging.getLogger(__name__)
43 46
44 section_filters_key = 'appenlight:reports:filter:%s'
47 section_filters_key = "appenlight:reports:filter:%s"
45 48
46 49
47 @view_config(route_name='reports', renderer='json', permission='authenticated')
48 @view_config(route_name='slow_reports', renderer='json',
49 permission='authenticated')
50 @view_config(route_name="reports", renderer="json", permission="authenticated")
51 @view_config(route_name="slow_reports", renderer="json", permission="authenticated")
50 52 def index(request):
51 53 """
52 54 Returns list of report groups based on user search query
@@ -55,7 +57,8 b' def index(request):'
55 57 request.user.last_login_date = datetime.utcnow()
56 58
57 59 applications = UserService.resources_with_perms(
58 request.user, ['view'], resource_types=['application'])
60 request.user, ["view"], resource_types=["application"]
61 )
59 62
60 63 search_params = request.GET.mixed()
61 64
@@ -63,24 +66,36 b' def index(request):'
63 66 schema = ReportSearchSchema().bind(resources=all_possible_app_ids)
64 67 tag_schema = TagListSchema()
65 68 filter_settings = schema.deserialize(search_params)
66 tag_list = [{"name": k, "value": v} for k, v in filter_settings.items()
67 if k not in accepted_search_params]
69 tag_list = [
70 {"name": k, "value": v}
71 for k, v in filter_settings.items()
72 if k not in accepted_search_params
73 ]
68 74 tags = tag_schema.deserialize(tag_list)
69 filter_settings['tags'] = tags
70 if request.matched_route.name == 'slow_reports':
71 filter_settings['report_type'] = [ReportType.slow]
75 filter_settings["tags"] = tags
76 if request.matched_route.name == "slow_reports":
77 filter_settings["report_type"] = [ReportType.slow]
72 78 else:
73 filter_settings['report_type'] = [ReportType.error]
79 filter_settings["report_type"] = [ReportType.error]
74 80
75 81 reports_paginator = ReportGroupService.get_paginator_by_app_ids(
76 app_ids=filter_settings['resource'],
77 page=filter_settings['page'],
78 filter_settings=filter_settings
82 app_ids=filter_settings["resource"],
83 page=filter_settings["page"],
84 filter_settings=filter_settings,
79 85 )
80 86 reports = []
81 include_keys = ('id', 'http_status', 'report_type', 'resource_name',
82 'front_url', 'resource_id', 'error', 'url_path', 'tags',
83 'duration')
87 include_keys = (
88 "id",
89 "http_status",
90 "report_type",
91 "resource_name",
92 "front_url",
93 "resource_id",
94 "error",
95 "url_path",
96 "tags",
97 "duration",
98 )
84 99 for report in reports_paginator.sa_items:
85 100 reports.append(report.get_dict(request, include_keys=include_keys))
86 101 headers = gen_pagination_headers(request, reports_paginator)
@@ -88,8 +103,9 b' def index(request):'
88 103 return reports
89 104
90 105
91 @view_config(route_name='report_groups', renderer='json', permission='view',
92 request_method="GET")
106 @view_config(
107 route_name="report_groups", renderer="json", permission="view", request_method="GET"
108 )
93 109 def view_report(request):
94 110 """
95 111 Show individual detailed report group along with latest report
@@ -98,18 +114,21 b' def view_report(request):'
98 114 if not report_group.read:
99 115 report_group.read = True
100 116
101 report_id = request.params.get('reportId', request.params.get('report_id'))
102 report_dict = report_group.get_report(report_id).get_dict(request,
103 details=True)
117 report_id = request.params.get("reportId", request.params.get("report_id"))
118 report_dict = report_group.get_report(report_id).get_dict(request, details=True)
104 119 # disallow browsing other occurences by anonymous
105 120 if not request.user:
106 report_dict.pop('group_next_report', None)
107 report_dict.pop('group_previous_report', None)
121 report_dict.pop("group_next_report", None)
122 report_dict.pop("group_previous_report", None)
108 123 return report_dict
109 124
110 125
111 @view_config(route_name='report_groups', renderer='json',
112 permission='update_reports', request_method='DELETE')
126 @view_config(
127 route_name="report_groups",
128 renderer="json",
129 permission="update_reports",
130 request_method="DELETE",
131 )
113 132 def remove(request):
114 133 """
115 134 Used to remove reourt groups from database
@@ -121,8 +140,13 b' def remove(request):'
121 140 return True
122 141
123 142
124 @view_config(route_name='report_groups_property', match_param='key=comments',
125 renderer='json', permission='view', request_method="POST")
143 @view_config(
144 route_name="report_groups_property",
145 match_param="key=comments",
146 renderer="json",
147 permission="view",
148 request_method="POST",
149 )
126 150 def comment_create(request):
127 151 """
128 152 Creates user comments for report group, sends email notifications
@@ -130,76 +154,92 b' def comment_create(request):'
130 154 """
131 155 report_group = request.context.report_group
132 156 application = request.context.resource
133 form = forms.CommentForm(MultiDict(request.unsafe_json_body),
134 csrf_context=request)
135 if request.method == 'POST' and form.validate():
136 comment = ReportComment(owner_id=request.user.id,
137 report_time=report_group.first_timestamp)
157 form = forms.CommentForm(MultiDict(request.unsafe_json_body), csrf_context=request)
158 if request.method == "POST" and form.validate():
159 comment = ReportComment(
160 owner_id=request.user.id, report_time=report_group.first_timestamp
161 )
138 162 form.populate_obj(comment)
139 163 report_group.comments.append(comment)
140 perm_list = ResourceService.users_for_perm(application, 'view')
164 perm_list = ResourceService.users_for_perm(application, "view")
141 165 uids_to_notify = []
142 166 users_to_notify = []
143 167 for perm in perm_list:
144 168 user = perm.user
145 if ('@{}'.format(user.user_name) in comment.body and
146 user.id not in uids_to_notify):
169 if (
170 "@{}".format(user.user_name) in comment.body
171 and user.id not in uids_to_notify
172 ):
147 173 uids_to_notify.append(user.id)
148 174 users_to_notify.append(user)
149 175
150 176 commenters = ReportGroupService.users_commenting(
151 report_group, exclude_user_id=request.user.id)
177 report_group, exclude_user_id=request.user.id
178 )
152 179 for user in commenters:
153 180 if user.id not in uids_to_notify:
154 181 uids_to_notify.append(user.id)
155 182 users_to_notify.append(user)
156 183
157 184 for user in users_to_notify:
158 email_vars = {'user': user,
159 'commenting_user': request.user,
160 'request': request,
161 'application': application,
162 'report_group': report_group,
163 'comment': comment,
164 'email_title': "AppEnlight :: New comment"}
185 email_vars = {
186 "user": user,
187 "commenting_user": request.user,
188 "request": request,
189 "application": application,
190 "report_group": report_group,
191 "comment": comment,
192 "email_title": "AppEnlight :: New comment",
193 }
165 194 UserService.send_email(
166 195 request,
167 196 recipients=[user.email],
168 197 variables=email_vars,
169 template='/email_templates/new_comment_report.jinja2')
170 request.session.flash(_('Your comment was created'))
198 template="/email_templates/new_comment_report.jinja2",
199 )
200 request.session.flash(_("Your comment was created"))
171 201 return comment.get_dict()
172 202 else:
173 203 return form.errors
174 204
175 205
176 @view_config(route_name='report_groups_property',
177 match_param='key=assigned_users', renderer='json',
178 permission='update_reports', request_method="GET")
206 @view_config(
207 route_name="report_groups_property",
208 match_param="key=assigned_users",
209 renderer="json",
210 permission="update_reports",
211 request_method="GET",
212 )
179 213 def assigned_users(request):
180 214 """
181 215 Returns list of users a specific report group is assigned for review
182 216 """
183 217 report_group = request.context.report_group
184 218 application = request.context.resource
185 users = set([p.user for p in ResourceService.users_for_perm(application, 'view')])
219 users = set([p.user for p in ResourceService.users_for_perm(application, "view")])
186 220 currently_assigned = [u.user_name for u in report_group.assigned_users]
187 user_status = {'assigned': [], 'unassigned': []}
221 user_status = {"assigned": [], "unassigned": []}
188 222 # handle users
189 223 for user in users:
190 user_dict = {'user_name': user.user_name,
191 'gravatar_url': UserService.gravatar_url(user),
192 'name': '%s %s' % (user.first_name, user.last_name,)}
224 user_dict = {
225 "user_name": user.user_name,
226 "gravatar_url": UserService.gravatar_url(user),
227 "name": "%s %s" % (user.first_name, user.last_name),
228 }
193 229 if user.user_name in currently_assigned:
194 user_status['assigned'].append(user_dict)
195 elif user_dict not in user_status['unassigned']:
196 user_status['unassigned'].append(user_dict)
230 user_status["assigned"].append(user_dict)
231 elif user_dict not in user_status["unassigned"]:
232 user_status["unassigned"].append(user_dict)
197 233 return user_status
198 234
199 235
200 @view_config(route_name='report_groups_property',
201 match_param='key=assigned_users', renderer='json',
202 permission='update_reports', request_method="PATCH")
236 @view_config(
237 route_name="report_groups_property",
238 match_param="key=assigned_users",
239 renderer="json",
240 permission="update_reports",
241 request_method="PATCH",
242 )
203 243 def assign_users(request):
204 244 """
205 245 Assigns specific report group to user for review - send email notification
@@ -210,17 +250,18 b' def assign_users(request):'
210 250 new_assigns = request.unsafe_json_body
211 251
212 252 # first unassign old users
213 for user_name in new_assigns['unassigned']:
253 for user_name in new_assigns["unassigned"]:
214 254 if user_name in currently_assigned:
215 255 user = UserService.by_user_name(user_name)
216 256 report_group.assigned_users.remove(user)
217 comment = ReportComment(owner_id=request.user.id,
218 report_time=report_group.first_timestamp)
219 comment.body = 'Unassigned group from @%s' % user_name
257 comment = ReportComment(
258 owner_id=request.user.id, report_time=report_group.first_timestamp
259 )
260 comment.body = "Unassigned group from @%s" % user_name
220 261 report_group.comments.append(comment)
221 262
222 263 # assign new users
223 for user_name in new_assigns['assigned']:
264 for user_name in new_assigns["assigned"]:
224 265 if user_name not in currently_assigned:
225 266 user = UserService.by_user_name(user_name)
226 267 if user in report_group.assigned_users:
@@ -229,66 +270,81 b' def assign_users(request):'
229 270 assignment = ReportAssignment(
230 271 owner_id=user.id,
231 272 report_time=report_group.first_timestamp,
232 group_id=report_group.id)
273 group_id=report_group.id,
274 )
233 275 DBSession.add(assignment)
234 276
235 comment = ReportComment(owner_id=request.user.id,
236 report_time=report_group.first_timestamp)
237 comment.body = 'Assigned report_group to @%s' % user_name
277 comment = ReportComment(
278 owner_id=request.user.id, report_time=report_group.first_timestamp
279 )
280 comment.body = "Assigned report_group to @%s" % user_name
238 281 report_group.comments.append(comment)
239 282
240 email_vars = {'user': user,
241 'request': request,
242 'application': application,
243 'report_group': report_group,
244 'email_title': "AppEnlight :: Assigned Report"}
245 UserService.send_email(request, recipients=[user.email],
283 email_vars = {
284 "user": user,
285 "request": request,
286 "application": application,
287 "report_group": report_group,
288 "email_title": "AppEnlight :: Assigned Report",
289 }
290 UserService.send_email(
291 request,
292 recipients=[user.email],
246 293 variables=email_vars,
247 template='/email_templates/assigned_report.jinja2')
294 template="/email_templates/assigned_report.jinja2",
295 )
248 296
249 297 return True
250 298
251 299
252 @view_config(route_name='report_groups_property', match_param='key=history',
253 renderer='json', permission='view')
300 @view_config(
301 route_name="report_groups_property",
302 match_param="key=history",
303 renderer="json",
304 permission="view",
305 )
254 306 def history(request):
255 307 """ Separate error graph or similar graph"""
256 308 report_group = request.context.report_group
257 309 query_params = request.GET.mixed()
258 query_params['resource'] = (report_group.resource_id,)
310 query_params["resource"] = (report_group.resource_id,)
259 311
260 filter_settings = build_filter_settings_from_query_dict(request,
261 query_params)
262 if not filter_settings.get('end_date'):
312 filter_settings = build_filter_settings_from_query_dict(request, query_params)
313 if not filter_settings.get("end_date"):
263 314 end_date = datetime.utcnow().replace(microsecond=0, second=0)
264 filter_settings['end_date'] = end_date
315 filter_settings["end_date"] = end_date
265 316
266 if not filter_settings.get('start_date'):
317 if not filter_settings.get("start_date"):
267 318 delta = timedelta(days=30)
268 filter_settings['start_date'] = filter_settings['end_date'] - delta
319 filter_settings["start_date"] = filter_settings["end_date"] - delta
269 320
270 filter_settings['group_id'] = report_group.id
321 filter_settings["group_id"] = report_group.id
271 322
272 323 result = ReportGroupService.get_report_stats(request, filter_settings)
273 324
274 325 plot_data = []
275 326 for row in result:
276 327 point = {
277 'x': row['x'],
278 'reports': row['report'] + row['slow_report'] + row['not_found']}
328 "x": row["x"],
329 "reports": row["report"] + row["slow_report"] + row["not_found"],
330 }
279 331 plot_data.append(point)
280 332
281 333 return plot_data
282 334
283 335
284 @view_config(route_name='report_groups', renderer='json',
285 permission='update_reports', request_method="PATCH")
336 @view_config(
337 route_name="report_groups",
338 renderer="json",
339 permission="update_reports",
340 request_method="PATCH",
341 )
286 342 def report_groups_PATCH(request):
287 343 """
288 344 Used to update the report group fixed status
289 345 """
290 346 report_group = request.context.report_group
291 allowed_keys = ['public', 'fixed']
347 allowed_keys = ["public", "fixed"]
292 348 for k, v in request.unsafe_json_body.items():
293 349 if k in allowed_keys:
294 350 setattr(report_group, k, v)
This diff has been collapsed as it changes many lines, (515 lines changed) Show them Hide them
@@ -42,39 +42,52 b' log = logging.getLogger(__name__)'
42 42 GLOBAL_REQ = None
43 43
44 44
45 @view_config(route_name='test', match_param='action=mail',
46 renderer='string', permission='root_administration')
45 @view_config(
46 route_name="test",
47 match_param="action=mail",
48 renderer="string",
49 permission="root_administration",
50 )
47 51 def mail(request):
48 52 """
49 53 Test email communication
50 54 """
51 request.environ['HTTP_HOST'] = 'appenlight.com'
52 request.environ['wsgi.url_scheme'] = 'https'
53 renderer_vars = {"title": "You have just registered on AppEnlight",
55 request.environ["HTTP_HOST"] = "appenlight.com"
56 request.environ["wsgi.url_scheme"] = "https"
57 renderer_vars = {
58 "title": "You have just registered on AppEnlight",
54 59 "username": "test",
55 60 "email": "grzegżółka",
56 'firstname': 'dupa'}
61 "firstname": "dupa",
62 }
57 63 # return vars
58 html = pyramid.renderers.render('/email_templates/registered.jinja2',
59 renderer_vars,
60 request=request)
61 message = Message(subject="hello world %s" % random.randint(1, 9999),
64 html = pyramid.renderers.render(
65 "/email_templates/registered.jinja2", renderer_vars, request=request
66 )
67 message = Message(
68 subject="hello world %s" % random.randint(1, 9999),
62 69 sender="info@appenlight.com",
63 70 recipients=["ergo14@gmail.com"],
64 html=html)
71 html=html,
72 )
65 73 request.registry.mailer.send(message)
66 74 return html
67 75 return vars
68 76
69 77
70 @view_config(route_name='test', match_param='action=alerting',
71 renderer='appenlight:templates/tests/alerting.jinja2',
72 permission='root_administration')
78 @view_config(
79 route_name="test",
80 match_param="action=alerting",
81 renderer="appenlight:templates/tests/alerting.jinja2",
82 permission="root_administration",
83 )
73 84 def alerting_test(request):
74 85 """
75 86 Allows to test send data on various registered alerting channels
76 87 """
77 applications = UserService.resources_with_perms(request.user, ['view'], resource_types=['application'])
88 applications = UserService.resources_with_perms(
89 request.user, ["view"], resource_types=["application"]
90 )
78 91 # what we can select in total
79 92 all_possible_app_ids = [app.resource_id for app in applications]
80 93 resource = applications[0]
@@ -83,114 +96,129 b' def alerting_test(request):'
83 96 for channel in request.user.alert_channels:
84 97 alert_channels.append(channel.get_dict())
85 98
86 cname = request.params.get('channel_name')
87 cvalue = request.params.get('channel_value')
88 event_name = request.params.get('event_name')
99 cname = request.params.get("channel_name")
100 cvalue = request.params.get("channel_value")
101 event_name = request.params.get("event_name")
89 102 if cname and cvalue:
90 103 for channel in request.user.alert_channels:
91 if (channel.channel_value == cvalue and
92 channel.channel_name == cname):
104 if channel.channel_value == cvalue and channel.channel_name == cname:
93 105 break
94 if event_name in ['error_report_alert', 'slow_report_alert']:
106 if event_name in ["error_report_alert", "slow_report_alert"]:
95 107 # opened
96 new_event = Event(resource_id=resource.resource_id,
108 new_event = Event(
109 resource_id=resource.resource_id,
97 110 event_type=Event.types[event_name],
98 111 start_date=datetime.datetime.utcnow(),
99 status=Event.statuses['active'],
100 values={'reports': 5,
101 'threshold': 10}
112 status=Event.statuses["active"],
113 values={"reports": 5, "threshold": 10},
114 )
115 channel.notify_alert(
116 resource=resource, event=new_event, user=request.user, request=request
102 117 )
103 channel.notify_alert(resource=resource,
104 event=new_event,
105 user=request.user,
106 request=request)
107 118
108 119 # closed
109 ev_type = Event.types[event_name.replace('open', 'close')]
110 new_event = Event(resource_id=resource.resource_id,
120 ev_type = Event.types[event_name.replace("open", "close")]
121 new_event = Event(
122 resource_id=resource.resource_id,
111 123 event_type=ev_type,
112 124 start_date=datetime.datetime.utcnow(),
113 status=Event.statuses['closed'],
114 values={'reports': 5,
115 'threshold': 10})
116 channel.notify_alert(resource=resource,
117 event=new_event,
118 user=request.user,
119 request=request)
120 elif event_name == 'notify_reports':
121 report = ReportGroupService.by_app_ids(all_possible_app_ids) \
122 .filter(ReportGroup.report_type == ReportType.error).first()
125 status=Event.statuses["closed"],
126 values={"reports": 5, "threshold": 10},
127 )
128 channel.notify_alert(
129 resource=resource, event=new_event, user=request.user, request=request
130 )
131 elif event_name == "notify_reports":
132 report = (
133 ReportGroupService.by_app_ids(all_possible_app_ids)
134 .filter(ReportGroup.report_type == ReportType.error)
135 .first()
136 )
123 137 confirmed_reports = [(5, report), (1, report)]
124 channel.notify_reports(resource=resource,
138 channel.notify_reports(
139 resource=resource,
125 140 user=request.user,
126 141 request=request,
127 142 since_when=datetime.datetime.utcnow(),
128 reports=confirmed_reports)
143 reports=confirmed_reports,
144 )
129 145 confirmed_reports = [(5, report)]
130 channel.notify_reports(resource=resource,
146 channel.notify_reports(
147 resource=resource,
131 148 user=request.user,
132 149 request=request,
133 150 since_when=datetime.datetime.utcnow(),
134 reports=confirmed_reports)
135 elif event_name == 'notify_uptime':
136 new_event = Event(resource_id=resource.resource_id,
137 event_type=Event.types['uptime_alert'],
151 reports=confirmed_reports,
152 )
153 elif event_name == "notify_uptime":
154 new_event = Event(
155 resource_id=resource.resource_id,
156 event_type=Event.types["uptime_alert"],
138 157 start_date=datetime.datetime.utcnow(),
139 status=Event.statuses['active'],
140 values={"status_code": 500,
141 "tries": 2,
142 "response_time": 0})
143 channel.notify_uptime_alert(resource=resource,
144 event=new_event,
145 user=request.user,
146 request=request)
147 elif event_name == 'chart_alert':
158 status=Event.statuses["active"],
159 values={"status_code": 500, "tries": 2, "response_time": 0},
160 )
161 channel.notify_uptime_alert(
162 resource=resource, event=new_event, user=request.user, request=request
163 )
164 elif event_name == "chart_alert":
148 165 event = EventService.by_type_and_status(
149 event_types=(Event.types['chart_alert'],),
150 status_types=(Event.statuses['active'],)).first()
151 channel.notify_chart_alert(resource=event.resource,
152 event=event,
153 user=request.user,
154 request=request)
155 elif event_name == 'daily_digest':
156 since_when = datetime.datetime.utcnow() - datetime.timedelta(
157 hours=8)
158 filter_settings = {'resource': [resource.resource_id],
159 'tags': [{'name': 'type',
160 'value': ['error'], 'op': None}],
161 'type': 'error', 'start_date': since_when}
166 event_types=(Event.types["chart_alert"],),
167 status_types=(Event.statuses["active"],),
168 ).first()
169 channel.notify_chart_alert(
170 resource=event.resource, event=event, user=request.user, request=request
171 )
172 elif event_name == "daily_digest":
173 since_when = datetime.datetime.utcnow() - datetime.timedelta(hours=8)
174 filter_settings = {
175 "resource": [resource.resource_id],
176 "tags": [{"name": "type", "value": ["error"], "op": None}],
177 "type": "error",
178 "start_date": since_when,
179 }
162 180
163 181 reports = ReportGroupService.get_trending(
164 request, filter_settings=filter_settings, limit=50)
165 channel.send_digest(resource=resource,
182 request, filter_settings=filter_settings, limit=50
183 )
184 channel.send_digest(
185 resource=resource,
166 186 user=request.user,
167 187 request=request,
168 188 since_when=datetime.datetime.utcnow(),
169 reports=reports)
189 reports=reports,
190 )
170 191
171 return {'alert_channels': alert_channels,
172 'applications': dict([(app.resource_id, app.resource_name)
173 for app in applications.all()])}
192 return {
193 "alert_channels": alert_channels,
194 "applications": dict(
195 [(app.resource_id, app.resource_name) for app in applications.all()]
196 ),
197 }
174 198
175 199
176 @view_config(route_name='test', match_param='action=error',
177 renderer='string', permission='root_administration')
200 @view_config(
201 route_name="test",
202 match_param="action=error",
203 renderer="string",
204 permission="root_administration",
205 )
178 206 def error(request):
179 207 """
180 208 Raises an internal error with some test data for testing purposes
181 209 """
182 request.environ['appenlight.message'] = 'test message'
183 request.environ['appenlight.extra']['dupa'] = 'dupa'
184 request.environ['appenlight.extra']['message'] = 'message'
185 request.environ['appenlight.tags']['action'] = 'test_error'
186 request.environ['appenlight.tags']['count'] = 5
210 request.environ["appenlight.message"] = "test message"
211 request.environ["appenlight.extra"]["dupa"] = "dupa"
212 request.environ["appenlight.extra"]["message"] = "message"
213 request.environ["appenlight.tags"]["action"] = "test_error"
214 request.environ["appenlight.tags"]["count"] = 5
187 215 log.debug(chr(960))
188 log.debug('debug')
216 log.debug("debug")
189 217 log.info(chr(960))
190 log.info('INFO')
191 log.warning('warning')
218 log.info("INFO")
219 log.warning("warning")
192 220
193 @time_trace(name='error.foobar', min_duration=0.1)
221 @time_trace(name="error.foobar", min_duration=0.1)
194 222 def fooobar():
195 223 time.sleep(0.12)
196 224 return 1
@@ -198,24 +226,28 b' def error(request):'
198 226 fooobar()
199 227
200 228 def foobar(somearg):
201 raise Exception('test')
229 raise Exception("test")
202 230
203 231 client = redis.StrictRedis()
204 client.setex('testval', 10, 'foo')
205 request.environ['appenlight.force_send'] = 1
232 client.setex("testval", 10, "foo")
233 request.environ["appenlight.force_send"] = 1
206 234
207 235 # stats, result = get_local_storage(local_timing).get_thread_stats()
208 236 # import pprint
209 237 # pprint.pprint(stats)
210 238 # pprint.pprint(result)
211 239 # print 'entries', len(result)
212 request.environ['appenlight.username'] = 'ErgO'
213 raise Exception(chr(960) + '%s' % random.randint(1, 5))
240 request.environ["appenlight.username"] = "ErgO"
241 raise Exception(chr(960) + "%s" % random.randint(1, 5))
214 242 return {}
215 243
216 244
217 @view_config(route_name='test', match_param='action=task',
218 renderer='string', permission='root_administration')
245 @view_config(
246 route_name="test",
247 match_param="action=task",
248 renderer="string",
249 permission="root_administration",
250 )
219 251 def test_task(request):
220 252 """
221 253 Test erroneous celery task
@@ -223,11 +255,15 b' def test_task(request):'
223 255 import appenlight.celery.tasks
224 256
225 257 appenlight.celery.tasks.test_exception_task.delay()
226 return 'task sent'
258 return "task sent"
227 259
228 260
229 @view_config(route_name='test', match_param='action=task_retry',
230 renderer='string', permission='root_administration')
261 @view_config(
262 route_name="test",
263 match_param="action=task_retry",
264 renderer="string",
265 permission="root_administration",
266 )
231 267 def test_task_retry(request):
232 268 """
233 269 Test erroneous celery task
@@ -235,80 +271,131 b' def test_task_retry(request):'
235 271 import appenlight.celery.tasks
236 272
237 273 appenlight.celery.tasks.test_retry_exception_task.delay()
238 return 'task sent'
274 return "task sent"
239 275
240 276
241 @view_config(route_name='test', match_param='action=celery_emails',
242 renderer='string', permission='root_administration')
277 @view_config(
278 route_name="test",
279 match_param="action=celery_emails",
280 renderer="string",
281 permission="root_administration",
282 )
243 283 def test_celery_emails(request):
244 284 import appenlight.celery.tasks
285
245 286 appenlight.celery.tasks.alerting.delay()
246 return 'task sent'
287 return "task sent"
247 288
248 289
249 @view_config(route_name='test', match_param='action=daily_digest',
250 renderer='string', permission='root_administration')
290 @view_config(
291 route_name="test",
292 match_param="action=daily_digest",
293 renderer="string",
294 permission="root_administration",
295 )
251 296 def test_celery_daily_digest(request):
252 297 import appenlight.celery.tasks
298
253 299 appenlight.celery.tasks.daily_digest.delay()
254 return 'task sent'
300 return "task sent"
255 301
256 302
257 @view_config(route_name='test', match_param='action=celery_alerting',
258 renderer='string', permission='root_administration')
303 @view_config(
304 route_name="test",
305 match_param="action=celery_alerting",
306 renderer="string",
307 permission="root_administration",
308 )
259 309 def test_celery_alerting(request):
260 310 import appenlight.celery.tasks
311
261 312 appenlight.celery.tasks.alerting()
262 return 'task sent'
313 return "task sent"
263 314
264 315
265 @view_config(route_name='test', match_param='action=logging',
266 renderer='string', permission='root_administration')
316 @view_config(
317 route_name="test",
318 match_param="action=logging",
319 renderer="string",
320 permission="root_administration",
321 )
267 322 def logs(request):
268 323 """
269 324 Test some in-app logging
270 325 """
271 326 log.debug(chr(960))
272 log.debug('debug')
327 log.debug("debug")
273 328 log.info(chr(960))
274 log.info('INFO')
275 log.warning('Matched GET /\xc4\x85\xc5\xbc\xc4\x87'
276 '\xc4\x99\xc4\x99\xc4\x85/summary')
277 log.warning('XXXXMatched GET /\xc4\x85\xc5\xbc\xc4'
278 '\x87\xc4\x99\xc4\x99\xc4\x85/summary')
279 log.warning('DUPA /ążćęęą')
329 log.info("INFO")
330 log.warning(
331 "Matched GET /\xc4\x85\xc5\xbc\xc4\x87" "\xc4\x99\xc4\x99\xc4\x85/summary"
332 )
333 log.warning(
334 "XXXXMatched GET /\xc4\x85\xc5\xbc\xc4" "\x87\xc4\x99\xc4\x99\xc4\x85/summary"
335 )
336 log.warning("DUPA /ążćęęą")
280 337 log.warning("g\u017ceg\u017c\u00f3\u0142ka")
281 log.error('TEST Lorem ipsum2',
282 extra={'user': 'ergo', 'commit': 'sog8ds0g7sdih12hh1j512h5k'})
283 log.fatal('TEST Lorem ipsum3')
284 log.warning('TEST Lorem ipsum',
285 extra={"action": 'purchase',
338 log.error(
339 "TEST Lorem ipsum2",
340 extra={"user": "ergo", "commit": "sog8ds0g7sdih12hh1j512h5k"},
341 )
342 log.fatal("TEST Lorem ipsum3")
343 log.warning(
344 "TEST Lorem ipsum",
345 extra={
346 "action": "purchase",
347 "price": random.random() * 100,
348 "quantity": random.randint(1, 99),
349 },
350 )
351 log.warning(
352 "test_pkey",
353 extra={
354 "action": "test_pkey",
355 "price": random.random() * 100,
356 "ae_primary_key": 1,
357 "quantity": random.randint(1, 99),
358 },
359 )
360 log.warning(
361 "test_pkey2",
362 extra={
363 "action": "test_pkey",
286 364 "price": random.random() * 100,
287 "quantity": random.randint(1, 99)})
288 log.warning('test_pkey',
289 extra={"action": 'test_pkey', "price": random.random() * 100,
290 'ae_primary_key': 1,
291 "quantity": random.randint(1, 99)})
292 log.warning('test_pkey2',
293 extra={"action": 'test_pkey', "price": random.random() * 100,
294 'ae_primary_key': 'b',
295 'ae_permanent': 't',
296 "quantity": random.randint(1, 99)})
297 log.warning('test_pkey3',
298 extra={"action": 'test_pkey', "price": random.random() * 100,
299 'ae_primary_key': 1,
300 "quantity": random.randint(1, 99)})
301 log.warning('test_pkey4',
302 extra={"action": 'test_pkey', "price": random.random() * 100,
303 'ae_primary_key': 'b',
304 'ae_permanent': True,
305 "quantity": random.randint(1, 99)})
306 request.environ['appenlight.force_send'] = 1
365 "ae_primary_key": "b",
366 "ae_permanent": "t",
367 "quantity": random.randint(1, 99),
368 },
369 )
370 log.warning(
371 "test_pkey3",
372 extra={
373 "action": "test_pkey",
374 "price": random.random() * 100,
375 "ae_primary_key": 1,
376 "quantity": random.randint(1, 99),
377 },
378 )
379 log.warning(
380 "test_pkey4",
381 extra={
382 "action": "test_pkey",
383 "price": random.random() * 100,
384 "ae_primary_key": "b",
385 "ae_permanent": True,
386 "quantity": random.randint(1, 99),
387 },
388 )
389 request.environ["appenlight.force_send"] = 1
307 390 return {}
308 391
309 392
310 @view_config(route_name='test', match_param='action=transaction',
311 renderer='string', permission='root_administration')
393 @view_config(
394 route_name="test",
395 match_param="action=transaction",
396 renderer="string",
397 permission="root_administration",
398 )
312 399 def transaction_test(request):
313 400 """
314 401 Test transactions
@@ -318,74 +405,92 b' def transaction_test(request):'
318 405 except:
319 406 request.tm.abort()
320 407 result = DBSession.execute("SELECT 1")
321 return 'OK'
408 return "OK"
322 409
323 410
324 @view_config(route_name='test', match_param='action=slow_request',
325 renderer='string', permission='root_administration')
411 @view_config(
412 route_name="test",
413 match_param="action=slow_request",
414 renderer="string",
415 permission="root_administration",
416 )
326 417 def slow_request(request):
327 418 """
328 419 Test a request that has some slow entries - including nested calls
329 420 """
330 421 users = DBSession.query(User).all()
331 422 import random
423
332 424 some_val = random.random()
333 425 import threading
426
334 427 t_id = id(threading.currentThread())
335 log.warning('slow_log %s %s ' % (some_val, t_id))
336 log.critical('tid %s' % t_id)
428 log.warning("slow_log %s %s " % (some_val, t_id))
429 log.critical("tid %s" % t_id)
337 430
338 @time_trace(name='baz_func %s' % some_val, min_duration=0.1)
431 @time_trace(name="baz_func %s" % some_val, min_duration=0.1)
339 432 def baz(arg):
340 433 time.sleep(0.32)
341 434 return arg
342 435
343 requests.get('http://ubuntu.com')
436 requests.get("http://ubuntu.com")
344 437
345 @time_trace(name='foo_func %s %s' % (some_val, t_id), min_duration=0.1)
438 @time_trace(name="foo_func %s %s" % (some_val, t_id), min_duration=0.1)
346 439 def foo(arg):
347 440 time.sleep(0.52)
348 log.warning('foo_func %s %s' % (some_val, t_id))
349 requests.get('http://ubuntu.com?test=%s' % some_val)
441 log.warning("foo_func %s %s" % (some_val, t_id))
442 requests.get("http://ubuntu.com?test=%s" % some_val)
350 443 return bar(arg)
351 444
352 @time_trace(name='bar_func %s %s' % (some_val, t_id), min_duration=0.1)
445 @time_trace(name="bar_func %s %s" % (some_val, t_id), min_duration=0.1)
353 446 def bar(arg):
354 log.warning('bar_func %s %s' % (some_val, t_id))
447 log.warning("bar_func %s %s" % (some_val, t_id))
355 448 time.sleep(1.52)
356 449 baz(arg)
357 450 baz(arg)
358 451 return baz(arg)
359 452
360 foo('a')
453 foo("a")
361 454 return {}
362 455
363 456
364 @view_config(route_name='test', match_param='action=styling',
365 renderer='appenlight:templates/tests/styling.jinja2',
366 permission='__no_permission_required__')
457 @view_config(
458 route_name="test",
459 match_param="action=styling",
460 renderer="appenlight:templates/tests/styling.jinja2",
461 permission="__no_permission_required__",
462 )
367 463 def styling(request):
368 464 """
369 465 Some styling test page
370 466 """
371 467 _ = str
372 request.session.flash(_(
373 'Your password got updated. '
374 'Next time log in with your new credentials.'))
375 request.session.flash(_(
376 'Something went wrong when we '
377 'tried to authorize you via external provider'),
378 'warning')
379 request.session.flash(_(
380 'Unfortunately there was a problem '
381 'processing your payment, please try again later.'),
382 'error')
468 request.session.flash(
469 _("Your password got updated. " "Next time log in with your new credentials.")
470 )
471 request.session.flash(
472 _(
473 "Something went wrong when we "
474 "tried to authorize you via external provider"
475 ),
476 "warning",
477 )
478 request.session.flash(
479 _(
480 "Unfortunately there was a problem "
481 "processing your payment, please try again later."
482 ),
483 "error",
484 )
383 485 return {}
384 486
385 487
386 @view_config(route_name='test', match_param='action=js_error',
387 renderer='appenlight:templates/tests/js_error.jinja2',
388 permission='__no_permission_required__')
488 @view_config(
489 route_name="test",
490 match_param="action=js_error",
491 renderer="appenlight:templates/tests/js_error.jinja2",
492 permission="__no_permission_required__",
493 )
389 494 def js(request):
390 495 """
391 496 Used for testing javasctipt client for error catching
@@ -393,9 +498,12 b' def js(request):'
393 498 return {}
394 499
395 500
396 @view_config(route_name='test', match_param='action=js_log',
397 renderer='appenlight:templates/tests/js_log.jinja2',
398 permission='__no_permission_required__')
501 @view_config(
502 route_name="test",
503 match_param="action=js_log",
504 renderer="appenlight:templates/tests/js_log.jinja2",
505 permission="__no_permission_required__",
506 )
399 507 def js_log(request):
400 508 """
401 509 Used for testing javasctipt client for logging
@@ -403,9 +511,12 b' def js_log(request):'
403 511 return {}
404 512
405 513
406 @view_config(route_name='test', match_param='action=log_requests',
407 renderer='string',
408 permission='__no_permission_required__')
514 @view_config(
515 route_name="test",
516 match_param="action=log_requests",
517 renderer="string",
518 permission="__no_permission_required__",
519 )
409 520 def log_requests(request):
410 521 """
411 522 Util view for printing json requests
@@ -413,13 +524,17 b' def log_requests(request):'
413 524 return {}
414 525
415 526
416 @view_config(route_name='test', match_param='action=url', renderer='string',
417 permission='__no_permission_required__')
527 @view_config(
528 route_name="test",
529 match_param="action=url",
530 renderer="string",
531 permission="__no_permission_required__",
532 )
418 533 def log_requests(request):
419 534 """
420 535 I have no fucking clue why I needed that ;-)
421 536 """
422 return request.route_url('reports', _app_url='https://appenlight.com')
537 return request.route_url("reports", _app_url="https://appenlight.com")
423 538
424 539
425 540 class TestClass(object):
@@ -430,16 +545,32 b' class TestClass(object):'
430 545 def __init__(self, request):
431 546 self.request = request
432 547
433 @view_config(route_name='test', match_param='action=test_a',
434 renderer='string', permission='root_administration')
435 @view_config(route_name='test', match_param='action=test_c',
436 renderer='string', permission='root_administration')
437 @view_config(route_name='test', match_param='action=test_d',
438 renderer='string', permission='root_administration')
548 @view_config(
549 route_name="test",
550 match_param="action=test_a",
551 renderer="string",
552 permission="root_administration",
553 )
554 @view_config(
555 route_name="test",
556 match_param="action=test_c",
557 renderer="string",
558 permission="root_administration",
559 )
560 @view_config(
561 route_name="test",
562 match_param="action=test_d",
563 renderer="string",
564 permission="root_administration",
565 )
439 566 def test_a(self):
440 return 'ok'
567 return "ok"
441 568
442 @view_config(route_name='test', match_param='action=test_b',
443 renderer='string', permission='root_administration')
569 @view_config(
570 route_name="test",
571 match_param="action=test_b",
572 renderer="string",
573 permission="root_administration",
574 )
444 575 def test_b(self):
445 return 'ok'
576 return "ok"
This diff has been collapsed as it changes many lines, (625 lines changed) Show them Hide them
@@ -27,20 +27,23 b' from pyramid.view import view_config'
27 27 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
28 28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest
29 29 from pyramid.security import NO_PERMISSION_REQUIRED
30 from ziggurat_foundations.models.services.external_identity import \
31 ExternalIdentityService
30 from ziggurat_foundations.models.services.external_identity import (
31 ExternalIdentityService,
32 )
32 33 from ziggurat_foundations.models.services.user import UserService
33 34
34 35 from appenlight.lib import generate_random_string
35 36 from appenlight.lib.social import handle_social_data
36 from appenlight.lib.utils import channelstream_request, add_cors_headers, \
37 permission_tuple_to_dict
37 from appenlight.lib.utils import (
38 channelstream_request,
39 add_cors_headers,
40 permission_tuple_to_dict,
41 )
38 42 from appenlight.models import DBSession
39 43 from appenlight.models.alert_channels.email import EmailAlertChannel
40 44 from appenlight.models.alert_channel_action import AlertChannelAction
41 45 from appenlight.models.services.alert_channel import AlertChannelService
42 from appenlight.models.services.alert_channel_action import \
43 AlertChannelActionService
46 from appenlight.models.services.alert_channel_action import AlertChannelActionService
44 47 from appenlight.models.auth_token import AuthToken
45 48 from appenlight.models.report import REPORT_TYPE_MATRIX
46 49 from appenlight.models.user import User
@@ -53,33 +56,49 b' from webob.multidict import MultiDict'
53 56 log = logging.getLogger(__name__)
54 57
55 58
56 @view_config(route_name='users_no_id', renderer='json',
57 request_method="GET", permission='root_administration')
59 @view_config(
60 route_name="users_no_id",
61 renderer="json",
62 request_method="GET",
63 permission="root_administration",
64 )
58 65 def users_list(request):
59 66 """
60 67 Returns users list
61 68 """
62 props = ['user_name', 'id', 'first_name', 'last_name', 'email',
63 'last_login_date', 'status']
69 props = [
70 "user_name",
71 "id",
72 "first_name",
73 "last_name",
74 "email",
75 "last_login_date",
76 "status",
77 ]
64 78 users = UserService.all()
65 79 users_dicts = []
66 80 for user in users:
67 81 u_dict = user.get_dict(include_keys=props)
68 u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20)
82 u_dict["gravatar_url"] = UserService.gravatar_url(user, s=20)
69 83 users_dicts.append(u_dict)
70 84 return users_dicts
71 85
72 86
73 @view_config(route_name='users_no_id', renderer='json',
74 request_method="POST", permission='root_administration')
87 @view_config(
88 route_name="users_no_id",
89 renderer="json",
90 request_method="POST",
91 permission="root_administration",
92 )
75 93 def users_create(request):
76 94 """
77 95 Returns users list
78 96 """
79 form = forms.UserCreateForm(MultiDict(request.safe_json_body or {}),
80 csrf_context=request)
97 form = forms.UserCreateForm(
98 MultiDict(request.safe_json_body or {}), csrf_context=request
99 )
81 100 if form.validate():
82 log.info('registering user')
101 log.info("registering user")
83 102 # probably not needed in the future since this requires root anyways
84 103 # lets keep this here in case we lower view permission in the future
85 104 # if request.registry.settings['appenlight.disable_registration']:
@@ -91,29 +110,42 b' def users_create(request):'
91 110 UserService.regenerate_security_code(user)
92 111 UserService.set_password(user, user.user_password)
93 112 user.status = 1 if form.status.data else 0
94 request.session.flash(_('User created'))
113 request.session.flash(_("User created"))
95 114 DBSession.flush()
96 return user.get_dict(exclude_keys=['security_code_date', 'notes',
97 'security_code', 'user_password'])
115 return user.get_dict(
116 exclude_keys=[
117 "security_code_date",
118 "notes",
119 "security_code",
120 "user_password",
121 ]
122 )
98 123 else:
99 124 return HTTPUnprocessableEntity(body=form.errors_json)
100 125
101 126
102 @view_config(route_name='users', renderer='json',
103 request_method="GET", permission='root_administration')
104 @view_config(route_name='users', renderer='json',
105 request_method="PATCH", permission='root_administration')
127 @view_config(
128 route_name="users",
129 renderer="json",
130 request_method="GET",
131 permission="root_administration",
132 )
133 @view_config(
134 route_name="users",
135 renderer="json",
136 request_method="PATCH",
137 permission="root_administration",
138 )
106 139 def users_update(request):
107 140 """
108 141 Updates user object
109 142 """
110 user = UserService.by_id(request.matchdict.get('user_id'))
143 user = UserService.by_id(request.matchdict.get("user_id"))
111 144 if not user:
112 145 return HTTPNotFound()
113 146 post_data = request.safe_json_body or {}
114 if request.method == 'PATCH':
115 form = forms.UserUpdateForm(MultiDict(post_data),
116 csrf_context=request)
147 if request.method == "PATCH":
148 form = forms.UserUpdateForm(MultiDict(post_data), csrf_context=request)
117 149 if form.validate():
118 150 form.populate_obj(user, ignore_none=True)
119 151 if form.user_password.data:
@@ -124,114 +156,148 b' def users_update(request):'
124 156 user.status = 0
125 157 else:
126 158 return HTTPUnprocessableEntity(body=form.errors_json)
127 return user.get_dict(exclude_keys=['security_code_date', 'notes',
128 'security_code', 'user_password'])
159 return user.get_dict(
160 exclude_keys=["security_code_date", "notes", "security_code", "user_password"]
161 )
129 162
130 163
131 @view_config(route_name='users_property',
132 match_param='key=resource_permissions',
133 renderer='json', permission='authenticated')
164 @view_config(
165 route_name="users_property",
166 match_param="key=resource_permissions",
167 renderer="json",
168 permission="authenticated",
169 )
134 170 def users_resource_permissions_list(request):
135 171 """
136 172 Get list of permissions assigned to specific resources
137 173 """
138 user = UserService.by_id(request.matchdict.get('user_id'))
174 user = UserService.by_id(request.matchdict.get("user_id"))
139 175 if not user:
140 176 return HTTPNotFound()
141 return [permission_tuple_to_dict(perm) for perm in
142 UserService.resources_with_possible_perms(user)]
177 return [
178 permission_tuple_to_dict(perm)
179 for perm in UserService.resources_with_possible_perms(user)
180 ]
143 181
144 182
145 @view_config(route_name='users', renderer='json',
146 request_method="DELETE", permission='root_administration')
183 @view_config(
184 route_name="users",
185 renderer="json",
186 request_method="DELETE",
187 permission="root_administration",
188 )
147 189 def users_DELETE(request):
148 190 """
149 191 Removes a user permanently from db - makes a check to see if after the
150 192 operation there will be at least one admin left
151 193 """
152 msg = _('There needs to be at least one administrator in the system')
153 user = UserService.by_id(request.matchdict.get('user_id'))
194 msg = _("There needs to be at least one administrator in the system")
195 user = UserService.by_id(request.matchdict.get("user_id"))
154 196 if user:
155 users = UserService.users_for_perms(['root_administration']).all()
197 users = UserService.users_for_perms(["root_administration"]).all()
156 198 if len(users) < 2 and user.id == users[0].id:
157 request.session.flash(msg, 'warning')
199 request.session.flash(msg, "warning")
158 200 else:
159 201 DBSession.delete(user)
160 request.session.flash(_('User removed'))
202 request.session.flash(_("User removed"))
161 203 return True
162 204 request.response.status = 422
163 205 return False
164 206
165 207
166 @view_config(route_name='users_self', renderer='json',
167 request_method="GET", permission='authenticated')
168 @view_config(route_name='users_self', renderer='json',
169 request_method="PATCH", permission='authenticated')
208 @view_config(
209 route_name="users_self",
210 renderer="json",
211 request_method="GET",
212 permission="authenticated",
213 )
214 @view_config(
215 route_name="users_self",
216 renderer="json",
217 request_method="PATCH",
218 permission="authenticated",
219 )
170 220 def users_self(request):
171 221 """
172 222 Updates user personal information
173 223 """
174 224
175 if request.method == 'PATCH':
225 if request.method == "PATCH":
176 226 form = forms.gen_user_profile_form()(
177 MultiDict(request.unsafe_json_body),
178 csrf_context=request)
227 MultiDict(request.unsafe_json_body), csrf_context=request
228 )
179 229 if form.validate():
180 230 form.populate_obj(request.user)
181 request.session.flash(_('Your profile got updated.'))
231 request.session.flash(_("Your profile got updated."))
182 232 else:
183 233 return HTTPUnprocessableEntity(body=form.errors_json)
184 234 return request.user.get_dict(
185 exclude_keys=['security_code_date', 'notes', 'security_code',
186 'user_password'],
187 extended_info=True)
235 exclude_keys=["security_code_date", "notes", "security_code", "user_password"],
236 extended_info=True,
237 )
188 238
189 239
190 @view_config(route_name='users_self_property',
191 match_param='key=external_identities', renderer='json',
192 request_method='GET', permission='authenticated')
240 @view_config(
241 route_name="users_self_property",
242 match_param="key=external_identities",
243 renderer="json",
244 request_method="GET",
245 permission="authenticated",
246 )
193 247 def users_external_identies(request):
194 248 user = request.user
195 identities = [{'provider': ident.provider_name,
196 'id': ident.external_user_name} for ident
197 in user.external_identities.all()]
249 identities = [
250 {"provider": ident.provider_name, "id": ident.external_user_name}
251 for ident in user.external_identities.all()
252 ]
198 253 return identities
199 254
200 255
201 @view_config(route_name='users_self_property',
202 match_param='key=external_identities', renderer='json',
203 request_method='DELETE', permission='authenticated')
256 @view_config(
257 route_name="users_self_property",
258 match_param="key=external_identities",
259 renderer="json",
260 request_method="DELETE",
261 permission="authenticated",
262 )
204 263 def users_external_identies_DELETE(request):
205 264 """
206 265 Unbinds external identities(google,twitter etc.) from user account
207 266 """
208 267 user = request.user
209 268 for identity in user.external_identities.all():
210 log.info('found identity %s' % identity)
211 if (identity.provider_name == request.params.get('provider') and
212 identity.external_user_name == request.params.get('id')):
213 log.info('remove identity %s' % identity)
269 log.info("found identity %s" % identity)
270 if identity.provider_name == request.params.get(
271 "provider"
272 ) and identity.external_user_name == request.params.get("id"):
273 log.info("remove identity %s" % identity)
214 274 DBSession.delete(identity)
215 275 return True
216 276 return False
217 277
218 278
219 @view_config(route_name='users_self_property',
220 match_param='key=password', renderer='json',
221 request_method='PATCH', permission='authenticated')
279 @view_config(
280 route_name="users_self_property",
281 match_param="key=password",
282 renderer="json",
283 request_method="PATCH",
284 permission="authenticated",
285 )
222 286 def users_password(request):
223 287 """
224 288 Sets new password for user account
225 289 """
226 290 user = request.user
227 form = forms.ChangePasswordForm(MultiDict(request.unsafe_json_body),
228 csrf_context=request)
291 form = forms.ChangePasswordForm(
292 MultiDict(request.unsafe_json_body), csrf_context=request
293 )
229 294 form.old_password.user = user
230 295 if form.validate():
231 296 UserService.regenerate_security_code(user)
232 297 UserService.set_password(user, form.new_password.data)
233 msg = 'Your password got updated. ' \
234 'Next time log in with your new credentials.'
298 msg = (
299 "Your password got updated. " "Next time log in with your new credentials."
300 )
235 301 request.session.flash(_(msg))
236 302 return True
237 303 else:
@@ -239,35 +305,49 b' def users_password(request):'
239 305 return False
240 306
241 307
242 @view_config(route_name='users_self_property', match_param='key=websocket',
243 renderer='json', permission='authenticated')
308 @view_config(
309 route_name="users_self_property",
310 match_param="key=websocket",
311 renderer="json",
312 permission="authenticated",
313 )
244 314 def users_websocket(request):
245 315 """
246 316 Handle authorization of users trying to connect
247 317 """
248 318 # handle preflight request
249 319 user = request.user
250 if request.method == 'OPTIONS':
251 res = request.response.body('OK')
320 if request.method == "OPTIONS":
321 res = request.response.body("OK")
252 322 add_cors_headers(res)
253 323 return res
254 applications = UserService.resources_with_perms(user, ['view'], resource_types=['application'])
255 channels = ['app_%s' % app.resource_id for app in applications]
256 payload = {"username": user.user_name,
324 applications = UserService.resources_with_perms(
325 user, ["view"], resource_types=["application"]
326 )
327 channels = ["app_%s" % app.resource_id for app in applications]
328 payload = {
329 "username": user.user_name,
257 330 "conn_id": str(uuid.uuid4()),
258 "channels": channels
331 "channels": channels,
259 332 }
260 333 settings = request.registry.settings
261 334 response = channelstream_request(
262 settings['cometd.secret'], '/connect', payload,
263 servers=[request.registry.settings['cometd_servers']],
264 throw_exceptions=True)
335 settings["cometd.secret"],
336 "/connect",
337 payload,
338 servers=[request.registry.settings["cometd_servers"]],
339 throw_exceptions=True,
340 )
265 341 return payload
266 342
267 343
268 @view_config(route_name='users_self_property', request_method="GET",
269 match_param='key=alert_channels', renderer='json',
270 permission='authenticated')
344 @view_config(
345 route_name="users_self_property",
346 request_method="GET",
347 match_param="key=alert_channels",
348 renderer="json",
349 permission="authenticated",
350 )
271 351 def alert_channels(request):
272 352 """
273 353 Lists all available alert channels
@@ -276,8 +356,13 b' def alert_channels(request):'
276 356 return [c.get_dict(extended_info=True) for c in user.alert_channels]
277 357
278 358
279 @view_config(route_name='users_self_property', match_param='key=alert_actions',
280 request_method="GET", renderer='json', permission='authenticated')
359 @view_config(
360 route_name="users_self_property",
361 match_param="key=alert_actions",
362 request_method="GET",
363 renderer="json",
364 permission="authenticated",
365 )
281 366 def alert_actions(request):
282 367 """
283 368 Lists all available alert channels
@@ -286,41 +371,52 b' def alert_actions(request):'
286 371 return [r.get_dict(extended_info=True) for r in user.alert_actions]
287 372
288 373
289 @view_config(route_name='users_self_property', renderer='json',
290 match_param='key=alert_channels_rules', request_method='POST',
291 permission='authenticated')
374 @view_config(
375 route_name="users_self_property",
376 renderer="json",
377 match_param="key=alert_channels_rules",
378 request_method="POST",
379 permission="authenticated",
380 )
292 381 def alert_channels_rule_POST(request):
293 382 """
294 383 Creates new notification rule for specific alert channel
295 384 """
296 385 user = request.user
297 alert_action = AlertChannelAction(owner_id=request.user.id,
298 type='report')
386 alert_action = AlertChannelAction(owner_id=request.user.id, type="report")
299 387 DBSession.add(alert_action)
300 388 DBSession.flush()
301 389 return alert_action.get_dict()
302 390
303 391
304 @view_config(route_name='users_self_property', permission='authenticated',
305 match_param='key=alert_channels_rules',
306 renderer='json', request_method='DELETE')
392 @view_config(
393 route_name="users_self_property",
394 permission="authenticated",
395 match_param="key=alert_channels_rules",
396 renderer="json",
397 request_method="DELETE",
398 )
307 399 def alert_channels_rule_DELETE(request):
308 400 """
309 401 Removes specific alert channel rule
310 402 """
311 403 user = request.user
312 404 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
313 user.id,
314 request.GET.get('pkey'))
405 user.id, request.GET.get("pkey")
406 )
315 407 if rule_action:
316 408 DBSession.delete(rule_action)
317 409 return True
318 410 return HTTPNotFound()
319 411
320 412
321 @view_config(route_name='users_self_property', permission='authenticated',
322 match_param='key=alert_channels_rules',
323 renderer='json', request_method='PATCH')
413 @view_config(
414 route_name="users_self_property",
415 permission="authenticated",
416 match_param="key=alert_channels_rules",
417 renderer="json",
418 request_method="PATCH",
419 )
324 420 def alert_channels_rule_PATCH(request):
325 421 """
326 422 Removes specific alert channel rule
@@ -328,41 +424,47 b' def alert_channels_rule_PATCH(request):'
328 424 user = request.user
329 425 json_body = request.unsafe_json_body
330 426
331 schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX)
427 schema = build_rule_schema(json_body["rule"], REPORT_TYPE_MATRIX)
332 428 try:
333 schema.deserialize(json_body['rule'])
429 schema.deserialize(json_body["rule"])
334 430 except colander.Invalid as exc:
335 431 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
336 432
337 433 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
338 user.id,
339 request.GET.get('pkey'))
434 user.id, request.GET.get("pkey")
435 )
340 436
341 437 if rule_action:
342 rule_action.rule = json_body['rule']
343 rule_action.resource_id = json_body['resource_id']
344 rule_action.action = json_body['action']
438 rule_action.rule = json_body["rule"]
439 rule_action.resource_id = json_body["resource_id"]
440 rule_action.action = json_body["action"]
345 441 return rule_action.get_dict()
346 442 return HTTPNotFound()
347 443
348 444
349 @view_config(route_name='users_self_property', permission='authenticated',
350 match_param='key=alert_channels',
351 renderer='json', request_method='PATCH')
445 @view_config(
446 route_name="users_self_property",
447 permission="authenticated",
448 match_param="key=alert_channels",
449 renderer="json",
450 request_method="PATCH",
451 )
352 452 def alert_channels_PATCH(request):
353 453 user = request.user
354 channel_name = request.GET.get('channel_name')
355 channel_value = request.GET.get('channel_value')
454 channel_name = request.GET.get("channel_name")
455 channel_value = request.GET.get("channel_value")
356 456 # iterate over channels
357 457 channel = None
358 458 for channel in user.alert_channels:
359 if (channel.channel_name == channel_name and
360 channel.channel_value == channel_value):
459 if (
460 channel.channel_name == channel_name
461 and channel.channel_value == channel_value
462 ):
361 463 break
362 464 if not channel:
363 465 return HTTPNotFound()
364 466
365 allowed_keys = ['daily_digest', 'send_alerts']
467 allowed_keys = ["daily_digest", "send_alerts"]
366 468 for k, v in request.unsafe_json_body.items():
367 469 if k in allowed_keys:
368 470 setattr(channel, k, v)
@@ -371,67 +473,84 b' def alert_channels_PATCH(request):'
371 473 return channel.get_dict()
372 474
373 475
374 @view_config(route_name='users_self_property', permission='authenticated',
375 match_param='key=alert_channels',
376 request_method="POST", renderer='json')
476 @view_config(
477 route_name="users_self_property",
478 permission="authenticated",
479 match_param="key=alert_channels",
480 request_method="POST",
481 renderer="json",
482 )
377 483 def alert_channels_POST(request):
378 484 """
379 485 Creates a new email alert channel for user, sends a validation email
380 486 """
381 487 user = request.user
382 form = forms.EmailChannelCreateForm(MultiDict(request.unsafe_json_body),
383 csrf_context=request)
488 form = forms.EmailChannelCreateForm(
489 MultiDict(request.unsafe_json_body), csrf_context=request
490 )
384 491 if not form.validate():
385 492 return HTTPUnprocessableEntity(body=form.errors_json)
386 493
387 494 email = form.email.data.strip()
388 495 channel = EmailAlertChannel()
389 channel.channel_name = 'email'
496 channel.channel_name = "email"
390 497 channel.channel_value = email
391 498 security_code = generate_random_string(10)
392 channel.channel_json_conf = {'security_code': security_code}
499 channel.channel_json_conf = {"security_code": security_code}
393 500 user.alert_channels.append(channel)
394 501
395 email_vars = {'user': user,
396 'email': email,
397 'request': request,
398 'security_code': security_code,
399 'email_title': "AppEnlight :: "
400 "Please authorize your email"}
502 email_vars = {
503 "user": user,
504 "email": email,
505 "request": request,
506 "security_code": security_code,
507 "email_title": "AppEnlight :: " "Please authorize your email",
508 }
401 509
402 UserService.send_email(request, recipients=[email],
510 UserService.send_email(
511 request,
512 recipients=[email],
403 513 variables=email_vars,
404 template='/email_templates/authorize_email.jinja2')
405 request.session.flash(_('Your alert channel was '
406 'added to the system.'))
514 template="/email_templates/authorize_email.jinja2",
515 )
516 request.session.flash(_("Your alert channel was " "added to the system."))
407 517 request.session.flash(
408 _('You need to authorize your email channel, a message was '
409 'sent containing necessary information.'),
410 'warning')
518 _(
519 "You need to authorize your email channel, a message was "
520 "sent containing necessary information."
521 ),
522 "warning",
523 )
411 524 DBSession.flush()
412 525 channel.get_dict()
413 526
414 527
415 @view_config(route_name='section_view',
416 match_param=['section=user_section',
417 'view=alert_channels_authorize'],
418 renderer='string', permission='authenticated')
528 @view_config(
529 route_name="section_view",
530 match_param=["section=user_section", "view=alert_channels_authorize"],
531 renderer="string",
532 permission="authenticated",
533 )
419 534 def alert_channels_authorize(request):
420 535 """
421 536 Performs alert channel authorization based on auth code sent in email
422 537 """
423 538 user = request.user
424 539 for channel in user.alert_channels:
425 security_code = request.params.get('security_code', '')
426 if channel.channel_json_conf['security_code'] == security_code:
540 security_code = request.params.get("security_code", "")
541 if channel.channel_json_conf["security_code"] == security_code:
427 542 channel.channel_validated = True
428 request.session.flash(_('Your email was authorized.'))
429 return HTTPFound(location=request.route_url('/'))
543 request.session.flash(_("Your email was authorized."))
544 return HTTPFound(location=request.route_url("/"))
430 545
431 546
432 @view_config(route_name='users_self_property', request_method="DELETE",
433 match_param='key=alert_channels', renderer='json',
434 permission='authenticated')
547 @view_config(
548 route_name="users_self_property",
549 request_method="DELETE",
550 match_param="key=alert_channels",
551 renderer="json",
552 permission="authenticated",
553 )
435 554 def alert_channel_DELETE(request):
436 555 """
437 556 Removes alert channel from users channel
@@ -439,20 +558,25 b' def alert_channel_DELETE(request):'
439 558 user = request.user
440 559 channel = None
441 560 for chan in user.alert_channels:
442 if (chan.channel_name == request.params.get('channel_name') and
443 chan.channel_value == request.params.get('channel_value')):
561 if chan.channel_name == request.params.get(
562 "channel_name"
563 ) and chan.channel_value == request.params.get("channel_value"):
444 564 channel = chan
445 565 break
446 566 if channel:
447 567 user.alert_channels.remove(channel)
448 request.session.flash(_('Your channel was removed.'))
568 request.session.flash(_("Your channel was removed."))
449 569 return True
450 570 return False
451 571
452 572
453 @view_config(route_name='users_self_property', permission='authenticated',
454 match_param='key=alert_channels_actions_binds',
455 renderer='json', request_method="POST")
573 @view_config(
574 route_name="users_self_property",
575 permission="authenticated",
576 match_param="key=alert_channels_actions_binds",
577 renderer="json",
578 request_method="POST",
579 )
456 580 def alert_channels_actions_binds_POST(request):
457 581 """
458 582 Adds alert action to users channels
@@ -460,12 +584,12 b' def alert_channels_actions_binds_POST(request):'
460 584 user = request.user
461 585 json_body = request.unsafe_json_body
462 586 channel = AlertChannelService.by_owner_id_and_pkey(
463 user.id,
464 json_body.get('channel_pkey'))
587 user.id, json_body.get("channel_pkey")
588 )
465 589
466 590 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
467 user.id,
468 json_body.get('action_pkey'))
591 user.id, json_body.get("action_pkey")
592 )
469 593
470 594 if channel and rule_action:
471 595 if channel.pkey not in [c.pkey for c in rule_action.channels]:
@@ -474,21 +598,25 b' def alert_channels_actions_binds_POST(request):'
474 598 return HTTPUnprocessableEntity()
475 599
476 600
477 @view_config(route_name='users_self_property', request_method="DELETE",
478 match_param='key=alert_channels_actions_binds',
479 renderer='json', permission='authenticated')
601 @view_config(
602 route_name="users_self_property",
603 request_method="DELETE",
604 match_param="key=alert_channels_actions_binds",
605 renderer="json",
606 permission="authenticated",
607 )
480 608 def alert_channels_actions_binds_DELETE(request):
481 609 """
482 610 Removes alert action from users channels
483 611 """
484 612 user = request.user
485 613 channel = AlertChannelService.by_owner_id_and_pkey(
486 user.id,
487 request.GET.get('channel_pkey'))
614 user.id, request.GET.get("channel_pkey")
615 )
488 616
489 617 rule_action = AlertChannelActionService.by_owner_id_and_pkey(
490 user.id,
491 request.GET.get('action_pkey'))
618 user.id, request.GET.get("action_pkey")
619 )
492 620
493 621 if channel and rule_action:
494 622 if channel.pkey in [c.pkey for c in rule_action.channels]:
@@ -497,18 +625,19 b' def alert_channels_actions_binds_DELETE(request):'
497 625 return HTTPUnprocessableEntity()
498 626
499 627
500 @view_config(route_name='social_auth_abort',
501 renderer='string', permission=NO_PERMISSION_REQUIRED)
628 @view_config(
629 route_name="social_auth_abort", renderer="string", permission=NO_PERMISSION_REQUIRED
630 )
502 631 def oauth_abort(request):
503 632 """
504 633 Handles problems with authorization via velruse
505 634 """
506 635
507 636
508 @view_config(route_name='social_auth', permission=NO_PERMISSION_REQUIRED)
637 @view_config(route_name="social_auth", permission=NO_PERMISSION_REQUIRED)
509 638 def social_auth(request):
510 639 # Get the internal provider name URL variable.
511 provider_name = request.matchdict.get('provider')
640 provider_name = request.matchdict.get("provider")
512 641
513 642 # Start the login procedure.
514 643 adapter = WebObAdapter(request, request.response)
@@ -523,12 +652,17 b' def social_auth(request):'
523 652
524 653 def handle_auth_error(request, result):
525 654 # Login procedure finished with an error.
526 request.session.pop('zigg.social_auth', None)
527 request.session.flash(_('Something went wrong when we tried to '
528 'authorize you via external provider. '
529 'Please try again.'), 'warning')
655 request.session.pop("zigg.social_auth", None)
656 request.session.flash(
657 _(
658 "Something went wrong when we tried to "
659 "authorize you via external provider. "
660 "Please try again."
661 ),
662 "warning",
663 )
530 664
531 return HTTPFound(location=request.route_url('/'))
665 return HTTPFound(location=request.route_url("/"))
532 666
533 667
534 668 def handle_auth_success(request, result):
@@ -539,140 +673,169 b' def handle_auth_success(request, result):'
539 673 result.user.update()
540 674
541 675 social_data = {
542 'user': {'data': result.user.data},
543 'credentials': result.user.credentials
676 "user": {"data": result.user.data},
677 "credentials": result.user.credentials,
544 678 }
545 679 # normalize data
546 social_data['user']['id'] = result.user.id
547 user_name = result.user.username or ''
680 social_data["user"]["id"] = result.user.id
681 user_name = result.user.username or ""
548 682 # use email name as username for google
549 if (social_data['credentials'].provider_name == 'google' and
550 result.user.email):
683 if social_data["credentials"].provider_name == "google" and result.user.email:
551 684 user_name = result.user.email
552 social_data['user']['user_name'] = user_name
553 social_data['user']['email'] = result.user.email or ''
685 social_data["user"]["user_name"] = user_name
686 social_data["user"]["email"] = result.user.email or ""
554 687
555 request.session['zigg.social_auth'] = social_data
688 request.session["zigg.social_auth"] = social_data
556 689 # user is logged so bind his external identity with account
557 690 if request.user:
558 691 handle_social_data(request, request.user, social_data)
559 request.session.pop('zigg.social_auth', None)
560 return HTTPFound(location=request.route_url('/'))
692 request.session.pop("zigg.social_auth", None)
693 return HTTPFound(location=request.route_url("/"))
561 694 else:
562 695 user = ExternalIdentityService.user_by_external_id_and_provider(
563 social_data['user']['id'],
564 social_data['credentials'].provider_name
696 social_data["user"]["id"], social_data["credentials"].provider_name
565 697 )
566 698 # fix legacy accounts with wrong google ID
567 if not user and social_data['credentials'].provider_name == 'google':
699 if not user and social_data["credentials"].provider_name == "google":
568 700 user = ExternalIdentityService.user_by_external_id_and_provider(
569 social_data['user']['email'],
570 social_data['credentials'].provider_name)
701 social_data["user"]["email"], social_data["credentials"].provider_name
702 )
571 703
572 704 # user tokens are already found in our db
573 705 if user:
574 706 handle_social_data(request, user, social_data)
575 707 headers = security.remember(request, user.id)
576 request.session.pop('zigg.social_auth', None)
577 return HTTPFound(location=request.route_url('/'), headers=headers)
708 request.session.pop("zigg.social_auth", None)
709 return HTTPFound(location=request.route_url("/"), headers=headers)
578 710 else:
579 msg = 'You need to finish registration ' \
580 'process to bind your external identity to your account ' \
581 'or sign in to existing account'
711 msg = (
712 "You need to finish registration "
713 "process to bind your external identity to your account "
714 "or sign in to existing account"
715 )
582 716 request.session.flash(msg)
583 return HTTPFound(location=request.route_url('register'))
717 return HTTPFound(location=request.route_url("register"))
584 718
585 719
586 @view_config(route_name='section_view', permission='authenticated',
587 match_param=['section=users_section', 'view=search_users'],
588 renderer='json')
720 @view_config(
721 route_name="section_view",
722 permission="authenticated",
723 match_param=["section=users_section", "view=search_users"],
724 renderer="json",
725 )
589 726 def search_users(request):
590 727 """
591 728 Returns a list of users for autocomplete
592 729 """
593 730 user = request.user
594 731 items_returned = []
595 like_condition = request.params.get('user_name', '') + '%'
732 like_condition = request.params.get("user_name", "") + "%"
596 733 # first append used if email is passed
597 found_user = UserService.by_email(request.params.get('user_name', ''))
734 found_user = UserService.by_email(request.params.get("user_name", ""))
598 735 if found_user:
599 name = '{} {}'.format(found_user.first_name, found_user.last_name)
600 items_returned.append({'user': found_user.user_name, 'name': name})
736 name = "{} {}".format(found_user.first_name, found_user.last_name)
737 items_returned.append({"user": found_user.user_name, "name": name})
601 738 for found_user in UserService.user_names_like(like_condition).limit(20):
602 name = '{} {}'.format(found_user.first_name, found_user.last_name)
603 items_returned.append({'user': found_user.user_name, 'name': name})
739 name = "{} {}".format(found_user.first_name, found_user.last_name)
740 items_returned.append({"user": found_user.user_name, "name": name})
604 741 return items_returned
605 742
606 743
607 @view_config(route_name='users_self_property', match_param='key=auth_tokens',
608 request_method="GET", renderer='json', permission='authenticated')
609 @view_config(route_name='users_property', match_param='key=auth_tokens',
610 request_method="GET", renderer='json', permission='authenticated')
744 @view_config(
745 route_name="users_self_property",
746 match_param="key=auth_tokens",
747 request_method="GET",
748 renderer="json",
749 permission="authenticated",
750 )
751 @view_config(
752 route_name="users_property",
753 match_param="key=auth_tokens",
754 request_method="GET",
755 renderer="json",
756 permission="authenticated",
757 )
611 758 def auth_tokens_list(request):
612 759 """
613 760 Lists all available alert channels
614 761 """
615 if request.matched_route.name == 'users_self_property':
762 if request.matched_route.name == "users_self_property":
616 763 user = request.user
617 764 else:
618 user = UserService.by_id(request.matchdict.get('user_id'))
765 user = UserService.by_id(request.matchdict.get("user_id"))
619 766 if not user:
620 767 return HTTPNotFound()
621 768 return [c.get_dict() for c in user.auth_tokens]
622 769
623 770
624 @view_config(route_name='users_self_property', match_param='key=auth_tokens',
625 request_method="POST", renderer='json',
626 permission='authenticated')
627 @view_config(route_name='users_property', match_param='key=auth_tokens',
628 request_method="POST", renderer='json',
629 permission='authenticated')
771 @view_config(
772 route_name="users_self_property",
773 match_param="key=auth_tokens",
774 request_method="POST",
775 renderer="json",
776 permission="authenticated",
777 )
778 @view_config(
779 route_name="users_property",
780 match_param="key=auth_tokens",
781 request_method="POST",
782 renderer="json",
783 permission="authenticated",
784 )
630 785 def auth_tokens_POST(request):
631 786 """
632 787 Lists all available alert channels
633 788 """
634 if request.matched_route.name == 'users_self_property':
789 if request.matched_route.name == "users_self_property":
635 790 user = request.user
636 791 else:
637 user = UserService.by_id(request.matchdict.get('user_id'))
792 user = UserService.by_id(request.matchdict.get("user_id"))
638 793 if not user:
639 794 return HTTPNotFound()
640 795
641 796 req_data = request.safe_json_body or {}
642 if not req_data.get('expires'):
643 req_data.pop('expires', None)
797 if not req_data.get("expires"):
798 req_data.pop("expires", None)
644 799 form = forms.AuthTokenCreateForm(MultiDict(req_data), csrf_context=request)
645 800 if not form.validate():
646 801 return HTTPUnprocessableEntity(body=form.errors_json)
647 802 token = AuthToken()
648 803 form.populate_obj(token)
649 804 if token.expires:
650 interval = h.time_deltas.get(token.expires)['delta']
805 interval = h.time_deltas.get(token.expires)["delta"]
651 806 token.expires = datetime.datetime.utcnow() + interval
652 807 user.auth_tokens.append(token)
653 808 DBSession.flush()
654 809 return token.get_dict()
655 810
656 811
657 @view_config(route_name='users_self_property', match_param='key=auth_tokens',
658 request_method="DELETE", renderer='json',
659 permission='authenticated')
660 @view_config(route_name='users_property', match_param='key=auth_tokens',
661 request_method="DELETE", renderer='json',
662 permission='authenticated')
812 @view_config(
813 route_name="users_self_property",
814 match_param="key=auth_tokens",
815 request_method="DELETE",
816 renderer="json",
817 permission="authenticated",
818 )
819 @view_config(
820 route_name="users_property",
821 match_param="key=auth_tokens",
822 request_method="DELETE",
823 renderer="json",
824 permission="authenticated",
825 )
663 826 def auth_tokens_DELETE(request):
664 827 """
665 828 Lists all available alert channels
666 829 """
667 if request.matched_route.name == 'users_self_property':
830 if request.matched_route.name == "users_self_property":
668 831 user = request.user
669 832 else:
670 user = UserService.by_id(request.matchdict.get('user_id'))
833 user = UserService.by_id(request.matchdict.get("user_id"))
671 834 if not user:
672 835 return HTTPNotFound()
673 836
674 837 for token in user.auth_tokens:
675 if token.token == request.params.get('token'):
838 if token.token == request.params.get("token"):
676 839 user.auth_tokens.remove(token)
677 840 return True
678 841 return False
General Comments 0
You need to be logged in to leave comments. Login now