Show More
The requested changes are too big and content was truncated. Show full diff
@@ -4,12 +4,12 b' import re' | |||||
4 | from setuptools import setup, find_packages |
|
4 | from setuptools import setup, find_packages | |
5 |
|
5 | |||
6 | here = os.path.abspath(os.path.dirname(__file__)) |
|
6 | here = os.path.abspath(os.path.dirname(__file__)) | |
7 |
README = open(os.path.join(here, |
|
7 | README = open(os.path.join(here, "README.rst")).read() | |
8 |
CHANGES = open(os.path.join(here, |
|
8 | CHANGES = open(os.path.join(here, "CHANGELOG.rst")).read() | |
9 |
|
9 | |||
10 |
REQUIREMENTS = open(os.path.join(here, |
|
10 | REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines() | |
11 |
|
11 | |||
12 |
compiled = re.compile( |
|
12 | compiled = re.compile("([^=><]*).*") | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | def parse_req(req): |
|
15 | def parse_req(req): | |
@@ -21,7 +21,8 b' requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]' | |||||
21 |
|
21 | |||
22 | def _get_meta_var(name, data, callback_handler=None): |
|
22 | def _get_meta_var(name, data, callback_handler=None): | |
23 | import re |
|
23 | import re | |
24 | matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data) |
|
24 | ||
|
25 | matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data) | |||
25 | if matches: |
|
26 | if matches: | |
26 | if not callable(callback_handler): |
|
27 | if not callable(callback_handler): | |
27 | callback_handler = lambda v: v |
|
28 | callback_handler = lambda v: v | |
@@ -29,21 +30,22 b' def _get_meta_var(name, data, callback_handler=None):' | |||||
29 | return callback_handler(eval(matches.groups()[0])) |
|
30 | return callback_handler(eval(matches.groups()[0])) | |
30 |
|
31 | |||
31 |
|
32 | |||
32 |
with open(os.path.join(here, |
|
33 | with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta: | |
33 | _metadata = _meta.read() |
|
34 | _metadata = _meta.read() | |
34 |
|
35 | |||
35 |
with open(os.path.join(here, |
|
36 | with open(os.path.join(here, "VERSION"), "r") as _meta_version: | |
36 | __version__ = _meta_version.read().strip() |
|
37 | __version__ = _meta_version.read().strip() | |
37 |
|
38 | |||
38 |
__license__ = _get_meta_var( |
|
39 | __license__ = _get_meta_var("__license__", _metadata) | |
39 |
__author__ = _get_meta_var( |
|
40 | __author__ = _get_meta_var("__author__", _metadata) | |
40 |
__url__ = _get_meta_var( |
|
41 | __url__ = _get_meta_var("__url__", _metadata) | |
41 |
|
42 | |||
42 |
found_packages = find_packages( |
|
43 | found_packages = find_packages("src") | |
43 |
found_packages.append( |
|
44 | found_packages.append("appenlight.migrations.versions") | |
44 | setup(name='appenlight', |
|
45 | setup( | |
45 |
|
|
46 | name="appenlight", | |
46 | long_description=README + '\n\n' + CHANGES, |
|
47 | description="appenlight", | |
|
48 | long_description=README + "\n\n" + CHANGES, | |||
47 |
|
|
49 | classifiers=[ | |
48 |
|
|
50 | "Programming Language :: Python", | |
49 |
|
|
51 | "Framework :: Pylons", | |
@@ -54,28 +56,34 b" setup(name='appenlight'," | |||||
54 |
|
|
56 | license=__license__, | |
55 |
|
|
57 | author=__author__, | |
56 |
|
|
58 | url=__url__, | |
57 |
|
|
59 | keywords="web wsgi bfg pylons pyramid", | |
58 |
|
|
60 | package_dir={"": "src"}, | |
59 |
|
|
61 | packages=found_packages, | |
60 |
|
|
62 | include_package_data=True, | |
61 |
|
|
63 | zip_safe=False, | |
62 |
|
|
64 | test_suite="appenlight", | |
63 |
|
|
65 | install_requires=requires, | |
64 |
|
|
66 | extras_require={ | |
65 | "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "pytest-mock", "webtest"], |
|
67 | "dev": [ | |
|
68 | "coverage", | |||
|
69 | "pytest", | |||
|
70 | "pyramid", | |||
|
71 | "tox", | |||
|
72 | "mock", | |||
|
73 | "pytest-mock", | |||
|
74 | "webtest", | |||
|
75 | ], | |||
66 |
|
|
76 | "lint": ["black"], | |
67 |
|
|
77 | }, | |
68 |
|
|
78 | entry_points={ | |
69 |
|
|
79 | "paste.app_factory": ["main = appenlight:main"], | |
70 | 'main = appenlight:main' |
|
80 | "console_scripts": [ | |
|
81 | "appenlight-cleanup = appenlight.scripts.cleanup:main", | |||
|
82 | "appenlight-initializedb = appenlight.scripts.initialize_db:main", | |||
|
83 | "appenlight-migratedb = appenlight.scripts.migratedb:main", | |||
|
84 | "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main", | |||
|
85 | "appenlight-static = appenlight.scripts.static:main", | |||
|
86 | "appenlight-make-config = appenlight.scripts.make_config:main", | |||
71 |
|
|
87 | ], | |
72 | 'console_scripts': [ |
|
88 | }, | |
73 | 'appenlight-cleanup = appenlight.scripts.cleanup:main', |
|
|||
74 | 'appenlight-initializedb = appenlight.scripts.initialize_db:main', |
|
|||
75 | 'appenlight-migratedb = appenlight.scripts.migratedb:main', |
|
|||
76 | 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main', |
|
|||
77 | 'appenlight-static = appenlight.scripts.static:main', |
|
|||
78 | 'appenlight-make-config = appenlight.scripts.make_config:main', |
|
|||
79 | ] |
|
|||
80 | } |
|
|||
81 | ) |
|
89 | ) |
@@ -38,15 +38,17 b' from redlock import Redlock' | |||||
38 | from sqlalchemy import engine_from_config |
|
38 | from sqlalchemy import engine_from_config | |
39 |
|
39 | |||
40 | from appenlight.celery import configure_celery |
|
40 | from appenlight.celery import configure_celery | |
41 |
from appenlight.lib.configurator import ( |
|
41 | from appenlight.lib.configurator import ( | |
42 | register_appenlight_plugin) |
|
42 | CythonCompatConfigurator, | |
|
43 | register_appenlight_plugin, | |||
|
44 | ) | |||
43 | from appenlight.lib import cache_regions |
|
45 | from appenlight.lib import cache_regions | |
44 | from appenlight.lib.ext_json import json |
|
46 | from appenlight.lib.ext_json import json | |
45 | from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy |
|
47 | from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy | |
46 |
|
48 | |||
47 |
__license__ = |
|
49 | __license__ = "Apache 2.0" | |
48 |
__author__ = |
|
50 | __author__ = "RhodeCode GmbH" | |
49 |
__url__ = |
|
51 | __url__ = "http://rhodecode.com" | |
50 | __version__ = pkg_resources.get_distribution("appenlight").parsed_version |
|
52 | __version__ = pkg_resources.get_distribution("appenlight").parsed_version | |
51 |
|
53 | |||
52 | json_renderer = JSON(serializer=json.dumps, indent=4) |
|
54 | json_renderer = JSON(serializer=json.dumps, indent=4) | |
@@ -59,7 +61,7 b' def datetime_adapter(obj, request):' | |||||
59 |
|
61 | |||
60 |
|
62 | |||
61 | def all_permissions_adapter(obj, request): |
|
63 | def all_permissions_adapter(obj, request): | |
62 |
return |
|
64 | return "__all_permissions__" | |
63 |
|
65 | |||
64 |
|
66 | |||
65 | json_renderer.add_adapter(datetime.datetime, datetime_adapter) |
|
67 | json_renderer.add_adapter(datetime.datetime, datetime_adapter) | |
@@ -70,91 +72,109 b' def main(global_config, **settings):' | |||||
70 | """ This function returns a Pyramid WSGI application. |
|
72 | """ This function returns a Pyramid WSGI application. | |
71 | """ |
|
73 | """ | |
72 | auth_tkt_policy = AuthTktAuthenticationPolicy( |
|
74 | auth_tkt_policy = AuthTktAuthenticationPolicy( | |
73 |
settings[ |
|
75 | settings["authtkt.secret"], | |
74 |
hashalg= |
|
76 | hashalg="sha512", | |
75 | callback=groupfinder, |
|
77 | callback=groupfinder, | |
76 | max_age=2592000, |
|
78 | max_age=2592000, | |
77 |
secure=asbool(settings.get( |
|
79 | secure=asbool(settings.get("authtkt.secure", "false")), | |
78 | auth_token_policy = AuthTokenAuthenticationPolicy( |
|
|||
79 | callback=groupfinder |
|
|||
80 | ) |
|
80 | ) | |
|
81 | auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder) | |||
81 | authorization_policy = ACLAuthorizationPolicy() |
|
82 | authorization_policy = ACLAuthorizationPolicy() | |
82 | authentication_policy = AuthenticationStackPolicy() |
|
83 | authentication_policy = AuthenticationStackPolicy() | |
83 |
authentication_policy.add_policy( |
|
84 | authentication_policy.add_policy("auth_tkt", auth_tkt_policy) | |
84 |
authentication_policy.add_policy( |
|
85 | authentication_policy.add_policy("auth_token", auth_token_policy) | |
85 | # set crypto key |
|
86 | # set crypto key | |
86 |
encryption.ENCRYPTION_SECRET = settings.get( |
|
87 | encryption.ENCRYPTION_SECRET = settings.get("encryption_secret") | |
87 | # import this later so encyption key can be monkeypatched |
|
88 | # import this later so encyption key can be monkeypatched | |
88 | from appenlight.models import DBSession, register_datastores |
|
89 | from appenlight.models import DBSession, register_datastores | |
89 |
|
90 | |||
90 | # registration |
|
91 | # registration | |
91 |
settings[ |
|
92 | settings["appenlight.disable_registration"] = asbool( | |
92 |
settings.get( |
|
93 | settings.get("appenlight.disable_registration") | |
|
94 | ) | |||
93 |
|
95 | |||
94 | # update config with cometd info |
|
96 | # update config with cometd info | |
95 |
settings[ |
|
97 | settings["cometd_servers"] = { | |
96 |
|
|
98 | "server": settings["cometd.server"], | |
|
99 | "secret": settings["cometd.secret"], | |||
|
100 | } | |||
97 |
|
101 | |||
98 | # Create the Pyramid Configurator. |
|
102 | # Create the Pyramid Configurator. | |
99 |
settings[ |
|
103 | settings["_mail_url"] = settings["mailing.app_url"] | |
100 | config = CythonCompatConfigurator( |
|
104 | config = CythonCompatConfigurator( | |
101 | settings=settings, |
|
105 | settings=settings, | |
102 | authentication_policy=authentication_policy, |
|
106 | authentication_policy=authentication_policy, | |
103 | authorization_policy=authorization_policy, |
|
107 | authorization_policy=authorization_policy, | |
104 |
root_factory= |
|
108 | root_factory="appenlight.security.RootFactory", | |
105 |
default_permission= |
|
109 | default_permission="view", | |
|
110 | ) | |||
106 | # custom registry variables |
|
111 | # custom registry variables | |
107 |
|
112 | |||
108 | # resource type information |
|
113 | # resource type information | |
109 |
config.registry.resource_types = [ |
|
114 | config.registry.resource_types = ["resource", "application"] | |
110 | # plugin information |
|
115 | # plugin information | |
111 | config.registry.appenlight_plugins = {} |
|
116 | config.registry.appenlight_plugins = {} | |
112 |
|
117 | |||
113 |
config.set_default_csrf_options(require_csrf=True, header= |
|
118 | config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN") | |
114 |
config.add_view_deriver( |
|
119 | config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view") | |
115 | name='csrf_view') |
|
|||
116 |
|
120 | |||
117 | # later, when config is available |
|
121 | # later, when config is available | |
118 |
dogpile_config = { |
|
122 | dogpile_config = { | |
|
123 | "url": settings["redis.url"], | |||
119 |
|
|
124 | "redis_expiration_time": 86400, | |
120 |
|
|
125 | "redis_distributed_lock": True, | |
|
126 | } | |||
121 | cache_regions.regions = cache_regions.CacheRegions(dogpile_config) |
|
127 | cache_regions.regions = cache_regions.CacheRegions(dogpile_config) | |
122 | config.registry.cache_regions = cache_regions.regions |
|
128 | config.registry.cache_regions = cache_regions.regions | |
123 |
engine = engine_from_config(settings, |
|
129 | engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps) | |
124 | json_serializer=json.dumps) |
|
|||
125 | DBSession.configure(bind=engine) |
|
130 | DBSession.configure(bind=engine) | |
126 |
|
131 | |||
127 | # json rederer that serializes datetime |
|
132 | # json rederer that serializes datetime | |
128 |
config.add_renderer( |
|
133 | config.add_renderer("json", json_renderer) | |
129 | config.add_request_method('appenlight.lib.request.es_conn', 'es_conn', property=True) |
|
134 | config.add_request_method( | |
130 | config.add_request_method('appenlight.lib.request.get_user', 'user', |
|
135 | "appenlight.lib.request.es_conn", "es_conn", property=True | |
131 | reify=True, property=True) |
|
136 | ) | |
132 |
config.add_request_method( |
|
137 | config.add_request_method( | |
133 | 'csrf_token', reify=True, property=True) |
|
138 | "appenlight.lib.request.get_user", "user", reify=True, property=True | |
134 | config.add_request_method('appenlight.lib.request.safe_json_body', |
|
139 | ) | |
135 | 'safe_json_body', reify=True, property=True) |
|
140 | config.add_request_method( | |
136 | config.add_request_method('appenlight.lib.request.unsafe_json_body', |
|
141 | "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True | |
137 | 'unsafe_json_body', reify=True, property=True) |
|
142 | ) | |
138 | config.add_request_method('appenlight.lib.request.add_flash_to_headers', |
|
143 | config.add_request_method( | |
139 | 'add_flash_to_headers') |
|
144 | "appenlight.lib.request.safe_json_body", | |
140 | config.add_request_method('appenlight.lib.request.get_authomatic', |
|
145 | "safe_json_body", | |
141 | 'authomatic', reify=True) |
|
146 | reify=True, | |
142 |
|
147 | property=True, | ||
143 | config.include('pyramid_redis_sessions') |
|
148 | ) | |
144 | config.include('pyramid_tm') |
|
149 | config.add_request_method( | |
145 | config.include('pyramid_jinja2') |
|
150 | "appenlight.lib.request.unsafe_json_body", | |
146 | config.include('pyramid_mailer') |
|
151 | "unsafe_json_body", | |
147 | config.include('appenlight_client.ext.pyramid_tween') |
|
152 | reify=True, | |
148 | config.include('ziggurat_foundations.ext.pyramid.sign_in') |
|
153 | property=True, | |
149 | es_server_list = aslist(settings['elasticsearch.nodes']) |
|
154 | ) | |
150 | redis_url = settings['redis.url'] |
|
155 | config.add_request_method( | |
151 | log.warning('Elasticsearch server list: {}'.format(es_server_list)) |
|
156 | "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers" | |
152 | log.warning('Redis server: {}'.format(redis_url)) |
|
157 | ) | |
|
158 | config.add_request_method( | |||
|
159 | "appenlight.lib.request.get_authomatic", "authomatic", reify=True | |||
|
160 | ) | |||
|
161 | ||||
|
162 | config.include("pyramid_redis_sessions") | |||
|
163 | config.include("pyramid_tm") | |||
|
164 | config.include("pyramid_jinja2") | |||
|
165 | config.include("pyramid_mailer") | |||
|
166 | config.include("appenlight_client.ext.pyramid_tween") | |||
|
167 | config.include("ziggurat_foundations.ext.pyramid.sign_in") | |||
|
168 | es_server_list = aslist(settings["elasticsearch.nodes"]) | |||
|
169 | redis_url = settings["redis.url"] | |||
|
170 | log.warning("Elasticsearch server list: {}".format(es_server_list)) | |||
|
171 | log.warning("Redis server: {}".format(redis_url)) | |||
153 | config.registry.es_conn = Elasticsearch(es_server_list) |
|
172 | config.registry.es_conn = Elasticsearch(es_server_list) | |
154 | config.registry.redis_conn = redis.StrictRedis.from_url(redis_url) |
|
173 | config.registry.redis_conn = redis.StrictRedis.from_url(redis_url) | |
155 |
|
174 | |||
156 |
config.registry.redis_lockmgr = Redlock( |
|
175 | config.registry.redis_lockmgr = Redlock( | |
157 | retry_count=0, retry_delay=0) |
|
176 | [settings["redis.redlock.url"]], retry_count=0, retry_delay=0 | |
|
177 | ) | |||
158 | # mailer bw compat |
|
178 | # mailer bw compat | |
159 | config.registry.mailer = config.registry.getUtility(IMailer) |
|
179 | config.registry.mailer = config.registry.getUtility(IMailer) | |
160 |
|
180 | |||
@@ -163,47 +183,56 b' def main(global_config, **settings):' | |||||
163 | config.set_session_factory(session_factory) |
|
183 | config.set_session_factory(session_factory) | |
164 |
|
184 | |||
165 | # Configure renderers and event subscribers |
|
185 | # Configure renderers and event subscribers | |
166 |
config.add_jinja2_extension( |
|
186 | config.add_jinja2_extension("jinja2.ext.loopcontrols") | |
167 |
config.add_jinja2_search_path( |
|
187 | config.add_jinja2_search_path("appenlight:templates") | |
168 | # event subscribers |
|
188 | # event subscribers | |
169 | config.add_subscriber("appenlight.subscribers.application_created", |
|
189 | config.add_subscriber( | |
170 | "pyramid.events.ApplicationCreated") |
|
190 | "appenlight.subscribers.application_created", | |
171 | config.add_subscriber("appenlight.subscribers.add_renderer_globals", |
|
191 | "pyramid.events.ApplicationCreated", | |
172 | "pyramid.events.BeforeRender") |
|
192 | ) | |
173 | config.add_subscriber('appenlight.subscribers.new_request', |
|
193 | config.add_subscriber( | |
174 | 'pyramid.events.NewRequest') |
|
194 | "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender" | |
175 | config.add_view_predicate('context_type_class', |
|
195 | ) | |
176 | 'appenlight.predicates.contextTypeClass') |
|
196 | config.add_subscriber( | |
177 |
|
197 | "appenlight.subscribers.new_request", "pyramid.events.NewRequest" | ||
178 | register_datastores(es_conn=config.registry.es_conn, |
|
198 | ) | |
|
199 | config.add_view_predicate( | |||
|
200 | "context_type_class", "appenlight.predicates.contextTypeClass" | |||
|
201 | ) | |||
|
202 | ||||
|
203 | register_datastores( | |||
|
204 | es_conn=config.registry.es_conn, | |||
179 |
|
|
205 | redis_conn=config.registry.redis_conn, | |
180 |
|
|
206 | redis_lockmgr=config.registry.redis_lockmgr, | |
|
207 | ) | |||
181 |
|
208 | |||
182 | # base stuff and scan |
|
209 | # base stuff and scan | |
183 |
|
210 | |||
184 | # need to ensure webassets exists otherwise config.override_asset() |
|
211 | # need to ensure webassets exists otherwise config.override_asset() | |
185 | # throws exception |
|
212 | # throws exception | |
186 |
if not os.path.exists(settings[ |
|
213 | if not os.path.exists(settings["webassets.dir"]): | |
187 |
os.mkdir(settings[ |
|
214 | os.mkdir(settings["webassets.dir"]) | |
188 |
config.add_static_view( |
|
215 | config.add_static_view( | |
189 |
|
|
216 | path="appenlight:webassets", name="static", cache_max_age=3600 | |
190 | config.override_asset(to_override='appenlight:webassets/', |
|
217 | ) | |
191 | override_with=settings['webassets.dir']) |
|
218 | config.override_asset( | |
192 |
|
219 | to_override="appenlight:webassets/", override_with=settings["webassets.dir"] | ||
193 | config.include('appenlight.views') |
|
220 | ) | |
194 | config.include('appenlight.views.admin') |
|
221 | ||
195 | config.scan(ignore=['appenlight.migrations', 'appenlight.scripts', |
|
222 | config.include("appenlight.views") | |
196 | 'appenlight.tests']) |
|
223 | config.include("appenlight.views.admin") | |
197 |
|
224 | config.scan( | ||
198 | config.add_directive('register_appenlight_plugin', |
|
225 | ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"] | |
199 | register_appenlight_plugin) |
|
226 | ) | |
200 |
|
227 | |||
201 | for entry_point in iter_entry_points(group='appenlight.plugins'): |
|
228 | config.add_directive("register_appenlight_plugin", register_appenlight_plugin) | |
|
229 | ||||
|
230 | for entry_point in iter_entry_points(group="appenlight.plugins"): | |||
202 | plugin = entry_point.load() |
|
231 | plugin = entry_point.load() | |
203 | plugin.includeme(config) |
|
232 | plugin.includeme(config) | |
204 |
|
233 | |||
205 | # include other appenlight plugins explictly if needed |
|
234 | # include other appenlight plugins explictly if needed | |
206 |
includes = aslist(settings.get( |
|
235 | includes = aslist(settings.get("appenlight.includes", [])) | |
207 | for inc in includes: |
|
236 | for inc in includes: | |
208 | config.include(inc) |
|
237 | config.include(inc) | |
209 |
|
238 | |||
@@ -211,8 +240,8 b' def main(global_config, **settings):' | |||||
211 |
|
240 | |||
212 | def pre_commit(): |
|
241 | def pre_commit(): | |
213 | jinja_env = config.get_jinja2_environment() |
|
242 | jinja_env = config.get_jinja2_environment() | |
214 |
jinja_env.filters[ |
|
243 | jinja_env.filters["tojson"] = json.dumps | |
215 |
jinja_env.filters[ |
|
244 | jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe | |
216 |
|
245 | |||
217 | config.action(None, pre_commit, order=PHASE3_CONFIG + 999) |
|
246 | config.action(None, pre_commit, order=PHASE3_CONFIG + 999) | |
218 |
|
247 |
@@ -34,15 +34,23 b' from appenlight_client.ext.celery import register_signals' | |||||
34 |
|
34 | |||
35 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
36 |
|
36 | |||
37 | register('date_json', json_dumps, json_loads, |
|
37 | register( | |
38 | content_type='application/x-date_json', |
|
38 | "date_json", | |
39 | content_encoding='utf-8') |
|
39 | json_dumps, | |
|
40 | json_loads, | |||
|
41 | content_type="application/x-date_json", | |||
|
42 | content_encoding="utf-8", | |||
|
43 | ) | |||
40 |
|
44 | |||
41 | celery = Celery() |
|
45 | celery = Celery() | |
42 |
|
46 | |||
43 |
celery.user_options[ |
|
47 | celery.user_options["preload"].add( | |
44 | Option('--ini', dest='ini', default=None, |
|
48 | Option( | |
45 | help='Specifies pyramid configuration file location.') |
|
49 | "--ini", | |
|
50 | dest="ini", | |||
|
51 | default=None, | |||
|
52 | help="Specifies pyramid configuration file location.", | |||
|
53 | ) | |||
46 | ) |
|
54 | ) | |
47 |
|
55 | |||
48 |
|
56 | |||
@@ -51,19 +59,21 b' def on_preload_parsed(options, **kwargs):' | |||||
51 | """ |
|
59 | """ | |
52 | This actually configures celery from pyramid config file |
|
60 | This actually configures celery from pyramid config file | |
53 | """ |
|
61 | """ | |
54 |
celery.conf[ |
|
62 | celery.conf["INI_PYRAMID"] = options["ini"] | |
55 | import appenlight_client.client as e_client |
|
63 | import appenlight_client.client as e_client | |
56 | ini_location = options['ini'] |
|
64 | ||
|
65 | ini_location = options["ini"] | |||
57 | if not ini_location: |
|
66 | if not ini_location: | |
58 | raise Exception('You need to pass pyramid ini location using ' |
|
67 | raise Exception( | |
59 | '--ini=filename.ini argument to the worker') |
|
68 | "You need to pass pyramid ini location using " | |
|
69 | "--ini=filename.ini argument to the worker" | |||
|
70 | ) | |||
60 | env = bootstrap(ini_location[0]) |
|
71 | env = bootstrap(ini_location[0]) | |
61 |
api_key = env[ |
|
72 | api_key = env["request"].registry.settings["appenlight.api_key"] | |
62 |
tr_config = env[ |
|
73 | tr_config = env["request"].registry.settings.get("appenlight.transport_config") | |
63 | 'appenlight.transport_config') |
|
74 | CONFIG = e_client.get_config({"appenlight.api_key": api_key}) | |
64 | CONFIG = e_client.get_config({'appenlight.api_key': api_key}) |
|
|||
65 | if tr_config: |
|
75 | if tr_config: | |
66 |
CONFIG[ |
|
76 | CONFIG["appenlight.transport_config"] = tr_config | |
67 | APPENLIGHT_CLIENT = e_client.Client(CONFIG) |
|
77 | APPENLIGHT_CLIENT = e_client.Client(CONFIG) | |
68 | # log.addHandler(APPENLIGHT_CLIENT.log_handler) |
|
78 | # log.addHandler(APPENLIGHT_CLIENT.log_handler) | |
69 | register_signals(APPENLIGHT_CLIENT) |
|
79 | register_signals(APPENLIGHT_CLIENT) | |
@@ -71,101 +81,101 b' def on_preload_parsed(options, **kwargs):' | |||||
71 |
|
81 | |||
72 |
|
82 | |||
73 | celery_config = { |
|
83 | celery_config = { | |
74 |
|
|
84 | "CELERY_IMPORTS": ["appenlight.celery.tasks"], | |
75 |
|
|
85 | "CELERYD_TASK_TIME_LIMIT": 60, | |
76 |
|
|
86 | "CELERYD_MAX_TASKS_PER_CHILD": 1000, | |
77 |
|
|
87 | "CELERY_IGNORE_RESULT": True, | |
78 |
|
|
88 | "CELERY_ACCEPT_CONTENT": ["date_json"], | |
79 |
|
|
89 | "CELERY_TASK_SERIALIZER": "date_json", | |
80 |
|
|
90 | "CELERY_RESULT_SERIALIZER": "date_json", | |
81 |
|
|
91 | "BROKER_URL": None, | |
82 |
|
|
92 | "CELERYD_CONCURRENCY": None, | |
83 |
|
|
93 | "CELERY_TIMEZONE": None, | |
84 |
|
|
94 | "CELERYBEAT_SCHEDULE": { | |
85 |
|
|
95 | "alerting_reports": { | |
86 |
|
|
96 | "task": "appenlight.celery.tasks.alerting_reports", | |
87 |
|
|
97 | "schedule": timedelta(seconds=60), | |
|
98 | }, | |||
|
99 | "close_alerts": { | |||
|
100 | "task": "appenlight.celery.tasks.close_alerts", | |||
|
101 | "schedule": timedelta(seconds=60), | |||
|
102 | }, | |||
88 |
|
|
103 | }, | |
89 | 'close_alerts': { |
|
|||
90 | 'task': 'appenlight.celery.tasks.close_alerts', |
|
|||
91 | 'schedule': timedelta(seconds=60) |
|
|||
92 | } |
|
|||
93 | } |
|
|||
94 | } |
|
104 | } | |
95 | celery.config_from_object(celery_config) |
|
105 | celery.config_from_object(celery_config) | |
96 |
|
106 | |||
97 |
|
107 | |||
98 | def configure_celery(pyramid_registry): |
|
108 | def configure_celery(pyramid_registry): | |
99 | settings = pyramid_registry.settings |
|
109 | settings = pyramid_registry.settings | |
100 |
celery_config[ |
|
110 | celery_config["BROKER_URL"] = settings["celery.broker_url"] | |
101 |
celery_config[ |
|
111 | celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"] | |
102 |
celery_config[ |
|
112 | celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"] | |
103 |
|
113 | |||
104 | notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60)) |
|
114 | notifications_seconds = int( | |
|
115 | settings.get("tasks.notifications_reports.interval", 60) | |||
|
116 | ) | |||
105 |
|
117 | |||
106 |
celery_config[ |
|
118 | celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = { | |
107 |
|
|
119 | "task": "appenlight.celery.tasks.notifications_reports", | |
108 |
|
|
120 | "schedule": timedelta(seconds=notifications_seconds), | |
109 | } |
|
121 | } | |
110 |
|
122 | |||
111 |
celery_config[ |
|
123 | celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = { | |
112 |
|
|
124 | "task": "appenlight.celery.tasks.daily_digest", | |
113 |
|
|
125 | "schedule": crontab(minute=1, hour="4,12,20"), | |
114 | } |
|
126 | } | |
115 |
|
127 | |||
116 |
if asbool(settings.get( |
|
128 | if asbool(settings.get("celery.always_eager")): | |
117 |
celery_config[ |
|
129 | celery_config["CELERY_ALWAYS_EAGER"] = True | |
118 |
celery_config[ |
|
130 | celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True | |
119 |
|
131 | |||
120 | for plugin in pyramid_registry.appenlight_plugins.values(): |
|
132 | for plugin in pyramid_registry.appenlight_plugins.values(): | |
121 |
if plugin.get( |
|
133 | if plugin.get("celery_tasks"): | |
122 |
celery_config[ |
|
134 | celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"]) | |
123 |
if plugin.get( |
|
135 | if plugin.get("celery_beats"): | |
124 |
for name, config in plugin[ |
|
136 | for name, config in plugin["celery_beats"]: | |
125 |
celery_config[ |
|
137 | celery_config["CELERYBEAT_SCHEDULE"][name] = config | |
126 | celery.config_from_object(celery_config) |
|
138 | celery.config_from_object(celery_config) | |
127 |
|
139 | |||
128 |
|
140 | |||
129 | @task_prerun.connect |
|
141 | @task_prerun.connect | |
130 | def task_prerun_signal(task_id, task, args, kwargs, **kwaargs): |
|
142 | def task_prerun_signal(task_id, task, args, kwargs, **kwaargs): | |
131 |
if hasattr(celery, |
|
143 | if hasattr(celery, "pyramid"): | |
132 | env = celery.pyramid |
|
144 | env = celery.pyramid | |
133 |
env = prepare(registry=env[ |
|
145 | env = prepare(registry=env["request"].registry) | |
134 |
proper_base_url = env[ |
|
146 | proper_base_url = env["request"].registry.settings["mailing.app_url"] | |
135 |
tmp_req = Request.blank( |
|
147 | tmp_req = Request.blank("/", base_url=proper_base_url) | |
136 | # ensure tasks generate url for right domain from config |
|
148 | # ensure tasks generate url for right domain from config | |
137 |
env[ |
|
149 | env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"] | |
138 |
env[ |
|
150 | env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"] | |
139 |
env[ |
|
151 | env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"] | |
140 |
env[ |
|
152 | env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"] | |
141 | tmp_req.environ['wsgi.url_scheme'] |
|
|||
142 | get_current_request().tm.begin() |
|
153 | get_current_request().tm.begin() | |
143 |
|
154 | |||
144 |
|
155 | |||
145 | @task_success.connect |
|
156 | @task_success.connect | |
146 | def task_success_signal(result, **kwargs): |
|
157 | def task_success_signal(result, **kwargs): | |
147 | get_current_request().tm.commit() |
|
158 | get_current_request().tm.commit() | |
148 |
if hasattr(celery, |
|
159 | if hasattr(celery, "pyramid"): | |
149 | celery.pyramid["closer"]() |
|
160 | celery.pyramid["closer"]() | |
150 |
|
161 | |||
151 |
|
162 | |||
152 | @task_retry.connect |
|
163 | @task_retry.connect | |
153 | def task_retry_signal(request, reason, einfo, **kwargs): |
|
164 | def task_retry_signal(request, reason, einfo, **kwargs): | |
154 | get_current_request().tm.abort() |
|
165 | get_current_request().tm.abort() | |
155 |
if hasattr(celery, |
|
166 | if hasattr(celery, "pyramid"): | |
156 | celery.pyramid["closer"]() |
|
167 | celery.pyramid["closer"]() | |
157 |
|
168 | |||
158 |
|
169 | |||
159 | @task_failure.connect |
|
170 | @task_failure.connect | |
160 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, |
|
171 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs): | |
161 | **kwaargs): |
|
|||
162 | get_current_request().tm.abort() |
|
172 | get_current_request().tm.abort() | |
163 |
if hasattr(celery, |
|
173 | if hasattr(celery, "pyramid"): | |
164 | celery.pyramid["closer"]() |
|
174 | celery.pyramid["closer"]() | |
165 |
|
175 | |||
166 |
|
176 | |||
167 | @task_revoked.connect |
|
177 | @task_revoked.connect | |
168 | def task_revoked_signal(request, terminated, signum, expired, **kwaargs): |
|
178 | def task_revoked_signal(request, terminated, signum, expired, **kwaargs): | |
169 | get_current_request().tm.abort() |
|
179 | get_current_request().tm.abort() | |
170 |
if hasattr(celery, |
|
180 | if hasattr(celery, "pyramid"): | |
171 | celery.pyramid["closer"]() |
|
181 | celery.pyramid["closer"]() |
@@ -17,38 +17,29 b'' | |||||
17 | import json |
|
17 | import json | |
18 | from datetime import datetime, date, timedelta |
|
18 | from datetime import datetime, date, timedelta | |
19 |
|
19 | |||
20 |
DATE_FORMAT = |
|
20 | DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class DateEncoder(json.JSONEncoder): |
|
23 | class DateEncoder(json.JSONEncoder): | |
24 | def default(self, obj): |
|
24 | def default(self, obj): | |
25 | if isinstance(obj, datetime): |
|
25 | if isinstance(obj, datetime): | |
26 | return { |
|
26 | return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)} | |
27 | '__type__': '__datetime__', |
|
|||
28 | 'iso': obj.strftime(DATE_FORMAT) |
|
|||
29 | } |
|
|||
30 | elif isinstance(obj, date): |
|
27 | elif isinstance(obj, date): | |
31 | return { |
|
28 | return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)} | |
32 | '__type__': '__date__', |
|
|||
33 | 'iso': obj.strftime(DATE_FORMAT) |
|
|||
34 | } |
|
|||
35 | elif isinstance(obj, timedelta): |
|
29 | elif isinstance(obj, timedelta): | |
36 | return { |
|
30 | return {"__type__": "__timedelta__", "seconds": obj.total_seconds()} | |
37 | '__type__': '__timedelta__', |
|
|||
38 | 'seconds': obj.total_seconds() |
|
|||
39 | } |
|
|||
40 | else: |
|
31 | else: | |
41 | return json.JSONEncoder.default(self, obj) |
|
32 | return json.JSONEncoder.default(self, obj) | |
42 |
|
33 | |||
43 |
|
34 | |||
44 | def date_decoder(dct): |
|
35 | def date_decoder(dct): | |
45 |
if |
|
36 | if "__type__" in dct: | |
46 |
if dct[ |
|
37 | if dct["__type__"] == "__datetime__": | |
47 |
return datetime.strptime(dct[ |
|
38 | return datetime.strptime(dct["iso"], DATE_FORMAT) | |
48 |
elif dct[ |
|
39 | elif dct["__type__"] == "__date__": | |
49 |
return datetime.strptime(dct[ |
|
40 | return datetime.strptime(dct["iso"], DATE_FORMAT).date() | |
50 |
elif dct[ |
|
41 | elif dct["__type__"] == "__timedelta__": | |
51 |
return timedelta(seconds=dct[ |
|
42 | return timedelta(seconds=dct["seconds"]) | |
52 | return dct |
|
43 | return dct | |
53 |
|
44 | |||
54 |
|
45 | |||
@@ -57,4 +48,4 b' def json_dumps(obj):' | |||||
57 |
|
48 | |||
58 |
|
49 | |||
59 | def json_loads(obj): |
|
50 | def json_loads(obj): | |
60 |
return json.loads(obj.decode( |
|
51 | return json.loads(obj.decode("utf8"), object_hook=date_decoder) |
@@ -51,9 +51,11 b' from appenlight.lib.enums import ReportType' | |||||
51 |
|
51 | |||
52 | log = get_task_logger(__name__) |
|
52 | log = get_task_logger(__name__) | |
53 |
|
53 | |||
54 | sample_boundries = list(range(100, 1000, 100)) + \ |
|
54 | sample_boundries = ( | |
55 |
|
|
55 | list(range(100, 1000, 100)) | |
56 |
|
|
56 | + list(range(1000, 10000, 1000)) | |
|
57 | + list(range(10000, 100000, 5000)) | |||
|
58 | ) | |||
57 |
|
59 | |||
58 |
|
60 | |||
59 | def pick_sample(total_occurences, report_type=None): |
|
61 | def pick_sample(total_occurences, report_type=None): | |
@@ -70,9 +72,9 b' def pick_sample(total_occurences, report_type=None):' | |||||
70 |
|
72 | |||
71 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
73 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
72 | def test_exception_task(): |
|
74 | def test_exception_task(): | |
73 |
log.error( |
|
75 | log.error("test celery log", extra={"location": "celery"}) | |
74 |
log.warning( |
|
76 | log.warning("test celery log", extra={"location": "celery"}) | |
75 |
raise Exception( |
|
77 | raise Exception("Celery exception test") | |
76 |
|
78 | |||
77 |
|
79 | |||
78 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
80 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
@@ -81,9 +83,9 b' def test_retry_exception_task():' | |||||
81 | import time |
|
83 | import time | |
82 |
|
84 | |||
83 | time.sleep(1.3) |
|
85 | time.sleep(1.3) | |
84 |
log.error( |
|
86 | log.error("test retry celery log", extra={"location": "celery"}) | |
85 |
log.warning( |
|
87 | log.warning("test retry celery log", extra={"location": "celery"}) | |
86 |
raise Exception( |
|
88 | raise Exception("Celery exception test") | |
87 | except Exception as exc: |
|
89 | except Exception as exc: | |
88 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
90 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: | |
89 | raise |
|
91 | raise | |
@@ -92,7 +94,7 b' def test_retry_exception_task():' | |||||
92 |
|
94 | |||
93 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) |
|
95 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) | |
94 | def add_reports(resource_id, request_params, dataset, **kwargs): |
|
96 | def add_reports(resource_id, request_params, dataset, **kwargs): | |
95 |
proto_version = parse_proto(request_params.get( |
|
97 | proto_version = parse_proto(request_params.get("protocol_version", "")) | |
96 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
98 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
97 | try: |
|
99 | try: | |
98 | # we will store solr docs here for single insert |
|
100 | # we will store solr docs here for single insert | |
@@ -114,22 +116,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):' | |||||
114 | report_group = ReportGroupService.by_hash_and_resource( |
|
116 | report_group = ReportGroupService.by_hash_and_resource( | |
115 | report.resource_id, |
|
117 | report.resource_id, | |
116 | report.grouping_hash, |
|
118 | report.grouping_hash, | |
117 | since_when=datetime.utcnow().date().replace(day=1) |
|
119 | since_when=datetime.utcnow().date().replace(day=1), | |
118 | ) |
|
120 | ) | |
119 |
occurences = report_data.get( |
|
121 | occurences = report_data.get("occurences", 1) | |
120 | if not report_group: |
|
122 | if not report_group: | |
121 | # total reports will be +1 moment later |
|
123 | # total reports will be +1 moment later | |
122 |
report_group = ReportGroup( |
|
124 | report_group = ReportGroup( | |
123 | occurences=0, total_reports=0, |
|
125 | grouping_hash=report.grouping_hash, | |
|
126 | occurences=0, | |||
|
127 | total_reports=0, | |||
124 |
|
|
128 | last_report=0, | |
125 |
|
|
129 | priority=report.priority, | |
126 |
|
|
130 | error=report.error, | |
127 |
|
|
131 | first_timestamp=report.start_time, | |
|
132 | ) | |||
128 | report_group._skip_ft_index = True |
|
133 | report_group._skip_ft_index = True | |
129 | report_group.report_type = report.report_type |
|
134 | report_group.report_type = report.report_type | |
130 | report.report_group_time = report_group.first_timestamp |
|
135 | report.report_group_time = report_group.first_timestamp | |
131 |
add_sample = pick_sample( |
|
136 | add_sample = pick_sample( | |
132 |
|
|
137 | report_group.occurences, report_type=report_group.report_type | |
|
138 | ) | |||
133 | if add_sample: |
|
139 | if add_sample: | |
134 | resource.report_groups.append(report_group) |
|
140 | resource.report_groups.append(report_group) | |
135 | report_group.reports.append(report) |
|
141 | report_group.reports.append(report) | |
@@ -144,28 +150,26 b' def add_reports(resource_id, request_params, dataset, **kwargs):' | |||||
144 | for s_call in slow_calls: |
|
150 | for s_call in slow_calls: | |
145 | if s_call.partition_id not in es_slow_calls_docs: |
|
151 | if s_call.partition_id not in es_slow_calls_docs: | |
146 | es_slow_calls_docs[s_call.partition_id] = [] |
|
152 | es_slow_calls_docs[s_call.partition_id] = [] | |
147 | es_slow_calls_docs[s_call.partition_id].append( |
|
153 | es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc()) | |
148 | s_call.es_doc()) |
|
|||
149 | # try generating new stat rows if needed |
|
154 | # try generating new stat rows if needed | |
150 | else: |
|
155 | else: | |
151 | # required for postprocessing to not fail later |
|
156 | # required for postprocessing to not fail later | |
152 | report.report_group = report_group |
|
157 | report.report_group = report_group | |
153 |
|
158 | |||
154 | stat_row = ReportService.generate_stat_rows( |
|
159 | stat_row = ReportService.generate_stat_rows(report, resource, report_group) | |
155 | report, resource, report_group) |
|
|||
156 | if stat_row.partition_id not in es_reports_stats_rows: |
|
160 | if stat_row.partition_id not in es_reports_stats_rows: | |
157 | es_reports_stats_rows[stat_row.partition_id] = [] |
|
161 | es_reports_stats_rows[stat_row.partition_id] = [] | |
158 | es_reports_stats_rows[stat_row.partition_id].append( |
|
162 | es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc()) | |
159 | stat_row.es_doc()) |
|
|||
160 |
|
163 | |||
161 | # see if we should mark 10th occurence of report |
|
164 | # see if we should mark 10th occurence of report | |
162 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) |
|
165 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) | |
163 |
curr_occurences_10 = int( |
|
166 | curr_occurences_10 = int( | |
164 |
(report_group.occurences + report.occurences) / 10) |
|
167 | math.floor((report_group.occurences + report.occurences) / 10) | |
165 | last_occurences_100 = int( |
|
168 | ) | |
166 |
|
|
169 | last_occurences_100 = int(math.floor(report_group.occurences / 100)) | |
167 |
curr_occurences_100 = int( |
|
170 | curr_occurences_100 = int( | |
168 |
(report_group.occurences + report.occurences) / 100) |
|
171 | math.floor((report_group.occurences + report.occurences) / 100) | |
|
172 | ) | |||
169 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 |
|
173 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 | |
170 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 |
|
174 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 | |
171 | report_group.occurences = ReportGroup.occurences + occurences |
|
175 | report_group.occurences = ReportGroup.occurences + occurences | |
@@ -178,39 +182,47 b' def add_reports(resource_id, request_params, dataset, **kwargs):' | |||||
178 | if added_details: |
|
182 | if added_details: | |
179 | report_group.total_reports = ReportGroup.total_reports + 1 |
|
183 | report_group.total_reports = ReportGroup.total_reports + 1 | |
180 | report_group.last_report = report.id |
|
184 | report_group.last_report = report.id | |
181 |
report_group.set_notification_info( |
|
185 | report_group.set_notification_info( | |
182 |
|
|
186 | notify_10=notify_occurences_10, notify_100=notify_occurences_100 | |
|
187 | ) | |||
183 | DBSession.flush() |
|
188 | DBSession.flush() | |
184 | report_group.get_report().notify_channel(report_group) |
|
189 | report_group.get_report().notify_channel(report_group) | |
185 | if report_group.partition_id not in es_report_group_docs: |
|
190 | if report_group.partition_id not in es_report_group_docs: | |
186 | es_report_group_docs[report_group.partition_id] = [] |
|
191 | es_report_group_docs[report_group.partition_id] = [] | |
187 | es_report_group_docs[report_group.partition_id].append( |
|
192 | es_report_group_docs[report_group.partition_id].append( | |
188 |
report_group.es_doc() |
|
193 | report_group.es_doc() | |
|
194 | ) | |||
189 |
|
195 | |||
190 |
action = |
|
196 | action = "REPORT" | |
191 |
log_msg = |
|
197 | log_msg = "%s: %s %s, client: %s, proto: %s" % ( | |
192 | action, |
|
198 | action, | |
193 |
report_data.get( |
|
199 | report_data.get("http_status", "unknown"), | |
194 | str(resource), |
|
200 | str(resource), | |
195 |
report_data.get( |
|
201 | report_data.get("client"), | |
196 |
proto_version |
|
202 | proto_version, | |
|
203 | ) | |||
197 | log.info(log_msg) |
|
204 | log.info(log_msg) | |
198 | total_reports = len(dataset) |
|
205 | total_reports = len(dataset) | |
199 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
206 | redis_pipeline = Datastores.redis.pipeline(transaction=False) | |
200 |
key = REDIS_KEYS[ |
|
207 | key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time) | |
201 | redis_pipeline.incr(key, total_reports) |
|
208 | redis_pipeline.incr(key, total_reports) | |
202 | redis_pipeline.expire(key, 3600 * 24) |
|
209 | redis_pipeline.expire(key, 3600 * 24) | |
203 |
key = REDIS_KEYS[ |
|
210 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
204 |
resource.owner_user_id, current_time |
|
211 | resource.owner_user_id, current_time | |
|
212 | ) | |||
205 | redis_pipeline.incr(key, total_reports) |
|
213 | redis_pipeline.incr(key, total_reports) | |
206 | redis_pipeline.expire(key, 3600) |
|
214 | redis_pipeline.expire(key, 3600) | |
207 |
key = REDIS_KEYS[ |
|
215 | key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format( | |
208 |
resource_id, current_time.replace(minute=0) |
|
216 | resource_id, current_time.replace(minute=0) | |
|
217 | ) | |||
209 | redis_pipeline.incr(key, total_reports) |
|
218 | redis_pipeline.incr(key, total_reports) | |
210 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
219 | redis_pipeline.expire(key, 3600 * 24 * 7) | |
211 | redis_pipeline.sadd( |
|
220 | redis_pipeline.sadd( | |
212 |
REDIS_KEYS[ |
|
221 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
213 |
current_time.replace(minute=0) |
|
222 | current_time.replace(minute=0) | |
|
223 | ), | |||
|
224 | resource_id, | |||
|
225 | ) | |||
214 | redis_pipeline.execute() |
|
226 | redis_pipeline.execute() | |
215 |
|
227 | |||
216 | add_reports_es(es_report_group_docs, es_report_docs) |
|
228 | add_reports_es(es_report_group_docs, es_report_docs) | |
@@ -227,11 +239,11 b' def add_reports(resource_id, request_params, dataset, **kwargs):' | |||||
227 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
239 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
228 | def add_reports_es(report_group_docs, report_docs): |
|
240 | def add_reports_es(report_group_docs, report_docs): | |
229 | for k, v in report_group_docs.items(): |
|
241 | for k, v in report_group_docs.items(): | |
230 |
to_update = { |
|
242 | to_update = {"_index": k, "_type": "report_group"} | |
231 | [i.update(to_update) for i in v] |
|
243 | [i.update(to_update) for i in v] | |
232 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
244 | elasticsearch.helpers.bulk(Datastores.es, v) | |
233 | for k, v in report_docs.items(): |
|
245 | for k, v in report_docs.items(): | |
234 |
to_update = { |
|
246 | to_update = {"_index": k, "_type": "report"} | |
235 | [i.update(to_update) for i in v] |
|
247 | [i.update(to_update) for i in v] | |
236 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
248 | elasticsearch.helpers.bulk(Datastores.es, v) | |
237 |
|
249 | |||
@@ -239,7 +251,7 b' def add_reports_es(report_group_docs, report_docs):' | |||||
239 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
251 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
240 | def add_reports_slow_calls_es(es_docs): |
|
252 | def add_reports_slow_calls_es(es_docs): | |
241 | for k, v in es_docs.items(): |
|
253 | for k, v in es_docs.items(): | |
242 |
to_update = { |
|
254 | to_update = {"_index": k, "_type": "log"} | |
243 | [i.update(to_update) for i in v] |
|
255 | [i.update(to_update) for i in v] | |
244 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
256 | elasticsearch.helpers.bulk(Datastores.es, v) | |
245 |
|
257 | |||
@@ -247,14 +259,14 b' def add_reports_slow_calls_es(es_docs):' | |||||
247 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
259 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
248 | def add_reports_stats_rows_es(es_docs): |
|
260 | def add_reports_stats_rows_es(es_docs): | |
249 | for k, v in es_docs.items(): |
|
261 | for k, v in es_docs.items(): | |
250 |
to_update = { |
|
262 | to_update = {"_index": k, "_type": "log"} | |
251 | [i.update(to_update) for i in v] |
|
263 | [i.update(to_update) for i in v] | |
252 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
264 | elasticsearch.helpers.bulk(Datastores.es, v) | |
253 |
|
265 | |||
254 |
|
266 | |||
255 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) |
|
267 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) | |
256 | def add_logs(resource_id, request_params, dataset, **kwargs): |
|
268 | def add_logs(resource_id, request_params, dataset, **kwargs): | |
257 |
proto_version = request_params.get( |
|
269 | proto_version = request_params.get("protocol_version") | |
258 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
270 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
259 |
|
271 | |||
260 | try: |
|
272 | try: | |
@@ -264,16 +276,15 b' def add_logs(resource_id, request_params, dataset, **kwargs):' | |||||
264 | ns_pairs = [] |
|
276 | ns_pairs = [] | |
265 | for entry in dataset: |
|
277 | for entry in dataset: | |
266 | # gather pk and ns so we can remove older versions of row later |
|
278 | # gather pk and ns so we can remove older versions of row later | |
267 |
if entry[ |
|
279 | if entry["primary_key"] is not None: | |
268 |
ns_pairs.append({"pk": entry[ |
|
280 | ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]}) | |
269 | "ns": entry['namespace']}) |
|
|||
270 | log_entry = Log() |
|
281 | log_entry = Log() | |
271 | log_entry.set_data(entry, resource=resource) |
|
282 | log_entry.set_data(entry, resource=resource) | |
272 | log_entry._skip_ft_index = True |
|
283 | log_entry._skip_ft_index = True | |
273 | resource.logs.append(log_entry) |
|
284 | resource.logs.append(log_entry) | |
274 | DBSession.flush() |
|
285 | DBSession.flush() | |
275 | # insert non pk rows first |
|
286 | # insert non pk rows first | |
276 |
if entry[ |
|
287 | if entry["primary_key"] is None: | |
277 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
288 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) | |
278 |
|
289 | |||
279 | # 2nd pass to delete all log entries from db foe same pk/ns pair |
|
290 | # 2nd pass to delete all log entries from db foe same pk/ns pair | |
@@ -282,7 +293,8 b' def add_logs(resource_id, request_params, dataset, **kwargs):' | |||||
282 | es_docs = collections.defaultdict(list) |
|
293 | es_docs = collections.defaultdict(list) | |
283 | es_docs_to_delete = collections.defaultdict(list) |
|
294 | es_docs_to_delete = collections.defaultdict(list) | |
284 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( |
|
295 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( | |
285 |
list_of_pairs=ns_pairs |
|
296 | list_of_pairs=ns_pairs | |
|
297 | ) | |||
286 | log_dict = {} |
|
298 | log_dict = {} | |
287 | for log_entry in found_pkey_logs: |
|
299 | for log_entry in found_pkey_logs: | |
288 | log_key = (log_entry.primary_key, log_entry.namespace) |
|
300 | log_key = (log_entry.primary_key, log_entry.namespace) | |
@@ -299,51 +311,58 b' def add_logs(resource_id, request_params, dataset, **kwargs):' | |||||
299 | ids_to_delete.append(e.log_id) |
|
311 | ids_to_delete.append(e.log_id) | |
300 | es_docs_to_delete[e.partition_id].append(e.delete_hash) |
|
312 | es_docs_to_delete[e.partition_id].append(e.delete_hash) | |
301 |
|
313 | |||
302 | es_docs_to_delete[log_entry.partition_id].append( |
|
314 | es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash) | |
303 | log_entry.delete_hash) |
|
|||
304 |
|
315 | |||
305 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
316 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) | |
306 |
|
317 | |||
307 | if ids_to_delete: |
|
318 | if ids_to_delete: | |
308 | query = DBSession.query(Log).filter( |
|
319 | query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete)) | |
309 | Log.log_id.in_(ids_to_delete)) |
|
|||
310 | query.delete(synchronize_session=False) |
|
320 | query.delete(synchronize_session=False) | |
311 | if es_docs_to_delete: |
|
321 | if es_docs_to_delete: | |
312 | # batch this to avoid problems with default ES bulk limits |
|
322 | # batch this to avoid problems with default ES bulk limits | |
313 | for es_index in es_docs_to_delete.keys(): |
|
323 | for es_index in es_docs_to_delete.keys(): | |
314 | for batch in in_batches(es_docs_to_delete[es_index], 20): |
|
324 | for batch in in_batches(es_docs_to_delete[es_index], 20): | |
315 |
query = {"query": { |
|
325 | query = {"query": {"terms": {"delete_hash": batch}}} | |
316 |
|
326 | |||
317 | try: |
|
327 | try: | |
318 | Datastores.es.transport.perform_request( |
|
328 | Datastores.es.transport.perform_request( | |
319 |
"DELETE", |
|
329 | "DELETE", | |
|
330 | "/{}/{}/_query".format(es_index, "log"), | |||
|
331 | body=query, | |||
|
332 | ) | |||
320 | except elasticsearch.exceptions.NotFoundError as exc: |
|
333 | except elasticsearch.exceptions.NotFoundError as exc: | |
321 |
msg = |
|
334 | msg = "skipping index {}".format(es_index) | |
322 | log.info(msg) |
|
335 | log.info(msg) | |
323 |
|
336 | |||
324 | total_logs = len(dataset) |
|
337 | total_logs = len(dataset) | |
325 |
|
338 | |||
326 |
log_msg = |
|
339 | log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % ( | |
327 | str(resource), |
|
340 | str(resource), | |
328 | total_logs, |
|
341 | total_logs, | |
329 |
proto_version |
|
342 | proto_version, | |
|
343 | ) | |||
330 | log.info(log_msg) |
|
344 | log.info(log_msg) | |
331 | # mark_changed(session) |
|
345 | # mark_changed(session) | |
332 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
346 | redis_pipeline = Datastores.redis.pipeline(transaction=False) | |
333 |
key = REDIS_KEYS[ |
|
347 | key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time) | |
334 | redis_pipeline.incr(key, total_logs) |
|
348 | redis_pipeline.incr(key, total_logs) | |
335 | redis_pipeline.expire(key, 3600 * 24) |
|
349 | redis_pipeline.expire(key, 3600 * 24) | |
336 |
key = REDIS_KEYS[ |
|
350 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
337 |
resource.owner_user_id, current_time |
|
351 | resource.owner_user_id, current_time | |
|
352 | ) | |||
338 | redis_pipeline.incr(key, total_logs) |
|
353 | redis_pipeline.incr(key, total_logs) | |
339 | redis_pipeline.expire(key, 3600) |
|
354 | redis_pipeline.expire(key, 3600) | |
340 |
key = REDIS_KEYS[ |
|
355 | key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format( | |
341 |
resource_id, current_time.replace(minute=0) |
|
356 | resource_id, current_time.replace(minute=0) | |
|
357 | ) | |||
342 | redis_pipeline.incr(key, total_logs) |
|
358 | redis_pipeline.incr(key, total_logs) | |
343 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
359 | redis_pipeline.expire(key, 3600 * 24 * 7) | |
344 | redis_pipeline.sadd( |
|
360 | redis_pipeline.sadd( | |
345 |
REDIS_KEYS[ |
|
361 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
346 |
current_time.replace(minute=0) |
|
362 | current_time.replace(minute=0) | |
|
363 | ), | |||
|
364 | resource_id, | |||
|
365 | ) | |||
347 | redis_pipeline.execute() |
|
366 | redis_pipeline.execute() | |
348 | add_logs_es(es_docs) |
|
367 | add_logs_es(es_docs) | |
349 | return True |
|
368 | return True | |
@@ -357,7 +376,7 b' def add_logs(resource_id, request_params, dataset, **kwargs):' | |||||
357 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
376 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
358 | def add_logs_es(es_docs): |
|
377 | def add_logs_es(es_docs): | |
359 | for k, v in es_docs.items(): |
|
378 | for k, v in es_docs.items(): | |
360 |
to_update = { |
|
379 | to_update = {"_index": k, "_type": "log"} | |
361 | [i.update(to_update) for i in v] |
|
380 | [i.update(to_update) for i in v] | |
362 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
381 | elasticsearch.helpers.bulk(Datastores.es, v) | |
363 |
|
382 | |||
@@ -371,45 +390,51 b' def add_metrics(resource_id, request_params, dataset, proto_version):' | |||||
371 | es_docs = [] |
|
390 | es_docs = [] | |
372 | rows = [] |
|
391 | rows = [] | |
373 | for metric in dataset: |
|
392 | for metric in dataset: | |
374 |
tags = dict(metric[ |
|
393 | tags = dict(metric["tags"]) | |
375 |
server_n = tags.get( |
|
394 | server_n = tags.get("server_name", metric["server_name"]).lower() | |
376 |
tags[ |
|
395 | tags["server_name"] = server_n or "unknown" | |
377 | new_metric = Metric( |
|
396 | new_metric = Metric( | |
378 |
timestamp=metric[ |
|
397 | timestamp=metric["timestamp"], | |
379 | resource_id=resource.resource_id, |
|
398 | resource_id=resource.resource_id, | |
380 |
namespace=metric[ |
|
399 | namespace=metric["namespace"], | |
381 |
tags=tags |
|
400 | tags=tags, | |
|
401 | ) | |||
382 | rows.append(new_metric) |
|
402 | rows.append(new_metric) | |
383 | es_docs.append(new_metric.es_doc()) |
|
403 | es_docs.append(new_metric.es_doc()) | |
384 | session = DBSession() |
|
404 | session = DBSession() | |
385 | session.bulk_save_objects(rows) |
|
405 | session.bulk_save_objects(rows) | |
386 | session.flush() |
|
406 | session.flush() | |
387 |
|
407 | |||
388 |
action = |
|
408 | action = "METRICS" | |
389 |
metrics_msg = |
|
409 | metrics_msg = "%s: %s, metrics: %s, proto:%s" % ( | |
390 | action, |
|
410 | action, | |
391 | str(resource), |
|
411 | str(resource), | |
392 | len(dataset), |
|
412 | len(dataset), | |
393 | proto_version |
|
413 | proto_version, | |
394 | ) |
|
414 | ) | |
395 | log.info(metrics_msg) |
|
415 | log.info(metrics_msg) | |
396 |
|
416 | |||
397 | mark_changed(session) |
|
417 | mark_changed(session) | |
398 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
418 | redis_pipeline = Datastores.redis.pipeline(transaction=False) | |
399 |
key = REDIS_KEYS[ |
|
419 | key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time) | |
400 | redis_pipeline.incr(key, len(rows)) |
|
420 | redis_pipeline.incr(key, len(rows)) | |
401 | redis_pipeline.expire(key, 3600 * 24) |
|
421 | redis_pipeline.expire(key, 3600 * 24) | |
402 |
key = REDIS_KEYS[ |
|
422 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
403 |
resource.owner_user_id, current_time |
|
423 | resource.owner_user_id, current_time | |
|
424 | ) | |||
404 | redis_pipeline.incr(key, len(rows)) |
|
425 | redis_pipeline.incr(key, len(rows)) | |
405 | redis_pipeline.expire(key, 3600) |
|
426 | redis_pipeline.expire(key, 3600) | |
406 |
key = REDIS_KEYS[ |
|
427 | key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format( | |
407 |
resource_id, current_time.replace(minute=0) |
|
428 | resource_id, current_time.replace(minute=0) | |
|
429 | ) | |||
408 | redis_pipeline.incr(key, len(rows)) |
|
430 | redis_pipeline.incr(key, len(rows)) | |
409 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
431 | redis_pipeline.expire(key, 3600 * 24 * 7) | |
410 | redis_pipeline.sadd( |
|
432 | redis_pipeline.sadd( | |
411 |
REDIS_KEYS[ |
|
433 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
412 |
current_time.replace(minute=0) |
|
434 | current_time.replace(minute=0) | |
|
435 | ), | |||
|
436 | resource_id, | |||
|
437 | ) | |||
413 | redis_pipeline.execute() |
|
438 | redis_pipeline.execute() | |
414 | add_metrics_es(es_docs) |
|
439 | add_metrics_es(es_docs) | |
415 | return True |
|
440 | return True | |
@@ -423,8 +448,8 b' def add_metrics(resource_id, request_params, dataset, proto_version):' | |||||
423 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
448 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
424 | def add_metrics_es(es_docs): |
|
449 | def add_metrics_es(es_docs): | |
425 | for doc in es_docs: |
|
450 | for doc in es_docs: | |
426 |
partition = |
|
451 | partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d") | |
427 |
Datastores.es.index(partition, |
|
452 | Datastores.es.index(partition, "log", doc) | |
428 |
|
453 | |||
429 |
|
454 | |||
430 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
455 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) | |
@@ -435,10 +460,12 b' def check_user_report_notifications(resource_id):' | |||||
435 | application = ApplicationService.by_id(resource_id) |
|
460 | application = ApplicationService.by_id(resource_id) | |
436 | if not application: |
|
461 | if not application: | |
437 | return |
|
462 | return | |
438 |
error_key = REDIS_KEYS[ |
|
463 | error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
439 |
ReportType.error, resource_id |
|
464 | ReportType.error, resource_id | |
440 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
465 | ) | |
441 | ReportType.slow, resource_id) |
|
466 | slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
|
467 | ReportType.slow, resource_id | |||
|
468 | ) | |||
442 | error_group_ids = Datastores.redis.smembers(error_key) |
|
469 | error_group_ids = Datastores.redis.smembers(error_key) | |
443 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
470 | slow_group_ids = Datastores.redis.smembers(slow_key) | |
444 | Datastores.redis.delete(error_key) |
|
471 | Datastores.redis.delete(error_key) | |
@@ -448,8 +475,7 b' def check_user_report_notifications(resource_id):' | |||||
448 | group_ids = err_gids + slow_gids |
|
475 | group_ids = err_gids + slow_gids | |
449 | occurence_dict = {} |
|
476 | occurence_dict = {} | |
450 | for g_id in group_ids: |
|
477 | for g_id in group_ids: | |
451 |
key = REDIS_KEYS[ |
|
478 | key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id) | |
452 | g_id) |
|
|||
453 | val = Datastores.redis.get(key) |
|
479 | val = Datastores.redis.get(key) | |
454 | Datastores.redis.delete(key) |
|
480 | Datastores.redis.delete(key) | |
455 | if val: |
|
481 | if val: | |
@@ -460,14 +486,23 b' def check_user_report_notifications(resource_id):' | |||||
460 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
486 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |
461 |
|
487 | |||
462 | ApplicationService.check_for_groups_alert( |
|
488 | ApplicationService.check_for_groups_alert( | |
463 | application, 'alert', report_groups=report_groups, |
|
489 | application, | |
464 | occurence_dict=occurence_dict) |
|
490 | "alert", | |
465 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) |
|
491 | report_groups=report_groups, | |
|
492 | occurence_dict=occurence_dict, | |||
|
493 | ) | |||
|
494 | users = set( | |||
|
495 | [p.user for p in ResourceService.users_for_perm(application, "view")] | |||
|
496 | ) | |||
466 | report_groups = report_groups.all() |
|
497 | report_groups = report_groups.all() | |
467 | for user in users: |
|
498 | for user in users: | |
468 |
UserService.report_notify( |
|
499 | UserService.report_notify( | |
|
500 | user, | |||
|
501 | request, | |||
|
502 | application, | |||
469 |
|
|
503 | report_groups=report_groups, | |
470 |
|
|
504 | occurence_dict=occurence_dict, | |
|
505 | ) | |||
471 | for group in report_groups: |
|
506 | for group in report_groups: | |
472 | # marks report_groups as notified |
|
507 | # marks report_groups as notified | |
473 | if not group.notified: |
|
508 | if not group.notified: | |
@@ -485,12 +520,12 b' def check_alerts(resource_id):' | |||||
485 | application = ApplicationService.by_id(resource_id) |
|
520 | application = ApplicationService.by_id(resource_id) | |
486 | if not application: |
|
521 | if not application: | |
487 | return |
|
522 | return | |
488 | error_key = REDIS_KEYS[ |
|
523 | error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
489 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
524 | ReportType.error, resource_id | |
490 | ReportType.error, resource_id) |
|
525 | ) | |
491 | slow_key = REDIS_KEYS[ |
|
526 | slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
492 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
527 | ReportType.slow, resource_id | |
493 | ReportType.slow, resource_id) |
|
528 | ) | |
494 | error_group_ids = Datastores.redis.smembers(error_key) |
|
529 | error_group_ids = Datastores.redis.smembers(error_key) | |
495 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
530 | slow_group_ids = Datastores.redis.smembers(slow_key) | |
496 | Datastores.redis.delete(error_key) |
|
531 | Datastores.redis.delete(error_key) | |
@@ -500,9 +535,9 b' def check_alerts(resource_id):' | |||||
500 | group_ids = err_gids + slow_gids |
|
535 | group_ids = err_gids + slow_gids | |
501 | occurence_dict = {} |
|
536 | occurence_dict = {} | |
502 | for g_id in group_ids: |
|
537 | for g_id in group_ids: | |
503 |
key = REDIS_KEYS[ |
|
538 | key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format( | |
504 | 'report_group_occurences_alerting'].format( |
|
539 | g_id | |
505 |
|
|
540 | ) | |
506 | val = Datastores.redis.get(key) |
|
541 | val = Datastores.redis.get(key) | |
507 | Datastores.redis.delete(key) |
|
542 | Datastores.redis.delete(key) | |
508 | if val: |
|
543 | if val: | |
@@ -513,8 +548,12 b' def check_alerts(resource_id):' | |||||
513 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
548 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |
514 |
|
549 | |||
515 | ApplicationService.check_for_groups_alert( |
|
550 | ApplicationService.check_for_groups_alert( | |
516 | application, 'alert', report_groups=report_groups, |
|
551 | application, | |
517 | occurence_dict=occurence_dict, since_when=since_when) |
|
552 | "alert", | |
|
553 | report_groups=report_groups, | |||
|
554 | occurence_dict=occurence_dict, | |||
|
555 | since_when=since_when, | |||
|
556 | ) | |||
518 | except Exception as exc: |
|
557 | except Exception as exc: | |
519 | print_traceback(log) |
|
558 | print_traceback(log) | |
520 | raise |
|
559 | raise | |
@@ -522,21 +561,21 b' def check_alerts(resource_id):' | |||||
522 |
|
561 | |||
523 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
562 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
524 | def close_alerts(): |
|
563 | def close_alerts(): | |
525 |
log.warning( |
|
564 | log.warning("Checking alerts") | |
526 | since_when = datetime.utcnow() |
|
565 | since_when = datetime.utcnow() | |
527 | try: |
|
566 | try: | |
528 | event_types = [Event.types['error_report_alert'], |
|
567 | event_types = [ | |
529 |
|
|
568 | Event.types["error_report_alert"], | |
530 | statuses = [Event.statuses['active']] |
|
569 | Event.types["slow_report_alert"], | |
|
570 | ] | |||
|
571 | statuses = [Event.statuses["active"]] | |||
531 | # get events older than 5 min |
|
572 | # get events older than 5 min | |
532 | events = EventService.by_type_and_status( |
|
573 | events = EventService.by_type_and_status( | |
533 | event_types, |
|
574 | event_types, statuses, older_than=(since_when - timedelta(minutes=5)) | |
534 | statuses, |
|
575 | ) | |
535 | older_than=(since_when - timedelta(minutes=5))) |
|
|||
536 | for event in events: |
|
576 | for event in events: | |
537 | # see if we can close them |
|
577 | # see if we can close them | |
538 | event.validate_or_close( |
|
578 | event.validate_or_close(since_when=(since_when - timedelta(minutes=1))) | |
539 | since_when=(since_when - timedelta(minutes=1))) |
|
|||
540 | except Exception as exc: |
|
579 | except Exception as exc: | |
541 | print_traceback(log) |
|
580 | print_traceback(log) | |
542 | raise |
|
581 | raise | |
@@ -545,12 +584,18 b' def close_alerts():' | |||||
545 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) |
|
584 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) | |
546 | def update_tag_counter(tag_name, tag_value, count): |
|
585 | def update_tag_counter(tag_name, tag_value, count): | |
547 | try: |
|
586 | try: | |
548 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( |
|
587 | query = ( | |
549 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), |
|
588 | DBSession.query(Tag) | |
550 | sa.types.TEXT)) |
|
589 | .filter(Tag.name == tag_name) | |
551 | query.update({'times_seen': Tag.times_seen + count, |
|
590 | .filter( | |
552 | 'last_timestamp': datetime.utcnow()}, |
|
591 | sa.cast(Tag.value, sa.types.TEXT) | |
553 | synchronize_session=False) |
|
592 | == sa.cast(json.dumps(tag_value), sa.types.TEXT) | |
|
593 | ) | |||
|
594 | ) | |||
|
595 | query.update( | |||
|
596 | {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()}, | |||
|
597 | synchronize_session=False, | |||
|
598 | ) | |||
554 | session = DBSession() |
|
599 | session = DBSession() | |
555 | mark_changed(session) |
|
600 | mark_changed(session) | |
556 | return True |
|
601 | return True | |
@@ -566,8 +611,8 b' def update_tag_counters():' | |||||
566 | """ |
|
611 | """ | |
567 | Sets task to update counters for application tags |
|
612 | Sets task to update counters for application tags | |
568 | """ |
|
613 | """ | |
569 |
tags = Datastores.redis.lrange(REDIS_KEYS[ |
|
614 | tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1) | |
570 |
Datastores.redis.delete(REDIS_KEYS[ |
|
615 | Datastores.redis.delete(REDIS_KEYS["seen_tag_list"]) | |
571 | c = collections.Counter(tags) |
|
616 | c = collections.Counter(tags) | |
572 | for t_json, count in c.items(): |
|
617 | for t_json, count in c.items(): | |
573 | tag_info = json.loads(t_json) |
|
618 | tag_info = json.loads(t_json) | |
@@ -580,28 +625,34 b' def daily_digest():' | |||||
580 | Sends daily digest with top 50 error reports |
|
625 | Sends daily digest with top 50 error reports | |
581 | """ |
|
626 | """ | |
582 | request = get_current_request() |
|
627 | request = get_current_request() | |
583 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
628 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) | |
584 |
Datastores.redis.delete(REDIS_KEYS[ |
|
629 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) | |
585 | since_when = datetime.utcnow() - timedelta(hours=8) |
|
630 | since_when = datetime.utcnow() - timedelta(hours=8) | |
586 |
log.warning( |
|
631 | log.warning("Generating daily digests") | |
587 | for resource_id in apps: |
|
632 | for resource_id in apps: | |
588 |
resource_id = resource_id.decode( |
|
633 | resource_id = resource_id.decode("utf8") | |
589 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
634 | end_date = datetime.utcnow().replace(microsecond=0, second=0) | |
590 |
filter_settings = { |
|
635 | filter_settings = { | |
591 | 'tags': [{'name': 'type', |
|
636 | "resource": [resource_id], | |
592 | 'value': ['error'], 'op': None}], |
|
637 | "tags": [{"name": "type", "value": ["error"], "op": None}], | |
593 | 'type': 'error', 'start_date': since_when, |
|
638 | "type": "error", | |
594 | 'end_date': end_date} |
|
639 | "start_date": since_when, | |
|
640 | "end_date": end_date, | |||
|
641 | } | |||
595 |
|
642 | |||
596 | reports = ReportGroupService.get_trending( |
|
643 | reports = ReportGroupService.get_trending( | |
597 |
request, filter_settings=filter_settings, limit=50 |
|
644 | request, filter_settings=filter_settings, limit=50 | |
|
645 | ) | |||
598 |
|
646 | |||
599 | application = ApplicationService.by_id(resource_id) |
|
647 | application = ApplicationService.by_id(resource_id) | |
600 | if application: |
|
648 | if application: | |
601 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) |
|
649 | users = set( | |
|
650 | [p.user for p in ResourceService.users_for_perm(application, "view")] | |||
|
651 | ) | |||
602 | for user in users: |
|
652 | for user in users: | |
603 |
user.send_digest( |
|
653 | user.send_digest( | |
604 |
|
|
654 | request, application, reports=reports, since_when=since_when | |
|
655 | ) | |||
605 |
|
656 | |||
606 |
|
657 | |||
607 | @celery.task(queue="default") |
|
658 | @celery.task(queue="default") | |
@@ -610,11 +661,12 b' def notifications_reports():' | |||||
610 | Loop that checks redis for info and then issues new tasks to celery to |
|
661 | Loop that checks redis for info and then issues new tasks to celery to | |
611 | issue notifications |
|
662 | issue notifications | |
612 | """ |
|
663 | """ | |
613 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
664 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) | |
614 |
Datastores.redis.delete(REDIS_KEYS[ |
|
665 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) | |
615 | for app in apps: |
|
666 | for app in apps: | |
616 |
log.warning( |
|
667 | log.warning("Notify for app: %s" % app) | |
617 |
check_user_report_notifications.delay(app.decode( |
|
668 | check_user_report_notifications.delay(app.decode("utf8")) | |
|
669 | ||||
618 |
|
670 | |||
619 | @celery.task(queue="default") |
|
671 | @celery.task(queue="default") | |
620 | def alerting_reports(): |
|
672 | def alerting_reports(): | |
@@ -624,34 +676,33 b' def alerting_reports():' | |||||
624 | - which applications should have new alerts opened |
|
676 | - which applications should have new alerts opened | |
625 | """ |
|
677 | """ | |
626 |
|
678 | |||
627 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
679 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"]) | |
628 |
Datastores.redis.delete(REDIS_KEYS[ |
|
680 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"]) | |
629 | for app in apps: |
|
681 | for app in apps: | |
630 |
log.warning( |
|
682 | log.warning("Notify for app: %s" % app) | |
631 |
check_alerts.delay(app.decode( |
|
683 | check_alerts.delay(app.decode("utf8")) | |
632 |
|
684 | |||
633 |
|
685 | |||
634 | @celery.task(queue="default", soft_time_limit=3600 * 4, |
|
686 | @celery.task( | |
635 |
|
|
687 | queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144 | |
|
688 | ) | |||
636 | def logs_cleanup(resource_id, filter_settings): |
|
689 | def logs_cleanup(resource_id, filter_settings): | |
637 | request = get_current_request() |
|
690 | request = get_current_request() | |
638 | request.tm.begin() |
|
691 | request.tm.begin() | |
639 | es_query = { |
|
692 | es_query = { | |
640 | "query": { |
|
693 | "query": { | |
641 | "filtered": { |
|
694 | "filtered": {"filter": {"and": [{"term": {"resource_id": resource_id}}]}} | |
642 | "filter": { |
|
|||
643 | "and": [{"term": {"resource_id": resource_id}}] |
|
|||
644 | } |
|
|||
645 | } |
|
|||
646 | } |
|
695 | } | |
647 | } |
|
696 | } | |
648 |
|
697 | |||
649 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) |
|
698 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) | |
650 |
if filter_settings[ |
|
699 | if filter_settings["namespace"]: | |
651 |
query = query.filter(Log.namespace == filter_settings[ |
|
700 | query = query.filter(Log.namespace == filter_settings["namespace"][0]) | |
652 |
es_query[ |
|
701 | es_query["query"]["filtered"]["filter"]["and"].append( | |
653 |
{"term": {"namespace": filter_settings[ |
|
702 | {"term": {"namespace": filter_settings["namespace"][0]}} | |
654 | ) |
|
703 | ) | |
655 | query.delete(synchronize_session=False) |
|
704 | query.delete(synchronize_session=False) | |
656 | request.tm.commit() |
|
705 | request.tm.commit() | |
657 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format('rcae_l_*', 'log'), body=es_query) |
|
706 | Datastores.es.transport.perform_request( | |
|
707 | "DELETE", "/{}/{}/_query".format("rcae_l_*", "log"), body=es_query | |||
|
708 | ) |
@@ -14,6 +14,7 b'' | |||||
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
|
17 | ||||
17 | def filter_callable(structure, section=None): |
|
18 | def filter_callable(structure, section=None): | |
18 |
structure[ |
|
19 | structure["SOMEVAL"] = "***REMOVED***" | |
19 | return structure |
|
20 | return structure |
This diff has been collapsed as it changes many lines, (801 lines changed) Show them Hide them | |||||
@@ -43,7 +43,7 b' _ = str' | |||||
43 | strip_filter = lambda x: x.strip() if x else None |
|
43 | strip_filter = lambda x: x.strip() if x else None | |
44 | uppercase_filter = lambda x: x.upper() if x else None |
|
44 | uppercase_filter = lambda x: x.upper() if x else None | |
45 |
|
45 | |||
46 |
FALSE_VALUES = ( |
|
46 | FALSE_VALUES = ("false", "", False, None) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class CSRFException(Exception): |
|
49 | class CSRFException(Exception): | |
@@ -51,11 +51,14 b' class CSRFException(Exception):' | |||||
51 |
|
51 | |||
52 |
|
52 | |||
53 | class ReactorForm(SecureForm): |
|
53 | class ReactorForm(SecureForm): | |
54 |
def __init__(self, formdata=None, obj=None, prefix= |
|
54 | def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs): | |
55 | **kwargs): |
|
55 | super(ReactorForm, self).__init__( | |
56 | super(ReactorForm, self).__init__(formdata=formdata, obj=obj, |
|
56 | formdata=formdata, | |
|
57 | obj=obj, | |||
57 |
|
|
58 | prefix=prefix, | |
58 |
|
|
59 | csrf_context=csrf_context, | |
|
60 | **kwargs | |||
|
61 | ) | |||
59 | self._csrf_context = csrf_context |
|
62 | self._csrf_context = csrf_context | |
60 |
|
63 | |||
61 | def generate_csrf_token(self, csrf_context): |
|
64 | def generate_csrf_token(self, csrf_context): | |
@@ -63,14 +66,14 b' class ReactorForm(SecureForm):' | |||||
63 |
|
66 | |||
64 | def validate_csrf_token(self, field): |
|
67 | def validate_csrf_token(self, field): | |
65 | request = self._csrf_context or pyramid.threadlocal.get_current_request() |
|
68 | request = self._csrf_context or pyramid.threadlocal.get_current_request() | |
66 |
is_from_auth_token = |
|
69 | is_from_auth_token = "auth:auth_token" in request.effective_principals | |
67 | if is_from_auth_token: |
|
70 | if is_from_auth_token: | |
68 | return True |
|
71 | return True | |
69 |
|
72 | |||
70 | if field.data != field.current_token: |
|
73 | if field.data != field.current_token: | |
71 | # try to save the day by using token from angular |
|
74 | # try to save the day by using token from angular | |
72 |
if request.headers.get( |
|
75 | if request.headers.get("X-XSRF-TOKEN") != field.current_token: | |
73 |
raise CSRFException( |
|
76 | raise CSRFException("Invalid CSRF token") | |
74 |
|
77 | |||
75 | @property |
|
78 | @property | |
76 | def errors_dict(self): |
|
79 | def errors_dict(self): | |
@@ -105,45 +108,47 b' class ReactorForm(SecureForm):' | |||||
105 |
|
108 | |||
106 | class SignInForm(ReactorForm): |
|
109 | class SignInForm(ReactorForm): | |
107 | came_from = wtforms.HiddenField() |
|
110 | came_from = wtforms.HiddenField() | |
108 |
sign_in_user_name = wtforms.StringField(_( |
|
111 | sign_in_user_name = wtforms.StringField(_("User Name")) | |
109 |
sign_in_user_password = wtforms.PasswordField(_( |
|
112 | sign_in_user_password = wtforms.PasswordField(_("Password")) | |
110 |
|
113 | |||
111 |
ignore_labels = [ |
|
114 | ignore_labels = ["submit"] | |
112 |
css_classes = { |
|
115 | css_classes = {"submit": "btn btn-primary"} | |
113 |
|
116 | |||
114 | html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'}, |
|
117 | html_attrs = { | |
115 | 'sign_in_user_password': { |
|
118 | "sign_in_user_name": {"placeholder": "Your login"}, | |
116 |
|
|
119 | "sign_in_user_password": {"placeholder": "Your password"}, | |
|
120 | } | |||
117 |
|
121 | |||
118 |
|
122 | |||
119 | from wtforms.widgets import html_params, HTMLString |
|
123 | from wtforms.widgets import html_params, HTMLString | |
120 |
|
124 | |||
121 |
|
125 | |||
122 |
def select_multi_checkbox(field, ul_class= |
|
126 | def select_multi_checkbox(field, ul_class="set", **kwargs): | |
123 | """Render a multi-checkbox widget""" |
|
127 | """Render a multi-checkbox widget""" | |
124 |
kwargs.setdefault( |
|
128 | kwargs.setdefault("type", "checkbox") | |
125 |
field_id = kwargs.pop( |
|
129 | field_id = kwargs.pop("id", field.id) | |
126 |
html = [ |
|
130 | html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)] | |
127 | for value, label, checked in field.iter_choices(): |
|
131 | for value, label, checked in field.iter_choices(): | |
128 |
choice_id = |
|
132 | choice_id = "%s-%s" % (field_id, value) | |
129 | options = dict(kwargs, name=field.name, value=value, id=choice_id) |
|
133 | options = dict(kwargs, name=field.name, value=value, id=choice_id) | |
130 | if checked: |
|
134 | if checked: | |
131 |
options[ |
|
135 | options["checked"] = "checked" | |
132 |
html.append( |
|
136 | html.append("<li><input %s /> " % html_params(**options)) | |
133 | html.append('<label for="%s">%s</label></li>' % (choice_id, label)) |
|
137 | html.append('<label for="%s">%s</label></li>' % (choice_id, label)) | |
134 |
html.append( |
|
138 | html.append("</ul>") | |
135 |
return HTMLString( |
|
139 | return HTMLString("".join(html)) | |
136 |
|
140 | |||
137 |
|
141 | |||
138 |
def button_widget(field, button_cls= |
|
142 | def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs): | |
139 | """Render a button widget""" |
|
143 | """Render a button widget""" | |
140 |
kwargs.setdefault( |
|
144 | kwargs.setdefault("type", "button") | |
141 |
field_id = kwargs.pop( |
|
145 | field_id = kwargs.pop("id", field.id) | |
142 |
kwargs.setdefault( |
|
146 | kwargs.setdefault("value", field.label.text) | |
143 | html = ['<button %s>%s</button>' % (html_params(id=field_id, |
|
147 | html = [ | |
144 | class_=button_cls), |
|
148 | "<button %s>%s</button>" | |
145 | kwargs['value'],)] |
|
149 | % (html_params(id=field_id, class_=button_cls), kwargs["value"]) | |
146 | return HTMLString(''.join(html)) |
|
150 | ] | |
|
151 | return HTMLString("".join(html)) | |||
147 |
|
152 | |||
148 |
|
153 | |||
149 | def clean_whitespace(value): |
|
154 | def clean_whitespace(value): | |
@@ -157,33 +162,32 b' def found_username_validator(form, field):' | |||||
157 | # sets user to recover in email validator |
|
162 | # sets user to recover in email validator | |
158 | form.field_user = user |
|
163 | form.field_user = user | |
159 | if not user: |
|
164 | if not user: | |
160 |
raise wtforms.ValidationError( |
|
165 | raise wtforms.ValidationError("This username does not exist") | |
161 |
|
166 | |||
162 |
|
167 | |||
163 | def found_username_email_validator(form, field): |
|
168 | def found_username_email_validator(form, field): | |
164 | user = UserService.by_email(field.data) |
|
169 | user = UserService.by_email(field.data) | |
165 | if not user: |
|
170 | if not user: | |
166 |
raise wtforms.ValidationError( |
|
171 | raise wtforms.ValidationError("Email is incorrect") | |
167 |
|
172 | |||
168 |
|
173 | |||
169 | def unique_username_validator(form, field): |
|
174 | def unique_username_validator(form, field): | |
170 | user = UserService.by_user_name(field.data) |
|
175 | user = UserService.by_user_name(field.data) | |
171 | if user: |
|
176 | if user: | |
172 |
raise wtforms.ValidationError( |
|
177 | raise wtforms.ValidationError("This username already exists in system") | |
173 |
|
178 | |||
174 |
|
179 | |||
175 | def unique_groupname_validator(form, field): |
|
180 | def unique_groupname_validator(form, field): | |
176 | group = GroupService.by_group_name(field.data) |
|
181 | group = GroupService.by_group_name(field.data) | |
177 |
mod_group = getattr(form, |
|
182 | mod_group = getattr(form, "_modified_group", None) | |
178 | if group and (not mod_group or mod_group.id != group.id): |
|
183 | if group and (not mod_group or mod_group.id != group.id): | |
179 | raise wtforms.ValidationError( |
|
184 | raise wtforms.ValidationError("This group name already exists in system") | |
180 | 'This group name already exists in system') |
|
|||
181 |
|
185 | |||
182 |
|
186 | |||
183 | def unique_email_validator(form, field): |
|
187 | def unique_email_validator(form, field): | |
184 | user = UserService.by_email(field.data) |
|
188 | user = UserService.by_email(field.data) | |
185 | if user: |
|
189 | if user: | |
186 |
raise wtforms.ValidationError( |
|
190 | raise wtforms.ValidationError("This email already exists in system") | |
187 |
|
191 | |||
188 |
|
192 | |||
189 | def email_validator(form, field): |
|
193 | def email_validator(form, field): | |
@@ -196,145 +200,168 b' def email_validator(form, field):' | |||||
196 |
|
200 | |||
197 | def unique_alert_email_validator(form, field): |
|
201 | def unique_alert_email_validator(form, field): | |
198 | q = DBSession.query(AlertChannel) |
|
202 | q = DBSession.query(AlertChannel) | |
199 |
q = q.filter(AlertChannel.channel_name == |
|
203 | q = q.filter(AlertChannel.channel_name == "email") | |
200 | q = q.filter(AlertChannel.channel_value == field.data) |
|
204 | q = q.filter(AlertChannel.channel_value == field.data) | |
201 | email = q.first() |
|
205 | email = q.first() | |
202 | if email: |
|
206 | if email: | |
203 | raise wtforms.ValidationError( |
|
207 | raise wtforms.ValidationError("This email already exists in alert system") | |
204 | 'This email already exists in alert system') |
|
|||
205 |
|
208 | |||
206 |
|
209 | |||
207 | def blocked_email_validator(form, field): |
|
210 | def blocked_email_validator(form, field): | |
208 | blocked_emails = [ |
|
211 | blocked_emails = [ | |
209 |
|
|
212 | "goood-mail.org", | |
210 |
|
|
213 | "shoeonlineblog.com", | |
211 |
|
|
214 | "louboutinemart.com", | |
212 |
|
|
215 | "guccibagshere.com", | |
213 |
|
|
216 | "nikeshoesoutletforsale.com", | |
214 | ] |
|
217 | ] | |
215 |
data = field.data or |
|
218 | data = field.data or "" | |
216 |
domain = data.split( |
|
219 | domain = data.split("@")[-1] | |
217 | if domain in blocked_emails: |
|
220 | if domain in blocked_emails: | |
218 |
raise wtforms.ValidationError(' |
|
221 | raise wtforms.ValidationError("Don't spam") | |
219 |
|
222 | |||
220 |
|
223 | |||
221 | def old_password_validator(form, field): |
|
224 | def old_password_validator(form, field): | |
222 |
if not UserService.check_password(field.user, field.data or |
|
225 | if not UserService.check_password(field.user, field.data or ""): | |
223 |
raise wtforms.ValidationError( |
|
226 | raise wtforms.ValidationError("You need to enter correct password") | |
224 |
|
227 | |||
225 |
|
228 | |||
226 | class UserRegisterForm(ReactorForm): |
|
229 | class UserRegisterForm(ReactorForm): | |
227 | user_name = wtforms.StringField( |
|
230 | user_name = wtforms.StringField( | |
228 |
_( |
|
231 | _("User Name"), | |
229 | filters=[strip_filter], |
|
232 | filters=[strip_filter], | |
230 | validators=[ |
|
233 | validators=[ | |
231 | wtforms.validators.Length(min=2, max=30), |
|
234 | wtforms.validators.Length(min=2, max=30), | |
232 | wtforms.validators.Regexp( |
|
235 | wtforms.validators.Regexp( | |
233 |
re.compile(r |
|
236 | re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used" | |
234 | message="Invalid characters used"), |
|
237 | ), | |
235 | unique_username_validator, |
|
238 | unique_username_validator, | |
236 | wtforms.validators.DataRequired() |
|
239 | wtforms.validators.DataRequired(), | |
237 |
] |
|
240 | ], | |
|
241 | ) | |||
238 |
|
242 | |||
239 |
user_password = wtforms.PasswordField( |
|
243 | user_password = wtforms.PasswordField( | |
|
244 | _("User Password"), | |||
240 |
|
|
245 | filters=[strip_filter], | |
241 | validators=[ |
|
246 | validators=[ | |
242 |
|
|
247 | wtforms.validators.Length(min=4), | |
243 |
|
|
248 | wtforms.validators.DataRequired(), | |
244 | ]) |
|
249 | ], | |
|
250 | ) | |||
245 |
|
251 | |||
246 |
email = wtforms.StringField( |
|
252 | email = wtforms.StringField( | |
|
253 | _("Email Address"), | |||
247 |
|
|
254 | filters=[strip_filter], | |
248 | validators=[email_validator, |
|
255 | validators=[ | |
|
256 | email_validator, | |||
249 |
|
|
257 | unique_email_validator, | |
250 |
|
|
258 | blocked_email_validator, | |
251 |
|
|
259 | wtforms.validators.DataRequired(), | |
252 | first_name = wtforms.HiddenField(_('First Name')) |
|
260 | ], | |
253 | last_name = wtforms.HiddenField(_('Last Name')) |
|
261 | ) | |
|
262 | first_name = wtforms.HiddenField(_("First Name")) | |||
|
263 | last_name = wtforms.HiddenField(_("Last Name")) | |||
254 |
|
264 | |||
255 |
ignore_labels = [ |
|
265 | ignore_labels = ["submit"] | |
256 |
css_classes = { |
|
266 | css_classes = {"submit": "btn btn-primary"} | |
257 |
|
267 | |||
258 | html_attrs = {'user_name': {'placeholder': 'Your login'}, |
|
268 | html_attrs = { | |
259 |
|
|
269 | "user_name": {"placeholder": "Your login"}, | |
260 | 'email': {'placeholder': 'Your email'}} |
|
270 | "user_password": {"placeholder": "Your password"}, | |
|
271 | "email": {"placeholder": "Your email"}, | |||
|
272 | } | |||
261 |
|
273 | |||
262 |
|
274 | |||
263 | class UserCreateForm(UserRegisterForm): |
|
275 | class UserCreateForm(UserRegisterForm): | |
264 |
status = wtforms.BooleanField( |
|
276 | status = wtforms.BooleanField("User status", false_values=FALSE_VALUES) | |
265 | false_values=FALSE_VALUES) |
|
|||
266 |
|
277 | |||
267 |
|
278 | |||
268 | class UserUpdateForm(UserCreateForm): |
|
279 | class UserUpdateForm(UserCreateForm): | |
269 | user_name = None |
|
280 | user_name = None | |
270 |
user_password = wtforms.PasswordField( |
|
281 | user_password = wtforms.PasswordField( | |
|
282 | _("User Password"), | |||
271 |
|
|
283 | filters=[strip_filter], | |
272 | validators=[ |
|
284 | validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()], | |
273 | wtforms.validators.Length(min=4), |
|
285 | ) | |
274 | wtforms.validators.Optional() |
|
286 | email = wtforms.StringField( | |
275 | ]) |
|
287 | _("Email Address"), | |
276 | email = wtforms.StringField(_('Email Address'), |
|
|||
277 |
|
|
288 | filters=[strip_filter], | |
278 | validators=[email_validator, |
|
289 | validators=[email_validator, wtforms.validators.DataRequired()], | |
279 | wtforms.validators.DataRequired()]) |
|
290 | ) | |
280 |
|
291 | |||
281 |
|
292 | |||
282 | class LostPasswordForm(ReactorForm): |
|
293 | class LostPasswordForm(ReactorForm): | |
283 |
email = wtforms.StringField( |
|
294 | email = wtforms.StringField( | |
|
295 | _("Email Address"), | |||
284 |
|
|
296 | filters=[strip_filter], | |
285 | validators=[email_validator, |
|
297 | validators=[ | |
|
298 | email_validator, | |||
286 |
|
|
299 | found_username_email_validator, | |
287 |
|
|
300 | wtforms.validators.DataRequired(), | |
|
301 | ], | |||
|
302 | ) | |||
288 |
|
303 | |||
289 |
submit = wtforms.SubmitField(_( |
|
304 | submit = wtforms.SubmitField(_("Reset password")) | |
290 |
ignore_labels = [ |
|
305 | ignore_labels = ["submit"] | |
291 |
css_classes = { |
|
306 | css_classes = {"submit": "btn btn-primary"} | |
292 |
|
307 | |||
293 |
|
308 | |||
294 | class ChangePasswordForm(ReactorForm): |
|
309 | class ChangePasswordForm(ReactorForm): | |
295 | old_password = wtforms.PasswordField( |
|
310 | old_password = wtforms.PasswordField( | |
296 |
|
|
311 | "Old Password", | |
297 | filters=[strip_filter], |
|
312 | filters=[strip_filter], | |
298 | validators=[old_password_validator, |
|
313 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
299 | wtforms.validators.DataRequired()]) |
|
314 | ) | |
300 |
|
315 | |||
301 | new_password = wtforms.PasswordField( |
|
316 | new_password = wtforms.PasswordField( | |
302 |
|
|
317 | "New Password", | |
303 | filters=[strip_filter], |
|
318 | filters=[strip_filter], | |
304 | validators=[wtforms.validators.Length(min=4), |
|
319 | validators=[ | |
305 |
|
|
320 | wtforms.validators.Length(min=4), | |
|
321 | wtforms.validators.DataRequired(), | |||
|
322 | ], | |||
|
323 | ) | |||
306 | new_password_confirm = wtforms.PasswordField( |
|
324 | new_password_confirm = wtforms.PasswordField( | |
307 |
|
|
325 | "Confirm Password", | |
308 | filters=[strip_filter], |
|
326 | filters=[strip_filter], | |
309 | validators=[wtforms.validators.EqualTo('new_password'), |
|
327 | validators=[ | |
310 |
|
|
328 | wtforms.validators.EqualTo("new_password"), | |
311 | submit = wtforms.SubmitField('Change Password') |
|
329 | wtforms.validators.DataRequired(), | |
312 | ignore_labels = ['submit'] |
|
330 | ], | |
313 | css_classes = {'submit': 'btn btn-primary'} |
|
331 | ) | |
|
332 | submit = wtforms.SubmitField("Change Password") | |||
|
333 | ignore_labels = ["submit"] | |||
|
334 | css_classes = {"submit": "btn btn-primary"} | |||
314 |
|
335 | |||
315 |
|
336 | |||
316 | class CheckPasswordForm(ReactorForm): |
|
337 | class CheckPasswordForm(ReactorForm): | |
317 | password = wtforms.PasswordField( |
|
338 | password = wtforms.PasswordField( | |
318 |
|
|
339 | "Password", | |
319 | filters=[strip_filter], |
|
340 | filters=[strip_filter], | |
320 | validators=[old_password_validator, |
|
341 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
321 | wtforms.validators.DataRequired()]) |
|
342 | ) | |
322 |
|
343 | |||
323 |
|
344 | |||
324 | class NewPasswordForm(ReactorForm): |
|
345 | class NewPasswordForm(ReactorForm): | |
325 | new_password = wtforms.PasswordField( |
|
346 | new_password = wtforms.PasswordField( | |
326 |
|
|
347 | "New Password", | |
327 | filters=[strip_filter], |
|
348 | filters=[strip_filter], | |
328 | validators=[wtforms.validators.Length(min=4), |
|
349 | validators=[ | |
329 |
|
|
350 | wtforms.validators.Length(min=4), | |
|
351 | wtforms.validators.DataRequired(), | |||
|
352 | ], | |||
|
353 | ) | |||
330 | new_password_confirm = wtforms.PasswordField( |
|
354 | new_password_confirm = wtforms.PasswordField( | |
331 |
|
|
355 | "Confirm Password", | |
332 | filters=[strip_filter], |
|
356 | filters=[strip_filter], | |
333 | validators=[wtforms.validators.EqualTo('new_password'), |
|
357 | validators=[ | |
334 |
|
|
358 | wtforms.validators.EqualTo("new_password"), | |
335 | submit = wtforms.SubmitField('Set Password') |
|
359 | wtforms.validators.DataRequired(), | |
336 | ignore_labels = ['submit'] |
|
360 | ], | |
337 | css_classes = {'submit': 'btn btn-primary'} |
|
361 | ) | |
|
362 | submit = wtforms.SubmitField("Set Password") | |||
|
363 | ignore_labels = ["submit"] | |||
|
364 | css_classes = {"submit": "btn btn-primary"} | |||
338 |
|
365 | |||
339 |
|
366 | |||
340 | class CORSTextAreaField(wtforms.StringField): |
|
367 | class CORSTextAreaField(wtforms.StringField): | |
@@ -342,261 +369,290 b' class CORSTextAreaField(wtforms.StringField):' | |||||
342 | This field represents an HTML ``<textarea>`` and can be used to take |
|
369 | This field represents an HTML ``<textarea>`` and can be used to take | |
343 | multi-line input. |
|
370 | multi-line input. | |
344 | """ |
|
371 | """ | |
|
372 | ||||
345 | widget = wtforms.widgets.TextArea() |
|
373 | widget = wtforms.widgets.TextArea() | |
346 |
|
374 | |||
347 | def process_formdata(self, valuelist): |
|
375 | def process_formdata(self, valuelist): | |
348 | self.data = [] |
|
376 | self.data = [] | |
349 | if valuelist: |
|
377 | if valuelist: | |
350 |
data = [x.strip() for x in valuelist[0].split( |
|
378 | data = [x.strip() for x in valuelist[0].split("\n")] | |
351 | for d in data: |
|
379 | for d in data: | |
352 | if not d: |
|
380 | if not d: | |
353 | continue |
|
381 | continue | |
354 |
if d.startswith( |
|
382 | if d.startswith("www."): | |
355 | d = d[4:] |
|
383 | d = d[4:] | |
356 | if data: |
|
384 | if data: | |
357 | self.data.append(d) |
|
385 | self.data.append(d) | |
358 | else: |
|
386 | else: | |
359 | self.data = [] |
|
387 | self.data = [] | |
360 |
self.data = |
|
388 | self.data = "\n".join(self.data) | |
361 |
|
389 | |||
362 |
|
390 | |||
363 | class ApplicationCreateForm(ReactorForm): |
|
391 | class ApplicationCreateForm(ReactorForm): | |
364 | resource_name = wtforms.StringField( |
|
392 | resource_name = wtforms.StringField( | |
365 |
_( |
|
393 | _("Application name"), | |
366 | filters=[strip_filter], |
|
394 | filters=[strip_filter], | |
367 | validators=[wtforms.validators.Length(min=1), |
|
395 | validators=[ | |
368 |
|
|
396 | wtforms.validators.Length(min=1), | |
|
397 | wtforms.validators.DataRequired(), | |||
|
398 | ], | |||
|
399 | ) | |||
369 |
|
400 | |||
370 | domains = CORSTextAreaField( |
|
401 | domains = CORSTextAreaField( | |
371 |
_( |
|
402 | _("Domain names for CORS headers "), | |
372 | validators=[wtforms.validators.Length(min=1), |
|
403 | validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()], | |
373 | wtforms.validators.Optional()], |
|
404 | description="Required for Javascript error " | |
374 | description='Required for Javascript error ' |
|
405 | "tracking (one line one domain, skip http:// part)", | |
375 | 'tracking (one line one domain, skip http:// part)') |
|
406 | ) | |
376 |
|
407 | |||
377 |
submit = wtforms.SubmitField(_( |
|
408 | submit = wtforms.SubmitField(_("Create Application")) | |
378 |
|
409 | |||
379 |
ignore_labels = [ |
|
410 | ignore_labels = ["submit"] | |
380 |
css_classes = { |
|
411 | css_classes = {"submit": "btn btn-primary"} | |
381 | html_attrs = {'resource_name': {'placeholder': 'Application Name'}, |
|
412 | html_attrs = { | |
382 | 'uptime_url': {'placeholder': 'http://somedomain.com'}} |
|
413 | "resource_name": {"placeholder": "Application Name"}, | |
|
414 | "uptime_url": {"placeholder": "http://somedomain.com"}, | |||
|
415 | } | |||
383 |
|
416 | |||
384 |
|
417 | |||
385 | class ApplicationUpdateForm(ApplicationCreateForm): |
|
418 | class ApplicationUpdateForm(ApplicationCreateForm): | |
386 | default_grouping = wtforms.SelectField( |
|
419 | default_grouping = wtforms.SelectField( | |
387 |
_( |
|
420 | _("Default grouping for errors"), | |
388 | choices=[('url_type', 'Error Type + location',), |
|
421 | choices=[ | |
389 |
|
|
422 | ("url_type", "Error Type + location"), | |
390 |
|
|
423 | ("url_traceback", "Traceback + location"), | |
391 | default='url_traceback') |
|
424 | ("traceback_server", "Traceback + Server"), | |
|
425 | ], | |||
|
426 | default="url_traceback", | |||
|
427 | ) | |||
392 |
|
428 | |||
393 | error_report_threshold = wtforms.IntegerField( |
|
429 | error_report_threshold = wtforms.IntegerField( | |
394 |
_( |
|
430 | _("Alert on error reports"), | |
395 | validators=[ |
|
431 | validators=[ | |
396 | wtforms.validators.NumberRange(min=1), |
|
432 | wtforms.validators.NumberRange(min=1), | |
397 | wtforms.validators.DataRequired() |
|
433 | wtforms.validators.DataRequired(), | |
398 | ], |
|
434 | ], | |
399 |
description= |
|
435 | description="Application requires to send at least this amount of " | |
400 |
|
|
436 | "error reports per minute to open alert", | |
401 | ) |
|
437 | ) | |
402 |
|
438 | |||
403 | slow_report_threshold = wtforms.IntegerField( |
|
439 | slow_report_threshold = wtforms.IntegerField( | |
404 |
_( |
|
440 | _("Alert on slow reports"), | |
405 | validators=[wtforms.validators.NumberRange(min=1), |
|
441 | validators=[ | |
406 |
|
|
442 | wtforms.validators.NumberRange(min=1), | |
407 | description='Application requires to send at least this amount of ' |
|
443 | wtforms.validators.DataRequired(), | |
408 | 'slow reports per minute to open alert') |
|
444 | ], | |
|
445 | description="Application requires to send at least this amount of " | |||
|
446 | "slow reports per minute to open alert", | |||
|
447 | ) | |||
409 |
|
448 | |||
410 | allow_permanent_storage = wtforms.BooleanField( |
|
449 | allow_permanent_storage = wtforms.BooleanField( | |
411 |
_( |
|
450 | _("Permanent logs"), | |
412 | false_values=FALSE_VALUES, |
|
451 | false_values=FALSE_VALUES, | |
413 | description=_( |
|
452 | description=_("Allow permanent storage of logs in separate DB partitions"), | |
414 | 'Allow permanent storage of logs in separate DB partitions')) |
|
453 | ) | |
415 |
|
454 | |||
416 |
submit = wtforms.SubmitField(_( |
|
455 | submit = wtforms.SubmitField(_("Create Application")) | |
417 |
|
456 | |||
418 |
|
457 | |||
419 | class UserSearchSchemaForm(ReactorForm): |
|
458 | class UserSearchSchemaForm(ReactorForm): | |
420 |
user_name = wtforms.StringField( |
|
459 | user_name = wtforms.StringField("User Name", filters=[strip_filter]) | |
421 | filters=[strip_filter], ) |
|
|||
422 |
|
460 | |||
423 |
submit = wtforms.SubmitField(_( |
|
461 | submit = wtforms.SubmitField(_("Search User")) | |
424 |
ignore_labels = [ |
|
462 | ignore_labels = ["submit"] | |
425 |
css_classes = { |
|
463 | css_classes = {"submit": "btn btn-primary"} | |
426 |
|
464 | |||
427 | '<li class="user_exists"><span></span></li>' |
|
465 | '<li class="user_exists"><span></span></li>' | |
428 |
|
466 | |||
429 |
|
467 | |||
430 | class YesNoForm(ReactorForm): |
|
468 | class YesNoForm(ReactorForm): | |
431 |
no = wtforms.SubmitField( |
|
469 | no = wtforms.SubmitField("No", default="") | |
432 |
yes = wtforms.SubmitField( |
|
470 | yes = wtforms.SubmitField("Yes", default="") | |
433 |
ignore_labels = [ |
|
471 | ignore_labels = ["submit"] | |
434 |
css_classes = { |
|
472 | css_classes = {"submit": "btn btn-primary"} | |
435 |
|
473 | |||
436 |
|
474 | |||
437 |
status_codes = [( |
|
475 | status_codes = [("", "All"), ("500", "500"), ("404", "404")] | |
438 |
|
476 | |||
439 |
priorities = [( |
|
477 | priorities = [("", "All")] | |
440 | for i in range(1, 11): |
|
478 | for i in range(1, 11): | |
441 |
priorities.append((str(i), str(i) |
|
479 | priorities.append((str(i), str(i))) | |
442 |
|
480 | |||
443 |
report_status_choices = [ |
|
481 | report_status_choices = [ | |
444 | ('never_reviewed', 'Never revieved',), |
|
482 | ("", "All"), | |
445 | ('reviewed', 'Revieved',), |
|
483 | ("never_reviewed", "Never revieved"), | |
446 | ('public', 'Public',), |
|
484 | ("reviewed", "Revieved"), | |
447 | ('fixed', 'Fixed',), ] |
|
485 | ("public", "Public"), | |
|
486 | ("fixed", "Fixed"), | |||
|
487 | ] | |||
448 |
|
488 | |||
449 |
|
489 | |||
450 | class ReportBrowserForm(ReactorForm): |
|
490 | class ReportBrowserForm(ReactorForm): | |
451 |
applications = wtforms.SelectMultipleField( |
|
491 | applications = wtforms.SelectMultipleField( | |
452 | widget=select_multi_checkbox) |
|
492 | "Applications", widget=select_multi_checkbox | |
453 | http_status = wtforms.SelectField('HTTP Status', choices=status_codes) |
|
493 | ) | |
454 | priority = wtforms.SelectField('Priority', choices=priorities, default='') |
|
494 | http_status = wtforms.SelectField("HTTP Status", choices=status_codes) | |
455 | start_date = wtforms.DateField('Start Date') |
|
495 | priority = wtforms.SelectField("Priority", choices=priorities, default="") | |
456 |
|
|
496 | start_date = wtforms.DateField("Start Date") | |
457 | error = wtforms.StringField('Error') |
|
497 | end_date = wtforms.DateField("End Date") | |
458 |
|
|
498 | error = wtforms.StringField("Error") | |
459 |
url_ |
|
499 | url_path = wtforms.StringField("URL Path") | |
460 | report_status = wtforms.SelectField('Report status', |
|
500 | url_domain = wtforms.StringField("URL Domain") | |
461 | choices=report_status_choices, |
|
501 | report_status = wtforms.SelectField( | |
462 | default='') |
|
502 | "Report status", choices=report_status_choices, default="" | |
463 | submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">' |
|
503 | ) | |
464 | '</span> Filter results', |
|
504 | submit = wtforms.SubmitField( | |
465 | widget=button_widget) |
|
505 | '<span class="glyphicon glyphicon-search">' "</span> Filter results", | |
466 |
|
506 | widget=button_widget, | ||
467 | ignore_labels = ['submit'] |
|
507 | ) | |
468 | css_classes = {'submit': 'btn btn-primary'} |
|
508 | ||
469 |
|
509 | ignore_labels = ["submit"] | ||
470 |
|
510 | css_classes = {"submit": "btn btn-primary"} | ||
471 | slow_report_status_choices = [('', 'All',), |
|
511 | ||
472 | ('never_reviewed', 'Never revieved',), |
|
512 | ||
473 | ('reviewed', 'Revieved',), |
|
513 | slow_report_status_choices = [ | |
474 | ('public', 'Public',), ] |
|
514 | ("", "All"), | |
|
515 | ("never_reviewed", "Never revieved"), | |||
|
516 | ("reviewed", "Revieved"), | |||
|
517 | ("public", "Public"), | |||
|
518 | ] | |||
475 |
|
519 | |||
476 |
|
520 | |||
477 | class BulkOperationForm(ReactorForm): |
|
521 | class BulkOperationForm(ReactorForm): | |
478 |
applications = wtforms.SelectField( |
|
522 | applications = wtforms.SelectField("Applications") | |
479 | start_date = wtforms.DateField( |
|
523 | start_date = wtforms.DateField( | |
480 |
|
|
524 | "Start Date", | |
481 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta( |
|
525 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90), | |
482 | days=90)) |
|
526 | ) | |
483 |
end_date = wtforms.DateField( |
|
527 | end_date = wtforms.DateField("End Date") | |
484 | confirm = wtforms.BooleanField( |
|
528 | confirm = wtforms.BooleanField( | |
485 | 'Confirm operation', |
|
529 | "Confirm operation", validators=[wtforms.validators.DataRequired()] | |
486 | validators=[wtforms.validators.DataRequired()]) |
|
530 | ) | |
487 |
|
531 | |||
488 |
|
532 | |||
489 | class LogBrowserForm(ReactorForm): |
|
533 | class LogBrowserForm(ReactorForm): | |
490 |
applications = wtforms.SelectMultipleField( |
|
534 | applications = wtforms.SelectMultipleField( | |
491 | widget=select_multi_checkbox) |
|
535 | "Applications", widget=select_multi_checkbox | |
492 | start_date = wtforms.DateField('Start Date') |
|
536 | ) | |
493 | log_level = wtforms.StringField('Log level') |
|
537 | start_date = wtforms.DateField("Start Date") | |
494 |
|
|
538 | log_level = wtforms.StringField("Log level") | |
495 |
|
|
539 | message = wtforms.StringField("Message") | |
|
540 | namespace = wtforms.StringField("Namespace") | |||
496 | submit = wtforms.SubmitField( |
|
541 | submit = wtforms.SubmitField( | |
497 | '<span class="glyphicon glyphicon-search"></span> Filter results', |
|
542 | '<span class="glyphicon glyphicon-search"></span> Filter results', | |
498 |
widget=button_widget |
|
543 | widget=button_widget, | |
499 | ignore_labels = ['submit'] |
|
544 | ) | |
500 | css_classes = {'submit': 'btn btn-primary'} |
|
545 | ignore_labels = ["submit"] | |
|
546 | css_classes = {"submit": "btn btn-primary"} | |||
501 |
|
547 | |||
502 |
|
548 | |||
503 | class CommentForm(ReactorForm): |
|
549 | class CommentForm(ReactorForm): | |
504 |
body = wtforms.TextAreaField( |
|
550 | body = wtforms.TextAreaField( | |
|
551 | "Comment", | |||
|
552 | validators=[ | |||
505 | wtforms.validators.Length(min=1), |
|
553 | wtforms.validators.Length(min=1), | |
506 | wtforms.validators.DataRequired() |
|
554 | wtforms.validators.DataRequired(), | |
507 |
] |
|
555 | ], | |
508 | submit = wtforms.SubmitField('Comment', ) |
|
556 | ) | |
509 | ignore_labels = ['submit'] |
|
557 | submit = wtforms.SubmitField("Comment") | |
510 | css_classes = {'submit': 'btn btn-primary'} |
|
558 | ignore_labels = ["submit"] | |
|
559 | css_classes = {"submit": "btn btn-primary"} | |||
511 |
|
560 | |||
512 |
|
561 | |||
513 | class EmailChannelCreateForm(ReactorForm): |
|
562 | class EmailChannelCreateForm(ReactorForm): | |
514 |
email = wtforms.StringField( |
|
563 | email = wtforms.StringField( | |
|
564 | _("Email Address"), | |||
515 |
|
|
565 | filters=[strip_filter], | |
516 | validators=[email_validator, |
|
566 | validators=[ | |
|
567 | email_validator, | |||
517 |
|
|
568 | unique_alert_email_validator, | |
518 |
|
|
569 | wtforms.validators.DataRequired(), | |
519 | submit = wtforms.SubmitField('Add email channel', ) |
|
570 | ], | |
520 | ignore_labels = ['submit'] |
|
571 | ) | |
521 | css_classes = {'submit': 'btn btn-primary'} |
|
572 | submit = wtforms.SubmitField("Add email channel") | |
|
573 | ignore_labels = ["submit"] | |||
|
574 | css_classes = {"submit": "btn btn-primary"} | |||
522 |
|
575 | |||
523 |
|
576 | |||
524 | def gen_user_profile_form(): |
|
577 | def gen_user_profile_form(): | |
525 | class UserProfileForm(ReactorForm): |
|
578 | class UserProfileForm(ReactorForm): | |
526 | email = wtforms.StringField( |
|
579 | email = wtforms.StringField( | |
527 |
_( |
|
580 | _("Email Address"), | |
528 |
validators=[email_validator, wtforms.validators.DataRequired()] |
|
581 | validators=[email_validator, wtforms.validators.DataRequired()], | |
529 | first_name = wtforms.StringField(_('First Name')) |
|
582 | ) | |
530 |
|
|
583 | first_name = wtforms.StringField(_("First Name")) | |
531 |
|
|
584 | last_name = wtforms.StringField(_("Last Name")) | |
532 |
company_ |
|
585 | company_name = wtforms.StringField(_("Company Name")) | |
533 |
|
|
586 | company_address = wtforms.TextAreaField(_("Company Address")) | |
534 |
|
|
587 | zip_code = wtforms.StringField(_("ZIP code")) | |
535 | notifications = wtforms.BooleanField('Account notifications', |
|
588 | city = wtforms.StringField(_("City")) | |
536 | false_values=FALSE_VALUES) |
|
589 | notifications = wtforms.BooleanField( | |
537 | submit = wtforms.SubmitField(_('Update Account')) |
|
590 | "Account notifications", false_values=FALSE_VALUES | |
538 | ignore_labels = ['submit'] |
|
591 | ) | |
539 | css_classes = {'submit': 'btn btn-primary'} |
|
592 | submit = wtforms.SubmitField(_("Update Account")) | |
|
593 | ignore_labels = ["submit"] | |||
|
594 | css_classes = {"submit": "btn btn-primary"} | |||
540 |
|
595 | |||
541 | return UserProfileForm |
|
596 | return UserProfileForm | |
542 |
|
597 | |||
543 |
|
598 | |||
544 | class PurgeAppForm(ReactorForm): |
|
599 | class PurgeAppForm(ReactorForm): | |
545 | resource_id = wtforms.HiddenField( |
|
600 | resource_id = wtforms.HiddenField( | |
546 | 'App Id', |
|
601 | "App Id", validators=[wtforms.validators.DataRequired()] | |
547 | validators=[wtforms.validators.DataRequired()]) |
|
602 | ) | |
548 | days = wtforms.IntegerField( |
|
603 | days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()]) | |
549 | 'Days', |
|
|||
550 | validators=[wtforms.validators.DataRequired()]) |
|
|||
551 | password = wtforms.PasswordField( |
|
604 | password = wtforms.PasswordField( | |
552 |
|
|
605 | "Admin Password", | |
553 |
validators=[old_password_validator, wtforms.validators.DataRequired()] |
|
606 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
554 | submit = wtforms.SubmitField(_('Purge Data')) |
|
607 | ) | |
555 | ignore_labels = ['submit'] |
|
608 | submit = wtforms.SubmitField(_("Purge Data")) | |
556 | css_classes = {'submit': 'btn btn-primary'} |
|
609 | ignore_labels = ["submit"] | |
|
610 | css_classes = {"submit": "btn btn-primary"} | |||
557 |
|
611 | |||
558 |
|
612 | |||
559 | class IntegrationRepoForm(ReactorForm): |
|
613 | class IntegrationRepoForm(ReactorForm): | |
560 |
host_name = wtforms.StringField("Service Host", default= |
|
614 | host_name = wtforms.StringField("Service Host", default="") | |
561 | user_name = wtforms.StringField( |
|
615 | user_name = wtforms.StringField( | |
562 | "User Name", |
|
616 | "User Name", | |
563 | filters=[strip_filter], |
|
617 | filters=[strip_filter], | |
564 | validators=[wtforms.validators.DataRequired(), |
|
618 | validators=[ | |
565 |
|
|
619 | wtforms.validators.DataRequired(), | |
|
620 | wtforms.validators.Length(min=1), | |||
|
621 | ], | |||
|
622 | ) | |||
566 | repo_name = wtforms.StringField( |
|
623 | repo_name = wtforms.StringField( | |
567 | "Repo Name", |
|
624 | "Repo Name", | |
568 | filters=[strip_filter], |
|
625 | filters=[strip_filter], | |
569 | validators=[wtforms.validators.DataRequired(), |
|
626 | validators=[ | |
570 |
|
|
627 | wtforms.validators.DataRequired(), | |
|
628 | wtforms.validators.Length(min=1), | |||
|
629 | ], | |||
|
630 | ) | |||
571 |
|
631 | |||
572 |
|
632 | |||
573 | class IntegrationBitbucketForm(IntegrationRepoForm): |
|
633 | class IntegrationBitbucketForm(IntegrationRepoForm): | |
574 | host_name = wtforms.StringField("Service Host", |
|
634 | host_name = wtforms.StringField("Service Host", default="https://bitbucket.org") | |
575 | default='https://bitbucket.org') |
|
|||
576 |
|
635 | |||
577 | def validate_user_name(self, field): |
|
636 | def validate_user_name(self, field): | |
578 | try: |
|
637 | try: | |
579 | request = pyramid.threadlocal.get_current_request() |
|
638 | request = pyramid.threadlocal.get_current_request() | |
580 | client = BitbucketIntegration.create_client( |
|
639 | client = BitbucketIntegration.create_client( | |
581 | request, |
|
640 | request, self.user_name.data, self.repo_name.data | |
582 | self.user_name.data, |
|
641 | ) | |
583 | self.repo_name.data) |
|
|||
584 | client.get_assignees() |
|
642 | client.get_assignees() | |
585 | except IntegrationException as e: |
|
643 | except IntegrationException as e: | |
586 | raise wtforms.validators.ValidationError(str(e)) |
|
644 | raise wtforms.validators.ValidationError(str(e)) | |
587 |
|
645 | |||
588 |
|
646 | |||
589 | class IntegrationGithubForm(IntegrationRepoForm): |
|
647 | class IntegrationGithubForm(IntegrationRepoForm): | |
590 | host_name = wtforms.StringField("Service Host", |
|
648 | host_name = wtforms.StringField("Service Host", default="https://github.com") | |
591 | default='https://github.com') |
|
|||
592 |
|
649 | |||
593 | def validate_user_name(self, field): |
|
650 | def validate_user_name(self, field): | |
594 | try: |
|
651 | try: | |
595 | request = pyramid.threadlocal.get_current_request() |
|
652 | request = pyramid.threadlocal.get_current_request() | |
596 | client = GithubIntegration.create_client( |
|
653 | client = GithubIntegration.create_client( | |
597 | request, |
|
654 | request, self.user_name.data, self.repo_name.data | |
598 | self.user_name.data, |
|
655 | ) | |
599 | self.repo_name.data) |
|
|||
600 | client.get_assignees() |
|
656 | client.get_assignees() | |
601 | except IntegrationException as e: |
|
657 | except IntegrationException as e: | |
602 | raise wtforms.validators.ValidationError(str(e)) |
|
658 | raise wtforms.validators.ValidationError(str(e)) | |
@@ -605,25 +661,28 b' class IntegrationGithubForm(IntegrationRepoForm):' | |||||
605 |
|
661 | |||
606 | def filter_rooms(data): |
|
662 | def filter_rooms(data): | |
607 | if data is not None: |
|
663 | if data is not None: | |
608 |
rooms = data.split( |
|
664 | rooms = data.split(",") | |
609 |
return |
|
665 | return ",".join([r.strip() for r in rooms]) | |
610 |
|
666 | |||
611 |
|
667 | |||
612 | class IntegrationCampfireForm(ReactorForm): |
|
668 | class IntegrationCampfireForm(ReactorForm): | |
613 | account = wtforms.StringField( |
|
669 | account = wtforms.StringField( | |
614 |
|
|
670 | "Account", | |
615 | filters=[strip_filter], |
|
671 | filters=[strip_filter], | |
616 |
validators=[wtforms.validators.DataRequired()] |
|
672 | validators=[wtforms.validators.DataRequired()], | |
|
673 | ) | |||
617 | api_token = wtforms.StringField( |
|
674 | api_token = wtforms.StringField( | |
618 |
|
|
675 | "Api Token", | |
619 | filters=[strip_filter], |
|
676 | filters=[strip_filter], | |
620 |
validators=[wtforms.validators.DataRequired()] |
|
677 | validators=[wtforms.validators.DataRequired()], | |
621 | rooms = wtforms.StringField('Room ID list', filters=[filter_rooms]) |
|
678 | ) | |
|
679 | rooms = wtforms.StringField("Room ID list", filters=[filter_rooms]) | |||
622 |
|
680 | |||
623 | def validate_api_token(self, field): |
|
681 | def validate_api_token(self, field): | |
624 | try: |
|
682 | try: | |
625 |
client = CampfireIntegration.create_client( |
|
683 | client = CampfireIntegration.create_client( | |
626 | self.account.data) |
|
684 | self.api_token.data, self.account.data | |
|
685 | ) | |||
627 | client.get_account() |
|
686 | client.get_account() | |
628 | except IntegrationException as e: |
|
687 | except IntegrationException as e: | |
629 | raise wtforms.validators.ValidationError(str(e)) |
|
688 | raise wtforms.validators.ValidationError(str(e)) | |
@@ -631,17 +690,18 b' class IntegrationCampfireForm(ReactorForm):' | |||||
631 | def validate_rooms(self, field): |
|
690 | def validate_rooms(self, field): | |
632 | if not field.data: |
|
691 | if not field.data: | |
633 | return |
|
692 | return | |
634 |
client = CampfireIntegration.create_client( |
|
693 | client = CampfireIntegration.create_client( | |
635 | self.account.data) |
|
694 | self.api_token.data, self.account.data | |
|
695 | ) | |||
636 |
|
696 | |||
637 | try: |
|
697 | try: | |
638 |
room_list = [r[ |
|
698 | room_list = [r["id"] for r in client.get_rooms()] | |
639 | except IntegrationException as e: |
|
699 | except IntegrationException as e: | |
640 | raise wtforms.validators.ValidationError(str(e)) |
|
700 | raise wtforms.validators.ValidationError(str(e)) | |
641 |
|
701 | |||
642 |
rooms = field.data.split( |
|
702 | rooms = field.data.split(",") | |
643 | if len(rooms) > 3: |
|
703 | if len(rooms) > 3: | |
644 |
msg = |
|
704 | msg = "You can use up to 3 room ids" | |
645 | raise wtforms.validators.ValidationError(msg) |
|
705 | raise wtforms.validators.ValidationError(msg) | |
646 | if rooms: |
|
706 | if rooms: | |
647 | for room_id in rooms: |
|
707 | for room_id in rooms: | |
@@ -649,75 +709,78 b' class IntegrationCampfireForm(ReactorForm):' | |||||
649 | msg = "Room %s doesn't exist" |
|
709 | msg = "Room %s doesn't exist" | |
650 | raise wtforms.validators.ValidationError(msg % room_id) |
|
710 | raise wtforms.validators.ValidationError(msg % room_id) | |
651 | if not room_id.strip().isdigit(): |
|
711 | if not room_id.strip().isdigit(): | |
652 |
msg = |
|
712 | msg = "You must use only integers for room ids" | |
653 | raise wtforms.validators.ValidationError(msg) |
|
713 | raise wtforms.validators.ValidationError(msg) | |
654 |
|
714 | |||
655 |
submit = wtforms.SubmitField(_( |
|
715 | submit = wtforms.SubmitField(_("Connect to Campfire")) | |
656 |
ignore_labels = [ |
|
716 | ignore_labels = ["submit"] | |
657 |
css_classes = { |
|
717 | css_classes = {"submit": "btn btn-primary"} | |
658 |
|
718 | |||
659 |
|
719 | |||
660 | def filter_rooms(data): |
|
720 | def filter_rooms(data): | |
661 | if data is not None: |
|
721 | if data is not None: | |
662 |
rooms = data.split( |
|
722 | rooms = data.split(",") | |
663 |
return |
|
723 | return ",".join([r.strip() for r in rooms]) | |
664 |
|
724 | |||
665 |
|
725 | |||
666 | class IntegrationHipchatForm(ReactorForm): |
|
726 | class IntegrationHipchatForm(ReactorForm): | |
667 | api_token = wtforms.StringField( |
|
727 | api_token = wtforms.StringField( | |
668 |
|
|
728 | "Api Token", | |
669 | filters=[strip_filter], |
|
729 | filters=[strip_filter], | |
670 |
validators=[wtforms.validators.DataRequired()] |
|
730 | validators=[wtforms.validators.DataRequired()], | |
|
731 | ) | |||
671 | rooms = wtforms.StringField( |
|
732 | rooms = wtforms.StringField( | |
672 |
|
|
733 | "Room ID list", | |
673 | filters=[filter_rooms], |
|
734 | filters=[filter_rooms], | |
674 |
validators=[wtforms.validators.DataRequired()] |
|
735 | validators=[wtforms.validators.DataRequired()], | |
|
736 | ) | |||
675 |
|
737 | |||
676 | def validate_rooms(self, field): |
|
738 | def validate_rooms(self, field): | |
677 | if not field.data: |
|
739 | if not field.data: | |
678 | return |
|
740 | return | |
679 | client = HipchatIntegration.create_client(self.api_token.data) |
|
741 | client = HipchatIntegration.create_client(self.api_token.data) | |
680 |
rooms = field.data.split( |
|
742 | rooms = field.data.split(",") | |
681 | if len(rooms) > 3: |
|
743 | if len(rooms) > 3: | |
682 |
msg = |
|
744 | msg = "You can use up to 3 room ids" | |
683 | raise wtforms.validators.ValidationError(msg) |
|
745 | raise wtforms.validators.ValidationError(msg) | |
684 | if rooms: |
|
746 | if rooms: | |
685 | for room_id in rooms: |
|
747 | for room_id in rooms: | |
686 | if not room_id.strip().isdigit(): |
|
748 | if not room_id.strip().isdigit(): | |
687 |
msg = |
|
749 | msg = "You must use only integers for room ids" | |
688 | raise wtforms.validators.ValidationError(msg) |
|
750 | raise wtforms.validators.ValidationError(msg) | |
689 | try: |
|
751 | try: | |
690 |
client.send( |
|
752 | client.send( | |
691 |
|
|
753 | { | |
|
754 | "message_format": "text", | |||
692 | "message": "testing for room existence", |
|
755 | "message": "testing for room existence", | |
693 | "from": "AppEnlight", |
|
756 | "from": "AppEnlight", | |
694 | "room_id": room_id, |
|
757 | "room_id": room_id, | |
695 | "color": "green" |
|
758 | "color": "green", | |
696 |
} |
|
759 | } | |
|
760 | ) | |||
697 | except IntegrationException as exc: |
|
761 | except IntegrationException as exc: | |
698 |
msg = |
|
762 | msg = "Room id: %s exception: %s" | |
699 | raise wtforms.validators.ValidationError(msg % (room_id, |
|
763 | raise wtforms.validators.ValidationError(msg % (room_id, exc)) | |
700 | exc)) |
|
|||
701 |
|
764 | |||
702 |
|
765 | |||
703 | class IntegrationFlowdockForm(ReactorForm): |
|
766 | class IntegrationFlowdockForm(ReactorForm): | |
704 |
api_token = wtforms.StringField( |
|
767 | api_token = wtforms.StringField( | |
|
768 | "API Token", | |||
705 |
|
|
769 | filters=[strip_filter], | |
706 | validators=[ |
|
770 | validators=[wtforms.validators.DataRequired()], | |
707 | wtforms.validators.DataRequired() |
|
771 | ) | |
708 | ], ) |
|
|||
709 |
|
772 | |||
710 | def validate_api_token(self, field): |
|
773 | def validate_api_token(self, field): | |
711 | try: |
|
774 | try: | |
712 | client = FlowdockIntegration.create_client(self.api_token.data) |
|
775 | client = FlowdockIntegration.create_client(self.api_token.data) | |
713 | registry = pyramid.threadlocal.get_current_registry() |
|
776 | registry = pyramid.threadlocal.get_current_registry() | |
714 | payload = { |
|
777 | payload = { | |
715 |
"source": registry.settings[ |
|
778 | "source": registry.settings["mailing.from_name"], | |
716 |
"from_address": registry.settings[ |
|
779 | "from_address": registry.settings["mailing.from_email"], | |
717 | "subject": "Integration test", |
|
780 | "subject": "Integration test", | |
718 | "content": "If you can see this it was successful", |
|
781 | "content": "If you can see this it was successful", | |
719 | "tags": ["appenlight"], |
|
782 | "tags": ["appenlight"], | |
720 |
"link": registry.settings[ |
|
783 | "link": registry.settings["mailing.app_url"], | |
721 | } |
|
784 | } | |
722 | client.send_to_inbox(payload) |
|
785 | client.send_to_inbox(payload) | |
723 | except IntegrationException as e: |
|
786 | except IntegrationException as e: | |
@@ -726,30 +789,35 b' class IntegrationFlowdockForm(ReactorForm):' | |||||
726 |
|
789 | |||
727 | class IntegrationSlackForm(ReactorForm): |
|
790 | class IntegrationSlackForm(ReactorForm): | |
728 | webhook_url = wtforms.StringField( |
|
791 | webhook_url = wtforms.StringField( | |
729 |
|
|
792 | "Reports webhook", | |
730 | filters=[strip_filter], |
|
793 | filters=[strip_filter], | |
731 |
validators=[wtforms.validators.DataRequired()] |
|
794 | validators=[wtforms.validators.DataRequired()], | |
|
795 | ) | |||
732 |
|
796 | |||
733 | def validate_webhook_url(self, field): |
|
797 | def validate_webhook_url(self, field): | |
734 | registry = pyramid.threadlocal.get_current_registry() |
|
798 | registry = pyramid.threadlocal.get_current_registry() | |
735 | client = SlackIntegration.create_client(field.data) |
|
799 | client = SlackIntegration.create_client(field.data) | |
736 |
link = "<%s|%s>" % ( |
|
800 | link = "<%s|%s>" % ( | |
737 |
|
|
801 | registry.settings["mailing.app_url"], | |
|
802 | registry.settings["mailing.from_name"], | |||
|
803 | ) | |||
738 | test_data = { |
|
804 | test_data = { | |
739 | "username": "AppEnlight", |
|
805 | "username": "AppEnlight", | |
740 | "icon_emoji": ":fire:", |
|
806 | "icon_emoji": ":fire:", | |
741 | "attachments": [ |
|
807 | "attachments": [ | |
742 | {"fallback": "Testing integration channel: %s" % link, |
|
808 | { | |
|
809 | "fallback": "Testing integration channel: %s" % link, | |||
743 | "pretext": "Testing integration channel: %s" % link, |
|
810 | "pretext": "Testing integration channel: %s" % link, | |
744 | "color": "good", |
|
811 | "color": "good", | |
745 | "fields": [ |
|
812 | "fields": [ | |
746 | { |
|
813 | { | |
747 | "title": "Status", |
|
814 | "title": "Status", | |
748 | "value": "Integration is working fine", |
|
815 | "value": "Integration is working fine", | |
749 | "short": False |
|
816 | "short": False, | |
750 | } |
|
817 | } | |
751 |
] |
|
818 | ], | |
752 |
|
|
819 | } | |
|
820 | ], | |||
753 | } |
|
821 | } | |
754 | try: |
|
822 | try: | |
755 | client.make_request(data=test_data) |
|
823 | client.make_request(data=test_data) | |
@@ -759,44 +827,52 b' class IntegrationSlackForm(ReactorForm):' | |||||
759 |
|
827 | |||
760 | class IntegrationWebhooksForm(ReactorForm): |
|
828 | class IntegrationWebhooksForm(ReactorForm): | |
761 | reports_webhook = wtforms.StringField( |
|
829 | reports_webhook = wtforms.StringField( | |
762 |
|
|
830 | "Reports webhook", | |
763 | filters=[strip_filter], |
|
831 | filters=[strip_filter], | |
764 |
validators=[wtforms.validators.DataRequired()] |
|
832 | validators=[wtforms.validators.DataRequired()], | |
|
833 | ) | |||
765 | alerts_webhook = wtforms.StringField( |
|
834 | alerts_webhook = wtforms.StringField( | |
766 |
|
|
835 | "Alerts webhook", | |
767 | filters=[strip_filter], |
|
836 | filters=[strip_filter], | |
768 |
validators=[wtforms.validators.DataRequired()] |
|
837 | validators=[wtforms.validators.DataRequired()], | |
769 | submit = wtforms.SubmitField(_('Setup webhooks')) |
|
838 | ) | |
770 | ignore_labels = ['submit'] |
|
839 | submit = wtforms.SubmitField(_("Setup webhooks")) | |
771 | css_classes = {'submit': 'btn btn-primary'} |
|
840 | ignore_labels = ["submit"] | |
|
841 | css_classes = {"submit": "btn btn-primary"} | |||
772 |
|
842 | |||
773 |
|
843 | |||
774 | class IntegrationJiraForm(ReactorForm): |
|
844 | class IntegrationJiraForm(ReactorForm): | |
775 | host_name = wtforms.StringField( |
|
845 | host_name = wtforms.StringField( | |
776 |
|
|
846 | "Server URL", | |
777 | filters=[strip_filter], |
|
847 | filters=[strip_filter], | |
778 |
validators=[wtforms.validators.DataRequired()] |
|
848 | validators=[wtforms.validators.DataRequired()], | |
|
849 | ) | |||
779 | user_name = wtforms.StringField( |
|
850 | user_name = wtforms.StringField( | |
780 |
|
|
851 | "Username", | |
781 | filters=[strip_filter], |
|
852 | filters=[strip_filter], | |
782 |
validators=[wtforms.validators.DataRequired()] |
|
853 | validators=[wtforms.validators.DataRequired()], | |
|
854 | ) | |||
783 | password = wtforms.PasswordField( |
|
855 | password = wtforms.PasswordField( | |
784 |
|
|
856 | "Password", | |
785 | filters=[strip_filter], |
|
857 | filters=[strip_filter], | |
786 |
validators=[wtforms.validators.DataRequired()] |
|
858 | validators=[wtforms.validators.DataRequired()], | |
|
859 | ) | |||
787 | project = wtforms.StringField( |
|
860 | project = wtforms.StringField( | |
788 |
|
|
861 | "Project key", | |
789 | filters=[uppercase_filter, strip_filter], |
|
862 | filters=[uppercase_filter, strip_filter], | |
790 |
validators=[wtforms.validators.DataRequired()] |
|
863 | validators=[wtforms.validators.DataRequired()], | |
|
864 | ) | |||
791 |
|
865 | |||
792 | def validate_project(self, field): |
|
866 | def validate_project(self, field): | |
793 | if not field.data: |
|
867 | if not field.data: | |
794 | return |
|
868 | return | |
795 | try: |
|
869 | try: | |
796 |
client = JiraClient( |
|
870 | client = JiraClient( | |
|
871 | self.user_name.data, | |||
797 |
|
|
872 | self.password.data, | |
798 |
|
|
873 | self.host_name.data, | |
799 |
|
|
874 | self.project.data, | |
|
875 | ) | |||
800 | except Exception as exc: |
|
876 | except Exception as exc: | |
801 | raise wtforms.validators.ValidationError(str(exc)) |
|
877 | raise wtforms.validators.ValidationError(str(exc)) | |
802 |
|
878 | |||
@@ -809,88 +885,97 b' class IntegrationJiraForm(ReactorForm):' | |||||
809 | def get_deletion_form(resource): |
|
885 | def get_deletion_form(resource): | |
810 | class F(ReactorForm): |
|
886 | class F(ReactorForm): | |
811 | application_name = wtforms.StringField( |
|
887 | application_name = wtforms.StringField( | |
812 |
|
|
888 | "Application Name", | |
813 | filters=[strip_filter], |
|
889 | filters=[strip_filter], | |
814 |
validators=[wtforms.validators.AnyOf([resource.resource_name])] |
|
890 | validators=[wtforms.validators.AnyOf([resource.resource_name])], | |
|
891 | ) | |||
815 | resource_id = wtforms.HiddenField(default=resource.resource_id) |
|
892 | resource_id = wtforms.HiddenField(default=resource.resource_id) | |
816 |
submit = wtforms.SubmitField(_( |
|
893 | submit = wtforms.SubmitField(_("Delete my application")) | |
817 |
ignore_labels = [ |
|
894 | ignore_labels = ["submit"] | |
818 |
css_classes = { |
|
895 | css_classes = {"submit": "btn btn-danger"} | |
819 |
|
896 | |||
820 | return F |
|
897 | return F | |
821 |
|
898 | |||
822 |
|
899 | |||
823 | class ChangeApplicationOwnerForm(ReactorForm): |
|
900 | class ChangeApplicationOwnerForm(ReactorForm): | |
824 | password = wtforms.PasswordField( |
|
901 | password = wtforms.PasswordField( | |
825 |
|
|
902 | "Password", | |
826 | filters=[strip_filter], |
|
903 | filters=[strip_filter], | |
827 | validators=[old_password_validator, |
|
904 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
828 | wtforms.validators.DataRequired()]) |
|
905 | ) | |
829 |
|
906 | |||
830 | user_name = wtforms.StringField( |
|
907 | user_name = wtforms.StringField( | |
831 |
|
|
908 | "New owners username", | |
832 | filters=[strip_filter], |
|
909 | filters=[strip_filter], | |
833 | validators=[found_username_validator, |
|
910 | validators=[found_username_validator, wtforms.validators.DataRequired()], | |
834 | wtforms.validators.DataRequired()]) |
|
911 | ) | |
835 |
submit = wtforms.SubmitField(_( |
|
912 | submit = wtforms.SubmitField(_("Transfer ownership of application")) | |
836 |
ignore_labels = [ |
|
913 | ignore_labels = ["submit"] | |
837 |
css_classes = { |
|
914 | css_classes = {"submit": "btn btn-danger"} | |
838 |
|
915 | |||
839 |
|
916 | |||
840 | def default_filename(): |
|
917 | def default_filename(): | |
841 |
return |
|
918 | return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m") | |
842 |
|
919 | |||
843 |
|
920 | |||
844 | class FileUploadForm(ReactorForm): |
|
921 | class FileUploadForm(ReactorForm): | |
845 |
title = wtforms.StringField( |
|
922 | title = wtforms.StringField( | |
|
923 | "File Title", | |||
846 |
|
|
924 | default=default_filename, | |
847 |
|
|
925 | validators=[wtforms.validators.DataRequired()], | |
848 | file = wtforms.FileField('File') |
|
926 | ) | |
|
927 | file = wtforms.FileField("File") | |||
849 |
|
928 | |||
850 | def validate_file(self, field): |
|
929 | def validate_file(self, field): | |
851 |
if not hasattr(field.data, |
|
930 | if not hasattr(field.data, "file"): | |
852 |
raise wtforms.ValidationError( |
|
931 | raise wtforms.ValidationError("File is missing") | |
853 |
|
932 | |||
854 |
submit = wtforms.SubmitField(_( |
|
933 | submit = wtforms.SubmitField(_("Upload")) | |
855 |
|
934 | |||
856 |
|
935 | |||
857 | def get_partition_deletion_form(es_indices, pg_indices): |
|
936 | def get_partition_deletion_form(es_indices, pg_indices): | |
858 | class F(ReactorForm): |
|
937 | class F(ReactorForm): | |
859 |
es_index = wtforms.SelectMultipleField( |
|
938 | es_index = wtforms.SelectMultipleField( | |
860 | choices=[(ix, '') for ix in |
|
939 | "Elasticsearch", choices=[(ix, "") for ix in es_indices] | |
861 | es_indices]) |
|
940 | ) | |
862 |
pg_index = wtforms.SelectMultipleField( |
|
941 | pg_index = wtforms.SelectMultipleField( | |
863 | choices=[(ix, '') for ix in |
|
942 | "pg", choices=[(ix, "") for ix in pg_indices] | |
864 | pg_indices]) |
|
943 | ) | |
865 |
confirm = wtforms.TextField( |
|
944 | confirm = wtforms.TextField( | |
|
945 | "Confirm", | |||
866 |
|
|
946 | filters=[uppercase_filter, strip_filter], | |
867 |
|
|
947 | validators=[ | |
868 |
|
|
948 | wtforms.validators.AnyOf(["CONFIRM"]), | |
869 |
|
|
949 | wtforms.validators.DataRequired(), | |
870 | ignore_labels = ['submit'] |
|
950 | ], | |
871 | css_classes = {'submit': 'btn btn-danger'} |
|
951 | ) | |
|
952 | ignore_labels = ["submit"] | |||
|
953 | css_classes = {"submit": "btn btn-danger"} | |||
872 |
|
954 | |||
873 | return F |
|
955 | return F | |
874 |
|
956 | |||
875 |
|
957 | |||
876 | class GroupCreateForm(ReactorForm): |
|
958 | class GroupCreateForm(ReactorForm): | |
877 | group_name = wtforms.StringField( |
|
959 | group_name = wtforms.StringField( | |
878 |
_( |
|
960 | _("Group Name"), | |
879 | filters=[strip_filter], |
|
961 | filters=[strip_filter], | |
880 | validators=[ |
|
962 | validators=[ | |
881 | wtforms.validators.Length(min=2, max=50), |
|
963 | wtforms.validators.Length(min=2, max=50), | |
882 | unique_groupname_validator, |
|
964 | unique_groupname_validator, | |
883 | wtforms.validators.DataRequired() |
|
965 | wtforms.validators.DataRequired(), | |
884 |
] |
|
966 | ], | |
885 | description = wtforms.StringField(_('Group description')) |
|
967 | ) | |
|
968 | description = wtforms.StringField(_("Group description")) | |||
886 |
|
969 | |||
887 |
|
970 | |||
888 |
time_choices = [(k, v[ |
|
971 | time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()] | |
889 |
|
972 | |||
890 |
|
973 | |||
891 | class AuthTokenCreateForm(ReactorForm): |
|
974 | class AuthTokenCreateForm(ReactorForm): | |
892 |
description = wtforms.StringField(_( |
|
975 | description = wtforms.StringField(_("Token description")) | |
893 |
expires = wtforms.SelectField( |
|
976 | expires = wtforms.SelectField( | |
|
977 | "Expires", | |||
894 |
|
|
978 | coerce=lambda x: x, | |
895 |
|
|
979 | choices=time_choices, | |
896 |
|
|
980 | validators=[wtforms.validators.Optional()], | |
|
981 | ) |
@@ -24,20 +24,20 b' from appenlight_client.exceptions import get_current_traceback' | |||||
24 |
|
24 | |||
25 |
|
25 | |||
26 | def generate_random_string(chars=10): |
|
26 | def generate_random_string(chars=10): | |
27 |
return |
|
27 | return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars)) | |
28 | chars)) |
|
|||
29 |
|
28 | |||
30 |
|
29 | |||
31 | def to_integer_safe(input): |
|
30 | def to_integer_safe(input): | |
32 | try: |
|
31 | try: | |
33 | return int(input) |
|
32 | return int(input) | |
34 |
except (TypeError, ValueError |
|
33 | except (TypeError, ValueError): | |
35 | return None |
|
34 | return None | |
36 |
|
35 | |||
37 |
|
36 | |||
38 | def print_traceback(log): |
|
37 | def print_traceback(log): | |
39 |
traceback = get_current_traceback( |
|
38 | traceback = get_current_traceback( | |
40 | ignore_system_exceptions=True) |
|
39 | skip=1, show_hidden_frames=True, ignore_system_exceptions=True | |
|
40 | ) | |||
41 | exception_text = traceback.exception |
|
41 | exception_text = traceback.exception | |
42 | log.error(exception_text) |
|
42 | log.error(exception_text) | |
43 | log.error(traceback.plaintext) |
|
43 | log.error(traceback.plaintext) | |
@@ -45,6 +45,5 b' def print_traceback(log):' | |||||
45 |
|
45 | |||
46 |
|
46 | |||
47 | def get_callable(import_string): |
|
47 | def get_callable(import_string): | |
48 |
import_module, indexer_callable = import_string.split( |
|
48 | import_module, indexer_callable = import_string.split(":") | |
49 | return getattr(importlib.import_module(import_module), |
|
49 | return getattr(importlib.import_module(import_module), indexer_callable) | |
50 | indexer_callable) |
|
@@ -27,21 +27,18 b' log = logging.getLogger(__name__)' | |||||
27 |
|
27 | |||
28 | def rate_limiting(request, resource, section, to_increment=1): |
|
28 | def rate_limiting(request, resource, section, to_increment=1): | |
29 | tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0) |
|
29 | tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0) | |
30 |
key = REDIS_KEYS[ |
|
30 | key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id) | |
31 | resource.resource_id) |
|
|||
32 | redis_pipeline = request.registry.redis_conn.pipeline() |
|
31 | redis_pipeline = request.registry.redis_conn.pipeline() | |
33 | redis_pipeline.incr(key, to_increment) |
|
32 | redis_pipeline.incr(key, to_increment) | |
34 | redis_pipeline.expire(key, 3600 * 24) |
|
33 | redis_pipeline.expire(key, 3600 * 24) | |
35 | results = redis_pipeline.execute() |
|
34 | results = redis_pipeline.execute() | |
36 | current_count = results[0] |
|
35 | current_count = results[0] | |
37 |
config = ConfigService.by_key_and_section(section, |
|
36 | config = ConfigService.by_key_and_section(section, "global") | |
38 | limit = config.value if config else 1000 |
|
37 | limit = config.value if config else 1000 | |
39 | if current_count > int(limit): |
|
38 | if current_count > int(limit): | |
40 |
log.info( |
|
39 | log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count)) | |
41 | section, resource, current_count)) |
|
40 | abort_msg = "Rate limits are in effect for this application" | |
42 | abort_msg = 'Rate limits are in effect for this application' |
|
41 | raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg}) | |
43 | raise HTTPTooManyRequests(abort_msg, |
|
|||
44 | headers={'X-AppEnlight': abort_msg}) |
|
|||
45 |
|
42 | |||
46 |
|
43 | |||
47 | def check_cors(request, application, should_return=True): |
|
44 | def check_cors(request, application, should_return=True): | |
@@ -50,31 +47,34 b' def check_cors(request, application, should_return=True):' | |||||
50 | application, otherwise return 403 |
|
47 | application, otherwise return 403 | |
51 | """ |
|
48 | """ | |
52 | origin_found = False |
|
49 | origin_found = False | |
53 |
origin = request.headers.get( |
|
50 | origin = request.headers.get("Origin") | |
54 | if should_return: |
|
51 | if should_return: | |
55 |
log.info( |
|
52 | log.info("CORS for %s" % origin) | |
56 | if not origin: |
|
53 | if not origin: | |
57 | return False |
|
54 | return False | |
58 |
for domain in application.domains.split( |
|
55 | for domain in application.domains.split("\n"): | |
59 | if domain in origin: |
|
56 | if domain in origin: | |
60 | origin_found = True |
|
57 | origin_found = True | |
61 | if origin_found: |
|
58 | if origin_found: | |
62 |
request.response.headers.add( |
|
59 | request.response.headers.add("Access-Control-Allow-Origin", origin) | |
63 |
request.response.headers.add( |
|
60 | request.response.headers.add("XDomainRequestAllowed", "1") | |
64 |
request.response.headers.add( |
|
61 | request.response.headers.add( | |
65 | 'GET, POST, OPTIONS') |
|
62 | "Access-Control-Allow-Methods", "GET, POST, OPTIONS" | |
66 | request.response.headers.add('Access-Control-Allow-Headers', |
|
63 | ) | |
67 | 'Accept-Encoding, Accept-Language, ' |
|
64 | request.response.headers.add( | |
68 | 'Content-Type, ' |
|
65 | "Access-Control-Allow-Headers", | |
69 | 'Depth, User-Agent, X-File-Size, ' |
|
66 | "Accept-Encoding, Accept-Language, " | |
70 | 'X-Requested-With, If-Modified-Since, ' |
|
67 | "Content-Type, " | |
71 | 'X-File-Name, ' |
|
68 | "Depth, User-Agent, X-File-Size, " | |
72 | 'Cache-Control, Host, Pragma, Accept, ' |
|
69 | "X-Requested-With, If-Modified-Since, " | |
73 | 'Origin, Connection, ' |
|
70 | "X-File-Name, " | |
74 | 'Referer, Cookie, ' |
|
71 | "Cache-Control, Host, Pragma, Accept, " | |
75 | 'X-appenlight-public-api-key, ' |
|
72 | "Origin, Connection, " | |
76 | 'x-appenlight-public-api-key') |
|
73 | "Referer, Cookie, " | |
77 | request.response.headers.add('Access-Control-Max-Age', '86400') |
|
74 | "X-appenlight-public-api-key, " | |
|
75 | "x-appenlight-public-api-key", | |||
|
76 | ) | |||
|
77 | request.response.headers.add("Access-Control-Max-Age", "86400") | |||
78 | return request.response |
|
78 | return request.response | |
79 | else: |
|
79 | else: | |
80 | return HTTPForbidden() |
|
80 | return HTTPForbidden() |
@@ -42,23 +42,27 b' def hashgen(namespace, fn, to_str=compat.string_type):' | |||||
42 | """ |
|
42 | """ | |
43 |
|
43 | |||
44 | if namespace is None: |
|
44 | if namespace is None: | |
45 |
namespace = |
|
45 | namespace = "%s:%s" % (fn.__module__, fn.__name__) | |
46 | else: |
|
46 | else: | |
47 |
namespace = |
|
47 | namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace) | |
48 |
|
48 | |||
49 | args = inspect.getargspec(fn) |
|
49 | args = inspect.getargspec(fn) | |
50 |
has_self = args[0] and args[0][0] in ( |
|
50 | has_self = args[0] and args[0][0] in ("self", "cls") | |
51 |
|
51 | |||
52 | def generate_key(*args, **kw): |
|
52 | def generate_key(*args, **kw): | |
53 | if kw: |
|
53 | if kw: | |
54 | raise ValueError( |
|
54 | raise ValueError( | |
55 | "dogpile.cache's default key creation " |
|
55 | "dogpile.cache's default key creation " | |
56 |
"function does not accept keyword arguments." |
|
56 | "function does not accept keyword arguments." | |
|
57 | ) | |||
57 | if has_self: |
|
58 | if has_self: | |
58 | args = args[1:] |
|
59 | args = args[1:] | |
59 |
|
60 | |||
60 | return namespace + "|" + hashlib.sha1( |
|
61 | return ( | |
61 | " ".join(map(to_str, args)).encode('utf8')).hexdigest() |
|
62 | namespace | |
|
63 | + "|" | |||
|
64 | + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest() | |||
|
65 | ) | |||
62 |
|
66 | |||
63 | return generate_key |
|
67 | return generate_key | |
64 |
|
68 | |||
@@ -68,116 +72,97 b' class CacheRegions(object):' | |||||
68 | config_redis = {"arguments": settings} |
|
72 | config_redis = {"arguments": settings} | |
69 |
|
73 | |||
70 | self.redis_min_1 = make_region( |
|
74 | self.redis_min_1 = make_region( | |
71 | function_key_generator=hashgen, |
|
75 | function_key_generator=hashgen, key_mangler=key_mangler | |
72 | key_mangler=key_mangler).configure( |
|
76 | ).configure( | |
73 | "dogpile.cache.redis", |
|
77 | "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis) | |
74 | expiration_time=60, |
|
78 | ) | |
75 | **copy.deepcopy(config_redis)) |
|
|||
76 | self.redis_min_5 = make_region( |
|
79 | self.redis_min_5 = make_region( | |
77 | function_key_generator=hashgen, |
|
80 | function_key_generator=hashgen, key_mangler=key_mangler | |
78 | key_mangler=key_mangler).configure( |
|
81 | ).configure( | |
79 | "dogpile.cache.redis", |
|
82 | "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis) | |
80 | expiration_time=300, |
|
83 | ) | |
81 | **copy.deepcopy(config_redis)) |
|
|||
82 |
|
84 | |||
83 | self.redis_min_10 = make_region( |
|
85 | self.redis_min_10 = make_region( | |
84 | function_key_generator=hashgen, |
|
86 | function_key_generator=hashgen, key_mangler=key_mangler | |
85 | key_mangler=key_mangler).configure( |
|
87 | ).configure( | |
86 | "dogpile.cache.redis", |
|
88 | "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis) | |
87 | expiration_time=60, |
|
89 | ) | |
88 | **copy.deepcopy(config_redis)) |
|
|||
89 |
|
90 | |||
90 | self.redis_min_60 = make_region( |
|
91 | self.redis_min_60 = make_region( | |
91 | function_key_generator=hashgen, |
|
92 | function_key_generator=hashgen, key_mangler=key_mangler | |
92 | key_mangler=key_mangler).configure( |
|
93 | ).configure( | |
93 | "dogpile.cache.redis", |
|
94 | "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis) | |
94 | expiration_time=3600, |
|
95 | ) | |
95 | **copy.deepcopy(config_redis)) |
|
|||
96 |
|
96 | |||
97 | self.redis_sec_1 = make_region( |
|
97 | self.redis_sec_1 = make_region( | |
98 | function_key_generator=hashgen, |
|
98 | function_key_generator=hashgen, key_mangler=key_mangler | |
99 | key_mangler=key_mangler).configure( |
|
99 | ).configure( | |
100 | "dogpile.cache.redis", |
|
100 | "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis) | |
101 | expiration_time=1, |
|
101 | ) | |
102 | **copy.deepcopy(config_redis)) |
|
|||
103 |
|
102 | |||
104 | self.redis_sec_5 = make_region( |
|
103 | self.redis_sec_5 = make_region( | |
105 | function_key_generator=hashgen, |
|
104 | function_key_generator=hashgen, key_mangler=key_mangler | |
106 | key_mangler=key_mangler).configure( |
|
105 | ).configure( | |
107 | "dogpile.cache.redis", |
|
106 | "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis) | |
108 | expiration_time=5, |
|
107 | ) | |
109 | **copy.deepcopy(config_redis)) |
|
|||
110 |
|
108 | |||
111 | self.redis_sec_30 = make_region( |
|
109 | self.redis_sec_30 = make_region( | |
112 | function_key_generator=hashgen, |
|
110 | function_key_generator=hashgen, key_mangler=key_mangler | |
113 | key_mangler=key_mangler).configure( |
|
111 | ).configure( | |
114 | "dogpile.cache.redis", |
|
112 | "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis) | |
115 | expiration_time=30, |
|
113 | ) | |
116 | **copy.deepcopy(config_redis)) |
|
|||
117 |
|
114 | |||
118 | self.redis_day_1 = make_region( |
|
115 | self.redis_day_1 = make_region( | |
119 | function_key_generator=hashgen, |
|
116 | function_key_generator=hashgen, key_mangler=key_mangler | |
120 | key_mangler=key_mangler).configure( |
|
117 | ).configure( | |
121 | "dogpile.cache.redis", |
|
118 | "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis) | |
122 | expiration_time=86400, |
|
119 | ) | |
123 | **copy.deepcopy(config_redis)) |
|
|||
124 |
|
120 | |||
125 | self.redis_day_7 = make_region( |
|
121 | self.redis_day_7 = make_region( | |
126 | function_key_generator=hashgen, |
|
122 | function_key_generator=hashgen, key_mangler=key_mangler | |
127 | key_mangler=key_mangler).configure( |
|
123 | ).configure( | |
128 | "dogpile.cache.redis", |
|
124 | "dogpile.cache.redis", | |
129 | expiration_time=86400 * 7, |
|
125 | expiration_time=86400 * 7, | |
130 |
**copy.deepcopy(config_redis) |
|
126 | **copy.deepcopy(config_redis) | |
|
127 | ) | |||
131 |
|
128 | |||
132 | self.redis_day_30 = make_region( |
|
129 | self.redis_day_30 = make_region( | |
133 | function_key_generator=hashgen, |
|
130 | function_key_generator=hashgen, key_mangler=key_mangler | |
134 | key_mangler=key_mangler).configure( |
|
131 | ).configure( | |
135 | "dogpile.cache.redis", |
|
132 | "dogpile.cache.redis", | |
136 | expiration_time=86400 * 30, |
|
133 | expiration_time=86400 * 30, | |
137 |
**copy.deepcopy(config_redis) |
|
134 | **copy.deepcopy(config_redis) | |
|
135 | ) | |||
138 |
|
136 | |||
139 | self.memory_day_1 = make_region( |
|
137 | self.memory_day_1 = make_region( | |
140 | function_key_generator=hashgen, |
|
138 | function_key_generator=hashgen, key_mangler=key_mangler | |
141 | key_mangler=key_mangler).configure( |
|
139 | ).configure( | |
142 | "dogpile.cache.memory", |
|
140 | "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis) | |
143 | expiration_time=86400, |
|
141 | ) | |
144 | **copy.deepcopy(config_redis)) |
|
|||
145 |
|
142 | |||
146 | self.memory_sec_1 = make_region( |
|
143 | self.memory_sec_1 = make_region( | |
147 | function_key_generator=hashgen, |
|
144 | function_key_generator=hashgen, key_mangler=key_mangler | |
148 | key_mangler=key_mangler).configure( |
|
145 | ).configure("dogpile.cache.memory", expiration_time=1) | |
149 | "dogpile.cache.memory", |
|
|||
150 | expiration_time=1) |
|
|||
151 |
|
146 | |||
152 | self.memory_sec_5 = make_region( |
|
147 | self.memory_sec_5 = make_region( | |
153 | function_key_generator=hashgen, |
|
148 | function_key_generator=hashgen, key_mangler=key_mangler | |
154 | key_mangler=key_mangler).configure( |
|
149 | ).configure("dogpile.cache.memory", expiration_time=5) | |
155 | "dogpile.cache.memory", |
|
|||
156 | expiration_time=5) |
|
|||
157 |
|
150 | |||
158 | self.memory_min_1 = make_region( |
|
151 | self.memory_min_1 = make_region( | |
159 | function_key_generator=hashgen, |
|
152 | function_key_generator=hashgen, key_mangler=key_mangler | |
160 | key_mangler=key_mangler).configure( |
|
153 | ).configure("dogpile.cache.memory", expiration_time=60) | |
161 | "dogpile.cache.memory", |
|
|||
162 | expiration_time=60) |
|
|||
163 |
|
154 | |||
164 | self.memory_min_5 = make_region( |
|
155 | self.memory_min_5 = make_region( | |
165 | function_key_generator=hashgen, |
|
156 | function_key_generator=hashgen, key_mangler=key_mangler | |
166 | key_mangler=key_mangler).configure( |
|
157 | ).configure("dogpile.cache.memory", expiration_time=300) | |
167 | "dogpile.cache.memory", |
|
|||
168 | expiration_time=300) |
|
|||
169 |
|
158 | |||
170 | self.memory_min_10 = make_region( |
|
159 | self.memory_min_10 = make_region( | |
171 | function_key_generator=hashgen, |
|
160 | function_key_generator=hashgen, key_mangler=key_mangler | |
172 | key_mangler=key_mangler).configure( |
|
161 | ).configure("dogpile.cache.memory", expiration_time=600) | |
173 | "dogpile.cache.memory", |
|
|||
174 | expiration_time=600) |
|
|||
175 |
|
162 | |||
176 | self.memory_min_60 = make_region( |
|
163 | self.memory_min_60 = make_region( | |
177 | function_key_generator=hashgen, |
|
164 | function_key_generator=hashgen, key_mangler=key_mangler | |
178 | key_mangler=key_mangler).configure( |
|
165 | ).configure("dogpile.cache.memory", expiration_time=3600) | |
179 | "dogpile.cache.memory", |
|
|||
180 | expiration_time=3600) |
|
|||
181 |
|
166 | |||
182 |
|
167 | |||
183 | def get_region(region): |
|
168 | def get_region(region): |
@@ -5,6 +5,7 b' from pyramid.config import Configurator' | |||||
5 |
|
5 | |||
6 | log = logging.getLogger(__name__) |
|
6 | log = logging.getLogger(__name__) | |
7 |
|
7 | |||
|
8 | ||||
8 | class InspectProxy(object): |
|
9 | class InspectProxy(object): | |
9 | """ |
|
10 | """ | |
10 | Proxy to the `inspect` module that allows us to use the pyramid include |
|
11 | Proxy to the `inspect` module that allows us to use the pyramid include | |
@@ -17,7 +18,7 b' class InspectProxy(object):' | |||||
17 | """ |
|
18 | """ | |
18 | if inspect.ismethod(cyfunction): |
|
19 | if inspect.ismethod(cyfunction): | |
19 | cyfunction = cyfunction.im_func |
|
20 | cyfunction = cyfunction.im_func | |
20 |
return getattr(cyfunction, |
|
21 | return getattr(cyfunction, "func_code") | |
21 |
|
22 | |||
22 | def getmodule(self, *args, **kwds): |
|
23 | def getmodule(self, *args, **kwds): | |
23 | """ |
|
24 | """ | |
@@ -40,14 +41,14 b' class InspectProxy(object):' | |||||
40 | """ |
|
41 | """ | |
41 | # Check if it's called to look up the source file that contains the |
|
42 | # Check if it's called to look up the source file that contains the | |
42 | # magic pyramid `includeme` callable. |
|
43 | # magic pyramid `includeme` callable. | |
43 |
if getattr(obj, |
|
44 | if getattr(obj, "__name__") == "includeme": | |
44 | try: |
|
45 | try: | |
45 | return inspect.getfile(obj) |
|
46 | return inspect.getfile(obj) | |
46 | except TypeError as e: |
|
47 | except TypeError as e: | |
47 | # Cython functions are not recognized as functions by the |
|
48 | # Cython functions are not recognized as functions by the | |
48 | # inspect module. We have to unpack the func_code attribute |
|
49 | # inspect module. We have to unpack the func_code attribute | |
49 | # ourself. |
|
50 | # ourself. | |
50 |
if |
|
51 | if "cyfunction" in e.message: | |
51 | obj = self._get_cyfunction_func_code(obj) |
|
52 | obj = self._get_cyfunction_func_code(obj) | |
52 | return inspect.getfile(obj) |
|
53 | return inspect.getfile(obj) | |
53 | raise |
|
54 | raise | |
@@ -60,33 +61,32 b' class CythonCompatConfigurator(Configurator):' | |||||
60 | Customized configurator to replace the inspect class attribute with |
|
61 | Customized configurator to replace the inspect class attribute with | |
61 | a custom one that is cython compatible. |
|
62 | a custom one that is cython compatible. | |
62 | """ |
|
63 | """ | |
|
64 | ||||
63 | inspect = InspectProxy() |
|
65 | inspect = InspectProxy() | |
64 |
|
66 | |||
65 |
|
67 | |||
66 | def register_appenlight_plugin(config, plugin_name, plugin_config): |
|
68 | def register_appenlight_plugin(config, plugin_name, plugin_config): | |
67 | def register(): |
|
69 | def register(): | |
68 |
log.warning( |
|
70 | log.warning("Registering plugin: {}".format(plugin_name)) | |
69 | if plugin_name not in config.registry.appenlight_plugins: |
|
71 | if plugin_name not in config.registry.appenlight_plugins: | |
70 | config.registry.appenlight_plugins[plugin_name] = { |
|
72 | config.registry.appenlight_plugins[plugin_name] = { | |
71 |
|
|
73 | "javascript": None, | |
72 |
|
|
74 | "static": None, | |
73 |
|
|
75 | "css": None, | |
74 |
|
|
76 | "celery_tasks": None, | |
75 |
|
|
77 | "celery_beats": None, | |
76 |
|
|
78 | "fulltext_indexer": None, | |
77 |
|
|
79 | "sqlalchemy_migrations": None, | |
78 |
|
|
80 | "default_values_setter": None, | |
79 |
|
|
81 | "header_html": None, | |
80 |
|
|
82 | "resource_types": [], | |
81 |
|
|
83 | "url_gen": None, | |
82 | } |
|
84 | } | |
83 | config.registry.appenlight_plugins[plugin_name].update( |
|
85 | config.registry.appenlight_plugins[plugin_name].update(plugin_config) | |
84 | plugin_config) |
|
|||
85 | # inform AE what kind of resource types we have available |
|
86 | # inform AE what kind of resource types we have available | |
86 | # so we can avoid failing when a plugin is removed but data |
|
87 | # so we can avoid failing when a plugin is removed but data | |
87 | # is still present in the db |
|
88 | # is still present in the db | |
88 |
if plugin_config.get( |
|
89 | if plugin_config.get("resource_types"): | |
89 | config.registry.resource_types.extend( |
|
90 | config.registry.resource_types.extend(plugin_config["resource_types"]) | |
90 | plugin_config['resource_types']) |
|
|||
91 |
|
91 | |||
92 |
config.action( |
|
92 | config.action("appenlight_plugin={}".format(plugin_name), register) |
@@ -23,20 +23,20 b' ENCRYPTION_SECRET = None' | |||||
23 | def encrypt_fernet(value): |
|
23 | def encrypt_fernet(value): | |
24 | # avoid double encryption |
|
24 | # avoid double encryption | |
25 | # not sure if this is needed but it won't hurt too much to have this |
|
25 | # not sure if this is needed but it won't hurt too much to have this | |
26 |
if value.startswith( |
|
26 | if value.startswith("enc$fernet$"): | |
27 | return value |
|
27 | return value | |
28 | f = Fernet(ENCRYPTION_SECRET) |
|
28 | f = Fernet(ENCRYPTION_SECRET) | |
29 |
return |
|
29 | return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8")) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def decrypt_fernet(value): |
|
32 | def decrypt_fernet(value): | |
33 |
parts = value.split( |
|
33 | parts = value.split("$", 3) | |
34 | if not len(parts) == 3: |
|
34 | if not len(parts) == 3: | |
35 | # not encrypted values |
|
35 | # not encrypted values | |
36 | return value |
|
36 | return value | |
37 | else: |
|
37 | else: | |
38 | f = Fernet(ENCRYPTION_SECRET) |
|
38 | f = Fernet(ENCRYPTION_SECRET) | |
39 |
decrypted_data = f.decrypt(parts[2].encode( |
|
39 | decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8") | |
40 | return decrypted_data |
|
40 | return decrypted_data | |
41 |
|
41 | |||
42 |
|
42 |
@@ -1,4 +1,5 b'' | |||||
1 | import collections |
|
1 | import collections | |
|
2 | ||||
2 | # -*- coding: utf-8 -*- |
|
3 | # -*- coding: utf-8 -*- | |
3 |
|
4 | |||
4 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
5 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
@@ -20,13 +21,14 b' class StupidEnum(object):' | |||||
20 | @classmethod |
|
21 | @classmethod | |
21 | def set_inverse(cls): |
|
22 | def set_inverse(cls): | |
22 | cls._inverse_values = dict( |
|
23 | cls._inverse_values = dict( | |
23 | (y, x) for x, y in vars(cls).items() if |
|
24 | (y, x) | |
24 | not x.startswith('_') and not callable(y) |
|
25 | for x, y in vars(cls).items() | |
|
26 | if not x.startswith("_") and not callable(y) | |||
25 | ) |
|
27 | ) | |
26 |
|
28 | |||
27 | @classmethod |
|
29 | @classmethod | |
28 | def key_from_value(cls, value): |
|
30 | def key_from_value(cls, value): | |
29 |
if not hasattr(cls, |
|
31 | if not hasattr(cls, "_inverse_values"): | |
30 | cls.set_inverse() |
|
32 | cls.set_inverse() | |
31 | return cls._inverse_values.get(value) |
|
33 | return cls._inverse_values.get(value) | |
32 |
|
34 |
@@ -25,7 +25,7 b' import functools' | |||||
25 | import decimal |
|
25 | import decimal | |
26 | import imp |
|
26 | import imp | |
27 |
|
27 | |||
28 |
__all__ = [ |
|
28 | __all__ = ["json", "simplejson", "stdlibjson"] | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def _is_aware(value): |
|
31 | def _is_aware(value): | |
@@ -35,8 +35,7 b' def _is_aware(value):' | |||||
35 | The logic is described in Python's docs: |
|
35 | The logic is described in Python's docs: | |
36 | http://docs.python.org/library/datetime.html#datetime.tzinfo |
|
36 | http://docs.python.org/library/datetime.html#datetime.tzinfo | |
37 | """ |
|
37 | """ | |
38 | return (value.tzinfo is not None |
|
38 | return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None | |
39 | and value.tzinfo.utcoffset(value) is not None) |
|
|||
40 |
|
39 | |||
41 |
|
40 | |||
42 | def _obj_dump(obj): |
|
41 | def _obj_dump(obj): | |
@@ -55,8 +54,8 b' def _obj_dump(obj):' | |||||
55 | r = obj.isoformat() |
|
54 | r = obj.isoformat() | |
56 | # if obj.microsecond: |
|
55 | # if obj.microsecond: | |
57 | # r = r[:23] + r[26:] |
|
56 | # r = r[:23] + r[26:] | |
58 |
if r.endswith( |
|
57 | if r.endswith("+00:00"): | |
59 |
r = r[:-6] + |
|
58 | r = r[:-6] + "Z" | |
60 | return r |
|
59 | return r | |
61 | elif isinstance(obj, datetime.date): |
|
60 | elif isinstance(obj, datetime.date): | |
62 | return obj.isoformat() |
|
61 | return obj.isoformat() | |
@@ -71,7 +70,7 b' def _obj_dump(obj):' | |||||
71 | return r |
|
70 | return r | |
72 | elif isinstance(obj, set): |
|
71 | elif isinstance(obj, set): | |
73 | return list(obj) |
|
72 | return list(obj) | |
74 |
elif hasattr(obj, |
|
73 | elif hasattr(obj, "__json__"): | |
75 | if callable(obj.__json__): |
|
74 | if callable(obj.__json__): | |
76 | return obj.__json__() |
|
75 | return obj.__json__() | |
77 | else: |
|
76 | else: | |
@@ -83,8 +82,7 b' def _obj_dump(obj):' | |||||
83 | # Import simplejson |
|
82 | # Import simplejson | |
84 | try: |
|
83 | try: | |
85 | # import simplejson initially |
|
84 | # import simplejson initially | |
86 |
_sj = imp.load_module( |
|
85 | _sj = imp.load_module("_sj", *imp.find_module("simplejson")) | |
87 |
|
||||
88 |
|
86 | |||
89 | def extended_encode(obj): |
|
87 | def extended_encode(obj): | |
90 | try: |
|
88 | try: | |
@@ -93,22 +91,21 b' try:' | |||||
93 | pass |
|
91 | pass | |
94 | raise TypeError("%r is not JSON serializable" % (obj,)) |
|
92 | raise TypeError("%r is not JSON serializable" % (obj,)) | |
95 |
|
93 | |||
96 |
|
||||
97 | # we handle decimals our own it makes unified behavior of json vs |
|
94 | # we handle decimals our own it makes unified behavior of json vs | |
98 | # simplejson |
|
95 | # simplejson | |
99 |
sj_version = [int(x) for x in _sj.__version__.split( |
|
96 | sj_version = [int(x) for x in _sj.__version__.split(".")] | |
100 | major, minor = sj_version[0], sj_version[1] |
|
97 | major, minor = sj_version[0], sj_version[1] | |
101 | if major < 2 or (major == 2 and minor < 1): |
|
98 | if major < 2 or (major == 2 and minor < 1): | |
102 | # simplejson < 2.1 doesnt support use_decimal |
|
99 | # simplejson < 2.1 doesnt support use_decimal | |
103 | _sj.dumps = functools.partial( |
|
100 | _sj.dumps = functools.partial(_sj.dumps, default=extended_encode) | |
104 |
|
|
101 | _sj.dump = functools.partial(_sj.dump, default=extended_encode) | |
105 | _sj.dump = functools.partial( |
|
|||
106 | _sj.dump, default=extended_encode) |
|
|||
107 | else: |
|
102 | else: | |
108 | _sj.dumps = functools.partial( |
|
103 | _sj.dumps = functools.partial( | |
109 |
_sj.dumps, default=extended_encode, use_decimal=False |
|
104 | _sj.dumps, default=extended_encode, use_decimal=False | |
|
105 | ) | |||
110 | _sj.dump = functools.partial( |
|
106 | _sj.dump = functools.partial( | |
111 |
_sj.dump, default=extended_encode, use_decimal=False |
|
107 | _sj.dump, default=extended_encode, use_decimal=False | |
|
108 | ) | |||
112 | simplejson = _sj |
|
109 | simplejson = _sj | |
113 |
|
110 | |||
114 | except ImportError: |
|
111 | except ImportError: | |
@@ -117,8 +114,7 b' except ImportError:' | |||||
117 |
|
114 | |||
118 | try: |
|
115 | try: | |
119 | # simplejson not found try out regular json module |
|
116 | # simplejson not found try out regular json module | |
120 |
_json = imp.load_module( |
|
117 | _json = imp.load_module("_json", *imp.find_module("json")) | |
121 |
|
||||
122 |
|
118 | |||
123 | # extended JSON encoder for json |
|
119 | # extended JSON encoder for json | |
124 | class ExtendedEncoder(_json.JSONEncoder): |
|
120 | class ExtendedEncoder(_json.JSONEncoder): | |
@@ -129,7 +125,6 b' try:' | |||||
129 | pass |
|
125 | pass | |
130 | raise TypeError("%r is not JSON serializable" % (obj,)) |
|
126 | raise TypeError("%r is not JSON serializable" % (obj,)) | |
131 |
|
127 | |||
132 |
|
||||
133 | # monkey-patch JSON encoder to use extended version |
|
128 | # monkey-patch JSON encoder to use extended version | |
134 | _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder) |
|
129 | _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder) | |
135 | _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder) |
|
130 | _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder) | |
@@ -145,4 +140,4 b' if simplejson:' | |||||
145 | elif _json: |
|
140 | elif _json: | |
146 | json = _json |
|
141 | json = _json | |
147 | else: |
|
142 | else: | |
148 |
raise ImportError( |
|
143 | raise ImportError("Could not find any json modules") |
@@ -26,94 +26,135 b' _ = lambda x: x' | |||||
26 |
|
26 | |||
27 | time_deltas = OrderedDict() |
|
27 | time_deltas = OrderedDict() | |
28 |
|
28 | |||
29 | time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1), |
|
29 | time_deltas["1m"] = { | |
30 | 'label': '1 minute', 'minutes': 1} |
|
30 | "delta": datetime.timedelta(minutes=1), | |
31 |
|
31 | "label": "1 minute", | ||
32 | time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5), |
|
32 | "minutes": 1, | |
33 | 'label': '5 minutes', 'minutes': 5} |
|
33 | } | |
34 | time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30), |
|
34 | ||
35 | 'label': '30 minutes', 'minutes': 30} |
|
35 | time_deltas["5m"] = { | |
36 |
|
|
36 | "delta": datetime.timedelta(minutes=5), | |
37 | 'label': '60 minutes', 'minutes': 60} |
|
37 | "label": "5 minutes", | |
38 | time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours', |
|
38 | "minutes": 5, | |
39 | 'minutes': 60 * 4} |
|
39 | } | |
40 | time_deltas['12h'] = {'delta': datetime.timedelta(hours=12), |
|
40 | time_deltas["30m"] = { | |
41 | 'label': '12 hours', 'minutes': 60 * 12} |
|
41 | "delta": datetime.timedelta(minutes=30), | |
42 | time_deltas['24h'] = {'delta': datetime.timedelta(hours=24), |
|
42 | "label": "30 minutes", | |
43 | 'label': '24 hours', 'minutes': 60 * 24} |
|
43 | "minutes": 30, | |
44 | time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days', |
|
44 | } | |
45 | 'minutes': 60 * 24 * 3} |
|
45 | time_deltas["1h"] = { | |
46 | time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days', |
|
46 | "delta": datetime.timedelta(hours=1), | |
47 | 'minutes': 60 * 24 * 7} |
|
47 | "label": "60 minutes", | |
48 | time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days', |
|
48 | "minutes": 60, | |
49 | 'minutes': 60 * 24 * 14} |
|
49 | } | |
50 | time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days', |
|
50 | time_deltas["4h"] = { | |
51 | 'minutes': 60 * 24 * 31} |
|
51 | "delta": datetime.timedelta(hours=4), | |
52 | time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3), |
|
52 | "label": "4 hours", | |
53 | 'label': '3 months', |
|
53 | "minutes": 60 * 4, | |
54 | 'minutes': 60 * 24 * 31 * 3} |
|
54 | } | |
55 | time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6), |
|
55 | time_deltas["12h"] = { | |
56 | 'label': '6 months', |
|
56 | "delta": datetime.timedelta(hours=12), | |
57 | 'minutes': 60 * 24 * 31 * 6} |
|
57 | "label": "12 hours", | |
58 | time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12), |
|
58 | "minutes": 60 * 12, | |
59 | 'label': '12 months', |
|
59 | } | |
60 | 'minutes': 60 * 24 * 31 * 12} |
|
60 | time_deltas["24h"] = { | |
|
61 | "delta": datetime.timedelta(hours=24), | |||
|
62 | "label": "24 hours", | |||
|
63 | "minutes": 60 * 24, | |||
|
64 | } | |||
|
65 | time_deltas["3d"] = { | |||
|
66 | "delta": datetime.timedelta(days=3), | |||
|
67 | "label": "3 days", | |||
|
68 | "minutes": 60 * 24 * 3, | |||
|
69 | } | |||
|
70 | time_deltas["1w"] = { | |||
|
71 | "delta": datetime.timedelta(days=7), | |||
|
72 | "label": "7 days", | |||
|
73 | "minutes": 60 * 24 * 7, | |||
|
74 | } | |||
|
75 | time_deltas["2w"] = { | |||
|
76 | "delta": datetime.timedelta(days=14), | |||
|
77 | "label": "14 days", | |||
|
78 | "minutes": 60 * 24 * 14, | |||
|
79 | } | |||
|
80 | time_deltas["1M"] = { | |||
|
81 | "delta": datetime.timedelta(days=31), | |||
|
82 | "label": "31 days", | |||
|
83 | "minutes": 60 * 24 * 31, | |||
|
84 | } | |||
|
85 | time_deltas["3M"] = { | |||
|
86 | "delta": datetime.timedelta(days=31 * 3), | |||
|
87 | "label": "3 months", | |||
|
88 | "minutes": 60 * 24 * 31 * 3, | |||
|
89 | } | |||
|
90 | time_deltas["6M"] = { | |||
|
91 | "delta": datetime.timedelta(days=31 * 6), | |||
|
92 | "label": "6 months", | |||
|
93 | "minutes": 60 * 24 * 31 * 6, | |||
|
94 | } | |||
|
95 | time_deltas["12M"] = { | |||
|
96 | "delta": datetime.timedelta(days=31 * 12), | |||
|
97 | "label": "12 months", | |||
|
98 | "minutes": 60 * 24 * 31 * 12, | |||
|
99 | } | |||
61 |
|
100 | |||
62 | # used in json representation |
|
101 | # used in json representation | |
63 | time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']}) |
|
102 | time_options = dict( | |
64 | for k, v in time_deltas.items()]) |
|
103 | [ | |
65 | FlashMsg = namedtuple('FlashMsg', ['msg', 'level']) |
|
104 | (k, {"label": v["label"], "minutes": v["minutes"]}) | |
|
105 | for k, v in time_deltas.items() | |||
|
106 | ] | |||
|
107 | ) | |||
|
108 | FlashMsg = namedtuple("FlashMsg", ["msg", "level"]) | |||
66 |
|
109 | |||
67 |
|
110 | |||
68 | def get_flash(request): |
|
111 | def get_flash(request): | |
69 | messages = [] |
|
112 | messages = [] | |
70 | messages.extend( |
|
113 | messages.extend( | |
71 | [FlashMsg(msg, 'error') |
|
114 | [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")] | |
72 | for msg in request.session.peek_flash('error')]) |
|
115 | ) | |
73 | messages.extend([FlashMsg(msg, 'warning') |
|
|||
74 | for msg in request.session.peek_flash('warning')]) |
|
|||
75 | messages.extend( |
|
116 | messages.extend( | |
76 |
[FlashMsg(msg, |
|
117 | [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")] | |
|
118 | ) | |||
|
119 | messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()]) | |||
77 | return messages |
|
120 | return messages | |
78 |
|
121 | |||
79 |
|
122 | |||
80 | def clear_flash(request): |
|
123 | def clear_flash(request): | |
81 |
request.session.pop_flash( |
|
124 | request.session.pop_flash("error") | |
82 |
request.session.pop_flash( |
|
125 | request.session.pop_flash("warning") | |
83 | request.session.pop_flash() |
|
126 | request.session.pop_flash() | |
84 |
|
127 | |||
85 |
|
128 | |||
86 | def get_type_formatted_flash(request): |
|
129 | def get_type_formatted_flash(request): | |
87 | return [{'msg': message.msg, 'type': message.level} |
|
130 | return [ | |
88 |
for message in get_flash(request) |
|
131 | {"msg": message.msg, "type": message.level} for message in get_flash(request) | |
|
132 | ] | |||
89 |
|
133 | |||
90 |
|
134 | |||
91 | def gen_pagination_headers(request, paginator): |
|
135 | def gen_pagination_headers(request, paginator): | |
92 | headers = { |
|
136 | headers = { | |
93 |
|
|
137 | "x-total-count": str(paginator.item_count), | |
94 |
|
|
138 | "x-current-page": str(paginator.page), | |
95 |
|
|
139 | "x-items-per-page": str(paginator.items_per_page), | |
96 | } |
|
140 | } | |
97 | params_dict = request.GET.dict_of_lists() |
|
141 | params_dict = request.GET.dict_of_lists() | |
98 | last_page_params = copy.deepcopy(params_dict) |
|
142 | last_page_params = copy.deepcopy(params_dict) | |
99 |
last_page_params[ |
|
143 | last_page_params["page"] = paginator.last_page or 1 | |
100 | first_page_params = copy.deepcopy(params_dict) |
|
144 | first_page_params = copy.deepcopy(params_dict) | |
101 |
first_page_params.pop( |
|
145 | first_page_params.pop("page", None) | |
102 | next_page_params = copy.deepcopy(params_dict) |
|
146 | next_page_params = copy.deepcopy(params_dict) | |
103 |
next_page_params[ |
|
147 | next_page_params["page"] = paginator.next_page or paginator.last_page or 1 | |
104 | prev_page_params = copy.deepcopy(params_dict) |
|
148 | prev_page_params = copy.deepcopy(params_dict) | |
105 |
prev_page_params[ |
|
149 | prev_page_params["page"] = paginator.previous_page or 1 | |
106 | lp_url = request.current_route_url(_query=last_page_params) |
|
150 | lp_url = request.current_route_url(_query=last_page_params) | |
107 | fp_url = request.current_route_url(_query=first_page_params) |
|
151 | fp_url = request.current_route_url(_query=first_page_params) | |
108 | links = [ |
|
152 | links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)] | |
109 | 'rel="last", <{}>'.format(lp_url), |
|
|||
110 | 'rel="first", <{}>'.format(fp_url), |
|
|||
111 | ] |
|
|||
112 | if first_page_params != prev_page_params: |
|
153 | if first_page_params != prev_page_params: | |
113 | prev_url = request.current_route_url(_query=prev_page_params) |
|
154 | prev_url = request.current_route_url(_query=prev_page_params) | |
114 | links.append('rel="prev", <{}>'.format(prev_url)) |
|
155 | links.append('rel="prev", <{}>'.format(prev_url)) | |
115 | if last_page_params != next_page_params: |
|
156 | if last_page_params != next_page_params: | |
116 | next_url = request.current_route_url(_query=next_page_params) |
|
157 | next_url = request.current_route_url(_query=next_page_params) | |
117 | links.append('rel="next", <{}>'.format(next_url)) |
|
158 | links.append('rel="next", <{}>'.format(next_url)) | |
118 |
headers[ |
|
159 | headers["link"] = "; ".join(links) | |
119 | return headers |
|
160 | return headers |
@@ -18,17 +18,21 b' import re' | |||||
18 | from appenlight.lib.ext_json import json |
|
18 | from appenlight.lib.ext_json import json | |
19 | from jinja2 import Markup, escape, evalcontextfilter |
|
19 | from jinja2 import Markup, escape, evalcontextfilter | |
20 |
|
20 | |||
21 |
_paragraph_re = re.compile(r |
|
21 | _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}") | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | @evalcontextfilter |
|
24 | @evalcontextfilter | |
25 | def nl2br(eval_ctx, value): |
|
25 | def nl2br(eval_ctx, value): | |
26 | if eval_ctx.autoescape: |
|
26 | if eval_ctx.autoescape: | |
27 | result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n')) |
|
27 | result = "\n\n".join( | |
28 | for p in _paragraph_re.split(escape(value))) |
|
28 | "<p>%s</p>" % p.replace("\n", Markup("<br>\n")) | |
|
29 | for p in _paragraph_re.split(escape(value)) | |||
|
30 | ) | |||
29 | else: |
|
31 | else: | |
30 | result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n') |
|
32 | result = "\n\n".join( | |
31 | for p in _paragraph_re.split(escape(value))) |
|
33 | "<p>%s</p>" % p.replace("\n", "<br>\n") | |
|
34 | for p in _paragraph_re.split(escape(value)) | |||
|
35 | ) | |||
32 | if eval_ctx.autoescape: |
|
36 | if eval_ctx.autoescape: | |
33 | result = Markup(result) |
|
37 | result = Markup(result) | |
34 | return result |
|
38 | return result | |
@@ -36,11 +40,14 b' def nl2br(eval_ctx, value):' | |||||
36 |
|
40 | |||
37 | @evalcontextfilter |
|
41 | @evalcontextfilter | |
38 | def toJSONUnsafe(eval_ctx, value): |
|
42 | def toJSONUnsafe(eval_ctx, value): | |
39 | encoded = json.dumps(value).replace('&', '\\u0026') \ |
|
43 | encoded = ( | |
40 | .replace('<', '\\u003c') \ |
|
44 | json.dumps(value) | |
41 |
.replace( |
|
45 | .replace("&", "\\u0026") | |
42 |
.replace( |
|
46 | .replace("<", "\\u003c") | |
43 |
.replace( |
|
47 | .replace(">", "\\u003e") | |
44 |
.replace(" |
|
48 | .replace(">", "\\u003e") | |
45 |
.replace( |
|
49 | .replace('"', "\\u0022") | |
|
50 | .replace("'", "\\u0027") | |||
|
51 | .replace(r"\n", "/\\\n") | |||
|
52 | ) | |||
46 | return Markup("'%s'" % encoded) |
|
53 | return Markup("'%s'" % encoded) |
@@ -17,11 +17,30 b'' | |||||
17 | import json |
|
17 | import json | |
18 | import logging |
|
18 | import logging | |
19 |
|
19 | |||
20 | ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text', |
|
20 | ignored_keys = [ | |
21 | 'filename', 'funcName', 'levelname', 'levelno', 'lineno', |
|
21 | "args", | |
22 | 'message', 'module', 'msecs', 'msg', 'name', 'pathname', |
|
22 | "asctime", | |
23 | 'process', 'processName', 'relativeCreated', 'stack_info', |
|
23 | "created", | |
24 | 'thread', 'threadName'] |
|
24 | "exc_info", | |
|
25 | "exc_text", | |||
|
26 | "filename", | |||
|
27 | "funcName", | |||
|
28 | "levelname", | |||
|
29 | "levelno", | |||
|
30 | "lineno", | |||
|
31 | "message", | |||
|
32 | "module", | |||
|
33 | "msecs", | |||
|
34 | "msg", | |||
|
35 | "name", | |||
|
36 | "pathname", | |||
|
37 | "process", | |||
|
38 | "processName", | |||
|
39 | "relativeCreated", | |||
|
40 | "stack_info", | |||
|
41 | "thread", | |||
|
42 | "threadName", | |||
|
43 | ] | |||
25 |
|
44 | |||
26 |
|
45 | |||
27 | class JSONFormatter(logging.Formatter): |
|
46 | class JSONFormatter(logging.Formatter): | |
@@ -41,7 +60,7 b' class JSONFormatter(logging.Formatter):' | |||||
41 | record.message = record.getMessage() |
|
60 | record.message = record.getMessage() | |
42 | log_dict = vars(record) |
|
61 | log_dict = vars(record) | |
43 | keys = [k for k in log_dict.keys() if k not in ignored_keys] |
|
62 | keys = [k for k in log_dict.keys() if k not in ignored_keys] | |
44 |
payload = { |
|
63 | payload = {"message": record.message} | |
45 | payload.update({k: log_dict[k] for k in keys}) |
|
64 | payload.update({k: log_dict[k] for k in keys}) | |
46 | record.message = json.dumps(payload, default=lambda x: str(x)) |
|
65 | record.message = json.dumps(payload, default=lambda x: str(x)) | |
47 |
|
66 |
@@ -14,52 +14,56 b'' | |||||
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 |
BASE = |
|
17 | BASE = "appenlight:data:{}" | |
18 |
|
18 | |||
19 | REDIS_KEYS = { |
|
19 | REDIS_KEYS = { | |
20 |
|
|
20 | "tasks": { | |
21 |
|
|
21 | "add_reports_lock": BASE.format("add_reports_lock:{}"), | |
22 |
|
|
22 | "add_logs_lock": BASE.format("add_logs_lock:{}"), | |
23 | }, |
|
23 | }, | |
24 |
|
|
24 | "counters": { | |
25 |
|
|
25 | "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"), | |
26 | 'events_per_minute_per_user:{}:{}'), |
|
26 | "reports_per_minute": BASE.format("reports_per_minute:{}"), | |
27 |
|
|
27 | "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"), | |
28 |
|
|
28 | "reports_per_type": BASE.format("reports_per_type:{}"), | |
29 | 'reports_per_hour_per_app:{}:{}'), |
|
29 | "logs_per_minute": BASE.format("logs_per_minute:{}"), | |
30 |
|
|
30 | "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"), | |
31 |
|
|
31 | "metrics_per_minute": BASE.format("metrics_per_minute:{}"), | |
32 |
|
|
32 | "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"), | |
33 | 'logs_per_hour_per_app:{}:{}'), |
|
33 | "report_group_occurences": BASE.format("report_group_occurences:{}"), | |
34 | 'metrics_per_minute': BASE.format('metrics_per_minute:{}'), |
|
34 | "report_group_occurences_alerting": BASE.format( | |
35 | 'metrics_per_hour_per_app': BASE.format( |
|
35 | "report_group_occurences_alerting:{}" | |
36 | 'metrics_per_hour_per_app:{}:{}'), |
|
36 | ), | |
37 |
|
|
37 | "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"), | |
38 |
|
|
38 | "report_group_occurences_100th": BASE.format( | |
39 |
|
|
39 | "report_group_occurences_100th:{}" | |
40 | 'report_group_occurences_10th': BASE.format( |
|
40 | ), | |
41 | 'report_group_occurences_10th:{}'), |
|
|||
42 | 'report_group_occurences_100th': BASE.format( |
|
|||
43 | 'report_group_occurences_100th:{}'), |
|
|||
44 | }, |
|
41 | }, | |
45 |
|
|
42 | "rate_limits": { | |
46 |
|
|
43 | "per_application_reports_rate_limit": BASE.format( | |
47 |
|
|
44 | "per_application_reports_limit:{}:{}" | |
48 | 'per_application_logs_rate_limit': BASE.format( |
|
45 | ), | |
49 |
|
|
46 | "per_application_logs_rate_limit": BASE.format( | |
50 |
|
|
47 | "per_application_logs_rate_limit:{}:{}" | |
51 | 'per_application_metrics_rate_limit:{}:{}'), |
|
48 | ), | |
|
49 | "per_application_metrics_rate_limit": BASE.format( | |||
|
50 | "per_application_metrics_rate_limit:{}:{}" | |||
|
51 | ), | |||
52 | }, |
|
52 | }, | |
53 |
|
|
53 | "apps_that_got_new_data_per_hour": BASE.format( | |
54 | 'apps_that_had_reports': BASE.format('apps_that_had_reports'), |
|
54 | "apps_that_got_new_data_per_hour:{}" | |
55 | 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'), |
|
55 | ), | |
56 |
|
|
56 | "apps_that_had_reports": BASE.format("apps_that_had_reports"), | |
57 | 'apps_that_had_reports_alerting'), |
|
57 | "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"), | |
58 |
|
|
58 | "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"), | |
59 |
|
|
59 | "apps_that_had_error_reports_alerting": BASE.format( | |
60 | 'reports_to_notify_per_type_per_app': BASE.format( |
|
60 | "apps_that_had_error_reports_alerting" | |
61 | 'reports_to_notify_per_type_per_app:{}:{}'), |
|
61 | ), | |
62 |
|
|
62 | "reports_to_notify_per_type_per_app": BASE.format( | |
63 |
|
|
63 | "reports_to_notify_per_type_per_app:{}:{}" | |
64 | 'seen_tag_list': BASE.format('seen_tag_list') |
|
64 | ), | |
|
65 | "reports_to_notify_per_type_per_app_alerting": BASE.format( | |||
|
66 | "reports_to_notify_per_type_per_app_alerting:{}:{}" | |||
|
67 | ), | |||
|
68 | "seen_tag_list": BASE.format("seen_tag_list"), | |||
65 | } |
|
69 | } |
@@ -54,11 +54,11 b' def unsafe_json_body(request):' | |||||
54 | try: |
|
54 | try: | |
55 | return request.json_body |
|
55 | return request.json_body | |
56 | except ValueError: |
|
56 | except ValueError: | |
57 |
raise JSONException( |
|
57 | raise JSONException("Incorrect JSON") | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def get_user(request): |
|
60 | def get_user(request): | |
61 |
if not request.path_info.startswith( |
|
61 | if not request.path_info.startswith("/static"): | |
62 | user_id = unauthenticated_userid(request) |
|
62 | user_id = unauthenticated_userid(request) | |
63 | try: |
|
63 | try: | |
64 | user_id = int(user_id) |
|
64 | user_id = int(user_id) | |
@@ -68,8 +68,10 b' def get_user(request):' | |||||
68 | if user_id: |
|
68 | if user_id: | |
69 | user = UserService.by_id(user_id) |
|
69 | user = UserService.by_id(user_id) | |
70 | if user: |
|
70 | if user: | |
71 |
request.environ[ |
|
71 | request.environ["appenlight.username"] = "%d:%s" % ( | |
72 |
user_id, |
|
72 | user_id, | |
|
73 | user.user_name, | |||
|
74 | ) | |||
73 | return user |
|
75 | return user | |
74 | else: |
|
76 | else: | |
75 | return None |
|
77 | return None | |
@@ -85,7 +87,7 b' def add_flash_to_headers(request, clear=True):' | |||||
85 | flash queue |
|
87 | flash queue | |
86 | """ |
|
88 | """ | |
87 | flash_msgs = helpers.get_type_formatted_flash(request) |
|
89 | flash_msgs = helpers.get_type_formatted_flash(request) | |
88 |
request.response.headers[ |
|
90 | request.response.headers["x-flash-messages"] = json.dumps(flash_msgs) | |
89 | helpers.clear_flash(request) |
|
91 | helpers.clear_flash(request) | |
90 |
|
92 | |||
91 |
|
93 | |||
@@ -94,42 +96,36 b' def get_authomatic(request):' | |||||
94 | # authomatic social auth |
|
96 | # authomatic social auth | |
95 | authomatic_conf = { |
|
97 | authomatic_conf = { | |
96 | # callback http://yourapp.com/social_auth/twitter |
|
98 | # callback http://yourapp.com/social_auth/twitter | |
97 |
|
|
99 | "twitter": { | |
98 |
|
|
100 | "class_": oauth1.Twitter, | |
99 |
|
|
101 | "consumer_key": settings.get("authomatic.pr.twitter.key", ""), | |
100 |
|
|
102 | "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""), | |
101 | ''), |
|
|||
102 | }, |
|
103 | }, | |
103 | # callback http://yourapp.com/social_auth/facebook |
|
104 | # callback http://yourapp.com/social_auth/facebook | |
104 |
|
|
105 | "facebook": { | |
105 |
|
|
106 | "class_": oauth2.Facebook, | |
106 |
|
|
107 | "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""), | |
107 |
|
|
108 | "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""), | |
108 | ''), |
|
109 | "scope": ["email"], | |
109 | 'scope': ['email'], |
|
|||
110 | }, |
|
110 | }, | |
111 | # callback http://yourapp.com/social_auth/google |
|
111 | # callback http://yourapp.com/social_auth/google | |
112 |
|
|
112 | "google": { | |
113 |
|
|
113 | "class_": oauth2.Google, | |
114 |
|
|
114 | "consumer_key": settings.get("authomatic.pr.google.key", ""), | |
115 |
|
|
115 | "consumer_secret": settings.get("authomatic.pr.google.secret", ""), | |
116 | 'authomatic.pr.google.secret', ''), |
|
116 | "scope": ["profile", "email"], | |
117 | 'scope': ['profile', 'email'], |
|
|||
118 | }, |
|
117 | }, | |
119 |
|
|
118 | "github": { | |
120 |
|
|
119 | "class_": oauth2.GitHub, | |
121 |
|
|
120 | "consumer_key": settings.get("authomatic.pr.github.key", ""), | |
122 |
|
|
121 | "consumer_secret": settings.get("authomatic.pr.github.secret", ""), | |
123 | 'authomatic.pr.github.secret', ''), |
|
122 | "scope": ["repo", "public_repo", "user:email"], | |
124 | 'scope': ['repo', 'public_repo', 'user:email'], |
|
123 | "access_headers": {"User-Agent": "AppEnlight"}, | |
125 | 'access_headers': {'User-Agent': 'AppEnlight'}, |
|
124 | }, | |
|
125 | "bitbucket": { | |||
|
126 | "class_": oauth1.Bitbucket, | |||
|
127 | "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""), | |||
|
128 | "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""), | |||
126 | }, |
|
129 | }, | |
127 | 'bitbucket': { |
|
|||
128 | 'class_': oauth1.Bitbucket, |
|
|||
129 | 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''), |
|
|||
130 | 'consumer_secret': settings.get( |
|
|||
131 | 'authomatic.pr.bitbucket.secret', '') |
|
|||
132 | } |
|
|||
133 | } |
|
130 | } | |
134 | return Authomatic( |
|
131 | return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"]) | |
135 | config=authomatic_conf, secret=settings['authomatic.secret']) |
|
@@ -52,13 +52,13 b' class RuleBase(object):' | |||||
52 | :param field_name: |
|
52 | :param field_name: | |
53 | :return: |
|
53 | :return: | |
54 | """ |
|
54 | """ | |
55 |
parts = field_name.split( |
|
55 | parts = field_name.split(":") if field_name else [] | |
56 | found = struct |
|
56 | found = struct | |
57 | while parts: |
|
57 | while parts: | |
58 | current_key = parts.pop(0) |
|
58 | current_key = parts.pop(0) | |
59 | found = found.get(current_key) |
|
59 | found = found.get(current_key) | |
60 | if not found and parts: |
|
60 | if not found and parts: | |
61 |
raise KeyNotFoundException( |
|
61 | raise KeyNotFoundException("Key not found in structure") | |
62 | return found |
|
62 | return found | |
63 |
|
63 | |||
64 | @classmethod |
|
64 | @classmethod | |
@@ -72,13 +72,13 b' class RuleBase(object):' | |||||
72 | :param field_name: |
|
72 | :param field_name: | |
73 | :return: |
|
73 | :return: | |
74 | """ |
|
74 | """ | |
75 |
parts = field_name.split( |
|
75 | parts = field_name.split(":") | |
76 | found = struct |
|
76 | found = struct | |
77 | while parts: |
|
77 | while parts: | |
78 | current_key = parts.pop(0) |
|
78 | current_key = parts.pop(0) | |
79 | found = getattr(found, current_key, None) |
|
79 | found = getattr(found, current_key, None) | |
80 | if not found and parts: |
|
80 | if not found and parts: | |
81 |
raise KeyNotFoundException( |
|
81 | raise KeyNotFoundException("Key not found in structure") | |
82 | return found |
|
82 | return found | |
83 |
|
83 | |||
84 | def normalized_type(self, field, value): |
|
84 | def normalized_type(self, field, value): | |
@@ -89,28 +89,32 b' class RuleBase(object):' | |||||
89 | """ |
|
89 | """ | |
90 | f_type = self.type_matrix.get(field) |
|
90 | f_type = self.type_matrix.get(field) | |
91 | if f_type: |
|
91 | if f_type: | |
92 |
cast_to = f_type[ |
|
92 | cast_to = f_type["type"] | |
93 | else: |
|
93 | else: | |
94 |
raise UnknownTypeException( |
|
94 | raise UnknownTypeException("Unknown type") | |
95 |
|
95 | |||
96 | if value is None: |
|
96 | if value is None: | |
97 | return None |
|
97 | return None | |
98 |
|
98 | |||
99 | try: |
|
99 | try: | |
100 |
if cast_to == |
|
100 | if cast_to == "int": | |
101 | return int(value) |
|
101 | return int(value) | |
102 |
elif cast_to == |
|
102 | elif cast_to == "float": | |
103 | return float(value) |
|
103 | return float(value) | |
104 |
elif cast_to == |
|
104 | elif cast_to == "unicode": | |
105 | return str(value) |
|
105 | return str(value) | |
106 | except ValueError as exc: |
|
106 | except ValueError as exc: | |
107 | raise InvalidValueException(exc) |
|
107 | raise InvalidValueException(exc) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | class Rule(RuleBase): |
|
110 | class Rule(RuleBase): | |
111 | def __init__(self, config, type_matrix, |
|
111 | def __init__( | |
|
112 | self, | |||
|
113 | config, | |||
|
114 | type_matrix, | |||
112 |
|
|
115 | struct_getter=RuleBase.default_dict_struct_getter, | |
113 |
|
|
116 | config_manipulator=None, | |
|
117 | ): | |||
114 | """ |
|
118 | """ | |
115 |
|
119 | |||
116 | :param config: dict - contains rule configuration |
|
120 | :param config: dict - contains rule configuration | |
@@ -159,8 +163,9 b' class Rule(RuleBase):' | |||||
159 | config_manipulator(self) |
|
163 | config_manipulator(self) | |
160 |
|
164 | |||
161 | def subrule_check(self, rule_config, struct): |
|
165 | def subrule_check(self, rule_config, struct): | |
162 | rule = Rule(rule_config, self.type_matrix, |
|
166 | rule = Rule( | |
163 |
|
|
167 | rule_config, self.type_matrix, config_manipulator=self.config_manipulator | |
|
168 | ) | |||
164 | return rule.match(struct) |
|
169 | return rule.match(struct) | |
165 |
|
170 | |||
166 | def match(self, struct): |
|
171 | def match(self, struct): | |
@@ -169,32 +174,41 b' class Rule(RuleBase):' | |||||
169 | First tries report value, then tests tags in not found, then finally |
|
174 | First tries report value, then tests tags in not found, then finally | |
170 | report group |
|
175 | report group | |
171 | """ |
|
176 | """ | |
172 |
field_name = self.config.get( |
|
177 | field_name = self.config.get("field") | |
173 |
test_value = self.config.get( |
|
178 | test_value = self.config.get("value") | |
174 |
|
179 | |||
175 | if not field_name: |
|
180 | if not field_name: | |
176 | return False |
|
181 | return False | |
177 |
|
182 | |||
178 |
if field_name == |
|
183 | if field_name == "__AND__": | |
179 | rule = AND(self.config['rules'], self.type_matrix, |
|
184 | rule = AND( | |
180 | config_manipulator=self.config_manipulator) |
|
185 | self.config["rules"], | |
|
186 | self.type_matrix, | |||
|
187 | config_manipulator=self.config_manipulator, | |||
|
188 | ) | |||
181 | return rule.match(struct) |
|
189 | return rule.match(struct) | |
182 |
elif field_name == |
|
190 | elif field_name == "__OR__": | |
183 | rule = OR(self.config['rules'], self.type_matrix, |
|
191 | rule = OR( | |
184 | config_manipulator=self.config_manipulator) |
|
192 | self.config["rules"], | |
|
193 | self.type_matrix, | |||
|
194 | config_manipulator=self.config_manipulator, | |||
|
195 | ) | |||
185 | return rule.match(struct) |
|
196 | return rule.match(struct) | |
186 |
elif field_name == |
|
197 | elif field_name == "__NOT__": | |
187 | rule = NOT(self.config['rules'], self.type_matrix, |
|
198 | rule = NOT( | |
188 | config_manipulator=self.config_manipulator) |
|
199 | self.config["rules"], | |
|
200 | self.type_matrix, | |||
|
201 | config_manipulator=self.config_manipulator, | |||
|
202 | ) | |||
189 | return rule.match(struct) |
|
203 | return rule.match(struct) | |
190 |
|
204 | |||
191 | if test_value is None: |
|
205 | if test_value is None: | |
192 | return False |
|
206 | return False | |
193 |
|
207 | |||
194 | try: |
|
208 | try: | |
195 |
struct_value = self.normalized_type( |
|
209 | struct_value = self.normalized_type( | |
196 |
|
|
210 | field_name, self.struct_getter(struct, field_name) | |
197 | field_name)) |
|
211 | ) | |
198 | except (UnknownTypeException, InvalidValueException) as exc: |
|
212 | except (UnknownTypeException, InvalidValueException) as exc: | |
199 | log.error(str(exc)) |
|
213 | log.error(str(exc)) | |
200 | return False |
|
214 | return False | |
@@ -205,24 +219,23 b' class Rule(RuleBase):' | |||||
205 | log.error(str(exc)) |
|
219 | log.error(str(exc)) | |
206 | return False |
|
220 | return False | |
207 |
|
221 | |||
208 |
if self.config[ |
|
222 | if self.config["op"] not in ("startswith", "endswith", "contains"): | |
209 | try: |
|
223 | try: | |
210 | return getattr(operator, |
|
224 | return getattr(operator, self.config["op"])(struct_value, test_value) | |
211 | self.config['op'])(struct_value, test_value) |
|
|||
212 | except TypeError: |
|
225 | except TypeError: | |
213 | return False |
|
226 | return False | |
214 |
elif self.config[ |
|
227 | elif self.config["op"] == "startswith": | |
215 | return struct_value.startswith(test_value) |
|
228 | return struct_value.startswith(test_value) | |
216 |
elif self.config[ |
|
229 | elif self.config["op"] == "endswith": | |
217 | return struct_value.endswith(test_value) |
|
230 | return struct_value.endswith(test_value) | |
218 |
elif self.config[ |
|
231 | elif self.config["op"] == "contains": | |
219 | return test_value in struct_value |
|
232 | return test_value in struct_value | |
220 |
raise BadConfigException( |
|
233 | raise BadConfigException( | |
221 |
|
|
234 | "Invalid configuration, " "unknown operator: {}".format(self.config) | |
|
235 | ) | |||
222 |
|
236 | |||
223 | def __repr__(self): |
|
237 | def __repr__(self): | |
224 |
return |
|
238 | return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value")) | |
225 | self.config.get('value')) |
|
|||
226 |
|
239 | |||
227 |
|
240 | |||
228 | class AND(Rule): |
|
241 | class AND(Rule): | |
@@ -231,8 +244,7 b' class AND(Rule):' | |||||
231 | self.rules = rules |
|
244 | self.rules = rules | |
232 |
|
245 | |||
233 | def match(self, struct): |
|
246 | def match(self, struct): | |
234 | return all([self.subrule_check(r_conf, struct) for r_conf |
|
247 | return all([self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
235 | in self.rules]) |
|
|||
236 |
|
248 | |||
237 |
|
249 | |||
238 | class NOT(Rule): |
|
250 | class NOT(Rule): | |
@@ -241,8 +253,7 b' class NOT(Rule):' | |||||
241 | self.rules = rules |
|
253 | self.rules = rules | |
242 |
|
254 | |||
243 | def match(self, struct): |
|
255 | def match(self, struct): | |
244 | return all([not self.subrule_check(r_conf, struct) for r_conf |
|
256 | return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
245 | in self.rules]) |
|
|||
246 |
|
257 | |||
247 |
|
258 | |||
248 | class OR(Rule): |
|
259 | class OR(Rule): | |
@@ -251,14 +262,12 b' class OR(Rule):' | |||||
251 | self.rules = rules |
|
262 | self.rules = rules | |
252 |
|
263 | |||
253 | def match(self, struct): |
|
264 | def match(self, struct): | |
254 | return any([self.subrule_check(r_conf, struct) for r_conf |
|
265 | return any([self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
255 | in self.rules]) |
|
|||
256 |
|
266 | |||
257 |
|
267 | |||
258 | class RuleService(object): |
|
268 | class RuleService(object): | |
259 | @staticmethod |
|
269 | @staticmethod | |
260 | def rule_from_config(config, field_mappings, labels_dict, |
|
270 | def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None): | |
261 | manipulator_func=None): |
|
|||
262 | """ |
|
271 | """ | |
263 | Returns modified rule with manipulator function |
|
272 | Returns modified rule with manipulator function | |
264 | By default manipulator function replaces field id from labels_dict |
|
273 | By default manipulator function replaces field id from labels_dict | |
@@ -270,28 +279,33 b' class RuleService(object):' | |||||
270 | """ |
|
279 | """ | |
271 | rev_map = {} |
|
280 | rev_map = {} | |
272 | for k, v in labels_dict.items(): |
|
281 | for k, v in labels_dict.items(): | |
273 |
rev_map[(v[ |
|
282 | rev_map[(v["agg"], v["key"])] = k | |
274 |
|
283 | |||
275 | if manipulator_func is None: |
|
284 | if manipulator_func is None: | |
|
285 | ||||
276 | def label_rewriter_func(rule): |
|
286 | def label_rewriter_func(rule): | |
277 |
field = rule.config.get( |
|
287 | field = rule.config.get("field") | |
278 |
if not field or rule.config[ |
|
288 | if not field or rule.config["field"] in [ | |
279 | '__AND__', '__NOT__']: |
|
289 | "__OR__", | |
|
290 | "__AND__", | |||
|
291 | "__NOT__", | |||
|
292 | ]: | |||
280 | return |
|
293 | return | |
281 |
|
294 | |||
282 |
to_map = field_mappings.get(rule.config[ |
|
295 | to_map = field_mappings.get(rule.config["field"]) | |
283 |
|
296 | |||
284 | # we need to replace series field with _AE_NOT_FOUND_ to not match |
|
297 | # we need to replace series field with _AE_NOT_FOUND_ to not match | |
285 | # accidently some other field which happens to have the series that |
|
298 | # accidently some other field which happens to have the series that | |
286 | # was used when the alert was created |
|
299 | # was used when the alert was created | |
287 | if to_map: |
|
300 | if to_map: | |
288 |
to_replace = rev_map.get( |
|
301 | to_replace = rev_map.get( | |
289 | '_AE_NOT_FOUND_') |
|
302 | (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_" | |
|
303 | ) | |||
290 | else: |
|
304 | else: | |
291 |
to_replace = |
|
305 | to_replace = "_AE_NOT_FOUND_" | |
292 |
|
306 | |||
293 |
rule.config[ |
|
307 | rule.config["field"] = to_replace | |
294 |
rule.type_matrix[to_replace] = {"type": |
|
308 | rule.type_matrix[to_replace] = {"type": "float"} | |
295 |
|
309 | |||
296 | manipulator_func = label_rewriter_func |
|
310 | manipulator_func = label_rewriter_func | |
297 |
|
311 |
@@ -14,8 +14,9 b'' | |||||
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 |
from ziggurat_foundations.models.services.external_identity import |
|
17 | from ziggurat_foundations.models.services.external_identity import ( | |
18 | ExternalIdentityService |
|
18 | ExternalIdentityService, | |
|
19 | ) | |||
19 | from appenlight.models.external_identity import ExternalIdentity |
|
20 | from appenlight.models.external_identity import ExternalIdentity | |
20 |
|
21 | |||
21 |
|
22 | |||
@@ -24,37 +25,38 b' def handle_social_data(request, user, social_data):' | |||||
24 | update_identity = False |
|
25 | update_identity = False | |
25 |
|
26 | |||
26 | extng_id = ExternalIdentityService.by_external_id_and_provider( |
|
27 | extng_id = ExternalIdentityService.by_external_id_and_provider( | |
27 | social_data['user']['id'], |
|
28 | social_data["user"]["id"], social_data["credentials"].provider_name | |
28 | social_data['credentials'].provider_name |
|
|||
29 | ) |
|
29 | ) | |
30 |
|
30 | |||
31 | # fix legacy accounts with wrong google ID |
|
31 | # fix legacy accounts with wrong google ID | |
32 |
if not extng_id and social_data[ |
|
32 | if not extng_id and social_data["credentials"].provider_name == "google": | |
33 | extng_id = ExternalIdentityService.by_external_id_and_provider( |
|
33 | extng_id = ExternalIdentityService.by_external_id_and_provider( | |
34 |
social_data[ |
|
34 | social_data["user"]["email"], social_data["credentials"].provider_name | |
35 | social_data['credentials'].provider_name |
|
|||
36 | ) |
|
35 | ) | |
37 |
|
36 | |||
38 | if extng_id: |
|
37 | if extng_id: | |
39 | extng_id.delete() |
|
38 | extng_id.delete() | |
40 | update_identity = True |
|
39 | update_identity = True | |
41 |
|
40 | |||
42 |
if not social_data[ |
|
41 | if not social_data["user"]["id"]: | |
43 | request.session.flash( |
|
42 | request.session.flash( | |
44 |
|
|
43 | "No external user id found? Perhaps permissions for " | |
45 |
|
|
44 | "authentication are set incorrectly", | |
|
45 | "error", | |||
|
46 | ) | |||
46 | return False |
|
47 | return False | |
47 |
|
48 | |||
48 | if not extng_id or update_identity: |
|
49 | if not extng_id or update_identity: | |
49 | if not update_identity: |
|
50 | if not update_identity: | |
50 |
request.session.flash( |
|
51 | request.session.flash( | |
51 |
|
|
52 | "Your external identity is now " "connected with your account" | |
|
53 | ) | |||
52 | ex_identity = ExternalIdentity() |
|
54 | ex_identity = ExternalIdentity() | |
53 |
ex_identity.external_id = social_data[ |
|
55 | ex_identity.external_id = social_data["user"]["id"] | |
54 |
ex_identity.external_user_name = social_data[ |
|
56 | ex_identity.external_user_name = social_data["user"]["user_name"] | |
55 |
ex_identity.provider_name = social_data[ |
|
57 | ex_identity.provider_name = social_data["credentials"].provider_name | |
56 |
ex_identity.access_token = social_data[ |
|
58 | ex_identity.access_token = social_data["credentials"].token | |
57 |
ex_identity.token_secret = social_data[ |
|
59 | ex_identity.token_secret = social_data["credentials"].token_secret | |
58 |
ex_identity.alt_token = social_data[ |
|
60 | ex_identity.alt_token = social_data["credentials"].refresh_token | |
59 | user.external_identities.append(ex_identity) |
|
61 | user.external_identities.append(ex_identity) | |
60 |
request.session.pop( |
|
62 | request.session.pop("zigg.social_auth", None) |
@@ -28,9 +28,7 b' from collections import namedtuple' | |||||
28 | from datetime import timedelta, datetime, date |
|
28 | from datetime import timedelta, datetime, date | |
29 | from dogpile.cache.api import NO_VALUE |
|
29 | from dogpile.cache.api import NO_VALUE | |
30 | from appenlight.models import Datastores |
|
30 | from appenlight.models import Datastores | |
31 |
from appenlight.validators import |
|
31 | from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params | |
32 | TagListSchema, |
|
|||
33 | accepted_search_params) |
|
|||
34 | from itsdangerous import TimestampSigner |
|
32 | from itsdangerous import TimestampSigner | |
35 | from ziggurat_foundations.permissions import ALL_PERMISSIONS |
|
33 | from ziggurat_foundations.permissions import ALL_PERMISSIONS | |
36 | from ziggurat_foundations.models.services.user import UserService |
|
34 | from ziggurat_foundations.models.services.user import UserService | |
@@ -40,21 +38,20 b' from dateutil.rrule import rrule, MONTHLY, DAILY' | |||||
40 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
41 |
|
39 | |||
42 |
|
40 | |||
43 |
Stat = namedtuple( |
|
41 | Stat = namedtuple("Stat", "start_interval value") | |
44 |
|
42 | |||
45 |
|
43 | |||
46 | def default_extractor(item): |
|
44 | def default_extractor(item): | |
47 | """ |
|
45 | """ | |
48 | :param item - item to extract date from |
|
46 | :param item - item to extract date from | |
49 | """ |
|
47 | """ | |
50 |
if hasattr(item, |
|
48 | if hasattr(item, "start_interval"): | |
51 | return item.start_interval |
|
49 | return item.start_interval | |
52 |
return item[ |
|
50 | return item["start_interval"] | |
53 |
|
51 | |||
54 |
|
52 | |||
55 | # fast gap generator |
|
53 | # fast gap generator | |
56 | def gap_gen_default(start, step, itemiterator, end_time=None, |
|
54 | def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None): | |
57 | iv_extractor=None): |
|
|||
58 | """ generates a list of time/value items based on step and itemiterator |
|
55 | """ generates a list of time/value items based on step and itemiterator | |
59 | if there are entries missing from iterator time/None will be returned |
|
56 | if there are entries missing from iterator time/None will be returned | |
60 | instead |
|
57 | instead | |
@@ -100,27 +97,31 b' class DateTimeEncoder(json.JSONEncoder):' | |||||
100 | return json.JSONEncoder.default(self, obj) |
|
97 | return json.JSONEncoder.default(self, obj) | |
101 |
|
98 | |||
102 |
|
99 | |||
103 | def channelstream_request(secret, endpoint, payload, throw_exceptions=False, |
|
100 | def channelstream_request( | |
104 | servers=None): |
|
101 | secret, endpoint, payload, throw_exceptions=False, servers=None | |
|
102 | ): | |||
105 | responses = [] |
|
103 | responses = [] | |
106 | if not servers: |
|
104 | if not servers: | |
107 | servers = [] |
|
105 | servers = [] | |
108 |
|
106 | |||
109 | signer = TimestampSigner(secret) |
|
107 | signer = TimestampSigner(secret) | |
110 | sig_for_server = signer.sign(endpoint) |
|
108 | sig_for_server = signer.sign(endpoint) | |
111 |
for secret, server in [(s[ |
|
109 | for secret, server in [(s["secret"], s["server"]) for s in servers]: | |
112 | response = {} |
|
110 | response = {} | |
113 | secret_headers = {'x-channelstream-secret': sig_for_server, |
|
111 | secret_headers = { | |
114 | 'x-channelstream-endpoint': endpoint, |
|
112 | "x-channelstream-secret": sig_for_server, | |
115 | 'Content-Type': 'application/json'} |
|
113 | "x-channelstream-endpoint": endpoint, | |
116 | url = '%s%s' % (server, endpoint) |
|
114 | "Content-Type": "application/json", | |
|
115 | } | |||
|
116 | url = "%s%s" % (server, endpoint) | |||
117 | try: |
|
117 | try: | |
118 |
response = requests.post( |
|
118 | response = requests.post( | |
119 | data=json.dumps(payload, |
|
119 | url, | |
120 | cls=DateTimeEncoder), |
|
120 | data=json.dumps(payload, cls=DateTimeEncoder), | |
121 |
|
|
121 | headers=secret_headers, | |
122 |
|
|
122 | verify=False, | |
123 |
|
|
123 | timeout=2, | |
|
124 | ).json() | |||
124 | except requests.exceptions.RequestException as e: |
|
125 | except requests.exceptions.RequestException as e: | |
125 | if throw_exceptions: |
|
126 | if throw_exceptions: | |
126 | raise |
|
127 | raise | |
@@ -130,13 +131,15 b' def channelstream_request(secret, endpoint, payload, throw_exceptions=False,' | |||||
130 |
|
131 | |||
131 | def add_cors_headers(response): |
|
132 | def add_cors_headers(response): | |
132 | # allow CORS |
|
133 | # allow CORS | |
133 |
response.headers.add( |
|
134 | response.headers.add("Access-Control-Allow-Origin", "*") | |
134 |
response.headers.add( |
|
135 | response.headers.add("XDomainRequestAllowed", "1") | |
135 |
response.headers.add( |
|
136 | response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS") | |
136 | # response.headers.add('Access-Control-Allow-Credentials', 'true') |
|
137 | # response.headers.add('Access-Control-Allow-Credentials', 'true') | |
137 |
response.headers.add( |
|
138 | response.headers.add( | |
138 | 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie') |
|
139 | "Access-Control-Allow-Headers", | |
139 | response.headers.add('Access-Control-Max-Age', '86400') |
|
140 | "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie", | |
|
141 | ) | |||
|
142 | response.headers.add("Access-Control-Max-Age", "86400") | |||
140 |
|
143 | |||
141 |
|
144 | |||
142 | from sqlalchemy.sql import compiler |
|
145 | from sqlalchemy.sql import compiler | |
@@ -145,6 +148,7 b' from psycopg2.extensions import adapt as sqlescape' | |||||
145 |
|
148 | |||
146 | # or use the appropiate escape function from your db driver |
|
149 | # or use the appropiate escape function from your db driver | |
147 |
|
150 | |||
|
151 | ||||
148 | def compile_query(query): |
|
152 | def compile_query(query): | |
149 | dialect = query.session.bind.dialect |
|
153 | dialect = query.session.bind.dialect | |
150 | statement = query.statement |
|
154 | statement = query.statement | |
@@ -166,22 +170,23 b' def convert_es_type(input_data):' | |||||
166 | return str(input_data) |
|
170 | return str(input_data) | |
167 |
|
171 | |||
168 |
|
172 | |||
169 |
ProtoVersion = namedtuple( |
|
173 | ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"]) | |
170 |
|
174 | |||
171 |
|
175 | |||
172 | def parse_proto(input_data): |
|
176 | def parse_proto(input_data): | |
173 | try: |
|
177 | try: | |
174 |
parts = [int(x) for x in input_data.split( |
|
178 | parts = [int(x) for x in input_data.split(".")] | |
175 | while len(parts) < 3: |
|
179 | while len(parts) < 3: | |
176 | parts.append(0) |
|
180 | parts.append(0) | |
177 | return ProtoVersion(*parts) |
|
181 | return ProtoVersion(*parts) | |
178 | except Exception as e: |
|
182 | except Exception as e: | |
179 |
log.info( |
|
183 | log.info("Unknown protocol version: %s" % e) | |
180 | return ProtoVersion(99, 99, 99) |
|
184 | return ProtoVersion(99, 99, 99) | |
181 |
|
185 | |||
182 |
|
186 | |||
183 | def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6, |
|
187 | def es_index_name_limiter( | |
184 | ixtypes=None): |
|
188 | start_date=None, end_date=None, months_in_past=6, ixtypes=None | |
|
189 | ): | |||
185 | """ |
|
190 | """ | |
186 | This function limits the search to 6 months by default so we don't have to |
|
191 | This function limits the search to 6 months by default so we don't have to | |
187 | query 300 elasticsearch indices for 20 years of historical data for example |
|
192 | query 300 elasticsearch indices for 20 years of historical data for example | |
@@ -189,23 +194,23 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,' | |||||
189 |
|
194 | |||
190 | # should be cached later |
|
195 | # should be cached later | |
191 | def get_possible_names(): |
|
196 | def get_possible_names(): | |
192 |
return list(Datastores.es.indices.get_alias( |
|
197 | return list(Datastores.es.indices.get_alias("*")) | |
193 |
|
198 | |||
194 | possible_names = get_possible_names() |
|
199 | possible_names = get_possible_names() | |
195 | es_index_types = [] |
|
200 | es_index_types = [] | |
196 | if not ixtypes: |
|
201 | if not ixtypes: | |
197 |
ixtypes = [ |
|
202 | ixtypes = ["reports", "metrics", "logs"] | |
198 | for t in ixtypes: |
|
203 | for t in ixtypes: | |
199 |
if t == |
|
204 | if t == "reports": | |
200 |
es_index_types.append( |
|
205 | es_index_types.append("rcae_r_%s") | |
201 |
elif t == |
|
206 | elif t == "logs": | |
202 |
es_index_types.append( |
|
207 | es_index_types.append("rcae_l_%s") | |
203 |
elif t == |
|
208 | elif t == "metrics": | |
204 |
es_index_types.append( |
|
209 | es_index_types.append("rcae_m_%s") | |
205 |
elif t == |
|
210 | elif t == "uptime": | |
206 |
es_index_types.append( |
|
211 | es_index_types.append("rcae_u_%s") | |
207 |
elif t == |
|
212 | elif t == "slow_calls": | |
208 |
es_index_types.append( |
|
213 | es_index_types.append("rcae_sc_%s") | |
209 |
|
214 | |||
210 | if start_date: |
|
215 | if start_date: | |
211 | start_date = copy.copy(start_date) |
|
216 | start_date = copy.copy(start_date) | |
@@ -217,26 +222,34 b' def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,' | |||||
217 | if not end_date: |
|
222 | if not end_date: | |
218 | end_date = start_date + relativedelta(months=months_in_past) |
|
223 | end_date = start_date + relativedelta(months=months_in_past) | |
219 |
|
224 | |||
220 |
index_dates = list( |
|
225 | index_dates = list( | |
|
226 | rrule( | |||
|
227 | MONTHLY, | |||
221 |
|
|
228 | dtstart=start_date.date().replace(day=1), | |
222 |
|
|
229 | until=end_date.date(), | |
223 |
|
|
230 | count=36, | |
|
231 | ) | |||
|
232 | ) | |||
224 | index_names = [] |
|
233 | index_names = [] | |
225 | for ix_type in es_index_types: |
|
234 | for ix_type in es_index_types: | |
226 | to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates |
|
235 | to_extend = [ | |
227 |
|
|
236 | ix_type % d.strftime("%Y_%m") | |
|
237 | for d in index_dates | |||
|
238 | if ix_type % d.strftime("%Y_%m") in possible_names | |||
|
239 | ] | |||
228 | index_names.extend(to_extend) |
|
240 | index_names.extend(to_extend) | |
229 | for day in list(rrule(DAILY, dtstart=start_date.date(), |
|
241 | for day in list( | |
230 |
|
|
242 | rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366) | |
231 | ix_name = ix_type % day.strftime('%Y_%m_%d') |
|
243 | ): | |
|
244 | ix_name = ix_type % day.strftime("%Y_%m_%d") | |||
232 | if ix_name in possible_names: |
|
245 | if ix_name in possible_names: | |
233 | index_names.append(ix_name) |
|
246 | index_names.append(ix_name) | |
234 | return index_names |
|
247 | return index_names | |
235 |
|
248 | |||
236 |
|
249 | |||
237 | def build_filter_settings_from_query_dict( |
|
250 | def build_filter_settings_from_query_dict( | |
238 |
|
|
251 | request, params=None, override_app_ids=None, resource_permissions=None | |
239 | resource_permissions=None): |
|
252 | ): | |
240 | """ |
|
253 | """ | |
241 | Builds list of normalized search terms for ES from query params |
|
254 | Builds list of normalized search terms for ES from query params | |
242 | ensuring application list is restricted to only applications user |
|
255 | ensuring application list is restricted to only applications user | |
@@ -249,11 +262,12 b' def build_filter_settings_from_query_dict(' | |||||
249 | params = copy.deepcopy(params) |
|
262 | params = copy.deepcopy(params) | |
250 | applications = [] |
|
263 | applications = [] | |
251 | if not resource_permissions: |
|
264 | if not resource_permissions: | |
252 |
resource_permissions = [ |
|
265 | resource_permissions = ["view"] | |
253 |
|
266 | |||
254 | if request.user: |
|
267 | if request.user: | |
255 | applications = UserService.resources_with_perms( |
|
268 | applications = UserService.resources_with_perms( | |
256 |
request.user, resource_permissions, resource_types=[ |
|
269 | request.user, resource_permissions, resource_types=["application"] | |
|
270 | ) | |||
257 |
|
271 | |||
258 | # CRITICAL - this ensures our resultset is limited to only the ones |
|
272 | # CRITICAL - this ensures our resultset is limited to only the ones | |
259 | # user has view permissions |
|
273 | # user has view permissions | |
@@ -273,11 +287,11 b' def build_filter_settings_from_query_dict(' | |||||
273 | for k, v in list(filter_settings.items()): |
|
287 | for k, v in list(filter_settings.items()): | |
274 | if k in accepted_search_params: |
|
288 | if k in accepted_search_params: | |
275 | continue |
|
289 | continue | |
276 |
tag_list.append({"name": k, "value": v, "op": |
|
290 | tag_list.append({"name": k, "value": v, "op": "eq"}) | |
277 | # remove the key from filter_settings |
|
291 | # remove the key from filter_settings | |
278 | filter_settings.pop(k, None) |
|
292 | filter_settings.pop(k, None) | |
279 | tags = tag_schema.deserialize(tag_list) |
|
293 | tags = tag_schema.deserialize(tag_list) | |
280 |
filter_settings[ |
|
294 | filter_settings["tags"] = tags | |
281 | return filter_settings |
|
295 | return filter_settings | |
282 |
|
296 | |||
283 |
|
297 | |||
@@ -299,26 +313,36 b' def permission_tuple_to_dict(data):' | |||||
299 | "resource_type": None, |
|
313 | "resource_type": None, | |
300 | "resource_id": None, |
|
314 | "resource_id": None, | |
301 | "group_name": None, |
|
315 | "group_name": None, | |
302 | "group_id": None |
|
316 | "group_id": None, | |
303 | } |
|
317 | } | |
304 | if data.user: |
|
318 | if data.user: | |
305 | out["user_name"] = data.user.user_name |
|
319 | out["user_name"] = data.user.user_name | |
306 | if data.perm_name == ALL_PERMISSIONS: |
|
320 | if data.perm_name == ALL_PERMISSIONS: | |
307 |
out[ |
|
321 | out["perm_name"] = "__all_permissions__" | |
308 | if data.resource: |
|
322 | if data.resource: | |
309 |
out[ |
|
323 | out["resource_name"] = data.resource.resource_name | |
310 |
out[ |
|
324 | out["resource_type"] = data.resource.resource_type | |
311 |
out[ |
|
325 | out["resource_id"] = data.resource.resource_id | |
312 | if data.group: |
|
326 | if data.group: | |
313 |
out[ |
|
327 | out["group_name"] = data.group.group_name | |
314 |
out[ |
|
328 | out["group_id"] = data.group.id | |
315 | return out |
|
329 | return out | |
316 |
|
330 | |||
317 |
|
331 | |||
318 | def get_cached_buckets(request, stats_since, end_time, fn, cache_key, |
|
332 | def get_cached_buckets( | |
319 | gap_gen=None, db_session=None, step_interval=None, |
|
333 | request, | |
|
334 | stats_since, | |||
|
335 | end_time, | |||
|
336 | fn, | |||
|
337 | cache_key, | |||
|
338 | gap_gen=None, | |||
|
339 | db_session=None, | |||
|
340 | step_interval=None, | |||
320 |
|
|
341 | iv_extractor=None, | |
321 | rerange=False, *args, **kwargs): |
|
342 | rerange=False, | |
|
343 | *args, | |||
|
344 | **kwargs | |||
|
345 | ): | |||
322 | """ Takes "fn" that should return some data and tries to load the data |
|
346 | """ Takes "fn" that should return some data and tries to load the data | |
323 | dividing it into daily buckets - if the stats_since and end time give a |
|
347 | dividing it into daily buckets - if the stats_since and end time give a | |
324 | delta bigger than 24hours, then only "todays" data is computed on the fly |
|
348 | delta bigger than 24hours, then only "todays" data is computed on the fly | |
@@ -360,25 +384,28 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,' | |||||
360 | # do not use custom interval if total time range with new iv would exceed |
|
384 | # do not use custom interval if total time range with new iv would exceed | |
361 | # end time |
|
385 | # end time | |
362 | if not step_interval or stats_since + step_interval >= end_time: |
|
386 | if not step_interval or stats_since + step_interval >= end_time: | |
363 |
if delta < h.time_deltas.get( |
|
387 | if delta < h.time_deltas.get("12h")["delta"]: | |
364 | step_interval = timedelta(seconds=60) |
|
388 | step_interval = timedelta(seconds=60) | |
365 |
elif delta < h.time_deltas.get( |
|
389 | elif delta < h.time_deltas.get("3d")["delta"]: | |
366 | step_interval = timedelta(seconds=60 * 5) |
|
390 | step_interval = timedelta(seconds=60 * 5) | |
367 |
elif delta > h.time_deltas.get( |
|
391 | elif delta > h.time_deltas.get("2w")["delta"]: | |
368 | step_interval = timedelta(days=1) |
|
392 | step_interval = timedelta(days=1) | |
369 | else: |
|
393 | else: | |
370 | step_interval = timedelta(minutes=60) |
|
394 | step_interval = timedelta(minutes=60) | |
371 |
|
395 | |||
372 | if step_interval >= timedelta(minutes=60): |
|
396 | if step_interval >= timedelta(minutes=60): | |
373 | log.info('cached_buckets:{}: adjusting start time ' |
|
397 | log.info( | |
374 | 'for hourly or daily intervals'.format(cache_key)) |
|
398 | "cached_buckets:{}: adjusting start time " | |
|
399 | "for hourly or daily intervals".format(cache_key) | |||
|
400 | ) | |||
375 | stats_since = stats_since.replace(hour=0, minute=0) |
|
401 | stats_since = stats_since.replace(hour=0, minute=0) | |
376 |
|
402 | |||
377 | ranges = [i.start_interval for i in list(gap_gen(stats_since, |
|
403 | ranges = [ | |
378 | step_interval, [], |
|
404 | i.start_interval | |
379 | end_time=end_time))] |
|
405 | for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time)) | |
|
406 | ] | |||
380 | buckets = {} |
|
407 | buckets = {} | |
381 |
storage_key = |
|
408 | storage_key = "buckets:" + cache_key + "{}|{}" | |
382 | # this means we basicly cache per hour in 3-14 day intervals but i think |
|
409 | # this means we basicly cache per hour in 3-14 day intervals but i think | |
383 | # its fine at this point - will be faster than db access anyways |
|
410 | # its fine at this point - will be faster than db access anyways | |
384 |
|
411 | |||
@@ -391,45 +418,67 b' def get_cached_buckets(request, stats_since, end_time, fn, cache_key,' | |||||
391 | k = storage_key.format(step_interval.total_seconds(), r) |
|
418 | k = storage_key.format(step_interval.total_seconds(), r) | |
392 | value = request.registry.cache_regions.redis_day_30.get(k) |
|
419 | value = request.registry.cache_regions.redis_day_30.get(k) | |
393 | # last buckets are never loaded from cache |
|
420 | # last buckets are never loaded from cache | |
394 | is_last_result = ( |
|
421 | is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges | |
395 | r >= end_time - timedelta(hours=6) or r in last_ranges) |
|
|||
396 | if value is not NO_VALUE and not is_last_result: |
|
422 | if value is not NO_VALUE and not is_last_result: | |
397 |
log.info( |
|
423 | log.info( | |
398 | "loading range {} from cache".format(cache_key, r)) |
|
424 | "cached_buckets:{}: " | |
|
425 | "loading range {} from cache".format(cache_key, r) | |||
|
426 | ) | |||
399 | buckets[r] = value |
|
427 | buckets[r] = value | |
400 | else: |
|
428 | else: | |
401 |
log.info( |
|
429 | log.info( | |
402 | "loading range {} from storage".format(cache_key, r)) |
|
430 | "cached_buckets:{}: " | |
|
431 | "loading range {} from storage".format(cache_key, r) | |||
|
432 | ) | |||
403 | range_size = step_interval |
|
433 | range_size = step_interval | |
404 | if (step_interval == timedelta(minutes=60) and |
|
434 | if ( | |
405 | not is_last_result and rerange): |
|
435 | step_interval == timedelta(minutes=60) | |
|
436 | and not is_last_result | |||
|
437 | and rerange | |||
|
438 | ): | |||
406 | range_size = timedelta(days=1) |
|
439 | range_size = timedelta(days=1) | |
407 | r = r.replace(hour=0, minute=0) |
|
440 | r = r.replace(hour=0, minute=0) | |
408 |
log.info( |
|
441 | log.info( | |
|
442 | "cached_buckets:{}: " | |||
409 |
|
|
443 | "loading collapsed " | |
410 |
|
|
444 | "range {} {}".format(cache_key, r, r + range_size) | |
411 | r + range_size)) |
|
445 | ) | |
412 | bucket_data = fn( |
|
446 | bucket_data = fn( | |
413 |
request, |
|
447 | request, | |
414 | gap_gen, bucket_count=len(ranges), *args, **kwargs) |
|
448 | r, | |
|
449 | r + range_size, | |||
|
450 | step_interval, | |||
|
451 | gap_gen, | |||
|
452 | bucket_count=len(ranges), | |||
|
453 | *args, | |||
|
454 | **kwargs | |||
|
455 | ) | |||
415 | for b in bucket_data: |
|
456 | for b in bucket_data: | |
416 | b_iv = iv_extractor(b) |
|
457 | b_iv = iv_extractor(b) | |
417 | buckets[b_iv] = b |
|
458 | buckets[b_iv] = b | |
418 | k2 = storage_key.format( |
|
459 | k2 = storage_key.format(step_interval.total_seconds(), b_iv) | |
419 | step_interval.total_seconds(), b_iv) |
|
|||
420 | request.registry.cache_regions.redis_day_30.set(k2, b) |
|
460 | request.registry.cache_regions.redis_day_30.set(k2, b) | |
421 | log.info("cached_buckets:{}: saving cache".format(cache_key)) |
|
461 | log.info("cached_buckets:{}: saving cache".format(cache_key)) | |
422 | else: |
|
462 | else: | |
423 | # bucket count is 1 for short time ranges <= 24h from now |
|
463 | # bucket count is 1 for short time ranges <= 24h from now | |
424 | bucket_data = fn(request, stats_since, end_time, step_interval, |
|
464 | bucket_data = fn( | |
425 | gap_gen, bucket_count=1, *args, **kwargs) |
|
465 | request, | |
|
466 | stats_since, | |||
|
467 | end_time, | |||
|
468 | step_interval, | |||
|
469 | gap_gen, | |||
|
470 | bucket_count=1, | |||
|
471 | *args, | |||
|
472 | **kwargs | |||
|
473 | ) | |||
426 | for b in bucket_data: |
|
474 | for b in bucket_data: | |
427 | buckets[iv_extractor(b)] = b |
|
475 | buckets[iv_extractor(b)] = b | |
428 | return buckets |
|
476 | return buckets | |
429 |
|
477 | |||
430 |
|
478 | |||
431 | def get_cached_split_data(request, stats_since, end_time, fn, cache_key, |
|
479 | def get_cached_split_data( | |
432 | db_session=None, *args, **kwargs): |
|
480 | request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs | |
|
481 | ): | |||
433 | """ Takes "fn" that should return some data and tries to load the data |
|
482 | """ Takes "fn" that should return some data and tries to load the data | |
434 | dividing it into 2 buckets - cached "since_from" bucket and "today" |
|
483 | dividing it into 2 buckets - cached "since_from" bucket and "today" | |
435 | bucket - then the data can be reduced into single value |
|
484 | bucket - then the data can be reduced into single value | |
@@ -442,43 +491,51 b' def get_cached_split_data(request, stats_since, end_time, fn, cache_key,' | |||||
442 | delta = end_time - stats_since |
|
491 | delta = end_time - stats_since | |
443 |
|
492 | |||
444 | if delta >= timedelta(minutes=60): |
|
493 | if delta >= timedelta(minutes=60): | |
445 | log.info('cached_split_data:{}: adjusting start time ' |
|
494 | log.info( | |
446 | 'for hourly or daily intervals'.format(cache_key)) |
|
495 | "cached_split_data:{}: adjusting start time " | |
|
496 | "for hourly or daily intervals".format(cache_key) | |||
|
497 | ) | |||
447 | stats_since = stats_since.replace(hour=0, minute=0) |
|
498 | stats_since = stats_since.replace(hour=0, minute=0) | |
448 |
|
499 | |||
449 |
storage_key = |
|
500 | storage_key = "buckets_split_data:" + cache_key + ":{}|{}" | |
450 | old_end_time = end_time.replace(hour=0, minute=0) |
|
501 | old_end_time = end_time.replace(hour=0, minute=0) | |
451 |
|
502 | |||
452 | final_storage_key = storage_key.format(delta.total_seconds(), |
|
503 | final_storage_key = storage_key.format(delta.total_seconds(), old_end_time) | |
453 | old_end_time) |
|
|||
454 | older_data = None |
|
504 | older_data = None | |
455 |
|
505 | |||
456 | cdata = request.registry.cache_regions.redis_day_7.get( |
|
506 | cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key) | |
457 | final_storage_key) |
|
|||
458 |
|
507 | |||
459 | if cdata: |
|
508 | if cdata: | |
460 | log.info("cached_split_data:{}: found old " |
|
509 | log.info("cached_split_data:{}: found old " "bucket data".format(cache_key)) | |
461 | "bucket data".format(cache_key)) |
|
|||
462 | older_data = cdata |
|
510 | older_data = cdata | |
463 |
|
511 | |||
464 |
if |
|
512 | if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata: | |
465 | not cdata): |
|
513 | log.info( | |
466 |
|
|
514 | "cached_split_data:{}: didn't find the " | |
467 |
|
|
515 | "start bucket in cache so load older data".format(cache_key) | |
|
516 | ) | |||
468 | recent_stats_since = old_end_time |
|
517 | recent_stats_since = old_end_time | |
469 | older_data = fn(request, stats_since, recent_stats_since, |
|
518 | older_data = fn( | |
470 | db_session=db_session, *args, **kwargs) |
|
519 | request, | |
471 | request.registry.cache_regions.redis_day_7.set(final_storage_key, |
|
520 | stats_since, | |
472 | older_data) |
|
521 | recent_stats_since, | |
473 | elif stats_since < end_time - h.time_deltas.get('24h')['delta']: |
|
522 | db_session=db_session, | |
|
523 | *args, | |||
|
524 | **kwargs | |||
|
525 | ) | |||
|
526 | request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data) | |||
|
527 | elif stats_since < end_time - h.time_deltas.get("24h")["delta"]: | |||
474 | recent_stats_since = old_end_time |
|
528 | recent_stats_since = old_end_time | |
475 | else: |
|
529 | else: | |
476 | recent_stats_since = stats_since |
|
530 | recent_stats_since = stats_since | |
477 |
|
531 | |||
478 | log.info("cached_split_data:{}: loading fresh " |
|
532 | log.info( | |
479 | "data bucksts from last 24h ".format(cache_key)) |
|
533 | "cached_split_data:{}: loading fresh " | |
480 | todays_data = fn(request, recent_stats_since, end_time, |
|
534 | "data bucksts from last 24h ".format(cache_key) | |
481 | db_session=db_session, *args, **kwargs) |
|
535 | ) | |
|
536 | todays_data = fn( | |||
|
537 | request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs | |||
|
538 | ) | |||
482 | return older_data, todays_data |
|
539 | return older_data, todays_data | |
483 |
|
540 | |||
484 |
|
541 |
@@ -24,119 +24,138 b' log = logging.getLogger(__name__)' | |||||
24 |
|
24 | |||
25 | def parse_airbrake_xml(request): |
|
25 | def parse_airbrake_xml(request): | |
26 | root = request.context.airbrake_xml_etree |
|
26 | root = request.context.airbrake_xml_etree | |
27 |
error = root.find( |
|
27 | error = root.find("error") | |
28 |
notifier = root.find( |
|
28 | notifier = root.find("notifier") | |
29 |
server_env = root.find( |
|
29 | server_env = root.find("server-environment") | |
30 |
request_data = root.find( |
|
30 | request_data = root.find("request") | |
31 |
user = root.find( |
|
31 | user = root.find("current-user") | |
32 | if request_data is not None: |
|
32 | if request_data is not None: | |
33 |
cgi_data = request_data.find( |
|
33 | cgi_data = request_data.find("cgi-data") | |
34 | if cgi_data is None: |
|
34 | if cgi_data is None: | |
35 | cgi_data = [] |
|
35 | cgi_data = [] | |
36 |
|
36 | |||
37 | error_dict = { |
|
37 | error_dict = { | |
38 |
|
|
38 | "class_name": error.findtext("class") or "", | |
39 |
|
|
39 | "error": error.findtext("message") or "", | |
40 | "occurences": 1, |
|
40 | "occurences": 1, | |
41 | "http_status": 500, |
|
41 | "http_status": 500, | |
42 | "priority": 5, |
|
42 | "priority": 5, | |
43 |
"server": |
|
43 | "server": "unknown", | |
44 |
|
|
44 | "url": "unknown", | |
|
45 | "request": {}, | |||
45 | } |
|
46 | } | |
46 | if user is not None: |
|
47 | if user is not None: | |
47 |
error_dict[ |
|
48 | error_dict["username"] = user.findtext("username") or user.findtext("id") | |
48 | user.findtext('id') |
|
|||
49 | if notifier is not None: |
|
49 | if notifier is not None: | |
50 |
error_dict[ |
|
50 | error_dict["client"] = notifier.findtext("name") | |
51 |
|
51 | |||
52 | if server_env is not None: |
|
52 | if server_env is not None: | |
53 |
error_dict["server"] = server_env.findtext( |
|
53 | error_dict["server"] = server_env.findtext("hostname", "unknown") | |
54 |
|
54 | |||
55 | whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', |
|
55 | whitelist_environ = [ | |
56 | 'CONTENT_TYPE', 'HTTP_REFERER'] |
|
56 | "REMOTE_USER", | |
|
57 | "REMOTE_ADDR", | |||
|
58 | "SERVER_NAME", | |||
|
59 | "CONTENT_TYPE", | |||
|
60 | "HTTP_REFERER", | |||
|
61 | ] | |||
57 |
|
62 | |||
58 | if request_data is not None: |
|
63 | if request_data is not None: | |
59 |
error_dict[ |
|
64 | error_dict["url"] = request_data.findtext("url", "unknown") | |
60 |
component = request_data.findtext( |
|
65 | component = request_data.findtext("component") | |
61 |
action = request_data.findtext( |
|
66 | action = request_data.findtext("action") | |
62 | if component and action: |
|
67 | if component and action: | |
63 |
error_dict[ |
|
68 | error_dict["view_name"] = "%s:%s" % (component, action) | |
64 | for node in cgi_data: |
|
69 | for node in cgi_data: | |
65 |
key = node.get( |
|
70 | key = node.get("key") | |
66 |
if key.startswith( |
|
71 | if key.startswith("HTTP") or key in whitelist_environ: | |
67 |
error_dict[ |
|
72 | error_dict["request"][key] = node.text | |
68 |
elif |
|
73 | elif "query_parameters" in key: | |
69 |
error_dict[ |
|
74 | error_dict["request"]["GET"] = {} | |
70 | for x in node: |
|
75 | for x in node: | |
71 |
error_dict[ |
|
76 | error_dict["request"]["GET"][x.get("key")] = x.text | |
72 |
elif |
|
77 | elif "request_parameters" in key: | |
73 |
error_dict[ |
|
78 | error_dict["request"]["POST"] = {} | |
74 | for x in node: |
|
79 | for x in node: | |
75 |
error_dict[ |
|
80 | error_dict["request"]["POST"][x.get("key")] = x.text | |
76 |
elif key.endswith( |
|
81 | elif key.endswith("cookie"): | |
77 |
error_dict[ |
|
82 | error_dict["request"]["COOKIE"] = {} | |
78 | for x in node: |
|
83 | for x in node: | |
79 |
error_dict[ |
|
84 | error_dict["request"]["COOKIE"][x.get("key")] = x.text | |
80 |
elif key.endswith( |
|
85 | elif key.endswith("request_id"): | |
81 |
error_dict[ |
|
86 | error_dict["request_id"] = node.text | |
82 |
elif key.endswith( |
|
87 | elif key.endswith("session"): | |
83 |
error_dict[ |
|
88 | error_dict["request"]["SESSION"] = {} | |
84 | for x in node: |
|
89 | for x in node: | |
85 |
error_dict[ |
|
90 | error_dict["request"]["SESSION"][x.get("key")] = x.text | |
86 | else: |
|
91 | else: | |
87 |
if key in [ |
|
92 | if key in ["rack.session.options"]: | |
88 | # skip secret configs |
|
93 | # skip secret configs | |
89 | continue |
|
94 | continue | |
90 | try: |
|
95 | try: | |
91 | if len(node): |
|
96 | if len(node): | |
92 |
error_dict[ |
|
97 | error_dict["request"][key] = dict( | |
93 |
[(x.get( |
|
98 | [(x.get("key"), x.text) for x in node] | |
|
99 | ) | |||
94 | else: |
|
100 | else: | |
95 |
error_dict[ |
|
101 | error_dict["request"][key] = node.text | |
96 | except Exception as e: |
|
102 | except Exception as e: | |
97 |
log.warning( |
|
103 | log.warning("Airbrake integration exception: %s" % e) | |
98 |
|
104 | |||
99 |
error_dict[ |
|
105 | error_dict["request"].pop("HTTP_COOKIE", "") | |
100 |
|
106 | |||
101 |
error_dict[ |
|
107 | error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "") | |
102 |
error_dict[ |
|
108 | error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "") | |
103 |
if |
|
109 | if "request_id" not in error_dict: | |
104 |
error_dict[ |
|
110 | error_dict["request_id"] = str(uuid.uuid4()) | |
105 | if request.context.possibly_public: |
|
111 | if request.context.possibly_public: | |
106 | # set ip for reports that come from airbrake js client |
|
112 | # set ip for reports that come from airbrake js client | |
107 | error_dict["timestamp"] = datetime.utcnow() |
|
113 | error_dict["timestamp"] = datetime.utcnow() | |
108 | if request.environ.get("HTTP_X_FORWARDED_FOR"): |
|
114 | if request.environ.get("HTTP_X_FORWARDED_FOR"): | |
109 |
ip = request.environ.get("HTTP_X_FORWARDED_FOR", |
|
115 | ip = request.environ.get("HTTP_X_FORWARDED_FOR", "") | |
110 |
first_ip = ip.split( |
|
116 | first_ip = ip.split(",")[0] | |
111 | remote_addr = first_ip.strip() |
|
117 | remote_addr = first_ip.strip() | |
112 | else: |
|
118 | else: | |
113 |
remote_addr = |
|
119 | remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get( | |
114 | request.environ.get('REMOTE_ADDR')) |
|
120 | "REMOTE_ADDR" | |
|
121 | ) | |||
115 | error_dict["ip"] = remote_addr |
|
122 | error_dict["ip"] = remote_addr | |
116 |
|
123 | |||
117 | blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf', |
|
124 | blacklist = [ | |
118 | 'session', 'test'] |
|
125 | "password", | |
|
126 | "passwd", | |||
|
127 | "pwd", | |||
|
128 | "auth_tkt", | |||
|
129 | "secret", | |||
|
130 | "csrf", | |||
|
131 | "session", | |||
|
132 | "test", | |||
|
133 | ] | |||
119 |
|
134 | |||
120 | lines = [] |
|
135 | lines = [] | |
121 |
for l in error.find( |
|
136 | for l in error.find("backtrace"): | |
122 |
lines.append( |
|
137 | lines.append( | |
123 | 'line': l.get("number", ""), |
|
138 | { | |
124 |
|
|
139 | "file": l.get("file", ""), | |
125 |
|
|
140 | "line": l.get("number", ""), | |
126 |
|
|
141 | "fn": l.get("method", ""), | |
127 | 'vars': {}}) |
|
142 | "module": l.get("module", ""), | |
128 | error_dict['traceback'] = list(reversed(lines)) |
|
143 | "cline": l.get("method", ""), | |
|
144 | "vars": {}, | |||
|
145 | } | |||
|
146 | ) | |||
|
147 | error_dict["traceback"] = list(reversed(lines)) | |||
129 | # filtering is not provided by airbrake |
|
148 | # filtering is not provided by airbrake | |
130 | keys_to_check = ( |
|
149 | keys_to_check = ( | |
131 |
error_dict[ |
|
150 | error_dict["request"].get("COOKIE"), | |
132 |
error_dict[ |
|
151 | error_dict["request"].get("COOKIES"), | |
133 |
error_dict[ |
|
152 | error_dict["request"].get("POST"), | |
134 |
error_dict[ |
|
153 | error_dict["request"].get("SESSION"), | |
135 | ) |
|
154 | ) | |
136 | for source in [_f for _f in keys_to_check if _f]: |
|
155 | for source in [_f for _f in keys_to_check if _f]: | |
137 | for k in source.keys(): |
|
156 | for k in source.keys(): | |
138 | for bad_key in blacklist: |
|
157 | for bad_key in blacklist: | |
139 | if bad_key in k.lower(): |
|
158 | if bad_key in k.lower(): | |
140 |
source[k] = |
|
159 | source[k] = "***" | |
141 |
|
160 | |||
142 | return error_dict |
|
161 | return error_dict |
@@ -22,12 +22,12 b' log = logging.getLogger(__name__)' | |||||
22 |
|
22 | |||
23 |
|
23 | |||
24 | def to_relativedelta(time_delta): |
|
24 | def to_relativedelta(time_delta): | |
25 | return relativedelta(seconds=int(time_delta.total_seconds()), |
|
25 | return relativedelta( | |
26 |
|
|
26 | seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds | |
|
27 | ) | |||
27 |
|
28 | |||
28 |
|
29 | |||
29 | def convert_date(date_str, return_utcnow_if_wrong=True, |
|
30 | def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False): | |
30 | normalize_future=False): |
|
|||
31 | utcnow = datetime.utcnow() |
|
31 | utcnow = datetime.utcnow() | |
32 | if isinstance(date_str, datetime): |
|
32 | if isinstance(date_str, datetime): | |
33 | # get rid of tzinfo |
|
33 | # get rid of tzinfo | |
@@ -36,21 +36,21 b' def convert_date(date_str, return_utcnow_if_wrong=True,' | |||||
36 | return utcnow |
|
36 | return utcnow | |
37 | try: |
|
37 | try: | |
38 | try: |
|
38 | try: | |
39 |
if |
|
39 | if "Z" in date_str: | |
40 |
date_str = date_str[:date_str.index( |
|
40 | date_str = date_str[: date_str.index("Z")] | |
41 |
if |
|
41 | if "." in date_str: | |
42 |
date = datetime.strptime(date_str, |
|
42 | date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f") | |
43 | else: |
|
43 | else: | |
44 |
date = datetime.strptime(date_str, |
|
44 | date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S") | |
45 | except Exception: |
|
45 | except Exception: | |
46 | # bw compat with old client |
|
46 | # bw compat with old client | |
47 |
date = datetime.strptime(date_str, |
|
47 | date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f") | |
48 | except Exception: |
|
48 | except Exception: | |
49 | if return_utcnow_if_wrong: |
|
49 | if return_utcnow_if_wrong: | |
50 | date = utcnow |
|
50 | date = utcnow | |
51 | else: |
|
51 | else: | |
52 | date = None |
|
52 | date = None | |
53 | if normalize_future and date and date > (utcnow + timedelta(minutes=3)): |
|
53 | if normalize_future and date and date > (utcnow + timedelta(minutes=3)): | |
54 |
log.warning( |
|
54 | log.warning("time %s in future + 3 min, normalizing" % date) | |
55 | return utcnow |
|
55 | return utcnow | |
56 | return date |
|
56 | return date |
@@ -19,45 +19,68 b' from datetime import timedelta' | |||||
19 | from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType |
|
19 | from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType | |
20 |
|
20 | |||
21 | EXCLUDED_LOG_VARS = [ |
|
21 | EXCLUDED_LOG_VARS = [ | |
22 | 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', |
|
22 | "args", | |
23 | 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs', |
|
23 | "asctime", | |
24 | 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated', |
|
24 | "created", | |
25 | 'thread', 'threadName'] |
|
25 | "exc_info", | |
|
26 | "exc_text", | |||
|
27 | "filename", | |||
|
28 | "funcName", | |||
|
29 | "levelname", | |||
|
30 | "levelno", | |||
|
31 | "lineno", | |||
|
32 | "message", | |||
|
33 | "module", | |||
|
34 | "msecs", | |||
|
35 | "msg", | |||
|
36 | "name", | |||
|
37 | "pathname", | |||
|
38 | "process", | |||
|
39 | "processName", | |||
|
40 | "relativeCreated", | |||
|
41 | "thread", | |||
|
42 | "threadName", | |||
|
43 | ] | |||
26 |
|
44 | |||
27 | EXCLUDE_SENTRY_KEYS = [ |
|
45 | EXCLUDE_SENTRY_KEYS = [ | |
28 |
|
|
46 | "csp", | |
29 |
|
|
47 | "culprit", | |
30 |
|
|
48 | "event_id", | |
31 |
|
|
49 | "exception", | |
32 |
|
|
50 | "extra", | |
33 |
|
|
51 | "level", | |
34 |
|
|
52 | "logentry", | |
35 |
|
|
53 | "logger", | |
36 |
|
|
54 | "message", | |
37 |
|
|
55 | "modules", | |
38 |
|
|
56 | "platform", | |
39 |
|
|
57 | "query", | |
40 |
|
|
58 | "release", | |
41 |
|
|
59 | "request", | |
42 | 'sentry.interfaces.Csp', 'sentry.interfaces.Exception', |
|
60 | "sentry.interfaces.Csp", | |
43 | 'sentry.interfaces.Http', 'sentry.interfaces.Message', |
|
61 | "sentry.interfaces.Exception", | |
44 |
|
|
62 | "sentry.interfaces.Http", | |
45 |
|
|
63 | "sentry.interfaces.Message", | |
46 | 'sentry.interfaces.Template', 'sentry.interfaces.User', |
|
64 | "sentry.interfaces.Query", | |
47 |
|
|
65 | "sentry.interfaces.Stacktrace", | |
48 |
|
|
66 | "sentry.interfaces.Template", | |
49 |
|
|
67 | "sentry.interfaces.User", | |
50 |
|
|
68 | "sentry.interfaces.csp.Csp", | |
51 |
|
|
69 | "sentry.interfaces.exception.Exception", | |
52 |
|
|
70 | "sentry.interfaces.http.Http", | |
53 |
|
|
71 | "sentry.interfaces.message.Message", | |
54 |
|
|
72 | "sentry.interfaces.query.Query", | |
55 | 'stacktrace', |
|
73 | "sentry.interfaces.stacktrace.Stacktrace", | |
56 | 'tags', |
|
74 | "sentry.interfaces.template.Template", | |
57 | 'template', |
|
75 | "sentry.interfaces.user.User", | |
58 | 'time_spent', |
|
76 | "server_name", | |
59 | 'timestamp', |
|
77 | "stacktrace", | |
60 | 'user'] |
|
78 | "tags", | |
|
79 | "template", | |||
|
80 | "time_spent", | |||
|
81 | "timestamp", | |||
|
82 | "user", | |||
|
83 | ] | |||
61 |
|
84 | |||
62 |
|
85 | |||
63 | def get_keys(list_of_keys, json_body): |
|
86 | def get_keys(list_of_keys, json_body): | |
@@ -67,9 +90,10 b' def get_keys(list_of_keys, json_body):' | |||||
67 |
|
90 | |||
68 |
|
91 | |||
69 | def get_logentry(json_body): |
|
92 | def get_logentry(json_body): | |
70 |
key_names = [ |
|
93 | key_names = [ | |
71 | 'sentry.interfaces.message.Message', |
|
94 | "logentry", | |
72 |
|
|
95 | "sentry.interfaces.message.Message", | |
|
96 | "sentry.interfaces.Message", | |||
73 | ] |
|
97 | ] | |
74 | logentry = get_keys(key_names, json_body) |
|
98 | logentry = get_keys(key_names, json_body) | |
75 | return logentry |
|
99 | return logentry | |
@@ -77,20 +101,21 b' def get_logentry(json_body):' | |||||
77 |
|
101 | |||
78 | def get_exception(json_body): |
|
102 | def get_exception(json_body): | |
79 | parsed_exception = {} |
|
103 | parsed_exception = {} | |
80 |
key_names = [ |
|
104 | key_names = [ | |
81 | 'sentry.interfaces.exception.Exception', |
|
105 | "exception", | |
82 |
|
|
106 | "sentry.interfaces.exception.Exception", | |
|
107 | "sentry.interfaces.Exception", | |||
83 | ] |
|
108 | ] | |
84 | exception = get_keys(key_names, json_body) or {} |
|
109 | exception = get_keys(key_names, json_body) or {} | |
85 | if exception: |
|
110 | if exception: | |
86 | if isinstance(exception, dict): |
|
111 | if isinstance(exception, dict): | |
87 |
exception = exception[ |
|
112 | exception = exception["values"][0] | |
88 | else: |
|
113 | else: | |
89 | exception = exception[0] |
|
114 | exception = exception[0] | |
90 |
|
115 | |||
91 |
parsed_exception[ |
|
116 | parsed_exception["type"] = exception.get("type") | |
92 |
parsed_exception[ |
|
117 | parsed_exception["value"] = exception.get("value") | |
93 |
parsed_exception[ |
|
118 | parsed_exception["module"] = exception.get("module") | |
94 | parsed_stacktrace = get_stacktrace(exception) or {} |
|
119 | parsed_stacktrace = get_stacktrace(exception) or {} | |
95 | parsed_exception = exception or {} |
|
120 | parsed_exception = exception or {} | |
96 | return parsed_exception, parsed_stacktrace |
|
121 | return parsed_exception, parsed_stacktrace | |
@@ -98,20 +123,22 b' def get_exception(json_body):' | |||||
98 |
|
123 | |||
99 | def get_stacktrace(json_body): |
|
124 | def get_stacktrace(json_body): | |
100 | parsed_stacktrace = [] |
|
125 | parsed_stacktrace = [] | |
101 |
key_names = [ |
|
126 | key_names = [ | |
102 | 'sentry.interfaces.stacktrace.Stacktrace', |
|
127 | "stacktrace", | |
103 |
|
|
128 | "sentry.interfaces.stacktrace.Stacktrace", | |
|
129 | "sentry.interfaces.Stacktrace", | |||
104 | ] |
|
130 | ] | |
105 | stacktrace = get_keys(key_names, json_body) |
|
131 | stacktrace = get_keys(key_names, json_body) | |
106 | if stacktrace: |
|
132 | if stacktrace: | |
107 |
for frame in stacktrace[ |
|
133 | for frame in stacktrace["frames"]: | |
108 | parsed_stacktrace.append( |
|
134 | parsed_stacktrace.append( | |
109 | {"cline": frame.get('context_line', ''), |
|
135 | { | |
110 |
" |
|
136 | "cline": frame.get("context_line", ""), | |
111 |
" |
|
137 | "file": frame.get("filename", ""), | |
112 |
" |
|
138 | "module": frame.get("module", ""), | |
113 |
" |
|
139 | "fn": frame.get("function", ""), | |
114 |
" |
|
140 | "line": frame.get("lineno", ""), | |
|
141 | "vars": list(frame.get("vars", {}).items()), | |||
115 |
|
|
142 | } | |
116 | ) |
|
143 | ) | |
117 | return parsed_stacktrace |
|
144 | return parsed_stacktrace | |
@@ -119,19 +146,21 b' def get_stacktrace(json_body):' | |||||
119 |
|
146 | |||
120 | def get_template(json_body): |
|
147 | def get_template(json_body): | |
121 | parsed_template = {} |
|
148 | parsed_template = {} | |
122 |
key_names = [ |
|
149 | key_names = [ | |
123 | 'sentry.interfaces.template.Template', |
|
150 | "template", | |
124 |
|
|
151 | "sentry.interfaces.template.Template", | |
|
152 | "sentry.interfaces.Template", | |||
125 | ] |
|
153 | ] | |
126 | template = get_keys(key_names, json_body) |
|
154 | template = get_keys(key_names, json_body) | |
127 | if template: |
|
155 | if template: | |
128 |
for frame in template[ |
|
156 | for frame in template["frames"]: | |
129 | parsed_template.append( |
|
157 | parsed_template.append( | |
130 | {"cline": frame.get('context_line', ''), |
|
158 | { | |
131 |
" |
|
159 | "cline": frame.get("context_line", ""), | |
132 | "fn": '', |
|
160 | "file": frame.get("filename", ""), | |
133 | "line": frame.get('lineno', ''), |
|
161 | "fn": "", | |
134 | "vars": [] |
|
162 | "line": frame.get("lineno", ""), | |
|
163 | "vars": [], | |||
135 |
|
|
164 | } | |
136 | ) |
|
165 | ) | |
137 |
|
166 | |||
@@ -140,16 +169,13 b' def get_template(json_body):' | |||||
140 |
|
169 | |||
141 | def get_request(json_body): |
|
170 | def get_request(json_body): | |
142 | parsed_http = {} |
|
171 | parsed_http = {} | |
143 | key_names = ['request', |
|
172 | key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"] | |
144 | 'sentry.interfaces.http.Http', |
|
|||
145 | 'sentry.interfaces.Http' |
|
|||
146 | ] |
|
|||
147 | http = get_keys(key_names, json_body) or {} |
|
173 | http = get_keys(key_names, json_body) or {} | |
148 | for k, v in http.items(): |
|
174 | for k, v in http.items(): | |
149 |
if k == |
|
175 | if k == "headers": | |
150 |
parsed_http[ |
|
176 | parsed_http["headers"] = {} | |
151 |
for sk, sv in http[ |
|
177 | for sk, sv in http["headers"].items(): | |
152 |
parsed_http[ |
|
178 | parsed_http["headers"][sk.title()] = sv | |
153 | else: |
|
179 | else: | |
154 | parsed_http[k.lower()] = v |
|
180 | parsed_http[k.lower()] = v | |
155 | return parsed_http |
|
181 | return parsed_http | |
@@ -157,53 +183,47 b' def get_request(json_body):' | |||||
157 |
|
183 | |||
158 | def get_user(json_body): |
|
184 | def get_user(json_body): | |
159 | parsed_user = {} |
|
185 | parsed_user = {} | |
160 | key_names = ['user', |
|
186 | key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"] | |
161 | 'sentry.interfaces.user.User', |
|
|||
162 | 'sentry.interfaces.User' |
|
|||
163 | ] |
|
|||
164 | user = get_keys(key_names, json_body) |
|
187 | user = get_keys(key_names, json_body) | |
165 | if user: |
|
188 | if user: | |
166 |
parsed_user[ |
|
189 | parsed_user["id"] = user.get("id") | |
167 |
parsed_user[ |
|
190 | parsed_user["username"] = user.get("username") | |
168 |
parsed_user[ |
|
191 | parsed_user["email"] = user.get("email") | |
169 |
parsed_user[ |
|
192 | parsed_user["ip_address"] = user.get("ip_address") | |
170 |
|
193 | |||
171 | return parsed_user |
|
194 | return parsed_user | |
172 |
|
195 | |||
173 |
|
196 | |||
174 | def get_query(json_body): |
|
197 | def get_query(json_body): | |
175 | query = None |
|
198 | query = None | |
176 | key_name = ['query', |
|
199 | key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"] | |
177 | 'sentry.interfaces.query.Query', |
|
|||
178 | 'sentry.interfaces.Query' |
|
|||
179 | ] |
|
|||
180 | query = get_keys(key_name, json_body) |
|
200 | query = get_keys(key_name, json_body) | |
181 | return query |
|
201 | return query | |
182 |
|
202 | |||
183 |
|
203 | |||
184 | def parse_sentry_event(json_body): |
|
204 | def parse_sentry_event(json_body): | |
185 |
request_id = json_body.get( |
|
205 | request_id = json_body.get("event_id") | |
186 |
|
206 | |||
187 | # required |
|
207 | # required | |
188 |
message = json_body.get( |
|
208 | message = json_body.get("message") | |
189 |
log_timestamp = json_body.get( |
|
209 | log_timestamp = json_body.get("timestamp") | |
190 |
level = json_body.get( |
|
210 | level = json_body.get("level") | |
191 | if isinstance(level, int): |
|
211 | if isinstance(level, int): | |
192 | level = LogLevelPython.key_from_value(level) |
|
212 | level = LogLevelPython.key_from_value(level) | |
193 |
|
213 | |||
194 |
namespace = json_body.get( |
|
214 | namespace = json_body.get("logger") | |
195 |
language = json_body.get( |
|
215 | language = json_body.get("platform") | |
196 |
|
216 | |||
197 | # optional |
|
217 | # optional | |
198 |
server_name = json_body.get( |
|
218 | server_name = json_body.get("server_name") | |
199 |
culprit = json_body.get( |
|
219 | culprit = json_body.get("culprit") | |
200 |
release = json_body.get( |
|
220 | release = json_body.get("release") | |
201 |
|
221 | |||
202 |
tags = json_body.get( |
|
222 | tags = json_body.get("tags", {}) | |
203 |
if hasattr(tags, |
|
223 | if hasattr(tags, "items"): | |
204 | tags = list(tags.items()) |
|
224 | tags = list(tags.items()) | |
205 |
extra = json_body.get( |
|
225 | extra = json_body.get("extra", {}) | |
206 |
if hasattr(extra, |
|
226 | if hasattr(extra, "items"): | |
207 | extra = list(extra.items()) |
|
227 | extra = list(extra.items()) | |
208 |
|
228 | |||
209 | parsed_req = get_request(json_body) |
|
229 | parsed_req = get_request(json_body) | |
@@ -212,12 +232,13 b' def parse_sentry_event(json_body):' | |||||
212 | query = get_query(json_body) |
|
232 | query = get_query(json_body) | |
213 |
|
233 | |||
214 | # other unidentified keys found |
|
234 | # other unidentified keys found | |
215 | other_keys = [(k, json_body[k]) for k in json_body.keys() |
|
235 | other_keys = [ | |
216 |
|
|
236 | (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS | |
|
237 | ] | |||
217 |
|
238 | |||
218 | logentry = get_logentry(json_body) |
|
239 | logentry = get_logentry(json_body) | |
219 | if logentry: |
|
240 | if logentry: | |
220 |
message = logentry[ |
|
241 | message = logentry["message"] | |
221 |
|
242 | |||
222 | exception, stacktrace = get_exception(json_body) |
|
243 | exception, stacktrace = get_exception(json_body) | |
223 |
|
244 | |||
@@ -227,70 +248,70 b' def parse_sentry_event(json_body):' | |||||
227 | event_type = ParsedSentryEventType.LOG |
|
248 | event_type = ParsedSentryEventType.LOG | |
228 |
|
249 | |||
229 | event_dict = { |
|
250 | event_dict = { | |
230 |
|
|
251 | "log_level": level, | |
231 |
|
|
252 | "message": message, | |
232 |
|
|
253 | "namespace": namespace, | |
233 |
|
|
254 | "request_id": request_id, | |
234 |
|
|
255 | "server": server_name, | |
235 |
|
|
256 | "date": log_timestamp, | |
236 |
|
|
257 | "tags": tags, | |
237 | } |
|
258 | } | |
238 |
event_dict[ |
|
259 | event_dict["tags"].extend( | |
239 |
[(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS] |
|
260 | [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS] | |
|
261 | ) | |||
240 |
|
262 | |||
241 | # other keys can be various object types |
|
263 | # other keys can be various object types | |
242 |
event_dict[ |
|
264 | event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)]) | |
243 | if isinstance(v, str)]) |
|
|||
244 | if culprit: |
|
265 | if culprit: | |
245 |
event_dict[ |
|
266 | event_dict["tags"].append(("sentry_culprit", culprit)) | |
246 | if language: |
|
267 | if language: | |
247 |
event_dict[ |
|
268 | event_dict["tags"].append(("sentry_language", language)) | |
248 | if release: |
|
269 | if release: | |
249 |
event_dict[ |
|
270 | event_dict["tags"].append(("sentry_release", release)) | |
250 |
|
271 | |||
251 | if exception or stacktrace or alt_stacktrace or template: |
|
272 | if exception or stacktrace or alt_stacktrace or template: | |
252 | event_type = ParsedSentryEventType.ERROR_REPORT |
|
273 | event_type = ParsedSentryEventType.ERROR_REPORT | |
253 | event_dict = { |
|
274 | event_dict = { | |
254 |
|
|
275 | "client": "sentry", | |
255 |
|
|
276 | "error": message, | |
256 |
|
|
277 | "namespace": namespace, | |
257 |
|
|
278 | "request_id": request_id, | |
258 |
|
|
279 | "server": server_name, | |
259 |
|
|
280 | "start_time": log_timestamp, | |
260 |
|
|
281 | "end_time": None, | |
261 |
|
|
282 | "tags": tags, | |
262 |
|
|
283 | "extra": extra, | |
263 |
|
|
284 | "language": language, | |
264 |
|
|
285 | "view_name": json_body.get("culprit"), | |
265 |
|
|
286 | "http_status": None, | |
266 |
|
|
287 | "username": None, | |
267 |
|
|
288 | "url": parsed_req.get("url"), | |
268 |
|
|
289 | "ip": None, | |
269 |
|
|
290 | "user_agent": None, | |
270 |
|
|
291 | "request": None, | |
271 |
|
|
292 | "slow_calls": None, | |
272 |
|
|
293 | "request_stats": None, | |
273 |
|
|
294 | "traceback": None, | |
274 | } |
|
295 | } | |
275 |
|
296 | |||
276 |
event_dict[ |
|
297 | event_dict["extra"].extend(other_keys) | |
277 | if release: |
|
298 | if release: | |
278 |
event_dict[ |
|
299 | event_dict["tags"].append(("sentry_release", release)) | |
279 |
event_dict[ |
|
300 | event_dict["request"] = parsed_req | |
280 |
if |
|
301 | if "headers" in parsed_req: | |
281 |
event_dict[ |
|
302 | event_dict["user_agent"] = parsed_req["headers"].get("User-Agent") | |
282 |
if |
|
303 | if "env" in parsed_req: | |
283 |
event_dict[ |
|
304 | event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR") | |
284 |
ts_ms = int(json_body.get( |
|
305 | ts_ms = int(json_body.get("time_spent") or 0) | |
285 | if ts_ms > 0: |
|
306 | if ts_ms > 0: | |
286 |
event_dict[ |
|
307 | event_dict["end_time"] = event_dict["start_time"] + timedelta( | |
287 |
|
|
308 | milliseconds=ts_ms | |
|
309 | ) | |||
288 | if stacktrace or alt_stacktrace or template: |
|
310 | if stacktrace or alt_stacktrace or template: | |
289 |
event_dict[ |
|
311 | event_dict["traceback"] = stacktrace or alt_stacktrace or template | |
290 | for k in list(event_dict.keys()): |
|
312 | for k in list(event_dict.keys()): | |
291 | if event_dict[k] is None: |
|
313 | if event_dict[k] is None: | |
292 | del event_dict[k] |
|
314 | del event_dict[k] | |
293 | if user: |
|
315 | if user: | |
294 |
event_dict[ |
|
316 | event_dict["username"] = user["username"] or user["id"] or user["email"] | |
295 | or user['email'] |
|
|||
296 | return event_dict, event_type |
|
317 | return event_dict, event_type |
@@ -13,5 +13,3 b'' | |||||
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
||||
17 |
|
@@ -41,7 +41,7 b' target_metadata = MetaData(naming_convention=NAMING_CONVENTION)' | |||||
41 | # my_important_option = config.get_main_option("my_important_option") |
|
41 | # my_important_option = config.get_main_option("my_important_option") | |
42 | # ... etc. |
|
42 | # ... etc. | |
43 |
|
43 | |||
44 |
VERSION_TABLE_NAME = |
|
44 | VERSION_TABLE_NAME = "alembic_appenlight_version" | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | def run_migrations_offline(): |
|
47 | def run_migrations_offline(): | |
@@ -57,9 +57,12 b' def run_migrations_offline():' | |||||
57 |
|
57 | |||
58 | """ |
|
58 | """ | |
59 | url = config.get_main_option("sqlalchemy.url") |
|
59 | url = config.get_main_option("sqlalchemy.url") | |
60 | context.configure(url=url, target_metadata=target_metadata, |
|
60 | context.configure( | |
|
61 | url=url, | |||
|
62 | target_metadata=target_metadata, | |||
61 |
|
|
63 | transaction_per_migration=True, | |
62 |
|
|
64 | version_table=VERSION_TABLE_NAME, | |
|
65 | ) | |||
63 |
|
66 | |||
64 | with context.begin_transaction(): |
|
67 | with context.begin_transaction(): | |
65 | context.run_migrations() |
|
68 | context.run_migrations() | |
@@ -74,15 +77,16 b' def run_migrations_online():' | |||||
74 | """ |
|
77 | """ | |
75 | engine = engine_from_config( |
|
78 | engine = engine_from_config( | |
76 | config.get_section(config.config_ini_section), |
|
79 | config.get_section(config.config_ini_section), | |
77 |
prefix= |
|
80 | prefix="sqlalchemy.", | |
78 |
poolclass=pool.NullPool |
|
81 | poolclass=pool.NullPool, | |
|
82 | ) | |||
79 |
|
83 | |||
80 | connection = engine.connect() |
|
84 | connection = engine.connect() | |
81 | context.configure( |
|
85 | context.configure( | |
82 | connection=connection, |
|
86 | connection=connection, | |
83 | target_metadata=target_metadata, |
|
87 | target_metadata=target_metadata, | |
84 | transaction_per_migration=True, |
|
88 | transaction_per_migration=True, | |
85 | version_table=VERSION_TABLE_NAME |
|
89 | version_table=VERSION_TABLE_NAME, | |
86 | ) |
|
90 | ) | |
87 |
|
91 | |||
88 | try: |
|
92 | try: |
This diff has been collapsed as it changes many lines, (787 lines changed) Show them Hide them | |||||
@@ -23,7 +23,7 b' Create Date: 2014-10-13 23:47:38.295159' | |||||
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | # revision identifiers, used by Alembic. |
|
25 | # revision identifiers, used by Alembic. | |
26 |
revision = |
|
26 | revision = "55b6e612672f" | |
27 | down_revision = None |
|
27 | down_revision = None | |
28 |
|
28 | |||
29 | from alembic import op |
|
29 | from alembic import op | |
@@ -31,348 +31,514 b' import sqlalchemy as sa' | |||||
31 |
|
31 | |||
32 |
|
32 | |||
33 | def upgrade(): |
|
33 | def upgrade(): | |
34 |
op.add_column( |
|
34 | op.add_column("users", sa.Column("first_name", sa.Unicode(25))) | |
35 |
op.add_column( |
|
35 | op.add_column("users", sa.Column("last_name", sa.Unicode(50))) | |
36 |
op.add_column( |
|
36 | op.add_column("users", sa.Column("company_name", sa.Unicode(255))) | |
37 |
op.add_column( |
|
37 | op.add_column("users", sa.Column("company_address", sa.Unicode(255))) | |
38 |
op.add_column( |
|
38 | op.add_column("users", sa.Column("phone1", sa.Unicode(25))) | |
39 |
op.add_column( |
|
39 | op.add_column("users", sa.Column("phone2", sa.Unicode(25))) | |
40 |
op.add_column( |
|
40 | op.add_column("users", sa.Column("zip_code", sa.Unicode(25))) | |
41 | op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest")) |
|
41 | op.add_column( | |
42 | op.add_column('users', sa.Column('city', sa.Unicode(128))) |
|
42 | "users", | |
43 | op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default='')) |
|
43 | sa.Column( | |
44 | op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true')) |
|
44 | "default_report_sort", | |
45 | op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default='')) |
|
45 | sa.Unicode(20), | |
|
46 | nullable=False, | |||
|
47 | server_default="newest", | |||
|
48 | ), | |||
|
49 | ) | |||
|
50 | op.add_column("users", sa.Column("city", sa.Unicode(128))) | |||
|
51 | op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default="")) | |||
|
52 | op.add_column( | |||
|
53 | "users", | |||
|
54 | sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"), | |||
|
55 | ) | |||
|
56 | op.add_column( | |||
|
57 | "users", | |||
|
58 | sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""), | |||
|
59 | ) | |||
46 |
|
60 | |||
47 | op.create_table( |
|
61 | op.create_table( | |
48 |
|
|
62 | "integrations", | |
49 |
sa.Column( |
|
63 | sa.Column("id", sa.Integer(), primary_key=True), | |
50 | sa.Column('resource_id', sa.Integer(), |
|
64 | sa.Column( | |
51 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
65 | "resource_id", | |
52 | ondelete='cascade')), |
|
66 | sa.Integer(), | |
53 | sa.Column('integration_name', sa.Unicode(64)), |
|
67 | sa.ForeignKey( | |
54 | sa.Column('config', sa.dialects.postgresql.JSON, nullable=False), |
|
68 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
55 | sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()), |
|
69 | ), | |
56 | sa.Column('external_id', sa.Unicode(255)), |
|
70 | ), | |
57 |
sa.Column( |
|
71 | sa.Column("integration_name", sa.Unicode(64)), | |
|
72 | sa.Column("config", sa.dialects.postgresql.JSON, nullable=False), | |||
|
73 | sa.Column( | |||
|
74 | "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now() | |||
|
75 | ), | |||
|
76 | sa.Column("external_id", sa.Unicode(255)), | |||
|
77 | sa.Column("external_id2", sa.Unicode(255)), | |||
58 | ) |
|
78 | ) | |
59 |
|
79 | |||
60 | op.create_table( |
|
80 | op.create_table( | |
61 |
|
|
81 | "alert_channels", | |
62 | sa.Column('owner_id', sa.Integer(), |
|
82 | sa.Column( | |
63 | sa.ForeignKey('users.id', onupdate='cascade', |
|
83 | "owner_id", | |
64 | ondelete='cascade'), nullable=False), |
|
84 | sa.Integer(), | |
65 | sa.Column('channel_name', sa.Unicode(25), nullable=False), |
|
85 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
66 | sa.Column('channel_value', sa.Unicode(80), nullable=False), |
|
86 | nullable=False, | |
67 | sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False), |
|
87 | ), | |
68 |
sa.Column( |
|
88 | sa.Column("channel_name", sa.Unicode(25), nullable=False), | |
69 |
sa.Column( |
|
89 | sa.Column("channel_value", sa.Unicode(80), nullable=False), | |
70 | sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'), |
|
90 | sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False), | |
71 | sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'), |
|
91 | sa.Column( | |
72 | sa.Column('pkey', sa.Integer(), primary_key=True), |
|
92 | "channel_validated", sa.Boolean, nullable=False, server_default="False" | |
73 | sa.Column('integration_id', sa.Integer, |
|
93 | ), | |
74 | sa.ForeignKey('integrations.id', onupdate='cascade', |
|
94 | sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"), | |
75 | ondelete='cascade')), |
|
95 | sa.Column( | |
76 | ) |
|
96 | "notify_only_first", sa.Boolean, nullable=False, server_default="False" | |
77 | op.create_unique_constraint('uq_alert_channels', 'alert_channels', |
|
97 | ), | |
78 | ["owner_id", "channel_name", "channel_value"]) |
|
98 | sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"), | |
|
99 | sa.Column("pkey", sa.Integer(), primary_key=True), | |||
|
100 | sa.Column( | |||
|
101 | "integration_id", | |||
|
102 | sa.Integer, | |||
|
103 | sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"), | |||
|
104 | ), | |||
|
105 | ) | |||
|
106 | op.create_unique_constraint( | |||
|
107 | "uq_alert_channels", | |||
|
108 | "alert_channels", | |||
|
109 | ["owner_id", "channel_name", "channel_value"], | |||
|
110 | ) | |||
79 |
|
111 | |||
80 | op.create_table( |
|
112 | op.create_table( | |
81 |
|
|
113 | "alert_channels_actions", | |
82 |
sa.Column( |
|
114 | sa.Column("owner_id", sa.Integer(), nullable=False), | |
83 | sa.Column('resource_id', sa.Integer(), |
|
115 | sa.Column( | |
84 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
116 | "resource_id", | |
85 | ondelete='cascade')), |
|
117 | sa.Integer(), | |
86 | sa.Column('pkey', sa.Integer(), primary_key=True), |
|
118 | sa.ForeignKey( | |
87 | sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'), |
|
119 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
88 | sa.Column('rule', sa.dialects.postgresql.JSON), |
|
120 | ), | |
89 | sa.Column('type', sa.Unicode(10), index=True), |
|
121 | ), | |
90 |
sa.Column( |
|
122 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
91 | sa.Column('config', sa.dialects.postgresql.JSON), |
|
123 | sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"), | |
92 | sa.Column('name', sa.Unicode(255), server_default='') |
|
124 | sa.Column("rule", sa.dialects.postgresql.JSON), | |
|
125 | sa.Column("type", sa.Unicode(10), index=True), | |||
|
126 | sa.Column("other_id", sa.Unicode(40), index=True), | |||
|
127 | sa.Column("config", sa.dialects.postgresql.JSON), | |||
|
128 | sa.Column("name", sa.Unicode(255), server_default=""), | |||
93 | ) |
|
129 | ) | |
94 |
|
130 | |||
95 |
|
||||
96 | op.create_table( |
|
131 | op.create_table( | |
97 |
|
|
132 | "application_postprocess_conf", | |
98 |
sa.Column( |
|
133 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
99 |
sa.Column( |
|
134 | sa.Column("do", sa.Unicode(25), nullable=False), | |
100 |
sa.Column( |
|
135 | sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""), | |
101 | sa.Column('resource_id', sa.Integer(), |
|
136 | sa.Column( | |
102 | sa.ForeignKey('resources.resource_id', |
|
137 | "resource_id", | |
103 | onupdate='cascade', |
|
138 | sa.Integer(), | |
104 | ondelete='cascade'), nullable=False), |
|
139 | sa.ForeignKey( | |
105 | sa.Column('rule', sa.dialects.postgresql.JSON), |
|
140 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
141 | ), | |||
|
142 | nullable=False, | |||
|
143 | ), | |||
|
144 | sa.Column("rule", sa.dialects.postgresql.JSON), | |||
106 | ) |
|
145 | ) | |
107 |
|
146 | |||
108 | op.create_table( |
|
147 | op.create_table( | |
109 |
|
|
148 | "applications", | |
110 | sa.Column('resource_id', sa.Integer(), |
|
149 | sa.Column( | |
111 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
150 | "resource_id", | |
112 | ondelete='cascade'), nullable=False, |
|
151 | sa.Integer(), | |
113 | primary_key=True, autoincrement=False), |
|
152 | sa.ForeignKey( | |
114 | sa.Column('domains', sa.UnicodeText, nullable=False), |
|
153 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
115 | sa.Column('api_key', sa.Unicode(32), nullable=False, index=True), |
|
154 | ), | |
116 | sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'), |
|
155 | nullable=False, | |
117 | sa.Column('public_key', sa.Unicode(32), nullable=False, index=True), |
|
156 | primary_key=True, | |
118 | sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False), |
|
157 | autoincrement=False, | |
119 | sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False), |
|
158 | ), | |
120 |
sa.Column( |
|
159 | sa.Column("domains", sa.UnicodeText, nullable=False), | |
121 | sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False), |
|
160 | sa.Column("api_key", sa.Unicode(32), nullable=False, index=True), | |
122 | ) |
|
161 | sa.Column( | |
123 | op.create_unique_constraint(None, 'applications', |
|
162 | "default_grouping", | |
124 | ["public_key"]) |
|
163 | sa.Unicode(20), | |
125 | op.create_unique_constraint(None, 'applications', |
|
164 | nullable=False, | |
126 | ["api_key"]) |
|
165 | server_default="url_type", | |
|
166 | ), | |||
|
167 | sa.Column("public_key", sa.Unicode(32), nullable=False, index=True), | |||
|
168 | sa.Column( | |||
|
169 | "error_report_threshold", sa.Integer(), server_default="10", nullable=False | |||
|
170 | ), | |||
|
171 | sa.Column( | |||
|
172 | "slow_report_threshold", sa.Integer(), server_default="10", nullable=False | |||
|
173 | ), | |||
|
174 | sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False), | |||
|
175 | sa.Column( | |||
|
176 | "allow_permanent_storage", | |||
|
177 | sa.Boolean(), | |||
|
178 | server_default="false", | |||
|
179 | nullable=False, | |||
|
180 | ), | |||
|
181 | ) | |||
|
182 | op.create_unique_constraint(None, "applications", ["public_key"]) | |||
|
183 | op.create_unique_constraint(None, "applications", ["api_key"]) | |||
127 |
|
184 | |||
128 | op.create_table( |
|
185 | op.create_table( | |
129 |
|
|
186 | "metrics", | |
130 |
sa.Column( |
|
187 | sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True), | |
131 | sa.Column('resource_id', sa.Integer(), |
|
188 | sa.Column( | |
132 | sa.ForeignKey('resources.resource_id', |
|
189 | "resource_id", | |
133 | onupdate='cascade', |
|
190 | sa.Integer(), | |
134 | ondelete='cascade')), |
|
191 | sa.ForeignKey( | |
135 | sa.Column('timestamp', sa.DateTime), |
|
192 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
136 | sa.Column('namespace', sa.Unicode(255)), |
|
193 | ), | |
137 | sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}") |
|
194 | ), | |
|
195 | sa.Column("timestamp", sa.DateTime), | |||
|
196 | sa.Column("namespace", sa.Unicode(255)), | |||
|
197 | sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"), | |||
138 | ) |
|
198 | ) | |
139 |
|
199 | |||
140 | op.create_table( |
|
200 | op.create_table( | |
141 |
|
|
201 | "events", | |
142 |
sa.Column( |
|
202 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), | |
143 |
sa.Column( |
|
203 | sa.Column("start_date", sa.DateTime, nullable=False, index=True), | |
144 |
sa.Column( |
|
204 | sa.Column("end_date", sa.DateTime), | |
145 |
sa.Column( |
|
205 | sa.Column("status", sa.Integer(), nullable=False, index=True), | |
146 |
sa.Column( |
|
206 | sa.Column("event_type", sa.Integer(), nullable=False, index=True), | |
147 |
sa.Column( |
|
207 | sa.Column("origin_user_id", sa.Integer()), | |
148 |
sa.Column( |
|
208 | sa.Column("target_user_id", sa.Integer()), | |
149 |
sa.Column( |
|
209 | sa.Column("resource_id", sa.Integer(), index=True), | |
150 |
sa.Column( |
|
210 | sa.Column("text", sa.UnicodeText, server_default=""), | |
151 |
sa.Column( |
|
211 | sa.Column("values", sa.dialects.postgresql.JSON), | |
152 |
sa.Column( |
|
212 | sa.Column("target_id", sa.Integer()), | |
153 |
sa.Column( |
|
213 | sa.Column("target_uuid", sa.Unicode(40), index=True), | |
154 | ) |
|
214 | ) | |
155 |
|
215 | |||
156 | op.create_table( |
|
216 | op.create_table( | |
157 |
|
|
217 | "logs", | |
158 |
sa.Column( |
|
218 | sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True), | |
159 | sa.Column('resource_id', sa.Integer(), |
|
219 | sa.Column( | |
160 | sa.ForeignKey('resources.resource_id', |
|
220 | "resource_id", | |
161 | onupdate='cascade', |
|
221 | sa.Integer(), | |
162 | ondelete='cascade')), |
|
222 | sa.ForeignKey( | |
163 | sa.Column('log_level', sa.SmallInteger(), nullable=False), |
|
223 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
164 | sa.Column('primary_key', sa.Unicode(128), nullable=True), |
|
224 | ), | |
165 | sa.Column('message', sa.UnicodeText, nullable=False, server_default=''), |
|
225 | ), | |
166 | sa.Column('timestamp', sa.DateTime), |
|
226 | sa.Column("log_level", sa.SmallInteger(), nullable=False), | |
167 |
sa.Column( |
|
227 | sa.Column("primary_key", sa.Unicode(128), nullable=True), | |
168 | sa.Column('request_id', sa.Unicode(40)), |
|
228 | sa.Column("message", sa.UnicodeText, nullable=False, server_default=""), | |
169 | sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"), |
|
229 | sa.Column("timestamp", sa.DateTime), | |
170 | sa.Column('permanent', sa.Boolean(), server_default="false", |
|
230 | sa.Column("namespace", sa.Unicode(255)), | |
171 | nullable=False) |
|
231 | sa.Column("request_id", sa.Unicode(40)), | |
|
232 | sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"), | |||
|
233 | sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False), | |||
172 | ) |
|
234 | ) | |
173 |
|
235 | |||
174 | op.create_table( |
|
236 | op.create_table( | |
175 |
|
|
237 | "reports_groups", | |
176 |
sa.Column( |
|
238 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
177 | sa.Column('resource_id', sa.Integer, |
|
239 | sa.Column( | |
178 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
240 | "resource_id", | |
179 | ondelete='cascade'), nullable=False), |
|
241 | sa.Integer, | |
180 | sa.Column('priority', sa.Integer, nullable=False, server_default="5"), |
|
242 | sa.ForeignKey( | |
181 | sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()), |
|
243 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
182 | sa.Column('last_timestamp', sa.DateTime()), |
|
244 | ), | |
183 | sa.Column('error', sa.UnicodeText, nullable=False, server_default=""), |
|
245 | nullable=False, | |
184 | sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""), |
|
246 | ), | |
185 |
sa.Column( |
|
247 | sa.Column("priority", sa.Integer, nullable=False, server_default="5"), | |
186 | sa.Column('report_type', sa.Integer, nullable=False, server_default="0"), |
|
248 | sa.Column( | |
187 | sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"), |
|
249 | "first_timestamp", | |
188 | sa.Column('last_report', sa.Integer, nullable=False, server_default="0"), |
|
250 | sa.DateTime(), | |
189 | sa.Column('occurences', sa.Integer, nullable=False, server_default="1"), |
|
251 | nullable=False, | |
190 | sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"), |
|
252 | server_default=sa.func.now(), | |
191 | sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"), |
|
253 | ), | |
192 | sa.Column('notified', sa.Boolean, nullable=False, server_default="False"), |
|
254 | sa.Column("last_timestamp", sa.DateTime()), | |
193 |
sa.Column( |
|
255 | sa.Column("error", sa.UnicodeText, nullable=False, server_default=""), | |
194 |
sa.Column( |
|
256 | sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""), | |
195 | sa.Column('read', sa.Boolean, nullable=False, server_default="False"), |
|
257 | sa.Column( | |
|
258 | "triggered_postprocesses_ids", | |||
|
259 | sa.dialects.postgresql.JSON, | |||
|
260 | nullable=False, | |||
|
261 | server_default="[]", | |||
|
262 | ), | |||
|
263 | sa.Column("report_type", sa.Integer, nullable=False, server_default="0"), | |||
|
264 | sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"), | |||
|
265 | sa.Column("last_report", sa.Integer, nullable=False, server_default="0"), | |||
|
266 | sa.Column("occurences", sa.Integer, nullable=False, server_default="1"), | |||
|
267 | sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"), | |||
|
268 | sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"), | |||
|
269 | sa.Column("notified", sa.Boolean, nullable=False, server_default="False"), | |||
|
270 | sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"), | |||
|
271 | sa.Column("public", sa.Boolean, nullable=False, server_default="False"), | |||
|
272 | sa.Column("read", sa.Boolean, nullable=False, server_default="False"), | |||
196 | ) |
|
273 | ) | |
197 |
|
274 | |||
198 | op.create_table( |
|
275 | op.create_table( | |
199 |
|
|
276 | "reports", | |
200 |
sa.Column( |
|
277 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
201 | sa.Column('group_id', sa.types.BigInteger, |
|
278 | sa.Column( | |
202 | sa.ForeignKey('reports_groups.id', onupdate='cascade', |
|
279 | "group_id", | |
203 | ondelete='cascade'), nullable=False, index=True), |
|
280 | sa.types.BigInteger, | |
204 | sa.Column('resource_id', sa.Integer, nullable=False, index=True), |
|
281 | sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"), | |
205 | sa.Column('report_type', sa.Integer, nullable=False, server_default="0"), |
|
282 | nullable=False, | |
206 | sa.Column('error', sa.UnicodeText, nullable=False, server_default=""), |
|
283 | index=True, | |
207 | sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"), |
|
284 | ), | |
208 |
sa.Column( |
|
285 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
209 |
sa.Column( |
|
286 | sa.Column("report_type", sa.Integer, nullable=False, server_default="0"), | |
210 |
sa.Column( |
|
287 | sa.Column("error", sa.UnicodeText, nullable=False, server_default=""), | |
211 | sa.Column('username', sa.Unicode(255), nullable=False, server_default=""), |
|
288 | sa.Column( | |
212 |
|
|
289 | "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
213 | sa.Column('url', sa.UnicodeText, nullable=False, server_default=""), |
|
290 | ), | |
214 | sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""), |
|
291 | sa.Column( | |
215 |
|
|
292 | "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
216 | sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"), |
|
293 | ), | |
217 | sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""), |
|
294 | sa.Column( | |
218 | sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()), |
|
295 | "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
219 | sa.Column('end_time', sa.DateTime()), |
|
296 | ), | |
220 |
sa.Column( |
|
297 | sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""), | |
221 |
sa.Column( |
|
298 | sa.Column("username", sa.Unicode(255), nullable=False, server_default=""), | |
222 | sa.Column('http_status', sa.Integer, index=True), |
|
299 | sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""), | |
223 | sa.Column('url_domain', sa.Unicode(128)), |
|
300 | sa.Column("url", sa.UnicodeText, nullable=False, server_default=""), | |
224 | sa.Column('url_path', sa.UnicodeText), |
|
301 | sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""), | |
225 | sa.Column('language', sa.Integer, server_default="0"), |
|
302 | sa.Column( | |
226 | ) |
|
303 | "request_stats", | |
227 | op.create_index(None, 'reports', |
|
304 | sa.dialects.postgresql.JSON, | |
228 | [sa.text("(tags ->> 'server_name')")]) |
|
305 | nullable=False, | |
229 | op.create_index(None, 'reports', |
|
306 | server_default="{}", | |
230 | [sa.text("(tags ->> 'view_name')")]) |
|
307 | ), | |
|
308 | sa.Column( | |||
|
309 | "traceback", | |||
|
310 | sa.dialects.postgresql.JSON, | |||
|
311 | nullable=False, | |||
|
312 | server_default="{}", | |||
|
313 | ), | |||
|
314 | sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""), | |||
|
315 | sa.Column( | |||
|
316 | "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now() | |||
|
317 | ), | |||
|
318 | sa.Column("end_time", sa.DateTime()), | |||
|
319 | sa.Column( | |||
|
320 | "report_group_time", | |||
|
321 | sa.DateTime, | |||
|
322 | index=True, | |||
|
323 | nullable=False, | |||
|
324 | server_default=sa.func.now(), | |||
|
325 | ), | |||
|
326 | sa.Column("duration", sa.Float(), nullable=False, server_default="0"), | |||
|
327 | sa.Column("http_status", sa.Integer, index=True), | |||
|
328 | sa.Column("url_domain", sa.Unicode(128)), | |||
|
329 | sa.Column("url_path", sa.UnicodeText), | |||
|
330 | sa.Column("language", sa.Integer, server_default="0"), | |||
|
331 | ) | |||
|
332 | op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")]) | |||
|
333 | op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")]) | |||
231 |
|
334 | |||
232 | op.create_table( |
|
335 | op.create_table( | |
233 |
|
|
336 | "reports_assignments", | |
234 |
sa.Column( |
|
337 | sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True), | |
235 |
sa.Column( |
|
338 | sa.Column( | |
236 | sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'), |
|
339 | "owner_id", | |
237 | nullable=False, primary_key=True), |
|
340 | sa.Integer, | |
238 | sa.Column('report_time', sa.DateTime, nullable=False) |
|
341 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
|
342 | nullable=False, | |||
|
343 | primary_key=True, | |||
|
344 | ), | |||
|
345 | sa.Column("report_time", sa.DateTime, nullable=False), | |||
239 |
|
|
346 | ) | |
240 |
|
347 | |||
241 | op.create_table( |
|
348 | op.create_table( | |
242 |
|
|
349 | "reports_comments", | |
243 |
sa.Column( |
|
350 | sa.Column("comment_id", sa.Integer, primary_key=True), | |
244 |
sa.Column( |
|
351 | sa.Column("body", sa.UnicodeText, nullable=False, server_default=""), | |
245 |
sa.Column( |
|
352 | sa.Column( | |
246 | sa.ForeignKey('users.id', onupdate='cascade', |
|
353 | "owner_id", | |
247 | ondelete='set null'), nullable=True), |
|
354 | sa.Integer, | |
248 | sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
355 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"), | |
249 | sa.Column('report_time', sa.DateTime, nullable=False), |
|
356 | nullable=True, | |
250 | sa.Column('group_id', sa.types.BigInteger, nullable=False) |
|
357 | ), | |
|
358 | sa.Column( | |||
|
359 | "created_timestamp", | |||
|
360 | sa.DateTime, | |||
|
361 | nullable=False, | |||
|
362 | server_default=sa.func.now(), | |||
|
363 | ), | |||
|
364 | sa.Column("report_time", sa.DateTime, nullable=False), | |||
|
365 | sa.Column("group_id", sa.types.BigInteger, nullable=False), | |||
251 | ) |
|
366 | ) | |
252 |
|
367 | |||
253 | op.create_table( |
|
368 | op.create_table( | |
254 |
|
|
369 | "reports_stats", | |
255 |
sa.Column( |
|
370 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
256 |
sa.Column( |
|
371 | sa.Column("start_interval", sa.DateTime, nullable=False, index=True), | |
257 |
sa.Column( |
|
372 | sa.Column("group_id", sa.types.BigInteger, index=True), | |
258 | sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True), |
|
373 | sa.Column( | |
259 | sa.Column('owner_user_id', sa.Integer), |
|
374 | "occurences", sa.Integer, nullable=False, server_default="0", index=True | |
260 | sa.Column('type', sa.Integer, index=True, nullable=False), |
|
375 | ), | |
261 | sa.Column('duration', sa.Float(), server_default='0'), |
|
376 | sa.Column("owner_user_id", sa.Integer), | |
262 | sa.Column('server_name', sa.Unicode(128), |
|
377 | sa.Column("type", sa.Integer, index=True, nullable=False), | |
263 | server_default=''), |
|
378 | sa.Column("duration", sa.Float(), server_default="0"), | |
264 |
sa.Column( |
|
379 | sa.Column("server_name", sa.Unicode(128), server_default=""), | |
265 | server_default=''), |
|
380 | sa.Column("view_name", sa.Unicode(128), server_default=""), | |
266 |
sa.Column( |
|
381 | sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True), | |
267 | ) |
|
382 | ) | |
268 | op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats', |
|
383 | op.create_index( | |
269 |
|
|
384 | "ix_reports_stats_start_interval_group_id", | |
|
385 | "reports_stats", | |||
|
386 | ["start_interval", "group_id"], | |||
|
387 | ) | |||
270 |
|
388 | |||
271 | op.create_table( |
|
389 | op.create_table( | |
272 |
|
|
390 | "slow_calls", | |
273 |
sa.Column( |
|
391 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
274 | sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'), |
|
392 | sa.Column( | |
275 | nullable=False, index=True), |
|
393 | "report_id", | |
276 | sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True), |
|
394 | sa.types.BigInteger, | |
277 | sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True), |
|
395 | sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"), | |
278 | sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()), |
|
396 | nullable=False, | |
279 | sa.Column('type', sa.Unicode(16), nullable=False, index=True), |
|
397 | index=True, | |
280 | sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''), |
|
398 | ), | |
281 | sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False), |
|
399 | sa.Column( | |
282 | sa.Column('location', sa.UnicodeText, server_default=''), |
|
400 | "duration", sa.Float(), nullable=False, server_default="0", index=True | |
283 | sa.Column('subtype', sa.Unicode(16), nullable=False, index=True), |
|
401 | ), | |
284 | sa.Column('resource_id', sa.Integer, nullable=False, index=True), |
|
402 | sa.Column( | |
285 | sa.Column('statement_hash', sa.Unicode(60), index=True) |
|
403 | "timestamp", | |
|
404 | sa.DateTime, | |||
|
405 | nullable=False, | |||
|
406 | server_default=sa.func.now(), | |||
|
407 | index=True, | |||
|
408 | ), | |||
|
409 | sa.Column( | |||
|
410 | "report_group_time", | |||
|
411 | sa.DateTime, | |||
|
412 | index=True, | |||
|
413 | nullable=False, | |||
|
414 | server_default=sa.func.now(), | |||
|
415 | ), | |||
|
416 | sa.Column("type", sa.Unicode(16), nullable=False, index=True), | |||
|
417 | sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""), | |||
|
418 | sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False), | |||
|
419 | sa.Column("location", sa.UnicodeText, server_default=""), | |||
|
420 | sa.Column("subtype", sa.Unicode(16), nullable=False, index=True), | |||
|
421 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |||
|
422 | sa.Column("statement_hash", sa.Unicode(60), index=True), | |||
286 | ) |
|
423 | ) | |
287 |
|
424 | |||
288 | op.create_table( |
|
425 | op.create_table( | |
289 |
|
|
426 | "tags", | |
290 |
sa.Column( |
|
427 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
291 | sa.Column('resource_id', sa.Integer, |
|
428 | sa.Column( | |
292 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
429 | "resource_id", | |
293 | ondelete='cascade')), |
|
430 | sa.Integer, | |
294 | sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
431 | sa.ForeignKey( | |
295 | sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
432 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
296 | sa.Column('name', sa.Unicode(32), nullable=False), |
|
433 | ), | |
297 | sa.Column('value', sa.dialects.postgresql.JSON, nullable=False), |
|
434 | ), | |
298 | sa.Column('times_seen', sa.Integer, nullable=False, server_default='1') |
|
435 | sa.Column( | |
|
436 | "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now() | |||
|
437 | ), | |||
|
438 | sa.Column( | |||
|
439 | "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now() | |||
|
440 | ), | |||
|
441 | sa.Column("name", sa.Unicode(32), nullable=False), | |||
|
442 | sa.Column("value", sa.dialects.postgresql.JSON, nullable=False), | |||
|
443 | sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"), | |||
299 | ) |
|
444 | ) | |
300 |
|
445 | |||
301 | op.create_table( |
|
446 | op.create_table( | |
302 |
|
|
447 | "auth_tokens", | |
303 |
sa.Column( |
|
448 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), | |
304 |
sa.Column( |
|
449 | sa.Column("token", sa.Unicode), | |
305 | sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
450 | sa.Column( | |
306 | sa.Column('expires', sa.DateTime), |
|
451 | "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now() | |
307 | sa.Column('owner_id', sa.Integer, |
|
452 | ), | |
308 | sa.ForeignKey('users.id', onupdate='cascade', |
|
453 | sa.Column("expires", sa.DateTime), | |
309 | ondelete='cascade')), |
|
454 | sa.Column( | |
310 | sa.Column('description', sa.Unicode), |
|
455 | "owner_id", | |
|
456 | sa.Integer, | |||
|
457 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |||
|
458 | ), | |||
|
459 | sa.Column("description", sa.Unicode), | |||
311 | ) |
|
460 | ) | |
312 |
|
461 | |||
313 | op.create_table( |
|
462 | op.create_table( | |
314 |
|
|
463 | "channels_actions", | |
315 | sa.Column('channel_pkey', sa.Integer, |
|
464 | sa.Column( | |
316 | sa.ForeignKey('alert_channels.pkey', |
|
465 | "channel_pkey", | |
317 | ondelete='CASCADE', onupdate='CASCADE')), |
|
466 | sa.Integer, | |
318 | sa.Column('action_pkey', sa.Integer, |
|
467 | sa.ForeignKey( | |
319 | sa.ForeignKey('alert_channels_actions.pkey', |
|
468 | "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
320 | ondelete='CASCADE', onupdate='CASCADE')) |
|
469 | ), | |
|
470 | ), | |||
|
471 | sa.Column( | |||
|
472 | "action_pkey", | |||
|
473 | sa.Integer, | |||
|
474 | sa.ForeignKey( | |||
|
475 | "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE" | |||
|
476 | ), | |||
|
477 | ), | |||
321 | ) |
|
478 | ) | |
322 |
|
479 | |||
323 | op.create_table( |
|
480 | op.create_table( | |
324 |
|
|
481 | "config", | |
325 |
sa.Column( |
|
482 | sa.Column("key", sa.Unicode(128), primary_key=True), | |
326 |
sa.Column( |
|
483 | sa.Column("section", sa.Unicode(128), primary_key=True), | |
327 |
sa.Column( |
|
484 | sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"), | |
328 | server_default="{}") |
|
|||
329 | ) |
|
485 | ) | |
330 |
|
486 | |||
331 | op.create_table( |
|
487 | op.create_table( | |
332 |
|
|
488 | "plugin_configs", | |
333 |
sa.Column( |
|
489 | sa.Column("id", sa.Integer, primary_key=True), | |
334 |
sa.Column( |
|
490 | sa.Column("plugin_name", sa.Unicode(128)), | |
335 |
sa.Column( |
|
491 | sa.Column("section", sa.Unicode(128)), | |
336 |
sa.Column( |
|
492 | sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"), | |
337 | server_default="{}"), |
|
493 | sa.Column( | |
338 | sa.Column('resource_id', sa.Integer(), |
|
494 | "resource_id", | |
339 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
495 | sa.Integer(), | |
340 | ondelete='cascade')), |
|
496 | sa.ForeignKey( | |
341 | sa.Column('owner_id', sa.Integer(), |
|
497 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
342 | sa.ForeignKey('users.id', onupdate='cascade', |
|
498 | ), | |
343 | ondelete='cascade'))) |
|
499 | ), | |
|
500 | sa.Column( | |||
|
501 | "owner_id", | |||
|
502 | sa.Integer(), | |||
|
503 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |||
|
504 | ), | |||
|
505 | ) | |||
344 |
|
506 | |||
345 | op.create_table( |
|
507 | op.create_table( | |
346 |
|
|
508 | "rc_versions", | |
347 |
sa.Column( |
|
509 | sa.Column("name", sa.Unicode(40), primary_key=True), | |
348 |
sa.Column( |
|
510 | sa.Column("value", sa.Unicode(40)), | |
|
511 | ) | |||
|
512 | version_table = sa.table( | |||
|
513 | "rc_versions", | |||
|
514 | sa.Column("name", sa.Unicode(40)), | |||
|
515 | sa.Column("value", sa.Unicode(40)), | |||
349 | ) |
|
516 | ) | |
350 | version_table = sa.table('rc_versions', |
|
|||
351 | sa.Column('name', sa.Unicode(40)), |
|
|||
352 | sa.Column('value', sa.Unicode(40))) |
|
|||
353 |
|
517 | |||
354 |
insert = version_table.insert().values(name= |
|
518 | insert = version_table.insert().values(name="es_reports") | |
355 | op.execute(insert) |
|
519 | op.execute(insert) | |
356 |
insert = version_table.insert().values(name= |
|
520 | insert = version_table.insert().values(name="es_reports_groups") | |
357 | op.execute(insert) |
|
521 | op.execute(insert) | |
358 |
insert = version_table.insert().values(name= |
|
522 | insert = version_table.insert().values(name="es_reports_stats") | |
359 | op.execute(insert) |
|
523 | op.execute(insert) | |
360 |
insert = version_table.insert().values(name= |
|
524 | insert = version_table.insert().values(name="es_logs") | |
361 | op.execute(insert) |
|
525 | op.execute(insert) | |
362 |
insert = version_table.insert().values(name= |
|
526 | insert = version_table.insert().values(name="es_metrics") | |
363 | op.execute(insert) |
|
527 | op.execute(insert) | |
364 |
insert = version_table.insert().values(name= |
|
528 | insert = version_table.insert().values(name="es_slow_calls") | |
365 | op.execute(insert) |
|
529 | op.execute(insert) | |
366 |
|
530 | |||
367 |
|
531 | op.execute( | ||
368 | op.execute(''' |
|
532 | """ | |
369 |
|
|
533 | CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone) | |
370 |
|
|
534 | RETURNS timestamp without time zone AS | |
371 | $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$ |
|
535 | $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$ | |
372 |
|
|
536 | LANGUAGE sql VOLATILE; | |
373 | ''') |
|
537 | """ | |
|
538 | ) | |||
374 |
|
539 | |||
375 |
op.execute( |
|
540 | op.execute( | |
|
541 | """ | |||
376 |
|
|
542 | CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger | |
377 |
|
|
543 | LANGUAGE plpgsql SECURITY DEFINER | |
378 |
|
|
544 | AS $$ | |
@@ -426,13 +592,17 b' def upgrade():' | |||||
426 |
|
|
592 | RETURN NULL; | |
427 |
|
|
593 | END | |
428 |
|
|
594 | $$; | |
429 | ''') |
|
595 | """ | |
|
596 | ) | |||
430 |
|
597 | |||
431 |
op.execute( |
|
598 | op.execute( | |
|
599 | """ | |||
432 |
|
|
600 | CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs(); | |
433 | ''') |
|
601 | """ | |
|
602 | ) | |||
434 |
|
603 | |||
435 |
op.execute( |
|
604 | op.execute( | |
|
605 | """ | |||
436 |
|
|
606 | CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger | |
437 |
|
|
607 | LANGUAGE plpgsql SECURITY DEFINER | |
438 |
|
|
608 | AS $$ | |
@@ -463,13 +633,17 b' def upgrade():' | |||||
463 |
|
|
633 | RETURN NULL; | |
464 |
|
|
634 | END | |
465 |
|
|
635 | $$; | |
466 | ''') |
|
636 | """ | |
|
637 | ) | |||
467 |
|
638 | |||
468 |
op.execute( |
|
639 | op.execute( | |
|
640 | """ | |||
469 |
|
|
641 | CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics(); | |
470 | ''') |
|
642 | """ | |
|
643 | ) | |||
471 |
|
644 | |||
472 |
op.execute( |
|
645 | op.execute( | |
|
646 | """ | |||
473 |
|
|
647 | CREATE FUNCTION partition_reports_stats() RETURNS trigger | |
474 |
|
|
648 | LANGUAGE plpgsql SECURITY DEFINER | |
475 |
|
|
649 | AS $$ | |
@@ -499,13 +673,17 b' def upgrade():' | |||||
499 |
|
|
673 | RETURN NULL; | |
500 |
|
|
674 | END | |
501 |
|
|
675 | $$; | |
502 | ''') |
|
676 | """ | |
|
677 | ) | |||
503 |
|
678 | |||
504 |
op.execute( |
|
679 | op.execute( | |
|
680 | """ | |||
505 |
|
|
681 | CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats(); | |
506 | ''') |
|
682 | """ | |
|
683 | ) | |||
507 |
|
684 | |||
508 |
op.execute( |
|
685 | op.execute( | |
|
686 | """ | |||
509 |
|
|
687 | CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger | |
510 |
|
|
688 | LANGUAGE plpgsql SECURITY DEFINER | |
511 |
|
|
689 | AS $$ | |
@@ -533,13 +711,17 b' def upgrade():' | |||||
533 |
|
|
711 | RETURN NULL; | |
534 |
|
|
712 | END | |
535 |
|
|
713 | $$; | |
536 | ''') |
|
714 | """ | |
|
715 | ) | |||
537 |
|
716 | |||
538 |
op.execute( |
|
717 | op.execute( | |
|
718 | """ | |||
539 |
|
|
719 | CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups(); | |
540 | ''') |
|
720 | """ | |
|
721 | ) | |||
541 |
|
722 | |||
542 |
op.execute( |
|
723 | op.execute( | |
|
724 | """ | |||
543 |
|
|
725 | CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger | |
544 |
|
|
726 | LANGUAGE plpgsql SECURITY DEFINER | |
545 |
|
|
727 | AS $$ | |
@@ -573,14 +755,17 b' def upgrade():' | |||||
573 |
|
|
755 | RETURN NULL; | |
574 |
|
|
756 | END | |
575 |
|
|
757 | $$; | |
576 | ''') |
|
758 | """ | |
|
759 | ) | |||
577 |
|
760 | |||
578 |
op.execute( |
|
761 | op.execute( | |
|
762 | """ | |||
579 |
|
|
763 | CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports(); | |
580 | ''') |
|
764 | """ | |
581 |
|
765 | ) | ||
582 |
|
766 | |||
583 |
op.execute( |
|
767 | op.execute( | |
|
768 | """ | |||
584 |
|
|
769 | CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger | |
585 |
|
|
770 | LANGUAGE plpgsql SECURITY DEFINER | |
586 |
|
|
771 | AS $$ | |
@@ -614,11 +799,15 b' def upgrade():' | |||||
614 |
|
|
799 | RETURN NULL; | |
615 |
|
|
800 | END | |
616 |
|
|
801 | $$; | |
617 | ''') |
|
802 | """ | |
|
803 | ) | |||
618 |
|
804 | |||
619 |
op.execute( |
|
805 | op.execute( | |
|
806 | """ | |||
620 |
|
|
807 | CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls(); | |
621 | ''') |
|
808 | """ | |
|
809 | ) | |||
|
810 | ||||
622 |
|
811 | |||
623 | def downgrade(): |
|
812 | def downgrade(): | |
624 | pass |
|
813 | pass |
@@ -7,8 +7,8 b' Create Date: 2018-02-28 13:52:50.717217' | |||||
7 | """ |
|
7 | """ | |
8 |
|
8 | |||
9 | # revision identifiers, used by Alembic. |
|
9 | # revision identifiers, used by Alembic. | |
10 |
revision = |
|
10 | revision = "e9fcfbdd9498" | |
11 |
down_revision = |
|
11 | down_revision = "55b6e612672f" | |
12 |
|
12 | |||
13 | from alembic import op |
|
13 | from alembic import op | |
14 | import sqlalchemy as sa |
|
14 | import sqlalchemy as sa | |
@@ -16,17 +16,25 b' import sqlalchemy as sa' | |||||
16 |
|
16 | |||
17 | def upgrade(): |
|
17 | def upgrade(): | |
18 | op.create_table( |
|
18 | op.create_table( | |
19 |
|
|
19 | "channels_resources", | |
20 | sa.Column('channel_pkey', sa.Integer, |
|
20 | sa.Column( | |
21 | sa.ForeignKey('alert_channels.pkey', |
|
21 | "channel_pkey", | |
22 | ondelete='CASCADE', onupdate='CASCADE'), |
|
22 | sa.Integer, | |
23 | primary_key=True), |
|
23 | sa.ForeignKey( | |
24 | sa.Column('resource_id', sa.Integer, |
|
24 | "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
25 | sa.ForeignKey('resources.resource_id', |
|
25 | ), | |
26 | ondelete='CASCADE', onupdate='CASCADE'), |
|
26 | primary_key=True, | |
27 | primary_key=True) |
|
27 | ), | |
|
28 | sa.Column( | |||
|
29 | "resource_id", | |||
|
30 | sa.Integer, | |||
|
31 | sa.ForeignKey( | |||
|
32 | "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE" | |||
|
33 | ), | |||
|
34 | primary_key=True, | |||
|
35 | ), | |||
28 | ) |
|
36 | ) | |
29 |
|
37 | |||
30 |
|
38 | |||
31 | def downgrade(): |
|
39 | def downgrade(): | |
32 |
op.drop_table( |
|
40 | op.drop_table("channels_resources") |
@@ -29,11 +29,11 b' log = logging.getLogger(__name__)' | |||||
29 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) |
|
29 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) | |
30 |
|
30 | |||
31 | NAMING_CONVENTION = { |
|
31 | NAMING_CONVENTION = { | |
32 |
"ix": |
|
32 | "ix": "ix_%(column_0_label)s", | |
33 | "uq": "uq_%(table_name)s_%(column_0_name)s", |
|
33 | "uq": "uq_%(table_name)s_%(column_0_name)s", | |
34 | "ck": "ck_%(table_name)s_%(constraint_name)s", |
|
34 | "ck": "ck_%(table_name)s_%(constraint_name)s", | |
35 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", |
|
35 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", | |
36 | "pk": "pk_%(table_name)s" |
|
36 | "pk": "pk_%(table_name)s", | |
37 | } |
|
37 | } | |
38 |
|
38 | |||
39 | metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
39 | metadata = MetaData(naming_convention=NAMING_CONVENTION) | |
@@ -59,23 +59,24 b' class SliceableESQuery(object):' | |||||
59 | self.query = query |
|
59 | self.query = query | |
60 | self.sort_query = sort_query |
|
60 | self.sort_query = sort_query | |
61 | self.aggregations = aggregations |
|
61 | self.aggregations = aggregations | |
62 |
self.items_per_page = kwconfig.pop( |
|
62 | self.items_per_page = kwconfig.pop("items_per_page", 10) | |
63 |
self.page = kwconfig.pop( |
|
63 | self.page = kwconfig.pop("page", 1) | |
64 | self.kwconfig = kwconfig |
|
64 | self.kwconfig = kwconfig | |
65 | self.result = None |
|
65 | self.result = None | |
66 |
|
66 | |||
67 | def __getitem__(self, index): |
|
67 | def __getitem__(self, index): | |
68 | config = self.kwconfig.copy() |
|
68 | config = self.kwconfig.copy() | |
69 |
config[ |
|
69 | config["from_"] = index.start | |
70 | query = self.query.copy() |
|
70 | query = self.query.copy() | |
71 | if self.sort_query: |
|
71 | if self.sort_query: | |
72 | query.update(self.sort_query) |
|
72 | query.update(self.sort_query) | |
73 |
self.result = Datastores.es.search( |
|
73 | self.result = Datastores.es.search( | |
74 | **config) |
|
74 | body=query, size=self.items_per_page, **config | |
|
75 | ) | |||
75 | if self.aggregations: |
|
76 | if self.aggregations: | |
76 |
self.items = self.result.get( |
|
77 | self.items = self.result.get("aggregations") | |
77 | else: |
|
78 | else: | |
78 |
self.items = self.result[ |
|
79 | self.items = self.result["hits"]["hits"] | |
79 |
|
80 | |||
80 | return self.items |
|
81 | return self.items | |
81 |
|
82 | |||
@@ -85,14 +86,15 b' class SliceableESQuery(object):' | |||||
85 | def __len__(self): |
|
86 | def __len__(self): | |
86 | config = self.kwconfig.copy() |
|
87 | config = self.kwconfig.copy() | |
87 | query = self.query.copy() |
|
88 | query = self.query.copy() | |
88 |
self.result = Datastores.es.search( |
|
89 | self.result = Datastores.es.search( | |
89 | **config) |
|
90 | body=query, size=self.items_per_page, **config | |
|
91 | ) | |||
90 | if self.aggregations: |
|
92 | if self.aggregations: | |
91 |
self.items = self.result.get( |
|
93 | self.items = self.result.get("aggregations") | |
92 | else: |
|
94 | else: | |
93 |
self.items = self.result[ |
|
95 | self.items = self.result["hits"]["hits"] | |
94 |
|
96 | |||
95 |
count = int(self.result[ |
|
97 | count = int(self.result["hits"]["total"]) | |
96 | return count if count < 5000 else 5000 |
|
98 | return count if count < 5000 else 5000 | |
97 |
|
99 | |||
98 |
|
100 | |||
@@ -102,8 +104,7 b' from appenlight.models.user import User' | |||||
102 | from appenlight.models.alert_channel import AlertChannel |
|
104 | from appenlight.models.alert_channel import AlertChannel | |
103 | from appenlight.models.alert_channel_action import AlertChannelAction |
|
105 | from appenlight.models.alert_channel_action import AlertChannelAction | |
104 | from appenlight.models.metric import Metric |
|
106 | from appenlight.models.metric import Metric | |
105 |
from appenlight.models.application_postprocess_conf import |
|
107 | from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf | |
106 | ApplicationPostprocessConf |
|
|||
107 | from appenlight.models.auth_token import AuthToken |
|
108 | from appenlight.models.auth_token import AuthToken | |
108 | from appenlight.models.event import Event |
|
109 | from appenlight.models.event import Event | |
109 | from appenlight.models.external_identity import ExternalIdentity |
|
110 | from appenlight.models.external_identity import ExternalIdentity | |
@@ -124,7 +125,15 b' from appenlight.models.user_permission import UserPermission' | |||||
124 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
125 | from appenlight.models.user_resource_permission import UserResourcePermission | |
125 | from ziggurat_foundations import ziggurat_model_init |
|
126 | from ziggurat_foundations import ziggurat_model_init | |
126 |
|
127 | |||
127 | ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission, |
|
128 | ziggurat_model_init( | |
128 | UserResourcePermission, GroupResourcePermission, |
|
129 | User, | |
|
130 | Group, | |||
|
131 | UserGroup, | |||
|
132 | GroupPermission, | |||
|
133 | UserPermission, | |||
|
134 | UserResourcePermission, | |||
|
135 | GroupResourcePermission, | |||
129 |
|
|
136 | Resource, | |
130 | ExternalIdentity, passwordmanager=None) |
|
137 | ExternalIdentity, | |
|
138 | passwordmanager=None, | |||
|
139 | ) |
@@ -27,126 +27,125 b' log = logging.getLogger(__name__)' | |||||
27 |
|
27 | |||
28 | # |
|
28 | # | |
29 | channel_rules_m2m_table = sa.Table( |
|
29 | channel_rules_m2m_table = sa.Table( | |
30 |
|
|
30 | "channels_actions", | |
31 | sa.Column('channel_pkey', sa.Integer, |
|
31 | Base.metadata, | |
32 |
|
|
32 | sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | |
33 | sa.Column('action_pkey', sa.Integer, |
|
33 | sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")), | |
34 | sa.ForeignKey('alert_channels_actions.pkey')) |
|
|||
35 | ) |
|
34 | ) | |
36 |
|
35 | |||
37 | channel_resources_m2m_table = sa.Table( |
|
36 | channel_resources_m2m_table = sa.Table( | |
38 |
|
|
37 | "channels_resources", | |
39 | sa.Column('channel_pkey', sa.Integer, |
|
38 | Base.metadata, | |
40 |
|
|
39 | sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | |
41 |
sa.Column( |
|
40 | sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")), | |
42 | sa.ForeignKey('resources.resource_id')) |
|
|||
43 | ) |
|
41 | ) | |
44 |
|
42 | |||
45 |
DATE_FRMT = |
|
43 | DATE_FRMT = "%Y-%m-%dT%H:%M" | |
46 |
|
44 | |||
47 |
|
45 | |||
48 | class AlertChannel(Base, BaseModel): |
|
46 | class AlertChannel(Base, BaseModel): | |
49 | """ |
|
47 | """ | |
50 | Stores information about possible alerting options |
|
48 | Stores information about possible alerting options | |
51 | """ |
|
49 | """ | |
52 | __tablename__ = 'alert_channels' |
|
50 | ||
53 | __possible_channel_names__ = ['email'] |
|
51 | __tablename__ = "alert_channels" | |
|
52 | __possible_channel_names__ = ["email"] | |||
54 | __mapper_args__ = { |
|
53 | __mapper_args__ = { | |
55 |
|
|
54 | "polymorphic_on": "channel_name", | |
56 |
|
|
55 | "polymorphic_identity": "integration", | |
57 | } |
|
56 | } | |
58 |
|
57 | |||
59 |
owner_id = sa.Column( |
|
58 | owner_id = sa.Column( | |
60 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
59 | sa.Unicode(30), | |
61 | ondelete='CASCADE')) |
|
60 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |
|
61 | ) | |||
62 | channel_name = sa.Column(sa.Unicode(25), nullable=False) |
|
62 | channel_name = sa.Column(sa.Unicode(25), nullable=False) | |
63 |
channel_value = sa.Column(sa.Unicode(80), nullable=False, default= |
|
63 | channel_value = sa.Column(sa.Unicode(80), nullable=False, default="") | |
64 |
channel_json_conf = sa.Column(JSON(), nullable=False, default= |
|
64 | channel_json_conf = sa.Column(JSON(), nullable=False, default="") | |
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, |
|
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, default=False) | |
66 | default=False) |
|
66 | send_alerts = sa.Column(sa.Boolean, nullable=False, default=True) | |
67 |
|
|
67 | daily_digest = sa.Column(sa.Boolean, nullable=False, default=True) | |
68 | default=True) |
|
68 | integration_id = sa.Column( | |
69 | daily_digest = sa.Column(sa.Boolean, nullable=False, |
|
69 | sa.Integer, sa.ForeignKey("integrations.id"), nullable=True | |
70 | default=True) |
|
70 | ) | |
71 | integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'), |
|
|||
72 | nullable=True) |
|
|||
73 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
71 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
74 |
|
72 | |||
75 |
channel_actions = sa.orm.relationship( |
|
73 | channel_actions = sa.orm.relationship( | |
|
74 | "AlertChannelAction", | |||
76 | cascade="all", |
|
75 | cascade="all", | |
77 |
|
|
76 | passive_deletes=True, | |
78 |
|
|
77 | passive_updates=True, | |
79 |
|
|
78 | secondary=channel_rules_m2m_table, | |
80 | backref='channels') |
|
79 | backref="channels", | |
81 | resources = sa.orm.relationship('Resource', |
|
80 | ) | |
|
81 | resources = sa.orm.relationship( | |||
|
82 | "Resource", | |||
82 |
|
|
83 | cascade="all", | |
83 |
|
|
84 | passive_deletes=True, | |
84 |
|
|
85 | passive_updates=True, | |
85 |
|
|
86 | secondary=channel_resources_m2m_table, | |
86 | backref='resources') |
|
87 | backref="resources", | |
|
88 | ) | |||
87 |
|
89 | |||
88 | @property |
|
90 | @property | |
89 | def channel_visible_value(self): |
|
91 | def channel_visible_value(self): | |
90 | if self.integration: |
|
92 | if self.integration: | |
91 |
return |
|
93 | return "{}: {}".format( | |
92 | self.channel_name, |
|
94 | self.channel_name, self.integration.resource.resource_name | |
93 | self.integration.resource.resource_name |
|
|||
94 | ) |
|
95 | ) | |
95 |
|
96 | |||
96 | return '{}: {}'.format( |
|
97 | return "{}: {}".format(self.channel_name, self.channel_value) | |
97 | self.channel_name, |
|
|||
98 | self.channel_value |
|
|||
99 | ) |
|
|||
100 |
|
98 | |||
101 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
99 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True): | |
102 | extended_info=True): |
|
|||
103 | """ |
|
100 | """ | |
104 | Returns dictionary with required information that will be consumed by |
|
101 | Returns dictionary with required information that will be consumed by | |
105 | angular |
|
102 | angular | |
106 | """ |
|
103 | """ | |
107 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, |
|
104 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys) | |
108 | include_keys) |
|
|||
109 | exclude_keys_list = exclude_keys or [] |
|
105 | exclude_keys_list = exclude_keys or [] | |
110 | include_keys_list = include_keys or [] |
|
106 | include_keys_list = include_keys or [] | |
111 |
|
107 | |||
112 |
instance_dict[ |
|
108 | instance_dict["supports_report_alerting"] = True | |
113 |
instance_dict[ |
|
109 | instance_dict["channel_visible_value"] = self.channel_visible_value | |
114 |
|
110 | |||
115 | if extended_info: |
|
111 | if extended_info: | |
116 |
instance_dict[ |
|
112 | instance_dict["actions"] = [ | |
117 | rule.get_dict(extended_info=True) for |
|
113 | rule.get_dict(extended_info=True) for rule in self.channel_actions | |
118 | rule in self.channel_actions] |
|
114 | ] | |
119 |
|
115 | |||
120 |
del instance_dict[ |
|
116 | del instance_dict["channel_json_conf"] | |
121 |
|
117 | |||
122 | if self.integration: |
|
118 | if self.integration: | |
123 | instance_dict[ |
|
119 | instance_dict[ | |
124 |
|
|
120 | "supports_report_alerting" | |
125 |
|
|
121 | ] = self.integration.supports_report_alerting | |
126 | d = {} |
|
122 | d = {} | |
127 | for k in instance_dict.keys(): |
|
123 | for k in instance_dict.keys(): | |
128 |
if |
|
124 | if k not in exclude_keys_list and ( | |
129 |
|
|
125 | k in include_keys_list or not include_keys | |
|
126 | ): | |||
130 | d[k] = instance_dict[k] |
|
127 | d[k] = instance_dict[k] | |
131 | return d |
|
128 | return d | |
132 |
|
129 | |||
133 | def __repr__(self): |
|
130 | def __repr__(self): | |
134 |
return |
|
131 | return "<AlertChannel: (%s,%s), user:%s>" % ( | |
|
132 | self.channel_name, | |||
135 |
|
|
133 | self.channel_value, | |
136 | self.user_name,) |
|
134 | self.user_name, | |
|
135 | ) | |||
137 |
|
136 | |||
138 | def send_digest(self, **kwargs): |
|
137 | def send_digest(self, **kwargs): | |
139 | """ |
|
138 | """ | |
140 | This should implement daily top error report notifications |
|
139 | This should implement daily top error report notifications | |
141 | """ |
|
140 | """ | |
142 |
log.warning( |
|
141 | log.warning("send_digest NOT IMPLEMENTED") | |
143 |
|
142 | |||
144 | def notify_reports(self, **kwargs): |
|
143 | def notify_reports(self, **kwargs): | |
145 | """ |
|
144 | """ | |
146 | This should implement notification of reports that occured in 1 min |
|
145 | This should implement notification of reports that occured in 1 min | |
147 | interval |
|
146 | interval | |
148 | """ |
|
147 | """ | |
149 |
log.warning( |
|
148 | log.warning("notify_reports NOT IMPLEMENTED") | |
150 |
|
149 | |||
151 | def notify_alert(self, **kwargs): |
|
150 | def notify_alert(self, **kwargs): | |
152 | """ |
|
151 | """ | |
@@ -160,87 +159,85 b' class AlertChannel(Base, BaseModel):' | |||||
160 | request: request object |
|
159 | request: request object | |
161 |
|
160 | |||
162 | """ |
|
161 | """ | |
163 |
alert_name = kwargs[ |
|
162 | alert_name = kwargs["event"].unified_alert_name() | |
164 |
if alert_name in [ |
|
163 | if alert_name in ["slow_report_alert", "error_report_alert"]: | |
165 | self.notify_report_alert(**kwargs) |
|
164 | self.notify_report_alert(**kwargs) | |
166 |
elif alert_name == |
|
165 | elif alert_name == "uptime_alert": | |
167 | self.notify_uptime_alert(**kwargs) |
|
166 | self.notify_uptime_alert(**kwargs) | |
168 |
elif alert_name == |
|
167 | elif alert_name == "chart_alert": | |
169 | self.notify_chart_alert(**kwargs) |
|
168 | self.notify_chart_alert(**kwargs) | |
170 |
|
169 | |||
171 | def notify_chart_alert(self, **kwargs): |
|
170 | def notify_chart_alert(self, **kwargs): | |
172 | """ |
|
171 | """ | |
173 | This should implement report open/close alerts notifications |
|
172 | This should implement report open/close alerts notifications | |
174 | """ |
|
173 | """ | |
175 |
log.warning( |
|
174 | log.warning("notify_chart_alert NOT IMPLEMENTED") | |
176 |
|
175 | |||
177 | def notify_report_alert(self, **kwargs): |
|
176 | def notify_report_alert(self, **kwargs): | |
178 | """ |
|
177 | """ | |
179 | This should implement report open/close alerts notifications |
|
178 | This should implement report open/close alerts notifications | |
180 | """ |
|
179 | """ | |
181 |
log.warning( |
|
180 | log.warning("notify_report_alert NOT IMPLEMENTED") | |
182 |
|
181 | |||
183 | def notify_uptime_alert(self, **kwargs): |
|
182 | def notify_uptime_alert(self, **kwargs): | |
184 | """ |
|
183 | """ | |
185 | This should implement uptime open/close alerts notifications |
|
184 | This should implement uptime open/close alerts notifications | |
186 | """ |
|
185 | """ | |
187 |
log.warning( |
|
186 | log.warning("notify_uptime_alert NOT IMPLEMENTED") | |
188 |
|
187 | |||
189 | def get_notification_basic_vars(self, kwargs): |
|
188 | def get_notification_basic_vars(self, kwargs): | |
190 | """ |
|
189 | """ | |
191 | Sets most common variables used later for rendering notifications for |
|
190 | Sets most common variables used later for rendering notifications for | |
192 | channel |
|
191 | channel | |
193 | """ |
|
192 | """ | |
194 |
if |
|
193 | if "event" in kwargs: | |
195 |
kwargs[ |
|
194 | kwargs["since_when"] = kwargs["event"].start_date | |
196 |
|
195 | |||
197 |
url_start_date = kwargs.get( |
|
196 | url_start_date = kwargs.get("since_when") - timedelta(minutes=1) | |
198 |
url_end_date = kwargs.get( |
|
197 | url_end_date = kwargs.get("since_when") + timedelta(minutes=4) | |
199 | tmpl_vars = { |
|
198 | tmpl_vars = { | |
200 |
"timestamp": kwargs[ |
|
199 | "timestamp": kwargs["since_when"], | |
201 |
"user": kwargs[ |
|
200 | "user": kwargs["user"], | |
202 |
"since_when": kwargs.get( |
|
201 | "since_when": kwargs.get("since_when"), | |
203 | "url_start_date": url_start_date, |
|
202 | "url_start_date": url_start_date, | |
204 | "url_end_date": url_end_date |
|
203 | "url_end_date": url_end_date, | |
205 | } |
|
204 | } | |
206 |
tmpl_vars["resource_name"] = kwargs[ |
|
205 | tmpl_vars["resource_name"] = kwargs["resource"].resource_name | |
207 |
tmpl_vars["resource"] = kwargs[ |
|
206 | tmpl_vars["resource"] = kwargs["resource"] | |
208 |
|
207 | |||
209 |
if |
|
208 | if "event" in kwargs: | |
210 |
tmpl_vars[ |
|
209 | tmpl_vars["event_values"] = kwargs["event"].values | |
211 |
tmpl_vars[ |
|
210 | tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name() | |
212 |
tmpl_vars[ |
|
211 | tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action() | |
213 | return tmpl_vars |
|
212 | return tmpl_vars | |
214 |
|
213 | |||
215 | def report_alert_notification_vars(self, kwargs): |
|
214 | def report_alert_notification_vars(self, kwargs): | |
216 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
215 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
217 |
reports = kwargs.get( |
|
216 | reports = kwargs.get("reports", []) | |
218 | tmpl_vars["reports"] = reports |
|
217 | tmpl_vars["reports"] = reports | |
219 | tmpl_vars["confirmed_total"] = len(reports) |
|
218 | tmpl_vars["confirmed_total"] = len(reports) | |
220 |
|
219 | |||
221 | tmpl_vars["report_type"] = "error reports" |
|
220 | tmpl_vars["report_type"] = "error reports" | |
222 |
tmpl_vars["url_report_type"] = |
|
221 | tmpl_vars["url_report_type"] = "report/list" | |
223 |
|
222 | |||
224 |
alert_type = tmpl_vars.get( |
|
223 | alert_type = tmpl_vars.get("alert_type", "") | |
225 |
if |
|
224 | if "slow_report" in alert_type: | |
226 | tmpl_vars["report_type"] = "slow reports" |
|
225 | tmpl_vars["report_type"] = "slow reports" | |
227 |
tmpl_vars["url_report_type"] = |
|
226 | tmpl_vars["url_report_type"] = "report/list_slow" | |
228 |
|
227 | |||
229 |
app_url = kwargs[ |
|
228 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
230 |
|
229 | |||
231 |
destination_url = kwargs[ |
|
230 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
232 | _app_url=app_url) |
|
|||
233 | if alert_type: |
|
231 | if alert_type: | |
234 |
destination_url += |
|
232 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format( | |
235 | tmpl_vars["url_report_type"], |
|
233 | tmpl_vars["url_report_type"], | |
236 |
tmpl_vars[ |
|
234 | tmpl_vars["resource"].resource_id, | |
237 |
tmpl_vars[ |
|
235 | tmpl_vars["url_start_date"].strftime(DATE_FRMT), | |
238 |
tmpl_vars[ |
|
236 | tmpl_vars["url_end_date"].strftime(DATE_FRMT), | |
239 | ) |
|
237 | ) | |
240 | else: |
|
238 | else: | |
241 |
destination_url += |
|
239 | destination_url += "ui/{}?resource={}".format( | |
242 | tmpl_vars["url_report_type"], |
|
240 | tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id | |
243 | tmpl_vars['resource'].resource_id |
|
|||
244 | ) |
|
241 | ) | |
245 | tmpl_vars["destination_url"] = destination_url |
|
242 | tmpl_vars["destination_url"] = destination_url | |
246 |
|
243 | |||
@@ -248,58 +245,54 b' class AlertChannel(Base, BaseModel):' | |||||
248 |
|
245 | |||
249 | def uptime_alert_notification_vars(self, kwargs): |
|
246 | def uptime_alert_notification_vars(self, kwargs): | |
250 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
247 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
251 |
app_url = kwargs[ |
|
248 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
252 |
destination_url = kwargs[ |
|
249 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
253 |
destination_url += |
|
250 | destination_url += "ui/{}?resource={}".format( | |
254 | 'uptime', |
|
251 | "uptime", tmpl_vars["resource"].resource_id | |
255 | tmpl_vars['resource'].resource_id) |
|
252 | ) | |
256 |
tmpl_vars[ |
|
253 | tmpl_vars["destination_url"] = destination_url | |
257 |
|
254 | |||
258 |
reason = |
|
255 | reason = "" | |
259 |
e_values = tmpl_vars.get( |
|
256 | e_values = tmpl_vars.get("event_values") | |
260 |
|
257 | |||
261 |
if e_values and e_values.get( |
|
258 | if e_values and e_values.get("response_time") == 0: | |
262 |
reason += |
|
259 | reason += " Response time was slower than 20 seconds." | |
263 | elif e_values: |
|
260 | elif e_values: | |
264 |
code = e_values.get( |
|
261 | code = e_values.get("status_code") | |
265 |
reason += |
|
262 | reason += " Response status code: %s." % code | |
266 |
|
263 | |||
267 |
tmpl_vars[ |
|
264 | tmpl_vars["reason"] = reason | |
268 | return tmpl_vars |
|
265 | return tmpl_vars | |
269 |
|
266 | |||
270 | def chart_alert_notification_vars(self, kwargs): |
|
267 | def chart_alert_notification_vars(self, kwargs): | |
271 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
268 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
272 |
tmpl_vars[ |
|
269 | tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"] | |
273 |
tmpl_vars[ |
|
270 | tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or "" | |
274 | 'action_name') or '' |
|
271 | matched_values = tmpl_vars["event_values"]["matched_step_values"] | |
275 | matched_values = tmpl_vars['event_values']['matched_step_values'] |
|
272 | tmpl_vars["readable_values"] = [] | |
276 | tmpl_vars['readable_values'] = [] |
|
273 | for key, value in list(matched_values["values"].items()): | |
277 | for key, value in list(matched_values['values'].items()): |
|
274 | matched_label = matched_values["labels"].get(key) | |
278 | matched_label = matched_values['labels'].get(key) |
|
|||
279 | if matched_label: |
|
275 | if matched_label: | |
280 |
tmpl_vars[ |
|
276 | tmpl_vars["readable_values"].append( | |
281 |
|
|
277 | {"label": matched_label["human_label"], "value": value} | |
282 |
|
|
278 | ) | |
283 | }) |
|
279 | tmpl_vars["readable_values"] = sorted( | |
284 | tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'], |
|
280 | tmpl_vars["readable_values"], key=lambda x: x["label"] | |
285 | key=lambda x: x['label']) |
|
281 | ) | |
286 |
start_date = convert_date(tmpl_vars[ |
|
282 | start_date = convert_date(tmpl_vars["event_values"]["start_interval"]) | |
287 | end_date = None |
|
283 | end_date = None | |
288 |
if tmpl_vars[ |
|
284 | if tmpl_vars["event_values"].get("end_interval"): | |
289 |
end_date = convert_date(tmpl_vars[ |
|
285 | end_date = convert_date(tmpl_vars["event_values"]["end_interval"]) | |
290 |
|
286 | |||
291 |
app_url = kwargs[ |
|
287 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
292 |
destination_url = kwargs[ |
|
288 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
293 | to_encode = { |
|
289 | to_encode = { | |
294 |
|
|
290 | "resource": tmpl_vars["event_values"]["resource"], | |
295 |
|
|
291 | "start_date": start_date.strftime(DATE_FRMT), | |
296 | } |
|
292 | } | |
297 | if end_date: |
|
293 | if end_date: | |
298 |
to_encode[ |
|
294 | to_encode["end_date"] = end_date.strftime(DATE_FRMT) | |
299 |
|
295 | |||
300 |
destination_url += |
|
296 | destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode)) | |
301 | 'logs', |
|
297 | tmpl_vars["destination_url"] = destination_url | |
302 | urllib.parse.urlencode(to_encode) |
|
|||
303 | ) |
|
|||
304 | tmpl_vars['destination_url'] = destination_url |
|
|||
305 | return tmpl_vars |
|
298 | return tmpl_vars |
@@ -27,39 +27,42 b' class AlertChannelAction(Base, BaseModel):' | |||||
27 | Stores notifications conditions for user's alert channels |
|
27 | Stores notifications conditions for user's alert channels | |
28 | This is later used for rule parsing like "alert if http_status == 500" |
|
28 | This is later used for rule parsing like "alert if http_status == 500" | |
29 | """ |
|
29 | """ | |
30 | __tablename__ = 'alert_channels_actions' |
|
|||
31 |
|
30 | |||
32 | types = ['report', 'chart'] |
|
31 | __tablename__ = "alert_channels_actions" | |
33 |
|
32 | |||
34 | owner_id = sa.Column(sa.Integer, |
|
33 | types = ["report", "chart"] | |
35 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
34 | ||
36 | ondelete='CASCADE')) |
|
35 | owner_id = sa.Column( | |
|
36 | sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE") | |||
|
37 | ) | |||
37 | resource_id = sa.Column(sa.Integer()) |
|
38 | resource_id = sa.Column(sa.Integer()) | |
38 |
action = sa.Column(sa.Unicode(10), nullable=False, default= |
|
39 | action = sa.Column(sa.Unicode(10), nullable=False, default="always") | |
39 | type = sa.Column(sa.Unicode(10), nullable=False) |
|
40 | type = sa.Column(sa.Unicode(10), nullable=False) | |
40 | other_id = sa.Column(sa.Unicode(40)) |
|
41 | other_id = sa.Column(sa.Unicode(40)) | |
41 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
42 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
42 |
rule = sa.Column( |
|
43 | rule = sa.Column( | |
43 | nullable=False, default={'field': 'http_status', |
|
44 | sa.dialects.postgresql.JSON, | |
44 | "op": "ge", "value": "500"}) |
|
45 | nullable=False, | |
|
46 | default={"field": "http_status", "op": "ge", "value": "500"}, | |||
|
47 | ) | |||
45 | config = sa.Column(sa.dialects.postgresql.JSON) |
|
48 | config = sa.Column(sa.dialects.postgresql.JSON) | |
46 | name = sa.Column(sa.Unicode(255)) |
|
49 | name = sa.Column(sa.Unicode(255)) | |
47 |
|
50 | |||
48 |
@validates( |
|
51 | @validates("notify_type") | |
49 | def validate_email(self, key, notify_type): |
|
52 | def validate_email(self, key, notify_type): | |
50 |
assert notify_type in [ |
|
53 | assert notify_type in ["always", "only_first"] | |
51 | return notify_type |
|
54 | return notify_type | |
52 |
|
55 | |||
53 | def resource_name(self, db_session=None): |
|
56 | def resource_name(self, db_session=None): | |
54 | db_session = get_db_session(db_session) |
|
57 | db_session = get_db_session(db_session) | |
55 | if self.resource_id: |
|
58 | if self.resource_id: | |
56 | return ResourceService.by_resource_id( |
|
59 | return ResourceService.by_resource_id( | |
57 |
self.resource_id, db_session=db_session |
|
60 | self.resource_id, db_session=db_session | |
|
61 | ).resource_name | |||
58 | else: |
|
62 | else: | |
59 |
return |
|
63 | return "any resource" | |
60 |
|
64 | |||
61 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
65 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False): | |
62 | extended_info=False): |
|
|||
63 | """ |
|
66 | """ | |
64 | Returns dictionary with required information that will be consumed by |
|
67 | Returns dictionary with required information that will be consumed by | |
65 | angular |
|
68 | angular | |
@@ -68,12 +71,14 b' class AlertChannelAction(Base, BaseModel):' | |||||
68 | exclude_keys_list = exclude_keys or [] |
|
71 | exclude_keys_list = exclude_keys or [] | |
69 | include_keys_list = include_keys or [] |
|
72 | include_keys_list = include_keys or [] | |
70 | if extended_info: |
|
73 | if extended_info: | |
71 |
instance_dict[ |
|
74 | instance_dict["channels"] = [ | |
72 |
c.get_dict(extended_info=False) for c in self.channels |
|
75 | c.get_dict(extended_info=False) for c in self.channels | |
|
76 | ] | |||
73 |
|
77 | |||
74 | d = {} |
|
78 | d = {} | |
75 | for k in instance_dict.keys(): |
|
79 | for k in instance_dict.keys(): | |
76 |
if |
|
80 | if k not in exclude_keys_list and ( | |
77 |
|
|
81 | k in include_keys_list or not include_keys | |
|
82 | ): | |||
78 | d[k] = instance_dict[k] |
|
83 | d[k] = instance_dict[k] | |
79 | return d |
|
84 | return d |
@@ -13,4 +13,3 b'' | |||||
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
@@ -23,15 +23,13 b' log = logging.getLogger(__name__)' | |||||
23 |
|
23 | |||
24 |
|
24 | |||
25 | class CampfireAlertChannel(AlertChannel): |
|
25 | class CampfireAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "campfire"} | |
27 | 'polymorphic_identity': 'campfire' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | @property |
|
28 | @property | |
31 | def client(self): |
|
29 | def client(self): | |
32 | client = CampfireIntegration.create_client( |
|
30 | client = CampfireIntegration.create_client( | |
33 |
self.integration.config[ |
|
31 | self.integration.config["api_token"], self.integration.config["account"] | |
34 | self.integration.config['account']) |
|
32 | ) | |
35 | return client |
|
33 | return client | |
36 |
|
34 | |||
37 | def notify_reports(self, **kwargs): |
|
35 | def notify_reports(self, **kwargs): | |
@@ -48,37 +46,40 b' class CampfireAlertChannel(AlertChannel):' | |||||
48 | """ |
|
46 | """ | |
49 | template_vars = self.report_alert_notification_vars(kwargs) |
|
47 | template_vars = self.report_alert_notification_vars(kwargs) | |
50 |
|
48 | |||
51 |
app_url = kwargs[ |
|
49 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
52 |
destination_url = kwargs[ |
|
50 | destination_url = kwargs["request"].route_url("/", app_url=app_url) | |
53 | app_url=app_url) |
|
51 | f_args = ( | |
54 |
|
|
52 | "report", | |
55 |
|
|
53 | template_vars["resource"].resource_id, | |
56 |
|
|
54 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
57 |
|
|
55 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
58 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
56 | ) | |
59 | *f_args) |
|
57 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
60 |
|
58 | |||
61 |
if template_vars[ |
|
59 | if template_vars["confirmed_total"] > 1: | |
62 | template_vars["title"] = "%s - %s reports" % ( |
|
60 | template_vars["title"] = "%s - %s reports" % ( | |
63 |
template_vars[ |
|
61 | template_vars["resource_name"], | |
64 |
template_vars[ |
|
62 | template_vars["confirmed_total"], | |
65 | ) |
|
63 | ) | |
66 | else: |
|
64 | else: | |
67 |
error_title = truncate( |
|
65 | error_title = truncate( | |
68 | 'slow report', 90) |
|
66 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
67 | ) | |||
69 | template_vars["title"] = "%s - '%s' report" % ( |
|
68 | template_vars["title"] = "%s - '%s' report" % ( | |
70 |
template_vars[ |
|
69 | template_vars["resource_name"], | |
71 |
error_title |
|
70 | error_title, | |
|
71 | ) | |||
72 |
|
72 | |||
73 |
template_vars["title"] += |
|
73 | template_vars["title"] += " " + destination_url | |
74 |
|
74 | |||
75 |
log_msg = |
|
75 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
76 |
kwargs[ |
|
76 | kwargs["user"].user_name, | |
77 | self.channel_visible_value, |
|
77 | self.channel_visible_value, | |
78 |
template_vars[ |
|
78 | template_vars["confirmed_total"], | |
|
79 | ) | |||
79 | log.warning(log_msg) |
|
80 | log.warning(log_msg) | |
80 |
|
81 | |||
81 |
for room in self.integration.config[ |
|
82 | for room in self.integration.config["rooms"].split(","): | |
82 | self.client.speak_to_room(room.strip(), template_vars["title"]) |
|
83 | self.client.speak_to_room(room.strip(), template_vars["title"]) | |
83 |
|
84 | |||
84 | def notify_report_alert(self, **kwargs): |
|
85 | def notify_report_alert(self, **kwargs): | |
@@ -94,23 +95,23 b' class CampfireAlertChannel(AlertChannel):' | |||||
94 | """ |
|
95 | """ | |
95 | template_vars = self.report_alert_notification_vars(kwargs) |
|
96 | template_vars = self.report_alert_notification_vars(kwargs) | |
96 |
|
97 | |||
97 |
if kwargs[ |
|
98 | if kwargs["event"].unified_alert_action() == "OPEN": | |
98 |
title = |
|
99 | title = "ALERT %s: %s - %s %s %s" % ( | |
99 |
template_vars[ |
|
100 | template_vars["alert_action"], | |
100 |
template_vars[ |
|
101 | template_vars["resource_name"], | |
101 |
kwargs[ |
|
102 | kwargs["event"].values["reports"], | |
102 |
template_vars[ |
|
103 | template_vars["report_type"], | |
103 |
template_vars[ |
|
104 | template_vars["destination_url"], | |
104 | ) |
|
105 | ) | |
105 |
|
106 | |||
106 | else: |
|
107 | else: | |
107 |
title = |
|
108 | title = "ALERT %s: %s type: %s" % ( | |
108 |
template_vars[ |
|
109 | template_vars["alert_action"], | |
109 |
template_vars[ |
|
110 | template_vars["resource_name"], | |
110 |
template_vars[ |
|
111 | template_vars["alert_type"].replace("_", " "), | |
111 | ) |
|
112 | ) | |
112 |
for room in self.integration.config[ |
|
113 | for room in self.integration.config["rooms"].split(","): | |
113 |
self.client.speak_to_room(room.strip(), title, sound= |
|
114 | self.client.speak_to_room(room.strip(), title, sound="VUVUZELA") | |
114 |
|
115 | |||
115 | def notify_uptime_alert(self, **kwargs): |
|
116 | def notify_uptime_alert(self, **kwargs): | |
116 | """ |
|
117 | """ | |
@@ -125,15 +126,15 b' class CampfireAlertChannel(AlertChannel):' | |||||
125 | """ |
|
126 | """ | |
126 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
127 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
127 |
|
128 | |||
128 |
message = |
|
129 | message = "ALERT %s: %s has uptime issues %s\n\n" % ( | |
129 |
template_vars[ |
|
130 | template_vars["alert_action"], | |
130 |
template_vars[ |
|
131 | template_vars["resource_name"], | |
131 |
template_vars[ |
|
132 | template_vars["destination_url"], | |
132 | ) |
|
133 | ) | |
133 |
message += template_vars[ |
|
134 | message += template_vars["reason"] | |
134 |
|
135 | |||
135 |
for room in self.integration.config[ |
|
136 | for room in self.integration.config["rooms"].split(","): | |
136 |
self.client.speak_to_room(room.strip(), message, sound= |
|
137 | self.client.speak_to_room(room.strip(), message, sound="VUVUZELA") | |
137 |
|
138 | |||
138 | def send_digest(self, **kwargs): |
|
139 | def send_digest(self, **kwargs): | |
139 | """ |
|
140 | """ | |
@@ -148,17 +149,17 b' class CampfireAlertChannel(AlertChannel):' | |||||
148 |
|
149 | |||
149 | """ |
|
150 | """ | |
150 | template_vars = self.report_alert_notification_vars(kwargs) |
|
151 | template_vars = self.report_alert_notification_vars(kwargs) | |
151 |
f_args = (template_vars[ |
|
152 | f_args = (template_vars["resource_name"], template_vars["confirmed_total"]) | |
152 | template_vars['confirmed_total'],) |
|
|||
153 | message = "Daily report digest: %s - %s reports" % f_args |
|
153 | message = "Daily report digest: %s - %s reports" % f_args | |
154 |
message += |
|
154 | message += "{}\n".format(template_vars["destination_url"]) | |
155 |
for room in self.integration.config[ |
|
155 | for room in self.integration.config["rooms"].split(","): | |
156 | self.client.speak_to_room(room.strip(), message) |
|
156 | self.client.speak_to_room(room.strip(), message) | |
157 |
|
157 | |||
158 |
log_msg = |
|
158 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
159 |
kwargs[ |
|
159 | kwargs["user"].user_name, | |
160 | self.channel_visible_value, |
|
160 | self.channel_visible_value, | |
161 |
template_vars[ |
|
161 | template_vars["confirmed_total"], | |
|
162 | ) | |||
162 | log.warning(log_msg) |
|
163 | log.warning(log_msg) | |
163 |
|
164 | |||
164 | def notify_chart_alert(self, **kwargs): |
|
165 | def notify_chart_alert(self, **kwargs): | |
@@ -173,16 +174,18 b' class CampfireAlertChannel(AlertChannel):' | |||||
173 |
|
174 | |||
174 | """ |
|
175 | """ | |
175 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
176 | template_vars = self.chart_alert_notification_vars(kwargs) | |
176 | message = 'ALERT {}: value in "{}" chart: ' \ |
|
177 | message = ( | |
|
178 | 'ALERT {}: value in "{}" chart: ' | |||
177 |
|
|
179 | 'met alert "{}" criteria {} \n'.format( | |
178 |
template_vars[ |
|
180 | template_vars["alert_action"], | |
179 |
template_vars[ |
|
181 | template_vars["chart_name"], | |
180 |
template_vars[ |
|
182 | template_vars["action_name"], | |
181 |
template_vars[ |
|
183 | template_vars["destination_url"], | |
|
184 | ) | |||
182 | ) |
|
185 | ) | |
183 |
|
186 | |||
184 |
for item in template_vars[ |
|
187 | for item in template_vars["readable_values"]: | |
185 |
message += |
|
188 | message += "{}: {}\n".format(item["label"], item["value"]) | |
186 |
|
189 | |||
187 |
for room in self.integration.config[ |
|
190 | for room in self.integration.config["rooms"].split(","): | |
188 |
self.client.speak_to_room(room.strip(), message, sound= |
|
191 | self.client.speak_to_room(room.strip(), message, sound="VUVUZELA") |
@@ -27,9 +27,7 b' class EmailAlertChannel(AlertChannel):' | |||||
27 | Default email alerting channel |
|
27 | Default email alerting channel | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | __mapper_args__ = { |
|
30 | __mapper_args__ = {"polymorphic_identity": "email"} | |
31 | 'polymorphic_identity': 'email' |
|
|||
32 | } |
|
|||
33 |
|
31 | |||
34 | def notify_reports(self, **kwargs): |
|
32 | def notify_reports(self, **kwargs): | |
35 | """ |
|
33 | """ | |
@@ -45,25 +43,30 b' class EmailAlertChannel(AlertChannel):' | |||||
45 | """ |
|
43 | """ | |
46 | template_vars = self.report_alert_notification_vars(kwargs) |
|
44 | template_vars = self.report_alert_notification_vars(kwargs) | |
47 |
|
45 | |||
48 |
if template_vars[ |
|
46 | if template_vars["confirmed_total"] > 1: | |
49 | template_vars["title"] = "AppEnlight :: %s - %s reports" % ( |
|
47 | template_vars["title"] = "AppEnlight :: %s - %s reports" % ( | |
50 |
template_vars[ |
|
48 | template_vars["resource_name"], | |
51 |
template_vars[ |
|
49 | template_vars["confirmed_total"], | |
52 | ) |
|
50 | ) | |
53 | else: |
|
51 | else: | |
54 |
error_title = truncate( |
|
52 | error_title = truncate( | |
55 | 'slow report', 20) |
|
53 | template_vars["reports"][0][1].error or "slow report", 20 | |
|
54 | ) | |||
56 | template_vars["title"] = "AppEnlight :: %s - '%s' report" % ( |
|
55 | template_vars["title"] = "AppEnlight :: %s - '%s' report" % ( | |
57 |
template_vars[ |
|
56 | template_vars["resource_name"], | |
58 |
error_title |
|
57 | error_title, | |
59 | UserService.send_email(kwargs['request'], |
|
58 | ) | |
|
59 | UserService.send_email( | |||
|
60 | kwargs["request"], | |||
60 |
|
|
61 | [self.channel_value], | |
61 |
|
|
62 | template_vars, | |
62 |
|
|
63 | "/email_templates/notify_reports.jinja2", | |
63 | log_msg = 'NOTIFY : %s via %s :: %s reports' % ( |
|
64 | ) | |
64 | kwargs['user'].user_name, |
|
65 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
|
66 | kwargs["user"].user_name, | |||
65 | self.channel_visible_value, |
|
67 | self.channel_visible_value, | |
66 |
template_vars[ |
|
68 | template_vars["confirmed_total"], | |
|
69 | ) | |||
67 | log.warning(log_msg) |
|
70 | log.warning(log_msg) | |
68 |
|
71 | |||
69 | def send_digest(self, **kwargs): |
|
72 | def send_digest(self, **kwargs): | |
@@ -81,20 +84,23 b' class EmailAlertChannel(AlertChannel):' | |||||
81 | template_vars = self.report_alert_notification_vars(kwargs) |
|
84 | template_vars = self.report_alert_notification_vars(kwargs) | |
82 | title = "AppEnlight :: Daily report digest: %s - %s reports" |
|
85 | title = "AppEnlight :: Daily report digest: %s - %s reports" | |
83 | template_vars["email_title"] = title % ( |
|
86 | template_vars["email_title"] = title % ( | |
84 |
template_vars[ |
|
87 | template_vars["resource_name"], | |
85 |
template_vars[ |
|
88 | template_vars["confirmed_total"], | |
86 | ) |
|
89 | ) | |
87 |
|
90 | |||
88 |
UserService.send_email( |
|
91 | UserService.send_email( | |
|
92 | kwargs["request"], | |||
89 |
|
|
93 | [self.channel_value], | |
90 |
|
|
94 | template_vars, | |
91 |
|
|
95 | "/email_templates/notify_reports.jinja2", | |
92 |
|
|
96 | immediately=True, | |
93 |
|
|
97 | silent=True, | |
94 | log_msg = 'DIGEST : %s via %s :: %s reports' % ( |
|
98 | ) | |
95 | kwargs['user'].user_name, |
|
99 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
|
100 | kwargs["user"].user_name, | |||
96 | self.channel_visible_value, |
|
101 | self.channel_visible_value, | |
97 |
template_vars[ |
|
102 | template_vars["confirmed_total"], | |
|
103 | ) | |||
98 | log.warning(log_msg) |
|
104 | log.warning(log_msg) | |
99 |
|
105 | |||
100 | def notify_report_alert(self, **kwargs): |
|
106 | def notify_report_alert(self, **kwargs): | |
@@ -110,23 +116,26 b' class EmailAlertChannel(AlertChannel):' | |||||
110 | """ |
|
116 | """ | |
111 | template_vars = self.report_alert_notification_vars(kwargs) |
|
117 | template_vars = self.report_alert_notification_vars(kwargs) | |
112 |
|
118 | |||
113 |
if kwargs[ |
|
119 | if kwargs["event"].unified_alert_action() == "OPEN": | |
114 |
title = |
|
120 | title = "AppEnlight :: ALERT %s: %s - %s %s" % ( | |
115 |
template_vars[ |
|
121 | template_vars["alert_action"], | |
116 |
template_vars[ |
|
122 | template_vars["resource_name"], | |
117 |
kwargs[ |
|
123 | kwargs["event"].values["reports"], | |
118 |
template_vars[ |
|
124 | template_vars["report_type"], | |
119 | ) |
|
125 | ) | |
120 | else: |
|
126 | else: | |
121 |
title = |
|
127 | title = "AppEnlight :: ALERT %s: %s type: %s" % ( | |
122 |
template_vars[ |
|
128 | template_vars["alert_action"], | |
123 |
template_vars[ |
|
129 | template_vars["resource_name"], | |
124 |
template_vars[ |
|
130 | template_vars["alert_type"].replace("_", " "), | |
125 | ) |
|
131 | ) | |
126 |
template_vars[ |
|
132 | template_vars["email_title"] = title | |
127 |
UserService.send_email( |
|
133 | UserService.send_email( | |
|
134 | kwargs["request"], | |||
|
135 | [self.channel_value], | |||
128 |
|
|
136 | template_vars, | |
129 |
|
|
137 | "/email_templates/alert_reports.jinja2", | |
|
138 | ) | |||
130 |
|
139 | |||
131 | def notify_uptime_alert(self, **kwargs): |
|
140 | def notify_uptime_alert(self, **kwargs): | |
132 | """ |
|
141 | """ | |
@@ -140,15 +149,18 b' class EmailAlertChannel(AlertChannel):' | |||||
140 |
|
149 | |||
141 | """ |
|
150 | """ | |
142 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
151 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
143 |
title = |
|
152 | title = "AppEnlight :: ALERT %s: %s has uptime issues" % ( | |
144 |
template_vars[ |
|
153 | template_vars["alert_action"], | |
145 |
template_vars[ |
|
154 | template_vars["resource_name"], | |
146 | ) |
|
155 | ) | |
147 |
template_vars[ |
|
156 | template_vars["email_title"] = title | |
148 |
|
157 | |||
149 |
UserService.send_email( |
|
158 | UserService.send_email( | |
|
159 | kwargs["request"], | |||
|
160 | [self.channel_value], | |||
150 |
|
|
161 | template_vars, | |
151 |
|
|
162 | "/email_templates/alert_uptime.jinja2", | |
|
163 | ) | |||
152 |
|
164 | |||
153 | def notify_chart_alert(self, **kwargs): |
|
165 | def notify_chart_alert(self, **kwargs): | |
154 | """ |
|
166 | """ | |
@@ -163,13 +175,18 b' class EmailAlertChannel(AlertChannel):' | |||||
163 | """ |
|
175 | """ | |
164 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
176 | template_vars = self.chart_alert_notification_vars(kwargs) | |
165 |
|
177 | |||
166 | title = 'AppEnlight :: ALERT {} value in "{}" chart' \ |
|
178 | title = ( | |
|
179 | 'AppEnlight :: ALERT {} value in "{}" chart' | |||
167 |
|
|
180 | ' met alert "{}" criteria'.format( | |
168 |
template_vars[ |
|
181 | template_vars["alert_action"], | |
169 |
template_vars[ |
|
182 | template_vars["chart_name"], | |
170 |
template_vars[ |
|
183 | template_vars["action_name"], | |
171 | ) |
|
184 | ) | |
172 | template_vars['email_title'] = title |
|
185 | ) | |
173 | UserService.send_email(kwargs['request'], [self.channel_value], |
|
186 | template_vars["email_title"] = title | |
|
187 | UserService.send_email( | |||
|
188 | kwargs["request"], | |||
|
189 | [self.channel_value], | |||
174 |
|
|
190 | template_vars, | |
175 |
|
|
191 | "/email_templates/alert_chart.jinja2", | |
|
192 | ) |
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)' | |||||
23 |
|
23 | |||
24 |
|
24 | |||
25 | class FlowdockAlertChannel(AlertChannel): |
|
25 | class FlowdockAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "flowdock"} | |
27 | 'polymorphic_identity': 'flowdock' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | def notify_reports(self, **kwargs): |
|
28 | def notify_reports(self, **kwargs): | |
31 | """ |
|
29 | """ | |
@@ -41,44 +39,45 b' class FlowdockAlertChannel(AlertChannel):' | |||||
41 | """ |
|
39 | """ | |
42 | template_vars = self.report_alert_notification_vars(kwargs) |
|
40 | template_vars = self.report_alert_notification_vars(kwargs) | |
43 |
|
41 | |||
44 |
app_url = kwargs[ |
|
42 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
45 |
destination_url = kwargs[ |
|
43 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
46 | _app_url=app_url) |
|
44 | f_args = ( | |
47 |
|
|
45 | "report", | |
48 |
|
|
46 | template_vars["resource"].resource_id, | |
49 |
|
|
47 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
50 |
|
|
48 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
51 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
49 | ) | |
52 | *f_args) |
|
50 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
53 |
|
51 | |||
54 |
if template_vars[ |
|
52 | if template_vars["confirmed_total"] > 1: | |
55 | template_vars["title"] = "%s - %s reports" % ( |
|
53 | template_vars["title"] = "%s - %s reports" % ( | |
56 |
template_vars[ |
|
54 | template_vars["resource_name"], | |
57 |
template_vars[ |
|
55 | template_vars["confirmed_total"], | |
58 | ) |
|
56 | ) | |
59 | else: |
|
57 | else: | |
60 |
error_title = truncate( |
|
58 | error_title = truncate( | |
61 | 'slow report', 90) |
|
59 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
60 | ) | |||
62 | template_vars["title"] = "%s - '%s' report" % ( |
|
61 | template_vars["title"] = "%s - '%s' report" % ( | |
63 |
template_vars[ |
|
62 | template_vars["resource_name"], | |
64 |
error_title |
|
63 | error_title, | |
|
64 | ) | |||
65 |
|
65 | |||
66 |
log_msg = |
|
66 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
67 |
kwargs[ |
|
67 | kwargs["user"].user_name, | |
68 | self.channel_visible_value, |
|
68 | self.channel_visible_value, | |
69 |
template_vars[ |
|
69 | template_vars["confirmed_total"], | |
|
70 | ) | |||
70 | log.warning(log_msg) |
|
71 | log.warning(log_msg) | |
71 |
|
72 | |||
72 | client = FlowdockIntegration.create_client( |
|
73 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
73 | self.integration.config['api_token']) |
|
|||
74 | payload = { |
|
74 | payload = { | |
75 | "source": "AppEnlight", |
|
75 | "source": "AppEnlight", | |
76 |
"from_address": kwargs[ |
|
76 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
77 | 'mailing.from_email'], |
|
|||
78 | "subject": template_vars["title"], |
|
77 | "subject": template_vars["title"], | |
79 | "content": "New report present", |
|
78 | "content": "New report present", | |
80 | "tags": ["appenlight"], |
|
79 | "tags": ["appenlight"], | |
81 | "link": destination_url |
|
80 | "link": destination_url, | |
82 | } |
|
81 | } | |
83 | client.send_to_inbox(payload) |
|
82 | client.send_to_inbox(payload) | |
84 |
|
83 | |||
@@ -95,32 +94,30 b' class FlowdockAlertChannel(AlertChannel):' | |||||
95 | """ |
|
94 | """ | |
96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
95 | template_vars = self.report_alert_notification_vars(kwargs) | |
97 |
|
96 | |||
98 |
if kwargs[ |
|
97 | if kwargs["event"].unified_alert_action() == "OPEN": | |
99 |
|
98 | |||
100 |
title = |
|
99 | title = "ALERT %s: %s - %s %s" % ( | |
101 |
template_vars[ |
|
100 | template_vars["alert_action"], | |
102 |
template_vars[ |
|
101 | template_vars["resource_name"], | |
103 |
kwargs[ |
|
102 | kwargs["event"].values["reports"], | |
104 |
template_vars[ |
|
103 | template_vars["report_type"], | |
105 | ) |
|
104 | ) | |
106 |
|
105 | |||
107 | else: |
|
106 | else: | |
108 |
title = |
|
107 | title = "ALERT %s: %s type: %s" % ( | |
109 |
template_vars[ |
|
108 | template_vars["alert_action"], | |
110 |
template_vars[ |
|
109 | template_vars["resource_name"], | |
111 |
template_vars[ |
|
110 | template_vars["alert_type"].replace("_", " "), | |
112 | ) |
|
111 | ) | |
113 |
|
112 | |||
114 | client = FlowdockIntegration.create_client( |
|
113 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
115 | self.integration.config['api_token']) |
|
|||
116 | payload = { |
|
114 | payload = { | |
117 | "source": "AppEnlight", |
|
115 | "source": "AppEnlight", | |
118 |
"from_address": kwargs[ |
|
116 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
119 | 'mailing.from_email'], |
|
|||
120 | "subject": title, |
|
117 | "subject": title, | |
121 |
"content": |
|
118 | "content": "Investigation required", | |
122 |
"tags": ["appenlight", "alert", template_vars[ |
|
119 | "tags": ["appenlight", "alert", template_vars["alert_type"]], | |
123 |
"link": template_vars[ |
|
120 | "link": template_vars["destination_url"], | |
124 | } |
|
121 | } | |
125 | client.send_to_inbox(payload) |
|
122 | client.send_to_inbox(payload) | |
126 |
|
123 | |||
@@ -137,23 +134,21 b' class FlowdockAlertChannel(AlertChannel):' | |||||
137 | """ |
|
134 | """ | |
138 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
135 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
139 |
|
136 | |||
140 |
message = |
|
137 | message = "ALERT %s: %s has uptime issues" % ( | |
141 |
template_vars[ |
|
138 | template_vars["alert_action"], | |
142 |
template_vars[ |
|
139 | template_vars["resource_name"], | |
143 | ) |
|
140 | ) | |
144 |
submessage = |
|
141 | submessage = "Info: " | |
145 |
submessage += template_vars[ |
|
142 | submessage += template_vars["reason"] | |
146 |
|
143 | |||
147 | client = FlowdockIntegration.create_client( |
|
144 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
148 | self.integration.config['api_token']) |
|
|||
149 | payload = { |
|
145 | payload = { | |
150 | "source": "AppEnlight", |
|
146 | "source": "AppEnlight", | |
151 |
"from_address": kwargs[ |
|
147 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
152 | 'mailing.from_email'], |
|
|||
153 | "subject": message, |
|
148 | "subject": message, | |
154 | "content": submessage, |
|
149 | "content": submessage, | |
155 |
"tags": ["appenlight", "alert", |
|
150 | "tags": ["appenlight", "alert", "uptime"], | |
156 |
"link": template_vars[ |
|
151 | "link": template_vars["destination_url"], | |
157 | } |
|
152 | } | |
158 | client.send_to_inbox(payload) |
|
153 | client.send_to_inbox(payload) | |
159 |
|
154 | |||
@@ -171,29 +166,29 b' class FlowdockAlertChannel(AlertChannel):' | |||||
171 | """ |
|
166 | """ | |
172 | template_vars = self.report_alert_notification_vars(kwargs) |
|
167 | template_vars = self.report_alert_notification_vars(kwargs) | |
173 | message = "Daily report digest: %s - %s reports" % ( |
|
168 | message = "Daily report digest: %s - %s reports" % ( | |
174 |
template_vars[ |
|
169 | template_vars["resource_name"], | |
|
170 | template_vars["confirmed_total"], | |||
|
171 | ) | |||
175 |
|
172 | |||
176 |
f_args = (template_vars[ |
|
173 | f_args = (template_vars["confirmed_total"], template_vars["timestamp"]) | |
177 | template_vars['timestamp']) |
|
|||
178 |
|
174 | |||
179 | payload = { |
|
175 | payload = { | |
180 | "source": "AppEnlight", |
|
176 | "source": "AppEnlight", | |
181 |
"from_address": kwargs[ |
|
177 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
182 | 'mailing.from_email'], |
|
|||
183 | "subject": message, |
|
178 | "subject": message, | |
184 |
"content": |
|
179 | "content": "%s reports in total since %s" % f_args, | |
185 | "tags": ["appenlight", "digest"], |
|
180 | "tags": ["appenlight", "digest"], | |
186 |
"link": template_vars[ |
|
181 | "link": template_vars["destination_url"], | |
187 | } |
|
182 | } | |
188 |
|
183 | |||
189 | client = FlowdockIntegration.create_client( |
|
184 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
190 | self.integration.config['api_token']) |
|
|||
191 | client.send_to_inbox(payload) |
|
185 | client.send_to_inbox(payload) | |
192 |
|
186 | |||
193 |
log_msg = |
|
187 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
194 |
kwargs[ |
|
188 | kwargs["user"].user_name, | |
195 | self.channel_visible_value, |
|
189 | self.channel_visible_value, | |
196 |
template_vars[ |
|
190 | template_vars["confirmed_total"], | |
|
191 | ) | |||
197 | log.warning(log_msg) |
|
192 | log.warning(log_msg) | |
198 |
|
193 | |||
199 | def notify_chart_alert(self, **kwargs): |
|
194 | def notify_chart_alert(self, **kwargs): | |
@@ -209,25 +204,22 b' class FlowdockAlertChannel(AlertChannel):' | |||||
209 | """ |
|
204 | """ | |
210 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
205 | template_vars = self.chart_alert_notification_vars(kwargs) | |
211 |
|
206 | |||
212 |
message = 'ALERT {}: value in "{}" chart ' |
|
207 | message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format( | |
213 | 'met alert "{}" criteria'.format( |
|
208 | template_vars["alert_action"], | |
214 |
template_vars[ |
|
209 | template_vars["chart_name"], | |
215 |
template_vars[ |
|
210 | template_vars["action_name"], | |
216 | template_vars['action_name'], |
|
|||
217 | ) |
|
211 | ) | |
218 |
submessage = |
|
212 | submessage = "Info: " | |
219 |
for item in template_vars[ |
|
213 | for item in template_vars["readable_values"]: | |
220 |
submessage += |
|
214 | submessage += "{}: {}\n".format(item["label"], item["value"]) | |
221 |
|
215 | |||
222 | client = FlowdockIntegration.create_client( |
|
216 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
223 | self.integration.config['api_token']) |
|
|||
224 | payload = { |
|
217 | payload = { | |
225 | "source": "AppEnlight", |
|
218 | "source": "AppEnlight", | |
226 |
"from_address": kwargs[ |
|
219 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
227 | 'mailing.from_email'], |
|
|||
228 | "subject": message, |
|
220 | "subject": message, | |
229 | "content": submessage, |
|
221 | "content": submessage, | |
230 |
"tags": ["appenlight", "alert", |
|
222 | "tags": ["appenlight", "alert", "chart"], | |
231 |
"link": template_vars[ |
|
223 | "link": template_vars["destination_url"], | |
232 | } |
|
224 | } | |
233 | client.send_to_inbox(payload) |
|
225 | client.send_to_inbox(payload) |
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)' | |||||
23 |
|
23 | |||
24 |
|
24 | |||
25 | class HipchatAlertChannel(AlertChannel): |
|
25 | class HipchatAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "hipchat"} | |
27 | 'polymorphic_identity': 'hipchat' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | def notify_reports(self, **kwargs): |
|
28 | def notify_reports(self, **kwargs): | |
31 | """ |
|
29 | """ | |
@@ -41,46 +39,50 b' class HipchatAlertChannel(AlertChannel):' | |||||
41 | """ |
|
39 | """ | |
42 | template_vars = self.report_alert_notification_vars(kwargs) |
|
40 | template_vars = self.report_alert_notification_vars(kwargs) | |
43 |
|
41 | |||
44 |
app_url = kwargs[ |
|
42 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
45 |
destination_url = kwargs[ |
|
43 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
46 | _app_url=app_url) |
|
44 | f_args = ( | |
47 |
|
|
45 | "report", | |
48 |
|
|
46 | template_vars["resource"].resource_id, | |
49 |
|
|
47 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
50 |
|
|
48 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
51 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
49 | ) | |
52 | *f_args) |
|
50 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
53 |
|
51 | |||
54 |
if template_vars[ |
|
52 | if template_vars["confirmed_total"] > 1: | |
55 | template_vars["title"] = "%s - %s reports" % ( |
|
53 | template_vars["title"] = "%s - %s reports" % ( | |
56 |
template_vars[ |
|
54 | template_vars["resource_name"], | |
57 |
template_vars[ |
|
55 | template_vars["confirmed_total"], | |
58 | ) |
|
56 | ) | |
59 | else: |
|
57 | else: | |
60 |
error_title = truncate( |
|
58 | error_title = truncate( | |
61 | 'slow report', 90) |
|
59 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
60 | ) | |||
62 | template_vars["title"] = "%s - '%s' report" % ( |
|
61 | template_vars["title"] = "%s - '%s' report" % ( | |
63 |
template_vars[ |
|
62 | template_vars["resource_name"], | |
64 |
error_title |
|
63 | error_title, | |
|
64 | ) | |||
65 |
|
65 | |||
66 |
template_vars["title"] += |
|
66 | template_vars["title"] += " " + destination_url | |
67 |
|
67 | |||
68 |
log_msg = |
|
68 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
69 |
kwargs[ |
|
69 | kwargs["user"].user_name, | |
70 | self.channel_visible_value, |
|
70 | self.channel_visible_value, | |
71 |
template_vars[ |
|
71 | template_vars["confirmed_total"], | |
|
72 | ) | |||
72 | log.warning(log_msg) |
|
73 | log.warning(log_msg) | |
73 |
|
74 | |||
74 | client = HipchatIntegration.create_client( |
|
75 | client = HipchatIntegration.create_client(self.integration.config["api_token"]) | |
75 |
|
|
76 | for room in self.integration.config["rooms"].split(","): | |
76 | for room in self.integration.config['rooms'].split(','): |
|
77 | client.send( | |
77 |
|
|
78 | { | |
78 |
"message_format": |
|
79 | "message_format": "text", | |
79 | "message": template_vars["title"], |
|
80 | "message": template_vars["title"], | |
80 | "from": "AppEnlight", |
|
81 | "from": "AppEnlight", | |
81 | "room_id": room.strip(), |
|
82 | "room_id": room.strip(), | |
82 | "color": "yellow" |
|
83 | "color": "yellow", | |
83 |
} |
|
84 | } | |
|
85 | ) | |||
84 |
|
86 | |||
85 | def notify_report_alert(self, **kwargs): |
|
87 | def notify_report_alert(self, **kwargs): | |
86 | """ |
|
88 | """ | |
@@ -95,35 +97,37 b' class HipchatAlertChannel(AlertChannel):' | |||||
95 | """ |
|
97 | """ | |
96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
98 | template_vars = self.report_alert_notification_vars(kwargs) | |
97 |
|
99 | |||
98 |
if kwargs[ |
|
100 | if kwargs["event"].unified_alert_action() == "OPEN": | |
99 |
|
101 | |||
100 |
title = |
|
102 | title = "ALERT %s: %s - %s %s" % ( | |
101 |
template_vars[ |
|
103 | template_vars["alert_action"], | |
102 |
template_vars[ |
|
104 | template_vars["resource_name"], | |
103 |
kwargs[ |
|
105 | kwargs["event"].values["reports"], | |
104 |
template_vars[ |
|
106 | template_vars["report_type"], | |
105 | ) |
|
107 | ) | |
106 |
|
108 | |||
107 | else: |
|
109 | else: | |
108 |
title = |
|
110 | title = "ALERT %s: %s type: %s" % ( | |
109 |
template_vars[ |
|
111 | template_vars["alert_action"], | |
110 |
template_vars[ |
|
112 | template_vars["resource_name"], | |
111 |
template_vars[ |
|
113 | template_vars["alert_type"].replace("_", " "), | |
112 | ) |
|
114 | ) | |
113 |
|
115 | |||
114 |
title += |
|
116 | title += "\n " + template_vars["destination_url"] | |
115 |
|
117 | |||
116 |
api_token = self.integration.config[ |
|
118 | api_token = self.integration.config["api_token"] | |
117 | client = HipchatIntegration.create_client(api_token) |
|
119 | client = HipchatIntegration.create_client(api_token) | |
118 |
for room in self.integration.config[ |
|
120 | for room in self.integration.config["rooms"].split(","): | |
119 |
client.send( |
|
121 | client.send( | |
120 | "message_format": 'text', |
|
122 | { | |
|
123 | "message_format": "text", | |||
121 | "message": title, |
|
124 | "message": title, | |
122 | "from": "AppEnlight", |
|
125 | "from": "AppEnlight", | |
123 | "room_id": room.strip(), |
|
126 | "room_id": room.strip(), | |
124 | "color": "red", |
|
127 | "color": "red", | |
125 |
"notify": |
|
128 | "notify": "1", | |
126 |
} |
|
129 | } | |
|
130 | ) | |||
127 |
|
131 | |||
128 | def notify_uptime_alert(self, **kwargs): |
|
132 | def notify_uptime_alert(self, **kwargs): | |
129 | """ |
|
133 | """ | |
@@ -138,24 +142,26 b' class HipchatAlertChannel(AlertChannel):' | |||||
138 | """ |
|
142 | """ | |
139 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
143 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
140 |
|
144 | |||
141 |
message = |
|
145 | message = "ALERT %s: %s has uptime issues\n" % ( | |
142 |
template_vars[ |
|
146 | template_vars["alert_action"], | |
143 |
template_vars[ |
|
147 | template_vars["resource_name"], | |
144 | ) |
|
148 | ) | |
145 |
message += template_vars[ |
|
149 | message += template_vars["reason"] | |
146 |
message += |
|
150 | message += "\n{}".format(template_vars["destination_url"]) | |
147 |
|
151 | |||
148 |
api_token = self.integration.config[ |
|
152 | api_token = self.integration.config["api_token"] | |
149 | client = HipchatIntegration.create_client(api_token) |
|
153 | client = HipchatIntegration.create_client(api_token) | |
150 |
for room in self.integration.config[ |
|
154 | for room in self.integration.config["rooms"].split(","): | |
151 |
client.send( |
|
155 | client.send( | |
152 | "message_format": 'text', |
|
156 | { | |
|
157 | "message_format": "text", | |||
153 | "message": message, |
|
158 | "message": message, | |
154 | "from": "AppEnlight", |
|
159 | "from": "AppEnlight", | |
155 | "room_id": room.strip(), |
|
160 | "room_id": room.strip(), | |
156 | "color": "red", |
|
161 | "color": "red", | |
157 |
"notify": |
|
162 | "notify": "1", | |
158 |
} |
|
163 | } | |
|
164 | ) | |||
159 |
|
165 | |||
160 | def notify_chart_alert(self, **kwargs): |
|
166 | def notify_chart_alert(self, **kwargs): | |
161 | """ |
|
167 | """ | |
@@ -169,29 +175,30 b' class HipchatAlertChannel(AlertChannel):' | |||||
169 |
|
175 | |||
170 | """ |
|
176 | """ | |
171 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
177 | template_vars = self.chart_alert_notification_vars(kwargs) | |
172 | message = 'ALERT {}: value in "{}" chart: ' \ |
|
178 | message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format( | |
173 | 'met alert "{}" criteria\n'.format( |
|
179 | template_vars["alert_action"], | |
174 |
template_vars[ |
|
180 | template_vars["chart_name"], | |
175 |
template_vars[ |
|
181 | template_vars["action_name"], | |
176 | template_vars['action_name'], |
|
|||
177 | ) |
|
182 | ) | |
178 |
|
183 | |||
179 |
for item in template_vars[ |
|
184 | for item in template_vars["readable_values"]: | |
180 |
message += |
|
185 | message += "{}: {}\n".format(item["label"], item["value"]) | |
181 |
|
186 | |||
182 |
message += template_vars[ |
|
187 | message += template_vars["destination_url"] | |
183 |
|
188 | |||
184 |
api_token = self.integration.config[ |
|
189 | api_token = self.integration.config["api_token"] | |
185 | client = HipchatIntegration.create_client(api_token) |
|
190 | client = HipchatIntegration.create_client(api_token) | |
186 |
for room in self.integration.config[ |
|
191 | for room in self.integration.config["rooms"].split(","): | |
187 |
client.send( |
|
192 | client.send( | |
188 | "message_format": 'text', |
|
193 | { | |
|
194 | "message_format": "text", | |||
189 | "message": message, |
|
195 | "message": message, | |
190 | "from": "AppEnlight", |
|
196 | "from": "AppEnlight", | |
191 | "room_id": room.strip(), |
|
197 | "room_id": room.strip(), | |
192 | "color": "red", |
|
198 | "color": "red", | |
193 |
"notify": |
|
199 | "notify": "1", | |
194 |
} |
|
200 | } | |
|
201 | ) | |||
195 |
|
202 | |||
196 | def send_digest(self, **kwargs): |
|
203 | def send_digest(self, **kwargs): | |
197 | """ |
|
204 | """ | |
@@ -206,24 +213,26 b' class HipchatAlertChannel(AlertChannel):' | |||||
206 |
|
213 | |||
207 | """ |
|
214 | """ | |
208 | template_vars = self.report_alert_notification_vars(kwargs) |
|
215 | template_vars = self.report_alert_notification_vars(kwargs) | |
209 |
f_args = (template_vars[ |
|
216 | f_args = (template_vars["resource_name"], template_vars["confirmed_total"]) | |
210 | template_vars['confirmed_total'],) |
|
|||
211 | message = "Daily report digest: %s - %s reports" % f_args |
|
217 | message = "Daily report digest: %s - %s reports" % f_args | |
212 |
message += |
|
218 | message += "\n{}".format(template_vars["destination_url"]) | |
213 |
api_token = self.integration.config[ |
|
219 | api_token = self.integration.config["api_token"] | |
214 | client = HipchatIntegration.create_client(api_token) |
|
220 | client = HipchatIntegration.create_client(api_token) | |
215 |
for room in self.integration.config[ |
|
221 | for room in self.integration.config["rooms"].split(","): | |
216 |
client.send( |
|
222 | client.send( | |
217 | "message_format": 'text', |
|
223 | { | |
|
224 | "message_format": "text", | |||
218 | "message": message, |
|
225 | "message": message, | |
219 | "from": "AppEnlight", |
|
226 | "from": "AppEnlight", | |
220 | "room_id": room.strip(), |
|
227 | "room_id": room.strip(), | |
221 | "color": "green", |
|
228 | "color": "green", | |
222 |
"notify": |
|
229 | "notify": "1", | |
223 |
} |
|
230 | } | |
|
231 | ) | |||
224 |
|
232 | |||
225 |
log_msg = |
|
233 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
226 |
kwargs[ |
|
234 | kwargs["user"].user_name, | |
227 | self.channel_visible_value, |
|
235 | self.channel_visible_value, | |
228 |
template_vars[ |
|
236 | template_vars["confirmed_total"], | |
|
237 | ) | |||
229 | log.warning(log_msg) |
|
238 | log.warning(log_msg) |
@@ -23,9 +23,7 b' log = logging.getLogger(__name__)' | |||||
23 |
|
23 | |||
24 |
|
24 | |||
25 | class SlackAlertChannel(AlertChannel): |
|
25 | class SlackAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "slack"} | |
27 | 'polymorphic_identity': 'slack' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | def notify_reports(self, **kwargs): |
|
28 | def notify_reports(self, **kwargs): | |
31 | """ |
|
29 | """ | |
@@ -40,45 +38,40 b' class SlackAlertChannel(AlertChannel):' | |||||
40 |
|
38 | |||
41 | """ |
|
39 | """ | |
42 | template_vars = self.report_alert_notification_vars(kwargs) |
|
40 | template_vars = self.report_alert_notification_vars(kwargs) | |
43 |
template_vars["title"] = template_vars[ |
|
41 | template_vars["title"] = template_vars["resource_name"] | |
44 |
|
42 | |||
45 |
if template_vars[ |
|
43 | if template_vars["confirmed_total"] > 1: | |
46 |
template_vars[ |
|
44 | template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"] | |
47 | 'confirmed_total'] |
|
|||
48 | else: |
|
45 | else: | |
49 |
error_title = truncate( |
|
46 | error_title = truncate( | |
50 | 'slow report', 90) |
|
47 | template_vars["reports"][0][1].error or "slow report", 90 | |
51 | template_vars['subtext'] = error_title |
|
48 | ) | |
|
49 | template_vars["subtext"] = error_title | |||
52 |
|
50 | |||
53 |
log_msg = |
|
51 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
54 |
kwargs[ |
|
52 | kwargs["user"].user_name, | |
55 | self.channel_visible_value, |
|
53 | self.channel_visible_value, | |
56 |
template_vars[ |
|
54 | template_vars["confirmed_total"], | |
|
55 | ) | |||
57 | log.warning(log_msg) |
|
56 | log.warning(log_msg) | |
58 |
|
57 | |||
59 | client = SlackIntegration.create_client( |
|
58 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
60 | self.integration.config['webhook_url']) |
|
|||
61 | report_data = { |
|
59 | report_data = { | |
62 | "username": "AppEnlight", |
|
60 | "username": "AppEnlight", | |
63 | "icon_emoji": ":fire:", |
|
61 | "icon_emoji": ":fire:", | |
64 | "attachments": [ |
|
62 | "attachments": [ | |
65 | { |
|
63 | { | |
66 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
64 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
67 |
"fallback": "*%s* - <%s| Browse>" |
|
65 | "fallback": "*%s* - <%s| Browse>" | |
68 |
|
|
66 | % (template_vars["title"], template_vars["destination_url"]), | |
69 | template_vars['destination_url']), |
|
67 | "pretext": "*%s* - <%s| Browse>" | |
70 | "pretext": "*%s* - <%s| Browse>" % ( |
|
68 | % (template_vars["title"], template_vars["destination_url"]), | |
71 | template_vars["title"], |
|
|||
72 | template_vars['destination_url']), |
|
|||
73 | "color": "warning", |
|
69 | "color": "warning", | |
74 | "fields": [ |
|
70 | "fields": [ | |
75 | { |
|
71 | {"value": "Info: %s" % template_vars["subtext"], "short": False} | |
76 | "value": 'Info: %s' % template_vars['subtext'], |
|
72 | ], | |
77 | "short": False |
|
|||
78 |
|
|
73 | } | |
79 |
|
|
74 | ], | |
80 | } |
|
|||
81 | ] |
|
|||
82 | } |
|
75 | } | |
83 | client.make_request(data=report_data) |
|
76 | client.make_request(data=report_data) | |
84 |
|
77 | |||
@@ -95,53 +88,51 b' class SlackAlertChannel(AlertChannel):' | |||||
95 | """ |
|
88 | """ | |
96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
89 | template_vars = self.report_alert_notification_vars(kwargs) | |
97 |
|
90 | |||
98 |
if kwargs[ |
|
91 | if kwargs["event"].unified_alert_action() == "OPEN": | |
99 |
title = |
|
92 | title = "*ALERT %s*: %s" % ( | |
100 |
template_vars[ |
|
93 | template_vars["alert_action"], | |
101 |
template_vars[ |
|
94 | template_vars["resource_name"], | |
102 | ) |
|
95 | ) | |
103 |
|
96 | |||
104 |
template_vars[ |
|
97 | template_vars["subtext"] = "Got at least %s %s" % ( | |
105 |
kwargs[ |
|
98 | kwargs["event"].values["reports"], | |
106 |
template_vars[ |
|
99 | template_vars["report_type"], | |
107 | ) |
|
100 | ) | |
108 |
|
101 | |||
109 | else: |
|
102 | else: | |
110 |
title = |
|
103 | title = "*ALERT %s*: %s" % ( | |
111 |
template_vars[ |
|
104 | template_vars["alert_action"], | |
112 |
template_vars[ |
|
105 | template_vars["resource_name"], | |
113 | ) |
|
106 | ) | |
114 |
|
107 | |||
115 |
template_vars[ |
|
108 | template_vars["subtext"] = "" | |
116 |
|
109 | |||
117 |
alert_type = template_vars[ |
|
110 | alert_type = template_vars["alert_type"].replace("_", " ") | |
118 |
alert_type = alert_type.replace( |
|
111 | alert_type = alert_type.replace("alert", "").capitalize() | |
119 |
|
112 | |||
120 |
template_vars[ |
|
113 | template_vars["type"] = "Type: %s" % alert_type | |
121 |
|
114 | |||
122 | client = SlackIntegration.create_client( |
|
115 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
123 | self.integration.config['webhook_url'] |
|
|||
124 | ) |
|
|||
125 | report_data = { |
|
116 | report_data = { | |
126 | "username": "AppEnlight", |
|
117 | "username": "AppEnlight", | |
127 | "icon_emoji": ":rage:", |
|
118 | "icon_emoji": ":rage:", | |
128 | "attachments": [ |
|
119 | "attachments": [ | |
129 | { |
|
120 | { | |
130 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
121 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
131 |
"fallback": "%s - <%s| Browse>" |
|
122 | "fallback": "%s - <%s| Browse>" | |
132 |
|
|
123 | % (title, template_vars["destination_url"]), | |
133 |
"pretext": "%s - <%s| Browse>" |
|
124 | "pretext": "%s - <%s| Browse>" | |
134 |
|
|
125 | % (title, template_vars["destination_url"]), | |
135 | "color": "danger", |
|
126 | "color": "danger", | |
136 | "fields": [ |
|
127 | "fields": [ | |
137 | { |
|
128 | { | |
138 |
"title": template_vars[ |
|
129 | "title": template_vars["type"], | |
139 |
"value": template_vars[ |
|
130 | "value": template_vars["subtext"], | |
140 | "short": False |
|
131 | "short": False, | |
141 | } |
|
132 | } | |
142 | ] |
|
133 | ], | |
143 | } |
|
134 | } | |
144 | ] |
|
135 | ], | |
145 | } |
|
136 | } | |
146 | client.make_request(data=report_data) |
|
137 | client.make_request(data=report_data) | |
147 |
|
138 | |||
@@ -158,13 +149,11 b' class SlackAlertChannel(AlertChannel):' | |||||
158 | """ |
|
149 | """ | |
159 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
150 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
160 |
|
151 | |||
161 |
title = |
|
152 | title = "*ALERT %s*: %s" % ( | |
162 |
template_vars[ |
|
153 | template_vars["alert_action"], | |
163 |
template_vars[ |
|
154 | template_vars["resource_name"], | |
164 | ) |
|
|||
165 | client = SlackIntegration.create_client( |
|
|||
166 | self.integration.config['webhook_url'] |
|
|||
167 | ) |
|
155 | ) | |
|
156 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |||
168 | report_data = { |
|
157 | report_data = { | |
169 | "username": "AppEnlight", |
|
158 | "username": "AppEnlight", | |
170 | "icon_emoji": ":rage:", |
|
159 | "icon_emoji": ":rage:", | |
@@ -172,19 +161,21 b' class SlackAlertChannel(AlertChannel):' | |||||
172 | { |
|
161 | { | |
173 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
162 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
174 | "fallback": "{} - <{}| Browse>".format( |
|
163 | "fallback": "{} - <{}| Browse>".format( | |
175 |
title, template_vars[ |
|
164 | title, template_vars["destination_url"] | |
|
165 | ), | |||
176 | "pretext": "{} - <{}| Browse>".format( |
|
166 | "pretext": "{} - <{}| Browse>".format( | |
177 |
title, template_vars[ |
|
167 | title, template_vars["destination_url"] | |
|
168 | ), | |||
178 | "color": "danger", |
|
169 | "color": "danger", | |
179 | "fields": [ |
|
170 | "fields": [ | |
180 | { |
|
171 | { | |
181 | "title": "Application has uptime issues", |
|
172 | "title": "Application has uptime issues", | |
182 |
"value": template_vars[ |
|
173 | "value": template_vars["reason"], | |
183 | "short": False |
|
174 | "short": False, | |
184 | } |
|
175 | } | |
185 | ] |
|
176 | ], | |
186 | } |
|
177 | } | |
187 | ] |
|
178 | ], | |
188 | } |
|
179 | } | |
189 | client.make_request(data=report_data) |
|
180 | client.make_request(data=report_data) | |
190 |
|
181 | |||
@@ -201,39 +192,39 b' class SlackAlertChannel(AlertChannel):' | |||||
201 | """ |
|
192 | """ | |
202 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
193 | template_vars = self.chart_alert_notification_vars(kwargs) | |
203 |
|
194 | |||
204 |
title = '*ALERT {}*: value in *"{}"* chart ' |
|
195 | title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format( | |
205 | 'met alert *"{}"* criteria'.format( |
|
196 | template_vars["alert_action"], | |
206 |
template_vars[ |
|
197 | template_vars["chart_name"], | |
207 |
template_vars[ |
|
198 | template_vars["action_name"], | |
208 | template_vars['action_name'], |
|
|||
209 | ) |
|
199 | ) | |
210 |
|
200 | |||
211 |
subtext = |
|
201 | subtext = "" | |
212 |
for item in template_vars[ |
|
202 | for item in template_vars["readable_values"]: | |
213 |
subtext += |
|
203 | subtext += "{} - {}\n".format(item["label"], item["value"]) | |
214 |
|
204 | |||
215 | client = SlackIntegration.create_client( |
|
205 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
216 | self.integration.config['webhook_url'] |
|
|||
217 | ) |
|
|||
218 | report_data = { |
|
206 | report_data = { | |
219 | "username": "AppEnlight", |
|
207 | "username": "AppEnlight", | |
220 | "icon_emoji": ":rage:", |
|
208 | "icon_emoji": ":rage:", | |
221 | "attachments": [ |
|
209 | "attachments": [ | |
222 | {"mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
210 | { | |
|
211 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |||
223 | "fallback": "{} - <{}| Browse>".format( |
|
212 | "fallback": "{} - <{}| Browse>".format( | |
224 |
title, template_vars[ |
|
213 | title, template_vars["destination_url"] | |
|
214 | ), | |||
225 | "pretext": "{} - <{}| Browse>".format( |
|
215 | "pretext": "{} - <{}| Browse>".format( | |
226 |
title, template_vars[ |
|
216 | title, template_vars["destination_url"] | |
|
217 | ), | |||
227 | "color": "danger", |
|
218 | "color": "danger", | |
228 | "fields": [ |
|
219 | "fields": [ | |
229 | { |
|
220 | { | |
230 | "title": "Following criteria were met:", |
|
221 | "title": "Following criteria were met:", | |
231 | "value": subtext, |
|
222 | "value": subtext, | |
232 | "short": False |
|
223 | "short": False, | |
233 | } |
|
224 | } | |
234 | ] |
|
225 | ], | |
235 |
|
|
226 | } | |
236 | ] |
|
227 | ], | |
237 | } |
|
228 | } | |
238 | client.make_request(data=report_data) |
|
229 | client.make_request(data=report_data) | |
239 |
|
230 | |||
@@ -250,36 +241,30 b' class SlackAlertChannel(AlertChannel):' | |||||
250 |
|
241 | |||
251 | """ |
|
242 | """ | |
252 | template_vars = self.report_alert_notification_vars(kwargs) |
|
243 | template_vars = self.report_alert_notification_vars(kwargs) | |
253 |
title = "*Daily report digest*: %s" % template_vars[ |
|
244 | title = "*Daily report digest*: %s" % template_vars["resource_name"] | |
254 |
|
245 | |||
255 |
subtext = |
|
246 | subtext = "%s reports" % template_vars["confirmed_total"] | |
256 |
|
247 | |||
257 | client = SlackIntegration.create_client( |
|
248 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
258 | self.integration.config['webhook_url'] |
|
|||
259 | ) |
|
|||
260 | report_data = { |
|
249 | report_data = { | |
261 | "username": "AppEnlight", |
|
250 | "username": "AppEnlight", | |
262 | "attachments": [ |
|
251 | "attachments": [ | |
263 | { |
|
252 | { | |
264 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
253 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
265 |
"fallback": "%s : <%s| Browse>" |
|
254 | "fallback": "%s : <%s| Browse>" | |
266 |
|
|
255 | % (title, template_vars["destination_url"]), | |
267 |
"pretext": "%s: <%s| Browse>" |
|
256 | "pretext": "%s: <%s| Browse>" | |
268 |
|
|
257 | % (title, template_vars["destination_url"]), | |
269 | "color": "good", |
|
258 | "color": "good", | |
270 | "fields": [ |
|
259 | "fields": [{"title": "Got at least: %s" % subtext, "short": False}], | |
271 | { |
|
|||
272 | "title": "Got at least: %s" % subtext, |
|
|||
273 | "short": False |
|
|||
274 | } |
|
|||
275 | ] |
|
|||
276 | } |
|
260 | } | |
277 | ] |
|
261 | ], | |
278 | } |
|
262 | } | |
279 | client.make_request(data=report_data) |
|
263 | client.make_request(data=report_data) | |
280 |
|
264 | |||
281 |
log_msg = |
|
265 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
282 |
kwargs[ |
|
266 | kwargs["user"].user_name, | |
283 | self.channel_visible_value, |
|
267 | self.channel_visible_value, | |
284 |
template_vars[ |
|
268 | template_vars["confirmed_total"], | |
|
269 | ) | |||
285 | log.warning(log_msg) |
|
270 | log.warning(log_msg) |
@@ -24,7 +24,7 b' log = logging.getLogger(__name__)' | |||||
24 |
|
24 | |||
25 |
|
25 | |||
26 | def generate_api_key(): |
|
26 | def generate_api_key(): | |
27 |
uid = str(uuid.uuid4()).replace( |
|
27 | uid = str(uuid.uuid4()).replace("-", "") | |
28 | return uid[0:32] |
|
28 | return uid[0:32] | |
29 |
|
29 | |||
30 |
|
30 | |||
@@ -33,61 +33,69 b' class Application(Resource):' | |||||
33 | Resource of application type |
|
33 | Resource of application type | |
34 | """ |
|
34 | """ | |
35 |
|
35 | |||
36 |
__tablename__ = |
|
36 | __tablename__ = "applications" | |
37 |
__mapper_args__ = { |
|
37 | __mapper_args__ = {"polymorphic_identity": "application"} | |
38 |
|
38 | |||
39 | # lists configurable possible permissions for this resource type |
|
39 | # lists configurable possible permissions for this resource type | |
40 |
__possible_permissions__ = ( |
|
40 | __possible_permissions__ = ("view", "update_reports") | |
41 |
|
41 | |||
42 |
resource_id = sa.Column( |
|
42 | resource_id = sa.Column( | |
43 | sa.ForeignKey('resources.resource_id', |
|
43 | sa.Integer(), | |
44 | onupdate='CASCADE', |
|
44 | sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), | |
45 | ondelete='CASCADE', ), |
|
45 | primary_key=True, | |
46 | primary_key=True, ) |
|
46 | ) | |
47 |
domains = sa.Column(sa.UnicodeText(), nullable=False, default= |
|
47 | domains = sa.Column(sa.UnicodeText(), nullable=False, default="") | |
48 | api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True, |
|
48 | api_key = sa.Column( | |
49 | default=generate_api_key) |
|
49 | sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key | |
50 | public_key = sa.Column(sa.String(32), nullable=False, unique=True, |
|
50 | ) | |
51 | index=True, |
|
51 | public_key = sa.Column( | |
52 | default=generate_api_key) |
|
52 | sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key | |
53 | default_grouping = sa.Column(sa.Unicode(20), nullable=False, |
|
53 | ) | |
54 | default='url_traceback') |
|
54 | default_grouping = sa.Column( | |
|
55 | sa.Unicode(20), nullable=False, default="url_traceback" | |||
|
56 | ) | |||
55 | error_report_threshold = sa.Column(sa.Integer(), default=10) |
|
57 | error_report_threshold = sa.Column(sa.Integer(), default=10) | |
56 | slow_report_threshold = sa.Column(sa.Integer(), default=10) |
|
58 | slow_report_threshold = sa.Column(sa.Integer(), default=10) | |
57 | allow_permanent_storage = sa.Column(sa.Boolean(), default=False, |
|
59 | allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False) | |
58 | nullable=False) |
|
|||
59 |
|
60 | |||
60 |
@sa.orm.validates( |
|
61 | @sa.orm.validates("default_grouping") | |
61 | def validate_default_grouping(self, key, grouping): |
|
62 | def validate_default_grouping(self, key, grouping): | |
62 | """ validate if resouce can have specific permission """ |
|
63 | """ validate if resouce can have specific permission """ | |
63 |
assert grouping in [ |
|
64 | assert grouping in ["url_type", "url_traceback", "traceback_server"] | |
64 | return grouping |
|
65 | return grouping | |
65 |
|
66 | |||
66 |
report_groups = sa.orm.relationship( |
|
67 | report_groups = sa.orm.relationship( | |
|
68 | "ReportGroup", | |||
67 |
|
|
69 | cascade="all, delete-orphan", | |
68 |
|
|
70 | passive_deletes=True, | |
69 |
|
|
71 | passive_updates=True, | |
70 | lazy='dynamic', |
|
72 | lazy="dynamic", | |
71 |
|
|
73 | backref=sa.orm.backref("application", lazy="joined"), | |
72 | lazy="joined")) |
|
74 | ) | |
73 |
|
75 | |||
74 |
postprocess_conf = sa.orm.relationship( |
|
76 | postprocess_conf = sa.orm.relationship( | |
|
77 | "ApplicationPostprocessConf", | |||
75 |
|
|
78 | cascade="all, delete-orphan", | |
76 |
|
|
79 | passive_deletes=True, | |
77 |
|
|
80 | passive_updates=True, | |
78 | backref='resource') |
|
81 | backref="resource", | |
|
82 | ) | |||
79 |
|
83 | |||
80 |
logs = sa.orm.relationship( |
|
84 | logs = sa.orm.relationship( | |
81 | lazy='dynamic', |
|
85 | "Log", | |
82 | backref='application', |
|
86 | lazy="dynamic", | |
|
87 | backref="application", | |||
83 |
|
|
88 | passive_deletes=True, | |
84 |
|
|
89 | passive_updates=True, | |
|
90 | ) | |||
85 |
|
91 | |||
86 |
integrations = sa.orm.relationship( |
|
92 | integrations = sa.orm.relationship( | |
87 | backref='resource', |
|
93 | "IntegrationBase", | |
|
94 | backref="resource", | |||
88 |
|
|
95 | cascade="all, delete-orphan", | |
89 |
|
|
96 | passive_deletes=True, | |
90 |
|
|
97 | passive_updates=True, | |
|
98 | ) | |||
91 |
|
99 | |||
92 | def generate_api_key(self): |
|
100 | def generate_api_key(self): | |
93 | return generate_api_key() |
|
101 | return generate_api_key() | |
@@ -95,10 +103,11 b' class Application(Resource):' | |||||
95 |
|
103 | |||
96 | def after_update(mapper, connection, target): |
|
104 | def after_update(mapper, connection, target): | |
97 | from appenlight.models.services.application import ApplicationService |
|
105 | from appenlight.models.services.application import ApplicationService | |
98 | log.info('clearing out ApplicationService cache') |
|
106 | ||
|
107 | log.info("clearing out ApplicationService cache") | |||
99 | ApplicationService.by_id_cached().invalidate(target.resource_id) |
|
108 | ApplicationService.by_id_cached().invalidate(target.resource_id) | |
100 | ApplicationService.by_api_key_cached().invalidate(target.api_key) |
|
109 | ApplicationService.by_api_key_cached().invalidate(target.api_key) | |
101 |
|
110 | |||
102 |
|
111 | |||
103 |
sa.event.listen(Application, |
|
112 | sa.event.listen(Application, "after_update", after_update) | |
104 |
sa.event.listen(Application, |
|
113 | sa.event.listen(Application, "after_delete", after_update) |
@@ -27,18 +27,20 b' class ApplicationPostprocessConf(Base, BaseModel):' | |||||
27 | This is later used for rule parsing like "if 10 occurences bump priority +1" |
|
27 | This is later used for rule parsing like "if 10 occurences bump priority +1" | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 |
__tablename__ = |
|
30 | __tablename__ = "application_postprocess_conf" | |
31 |
|
31 | |||
32 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
32 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
33 |
resource_id = sa.Column( |
|
33 | resource_id = sa.Column( | |
34 | sa.ForeignKey('resources.resource_id', |
|
34 | sa.Integer(), | |
35 | onupdate='CASCADE', |
|
35 | sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), | |
36 | ondelete='CASCADE')) |
|
36 | ) | |
37 | do = sa.Column(sa.Unicode(25), nullable=False) |
|
37 | do = sa.Column(sa.Unicode(25), nullable=False) | |
38 |
new_value = sa.Column(sa.UnicodeText(), nullable=False, default= |
|
38 | new_value = sa.Column(sa.UnicodeText(), nullable=False, default="") | |
39 |
rule = sa.Column( |
|
39 | rule = sa.Column( | |
40 | nullable=False, default={'field': 'http_status', |
|
40 | sa.dialects.postgresql.JSON, | |
41 | "op": "ge", "value": "500"}) |
|
41 | nullable=False, | |
|
42 | default={"field": "http_status", "op": "ge", "value": "500"}, | |||
|
43 | ) | |||
42 |
|
44 | |||
43 | def postprocess(self, item): |
|
45 | def postprocess(self, item): | |
44 | new_value = int(self.new_value) |
|
46 | new_value = int(self.new_value) |
@@ -29,17 +29,22 b' class AuthToken(Base, BaseModel):' | |||||
29 | """ |
|
29 | """ | |
30 | Stores information about possible alerting options |
|
30 | Stores information about possible alerting options | |
31 | """ |
|
31 | """ | |
32 | __tablename__ = 'auth_tokens' |
|
32 | ||
|
33 | __tablename__ = "auth_tokens" | |||
33 |
|
34 | |||
34 | id = sa.Column(sa.Integer, primary_key=True, nullable=False) |
|
35 | id = sa.Column(sa.Integer, primary_key=True, nullable=False) | |
35 | token = sa.Column(sa.Unicode(40), nullable=False, |
|
36 | token = sa.Column( | |
36 | default=lambda x: UserService.generate_random_string(40)) |
|
37 | sa.Unicode(40), | |
37 | owner_id = sa.Column(sa.Unicode(30), |
|
38 | nullable=False, | |
38 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
39 | default=lambda x: UserService.generate_random_string(40), | |
39 | ondelete='CASCADE')) |
|
40 | ) | |
|
41 | owner_id = sa.Column( | |||
|
42 | sa.Unicode(30), | |||
|
43 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |||
|
44 | ) | |||
40 | creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow()) |
|
45 | creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow()) | |
41 | expires = sa.Column(sa.DateTime) |
|
46 | expires = sa.Column(sa.DateTime) | |
42 |
description = sa.Column(sa.Unicode, default= |
|
47 | description = sa.Column(sa.Unicode, default="") | |
43 |
|
48 | |||
44 | @property |
|
49 | @property | |
45 | def is_expired(self): |
|
50 | def is_expired(self): | |
@@ -49,4 +54,4 b' class AuthToken(Base, BaseModel):' | |||||
49 | return False |
|
54 | return False | |
50 |
|
55 | |||
51 | def __str__(self): |
|
56 | def __str__(self): | |
52 |
return |
|
57 | return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10]) |
@@ -22,7 +22,7 b' from . import Base' | |||||
22 |
|
22 | |||
23 |
|
23 | |||
24 | class Config(Base, BaseModel): |
|
24 | class Config(Base, BaseModel): | |
25 |
__tablename__ = |
|
25 | __tablename__ = "config" | |
26 |
|
26 | |||
27 | key = sa.Column(sa.Unicode, primary_key=True) |
|
27 | key = sa.Column(sa.Unicode, primary_key=True) | |
28 | section = sa.Column(sa.Unicode, primary_key=True) |
|
28 | section = sa.Column(sa.Unicode, primary_key=True) |
@@ -30,39 +30,40 b' log = logging.getLogger(__name__)' | |||||
30 |
|
30 | |||
31 |
|
31 | |||
32 | class Event(Base, BaseModel): |
|
32 | class Event(Base, BaseModel): | |
33 |
__tablename__ = |
|
33 | __tablename__ = "events" | |
34 |
|
34 | |||
35 | types = {'error_report_alert': 1, |
|
35 | types = { | |
36 |
|
|
36 | "error_report_alert": 1, | |
37 | 'comment': 5, |
|
37 | "slow_report_alert": 3, | |
38 |
|
|
38 | "comment": 5, | |
39 | 'uptime_alert': 7, |
|
39 | "assignment": 6, | |
40 |
|
|
40 | "uptime_alert": 7, | |
|
41 | "chart_alert": 9, | |||
|
42 | } | |||
41 |
|
43 | |||
42 |
statuses = { |
|
44 | statuses = {"active": 1, "closed": 0} | |
43 | 'closed': 0} |
|
|||
44 |
|
45 | |||
45 | id = sa.Column(sa.Integer, primary_key=True) |
|
46 | id = sa.Column(sa.Integer, primary_key=True) | |
46 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) |
|
47 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) | |
47 | end_date = sa.Column(sa.DateTime) |
|
48 | end_date = sa.Column(sa.DateTime) | |
48 | status = sa.Column(sa.Integer, default=1) |
|
49 | status = sa.Column(sa.Integer, default=1) | |
49 | event_type = sa.Column(sa.Integer, default=1) |
|
50 | event_type = sa.Column(sa.Integer, default=1) | |
50 |
origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey( |
|
51 | origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
51 | nullable=True) |
|
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), |
|
53 | resource_id = sa.Column( | |
53 | nullable=True) |
|
54 | sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True | |
54 | resource_id = sa.Column(sa.Integer(), |
|
55 | ) | |
55 | sa.ForeignKey('resources.resource_id'), |
|
|||
56 | nullable=True) |
|
|||
57 | target_id = sa.Column(sa.Integer) |
|
56 | target_id = sa.Column(sa.Integer) | |
58 | target_uuid = sa.Column(sa.Unicode(40)) |
|
57 | target_uuid = sa.Column(sa.Unicode(40)) | |
59 | text = sa.Column(sa.UnicodeText()) |
|
58 | text = sa.Column(sa.UnicodeText()) | |
60 | values = sa.Column(JSON(), nullable=False, default=None) |
|
59 | values = sa.Column(JSON(), nullable=False, default=None) | |
61 |
|
60 | |||
62 | def __repr__(self): |
|
61 | def __repr__(self): | |
63 |
return |
|
62 | return "<Event %s, app:%s, %s>" % ( | |
|
63 | self.unified_alert_name(), | |||
64 |
|
|
64 | self.resource_id, | |
65 |
|
|
65 | self.unified_alert_action(), | |
|
66 | ) | |||
66 |
|
67 | |||
67 | @property |
|
68 | @property | |
68 | def reverse_types(self): |
|
69 | def reverse_types(self): | |
@@ -73,9 +74,9 b' class Event(Base, BaseModel):' | |||||
73 |
|
74 | |||
74 | def unified_alert_action(self): |
|
75 | def unified_alert_action(self): | |
75 | event_name = self.reverse_types[self.event_type] |
|
76 | event_name = self.reverse_types[self.event_type] | |
76 |
if self.status == Event.statuses[ |
|
77 | if self.status == Event.statuses["closed"]: | |
77 | return "CLOSE" |
|
78 | return "CLOSE" | |
78 |
if self.status != Event.statuses[ |
|
79 | if self.status != Event.statuses["closed"]: | |
79 | return "OPEN" |
|
80 | return "OPEN" | |
80 | return event_name |
|
81 | return event_name | |
81 |
|
82 | |||
@@ -89,30 +90,33 b' class Event(Base, BaseModel):' | |||||
89 | request = get_current_request() |
|
90 | request = get_current_request() | |
90 | if not resource: |
|
91 | if not resource: | |
91 | return |
|
92 | return | |
92 |
users = set([p.user for p in ResourceService.users_for_perm(resource, |
|
93 | users = set([p.user for p in ResourceService.users_for_perm(resource, "view")]) | |
93 | for user in users: |
|
94 | for user in users: | |
94 | for channel in user.alert_channels: |
|
95 | for channel in user.alert_channels: | |
95 |
matches_resource = not channel.resources or resource in [ |
|
96 | matches_resource = not channel.resources or resource in [ | |
|
97 | r.resource_id for r in channel.resources | |||
|
98 | ] | |||
96 | if ( |
|
99 | if ( | |
97 |
not channel.channel_validated |
|
100 | not channel.channel_validated | |
98 |
not channel.send_alerts |
|
101 | or not channel.send_alerts | |
99 | not matches_resource |
|
102 | or not matches_resource | |
100 | ): |
|
103 | ): | |
101 | continue |
|
104 | continue | |
102 | else: |
|
105 | else: | |
103 | try: |
|
106 | try: | |
104 |
channel.notify_alert( |
|
107 | channel.notify_alert( | |
105 | event=self, |
|
108 | resource=resource, event=self, user=user, request=request | |
106 |
|
|
109 | ) | |
107 | request=request) |
|
|||
108 | except IntegrationException as e: |
|
110 | except IntegrationException as e: | |
109 |
log.warning( |
|
111 | log.warning("%s" % e) | |
110 |
|
112 | |||
111 | def validate_or_close(self, since_when, db_session=None): |
|
113 | def validate_or_close(self, since_when, db_session=None): | |
112 | """ Checks if alerts should stay open or it's time to close them. |
|
114 | """ Checks if alerts should stay open or it's time to close them. | |
113 | Generates close alert event if alerts get closed """ |
|
115 | Generates close alert event if alerts get closed """ | |
114 | event_types = [Event.types['error_report_alert'], |
|
116 | event_types = [ | |
115 |
|
|
117 | Event.types["error_report_alert"], | |
|
118 | Event.types["slow_report_alert"], | |||
|
119 | ] | |||
116 | app = ResourceService.by_resource_id(self.resource_id) |
|
120 | app = ResourceService.by_resource_id(self.resource_id) | |
117 | # if app was deleted close instantly |
|
121 | # if app was deleted close instantly | |
118 | if not app: |
|
122 | if not app: | |
@@ -121,10 +125,11 b' class Event(Base, BaseModel):' | |||||
121 |
|
125 | |||
122 | if self.event_type in event_types: |
|
126 | if self.event_type in event_types: | |
123 | total = ReportStatService.count_by_type( |
|
127 | total = ReportStatService.count_by_type( | |
124 |
self.event_type, self.resource_id, since_when |
|
128 | self.event_type, self.resource_id, since_when | |
125 | if Event.types['error_report_alert'] == self.event_type: |
|
129 | ) | |
|
130 | if Event.types["error_report_alert"] == self.event_type: | |||
126 | threshold = app.error_report_threshold |
|
131 | threshold = app.error_report_threshold | |
127 |
if Event.types[ |
|
132 | if Event.types["slow_report_alert"] == self.event_type: | |
128 | threshold = app.slow_report_threshold |
|
133 | threshold = app.slow_report_threshold | |
129 |
|
134 | |||
130 | if total < threshold: |
|
135 | if total < threshold: | |
@@ -135,31 +140,31 b' class Event(Base, BaseModel):' | |||||
135 | Closes an event and sends notification to affected users |
|
140 | Closes an event and sends notification to affected users | |
136 | """ |
|
141 | """ | |
137 | self.end_date = datetime.utcnow() |
|
142 | self.end_date = datetime.utcnow() | |
138 |
self.status = Event.statuses[ |
|
143 | self.status = Event.statuses["closed"] | |
139 |
log.warning( |
|
144 | log.warning("ALERT: CLOSE: %s" % self) | |
140 | self.send_alerts() |
|
145 | self.send_alerts() | |
141 |
|
146 | |||
142 | def text_representation(self): |
|
147 | def text_representation(self): | |
143 | alert_type = self.unified_alert_name() |
|
148 | alert_type = self.unified_alert_name() | |
144 |
text = |
|
149 | text = "" | |
145 |
if |
|
150 | if "slow_report" in alert_type: | |
146 |
text += |
|
151 | text += "Slow report alert" | |
147 |
if |
|
152 | if "error_report" in alert_type: | |
148 |
text += |
|
153 | text += "Exception report alert" | |
149 |
if |
|
154 | if "uptime_alert" in alert_type: | |
150 |
text += |
|
155 | text += "Uptime alert" | |
151 |
if |
|
156 | if "chart_alert" in alert_type: | |
152 |
text += |
|
157 | text += "Metrics value alert" | |
153 |
|
158 | |||
154 | alert_action = self.unified_alert_action() |
|
159 | alert_action = self.unified_alert_action() | |
155 |
if alert_action == |
|
160 | if alert_action == "OPEN": | |
156 |
text += |
|
161 | text += " got opened." | |
157 |
if alert_action == |
|
162 | if alert_action == "CLOSE": | |
158 |
text += |
|
163 | text += " got closed." | |
159 | return text |
|
164 | return text | |
160 |
|
165 | |||
161 | def get_dict(self, request=None): |
|
166 | def get_dict(self, request=None): | |
162 | dict_data = super(Event, self).get_dict() |
|
167 | dict_data = super(Event, self).get_dict() | |
163 |
dict_data[ |
|
168 | dict_data["text"] = self.text_representation() | |
164 |
dict_data[ |
|
169 | dict_data["resource_name"] = self.resource.resource_name | |
165 | return dict_data |
|
170 | return dict_data |
@@ -25,12 +25,12 b' from appenlight.lib.sqlalchemy_fields import EncryptedUnicode' | |||||
25 | class ExternalIdentity(ExternalIdentityMixin, Base): |
|
25 | class ExternalIdentity(ExternalIdentityMixin, Base): | |
26 | @declared_attr |
|
26 | @declared_attr | |
27 | def access_token(self): |
|
27 | def access_token(self): | |
28 |
return sa.Column(EncryptedUnicode(255), default= |
|
28 | return sa.Column(EncryptedUnicode(255), default="") | |
29 |
|
29 | |||
30 | @declared_attr |
|
30 | @declared_attr | |
31 | def alt_token(self): |
|
31 | def alt_token(self): | |
32 |
return sa.Column(EncryptedUnicode(255), default= |
|
32 | return sa.Column(EncryptedUnicode(255), default="") | |
33 |
|
33 | |||
34 | @declared_attr |
|
34 | @declared_attr | |
35 | def token_secret(self): |
|
35 | def token_secret(self): | |
36 |
return sa.Column(EncryptedUnicode(255), default= |
|
36 | return sa.Column(EncryptedUnicode(255), default="") |
@@ -19,27 +19,28 b' from appenlight.models import Base' | |||||
19 |
|
19 | |||
20 |
|
20 | |||
21 | class Group(GroupMixin, Base): |
|
21 | class Group(GroupMixin, Base): | |
22 |
__possible_permissions__ = ( |
|
22 | __possible_permissions__ = ( | |
23 | 'test_features', |
|
23 | "root_administration", | |
24 | 'admin_panel', |
|
24 | "test_features", | |
25 | 'admin_users', |
|
25 | "admin_panel", | |
26 | 'manage_partitions',) |
|
26 | "admin_users", | |
|
27 | "manage_partitions", | |||
|
28 | ) | |||
27 |
|
29 | |||
28 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
30 | def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False): | |
29 | include_perms=False): |
|
|||
30 | result = super(Group, self).get_dict(exclude_keys, include_keys) |
|
31 | result = super(Group, self).get_dict(exclude_keys, include_keys) | |
31 | if include_perms: |
|
32 | if include_perms: | |
32 |
result[ |
|
33 | result["possible_permissions"] = self.__possible_permissions__ | |
33 |
result[ |
|
34 | result["current_permissions"] = [p.perm_name for p in self.permissions] | |
34 | self.permissions] |
|
|||
35 | else: |
|
35 | else: | |
36 |
result[ |
|
36 | result["possible_permissions"] = [] | |
37 |
result[ |
|
37 | result["current_permissions"] = [] | |
38 | exclude_keys_list = exclude_keys or [] |
|
38 | exclude_keys_list = exclude_keys or [] | |
39 | include_keys_list = include_keys or [] |
|
39 | include_keys_list = include_keys or [] | |
40 | d = {} |
|
40 | d = {} | |
41 | for k in result.keys(): |
|
41 | for k in result.keys(): | |
42 |
if |
|
42 | if k not in exclude_keys_list and ( | |
43 |
|
|
43 | k in include_keys_list or not include_keys | |
|
44 | ): | |||
44 | d[k] = result[k] |
|
45 | d[k] = result[k] | |
45 | return d |
|
46 | return d |
@@ -14,8 +14,9 b'' | |||||
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 |
from ziggurat_foundations.models.group_resource_permission import |
|
17 | from ziggurat_foundations.models.group_resource_permission import ( | |
18 | GroupResourcePermissionMixin |
|
18 | GroupResourcePermissionMixin, | |
|
19 | ) | |||
19 | from appenlight.models import Base |
|
20 | from appenlight.models import Base | |
20 |
|
21 | |||
21 |
|
22 |
@@ -32,34 +32,37 b' class IntegrationBase(Base, BaseModel):' | |||||
32 | """ |
|
32 | """ | |
33 | Model from which all integrations inherit using polymorphic approach |
|
33 | Model from which all integrations inherit using polymorphic approach | |
34 | """ |
|
34 | """ | |
35 | __tablename__ = 'integrations' |
|
35 | ||
|
36 | __tablename__ = "integrations" | |||
36 |
|
37 | |||
37 | front_visible = False |
|
38 | front_visible = False | |
38 | as_alert_channel = False |
|
39 | as_alert_channel = False | |
39 | supports_report_alerting = False |
|
40 | supports_report_alerting = False | |
40 |
|
41 | |||
41 | id = sa.Column(sa.Integer, primary_key=True) |
|
42 | id = sa.Column(sa.Integer, primary_key=True) | |
42 | resource_id = sa.Column(sa.Integer, |
|
43 | resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id")) | |
43 | sa.ForeignKey('applications.resource_id')) |
|
|||
44 | integration_name = sa.Column(sa.Unicode(64)) |
|
44 | integration_name = sa.Column(sa.Unicode(64)) | |
45 |
_config = sa.Column( |
|
45 | _config = sa.Column("config", JSON(), nullable=False, default="") | |
46 | modified_date = sa.Column(sa.DateTime) |
|
46 | modified_date = sa.Column(sa.DateTime) | |
47 |
|
47 | |||
48 |
channel = sa.orm.relationship( |
|
48 | channel = sa.orm.relationship( | |
|
49 | "AlertChannel", | |||
49 |
|
|
50 | cascade="all,delete-orphan", | |
50 |
|
|
51 | passive_deletes=True, | |
51 |
|
|
52 | passive_updates=True, | |
52 |
|
|
53 | uselist=False, | |
53 |
|
|
54 | backref="integration", | |
|
55 | ) | |||
54 |
|
56 | |||
55 | __mapper_args__ = { |
|
57 | __mapper_args__ = { | |
56 |
|
|
58 | "polymorphic_on": "integration_name", | |
57 |
|
|
59 | "polymorphic_identity": "integration", | |
58 | } |
|
60 | } | |
59 |
|
61 | |||
60 | @classmethod |
|
62 | @classmethod | |
61 |
def by_app_id_and_integration_name( |
|
63 | def by_app_id_and_integration_name( | |
62 | db_session=None): |
|
64 | cls, resource_id, integration_name, db_session=None | |
|
65 | ): | |||
63 | db_session = get_db_session(db_session) |
|
66 | db_session = get_db_session(db_session) | |
64 | query = db_session.query(cls) |
|
67 | query = db_session.query(cls) | |
65 | query = query.filter(cls.integration_name == integration_name) |
|
68 | query = query.filter(cls.integration_name == integration_name) | |
@@ -72,7 +75,6 b' class IntegrationBase(Base, BaseModel):' | |||||
72 |
|
75 | |||
73 | @config.setter |
|
76 | @config.setter | |
74 | def config(self, value): |
|
77 | def config(self, value): | |
75 |
if not hasattr(value, |
|
78 | if not hasattr(value, "items"): | |
76 |
raise Exception( |
|
79 | raise Exception("IntegrationBase.config only accepts " "flat dictionaries") | |
77 | 'flat dictionaries') |
|
|||
78 | self._config = encrypt_dictionary_keys(value) |
|
80 | self._config = encrypt_dictionary_keys(value) |
@@ -16,8 +16,7 b'' | |||||
16 |
|
16 | |||
17 | import requests |
|
17 | import requests | |
18 | from requests_oauthlib import OAuth1 |
|
18 | from requests_oauthlib import OAuth1 | |
19 |
from appenlight.models.integrations import |
|
19 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
20 | IntegrationException) |
|
|||
21 |
|
20 | |||
22 | _ = str |
|
21 | _ = str | |
23 |
|
22 | |||
@@ -27,14 +26,12 b' class NotFoundException(Exception):' | |||||
27 |
|
26 | |||
28 |
|
27 | |||
29 | class BitbucketIntegration(IntegrationBase): |
|
28 | class BitbucketIntegration(IntegrationBase): | |
30 | __mapper_args__ = { |
|
29 | __mapper_args__ = {"polymorphic_identity": "bitbucket"} | |
31 | 'polymorphic_identity': 'bitbucket' |
|
|||
32 | } |
|
|||
33 | front_visible = True |
|
30 | front_visible = True | |
34 | as_alert_channel = False |
|
31 | as_alert_channel = False | |
35 | supports_report_alerting = False |
|
32 | supports_report_alerting = False | |
36 | action_notification = True |
|
33 | action_notification = True | |
37 |
integration_action = |
|
34 | integration_action = "Add issue to Bitbucket" | |
38 |
|
35 | |||
39 | @classmethod |
|
36 | @classmethod | |
40 | def create_client(cls, request, user_name=None, repo_name=None): |
|
37 | def create_client(cls, request, user_name=None, repo_name=None): | |
@@ -46,27 +43,36 b' class BitbucketIntegration(IntegrationBase):' | |||||
46 | token = None |
|
43 | token = None | |
47 | secret = None |
|
44 | secret = None | |
48 | for identity in request.user.external_identities: |
|
45 | for identity in request.user.external_identities: | |
49 |
if identity.provider_name == |
|
46 | if identity.provider_name == "bitbucket": | |
50 | token = identity.access_token |
|
47 | token = identity.access_token | |
51 | secret = identity.token_secret |
|
48 | secret = identity.token_secret | |
52 | break |
|
49 | break | |
53 | if not token: |
|
50 | if not token: | |
54 | raise IntegrationException( |
|
51 | raise IntegrationException("No valid auth token present for this service") | |
55 | 'No valid auth token present for this service') |
|
52 | client = BitbucketClient( | |
56 | client = BitbucketClient(token, secret, |
|
53 | token, | |
|
54 | secret, | |||
57 |
|
|
55 | user_name, | |
58 |
|
|
56 | repo_name, | |
59 |
|
|
57 | config["authomatic.pr.bitbucket.key"], | |
60 |
|
|
58 | config["authomatic.pr.bitbucket.secret"], | |
|
59 | ) | |||
61 | return client |
|
60 | return client | |
62 |
|
61 | |||
63 |
|
62 | |||
64 | class BitbucketClient(object): |
|
63 | class BitbucketClient(object): | |
65 |
api_url = |
|
64 | api_url = "https://bitbucket.org/api/1.0" | |
66 |
repo_type = |
|
65 | repo_type = "bitbucket" | |
67 |
|
66 | |||
68 | def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key, |
|
67 | def __init__( | |
69 | bitbucket_consumer_secret): |
|
68 | self, | |
|
69 | token, | |||
|
70 | secret, | |||
|
71 | owner, | |||
|
72 | repo_name, | |||
|
73 | bitbucket_consumer_key, | |||
|
74 | bitbucket_consumer_secret, | |||
|
75 | ): | |||
70 | self.access_token = token |
|
76 | self.access_token = token | |
71 | self.token_secret = secret |
|
77 | self.token_secret = secret | |
72 | self.owner = owner |
|
78 | self.owner = owner | |
@@ -75,89 +81,108 b' class BitbucketClient(object):' | |||||
75 | self.bitbucket_consumer_secret = bitbucket_consumer_secret |
|
81 | self.bitbucket_consumer_secret = bitbucket_consumer_secret | |
76 |
|
82 | |||
77 | possible_keys = { |
|
83 | possible_keys = { | |
78 | 'status': ['new', 'open', 'resolved', 'on hold', 'invalid', |
|
84 | "status": [ | |
79 | 'duplicate', 'wontfix'], |
|
85 | "new", | |
80 | 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'], |
|
86 | "open", | |
81 | 'kind': ['bug', 'enhancement', 'proposal', 'task'] |
|
87 | "resolved", | |
|
88 | "on hold", | |||
|
89 | "invalid", | |||
|
90 | "duplicate", | |||
|
91 | "wontfix", | |||
|
92 | ], | |||
|
93 | "priority": ["trivial", "minor", "major", "critical", "blocker"], | |||
|
94 | "kind": ["bug", "enhancement", "proposal", "task"], | |||
82 | } |
|
95 | } | |
83 |
|
96 | |||
84 | def get_statuses(self): |
|
97 | def get_statuses(self): | |
85 | """Gets list of possible item statuses""" |
|
98 | """Gets list of possible item statuses""" | |
86 |
return self.possible_keys[ |
|
99 | return self.possible_keys["status"] | |
87 |
|
100 | |||
88 | def get_priorities(self): |
|
101 | def get_priorities(self): | |
89 | """Gets list of possible item statuses""" |
|
102 | """Gets list of possible item statuses""" | |
90 |
return self.possible_keys[ |
|
103 | return self.possible_keys["priority"] | |
91 |
|
104 | |||
92 |
def make_request(self, url, method= |
|
105 | def make_request(self, url, method="get", data=None, headers=None): | |
93 | """ |
|
106 | """ | |
94 | Performs HTTP request to bitbucket |
|
107 | Performs HTTP request to bitbucket | |
95 | """ |
|
108 | """ | |
96 |
auth = OAuth1( |
|
109 | auth = OAuth1( | |
|
110 | self.bitbucket_consumer_key, | |||
97 |
|
|
111 | self.bitbucket_consumer_secret, | |
98 |
|
|
112 | self.access_token, | |
|
113 | self.token_secret, | |||
|
114 | ) | |||
99 | try: |
|
115 | try: | |
100 | resp = getattr(requests, method)(url, data=data, auth=auth, |
|
116 | resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10) | |
101 | timeout=10) |
|
|||
102 | except Exception as e: |
|
117 | except Exception as e: | |
103 | raise IntegrationException( |
|
118 | raise IntegrationException( | |
104 |
_( |
|
119 | _("Error communicating with Bitbucket: %s") % (e,) | |
|
120 | ) | |||
105 | if resp.status_code == 401: |
|
121 | if resp.status_code == 401: | |
106 | raise IntegrationException( |
|
122 | raise IntegrationException(_("You are not authorized to access this repo")) | |
107 | _('You are not authorized to access this repo')) |
|
|||
108 | elif resp.status_code == 404: |
|
123 | elif resp.status_code == 404: | |
109 |
raise IntegrationException(_( |
|
124 | raise IntegrationException(_("User or repo name are incorrect")) | |
110 | elif resp.status_code not in [200, 201]: |
|
125 | elif resp.status_code not in [200, 201]: | |
111 | raise IntegrationException( |
|
126 | raise IntegrationException( | |
112 |
_( |
|
127 | _("Bitbucket response_code: %s") % resp.status_code | |
|
128 | ) | |||
113 | try: |
|
129 | try: | |
114 | return resp.json() |
|
130 | return resp.json() | |
115 | except Exception as e: |
|
131 | except Exception as e: | |
116 | raise IntegrationException( |
|
132 | raise IntegrationException( | |
117 |
_( |
|
133 | _("Error decoding response from Bitbucket: %s") % (e,) | |
|
134 | ) | |||
118 |
|
135 | |||
119 | def get_assignees(self): |
|
136 | def get_assignees(self): | |
120 | """Gets list of possible assignees""" |
|
137 | """Gets list of possible assignees""" | |
121 |
url = |
|
138 | url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % { | |
122 |
|
|
139 | "api_url": self.api_url, | |
123 |
|
|
140 | "owner": self.owner, | |
124 |
|
|
141 | "repo_name": self.repo_name, | |
|
142 | } | |||
125 |
|
143 | |||
126 | data = self.make_request(url) |
|
144 | data = self.make_request(url) | |
127 |
results = [{ |
|
145 | results = [{"user": self.owner, "name": "(Repo owner)"}] | |
128 | if data: |
|
146 | if data: | |
129 | for entry in data: |
|
147 | for entry in data: | |
130 |
results.append( |
|
148 | results.append( | |
131 | "name": entry['user'].get('display_name')}) |
|
149 | { | |
|
150 | "user": entry["user"]["username"], | |||
|
151 | "name": entry["user"].get("display_name"), | |||
|
152 | } | |||
|
153 | ) | |||
132 | return results |
|
154 | return results | |
133 |
|
155 | |||
134 | def create_issue(self, form_data): |
|
156 | def create_issue(self, form_data): | |
135 | """ |
|
157 | """ | |
136 | Sends creates a new issue in tracker using REST call |
|
158 | Sends creates a new issue in tracker using REST call | |
137 | """ |
|
159 | """ | |
138 |
url = |
|
160 | url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % { | |
139 |
|
|
161 | "api_url": self.api_url, | |
140 |
|
|
162 | "owner": self.owner, | |
141 |
|
|
163 | "repo_name": self.repo_name, | |
|
164 | } | |||
142 |
|
165 | |||
143 | payload = { |
|
166 | payload = { | |
144 |
"title": form_data[ |
|
167 | "title": form_data["title"], | |
145 |
"content": form_data[ |
|
168 | "content": form_data["content"], | |
146 |
"kind": form_data[ |
|
169 | "kind": form_data["kind"], | |
147 |
"priority": form_data[ |
|
170 | "priority": form_data["priority"], | |
148 |
"responsible": form_data[ |
|
171 | "responsible": form_data["responsible"], | |
149 | } |
|
172 | } | |
150 |
data = self.make_request(url, |
|
173 | data = self.make_request(url, "post", payload) | |
151 | f_args = { |
|
174 | f_args = { | |
152 | "owner": self.owner, |
|
175 | "owner": self.owner, | |
153 | "repo_name": self.repo_name, |
|
176 | "repo_name": self.repo_name, | |
154 |
"issue_id": data[ |
|
177 | "issue_id": data["local_id"], | |
155 | } |
|
178 | } | |
156 | web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \ |
|
179 | web_url = ( | |
157 | '/issue/%(issue_id)s/issue-title' % f_args |
|
180 | "https://bitbucket.org/%(owner)s/%(repo_name)s" | |
|
181 | "/issue/%(issue_id)s/issue-title" % f_args | |||
|
182 | ) | |||
158 | to_return = { |
|
183 | to_return = { | |
159 |
|
|
184 | "id": data["local_id"], | |
160 |
|
|
185 | "resource_url": data["resource_uri"], | |
161 |
|
|
186 | "web_url": web_url, | |
162 | } |
|
187 | } | |
163 | return to_return |
|
188 | return to_return |
@@ -20,8 +20,7 b' from requests.exceptions import HTTPError, ConnectionError' | |||||
20 | from camplight import Request, Campfire |
|
20 | from camplight import Request, Campfire | |
21 | from camplight.exceptions import CamplightException |
|
21 | from camplight.exceptions import CamplightException | |
22 |
|
22 | |||
23 |
from appenlight.models.integrations import |
|
23 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
24 | IntegrationException) |
|
|||
25 |
|
24 | |||
26 | _ = str |
|
25 | _ = str | |
27 |
|
26 | |||
@@ -33,14 +32,12 b' class NotFoundException(Exception):' | |||||
33 |
|
32 | |||
34 |
|
33 | |||
35 | class CampfireIntegration(IntegrationBase): |
|
34 | class CampfireIntegration(IntegrationBase): | |
36 | __mapper_args__ = { |
|
35 | __mapper_args__ = {"polymorphic_identity": "campfire"} | |
37 | 'polymorphic_identity': 'campfire' |
|
|||
38 | } |
|
|||
39 | front_visible = False |
|
36 | front_visible = False | |
40 | as_alert_channel = True |
|
37 | as_alert_channel = True | |
41 | supports_report_alerting = True |
|
38 | supports_report_alerting = True | |
42 | action_notification = True |
|
39 | action_notification = True | |
43 |
integration_action = |
|
40 | integration_action = "Message via Campfire" | |
44 |
|
41 | |||
45 | @classmethod |
|
42 | @classmethod | |
46 | def create_client(cls, api_token, account): |
|
43 | def create_client(cls, api_token, account): | |
@@ -50,7 +47,7 b' class CampfireIntegration(IntegrationBase):' | |||||
50 |
|
47 | |||
51 | class CampfireClient(object): |
|
48 | class CampfireClient(object): | |
52 | def __init__(self, api_token, account): |
|
49 | def __init__(self, api_token, account): | |
53 |
request = Request( |
|
50 | request = Request("https://%s.campfirenow.com" % account, api_token) | |
54 | self.campfire = Campfire(request) |
|
51 | self.campfire = Campfire(request) | |
55 |
|
52 | |||
56 | def get_account(self): |
|
53 | def get_account(self): | |
@@ -65,10 +62,10 b' class CampfireClient(object):' | |||||
65 | except (HTTPError, CamplightException) as e: |
|
62 | except (HTTPError, CamplightException) as e: | |
66 | raise IntegrationException(str(e)) |
|
63 | raise IntegrationException(str(e)) | |
67 |
|
64 | |||
68 |
def speak_to_room(self, room, message, sound= |
|
65 | def speak_to_room(self, room, message, sound="RIMSHOT"): | |
69 | try: |
|
66 | try: | |
70 | room = self.campfire.room(room) |
|
67 | room = self.campfire.room(room) | |
71 | room.join() |
|
68 | room.join() | |
72 |
room.speak(message, type_= |
|
69 | room.speak(message, type_="TextMessage") | |
73 | except (HTTPError, CamplightException, ConnectionError) as e: |
|
70 | except (HTTPError, CamplightException, ConnectionError) as e: | |
74 | raise IntegrationException(str(e)) |
|
71 | raise IntegrationException(str(e)) |
@@ -20,8 +20,7 b' import requests' | |||||
20 | from requests.auth import HTTPBasicAuth |
|
20 | from requests.auth import HTTPBasicAuth | |
21 | import simplejson as json |
|
21 | import simplejson as json | |
22 |
|
22 | |||
23 |
from appenlight.models.integrations import |
|
23 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
24 | IntegrationException) |
|
|||
25 |
|
24 | |||
26 | _ = str |
|
25 | _ = str | |
27 |
|
26 | |||
@@ -33,14 +32,12 b' class NotFoundException(Exception):' | |||||
33 |
|
32 | |||
34 |
|
33 | |||
35 | class FlowdockIntegration(IntegrationBase): |
|
34 | class FlowdockIntegration(IntegrationBase): | |
36 | __mapper_args__ = { |
|
35 | __mapper_args__ = {"polymorphic_identity": "flowdock"} | |
37 | 'polymorphic_identity': 'flowdock' |
|
|||
38 | } |
|
|||
39 | front_visible = False |
|
36 | front_visible = False | |
40 | as_alert_channel = True |
|
37 | as_alert_channel = True | |
41 | supports_report_alerting = True |
|
38 | supports_report_alerting = True | |
42 | action_notification = True |
|
39 | action_notification = True | |
43 |
integration_action = |
|
40 | integration_action = "Message via Flowdock" | |
44 |
|
41 | |||
45 | @classmethod |
|
42 | @classmethod | |
46 | def create_client(cls, api_token): |
|
43 | def create_client(cls, api_token): | |
@@ -50,33 +47,37 b' class FlowdockIntegration(IntegrationBase):' | |||||
50 |
|
47 | |||
51 | class FlowdockClient(object): |
|
48 | class FlowdockClient(object): | |
52 | def __init__(self, api_token): |
|
49 | def __init__(self, api_token): | |
53 |
self.auth = HTTPBasicAuth(api_token, |
|
50 | self.auth = HTTPBasicAuth(api_token, "") | |
54 | self.api_token = api_token |
|
51 | self.api_token = api_token | |
55 |
self.api_url = |
|
52 | self.api_url = "https://api.flowdock.com/v1/messages" | |
56 |
|
53 | |||
57 |
def make_request(self, url, method= |
|
54 | def make_request(self, url, method="get", data=None): | |
58 | headers = { |
|
55 | headers = { | |
59 |
|
|
56 | "Content-Type": "application/json", | |
60 |
|
|
57 | "User-Agent": "appenlight-flowdock", | |
61 | } |
|
58 | } | |
62 | try: |
|
59 | try: | |
63 | if data: |
|
60 | if data: | |
64 | data = json.dumps(data) |
|
61 | data = json.dumps(data) | |
65 |
resp = getattr(requests, method)( |
|
62 | resp = getattr(requests, method)( | |
66 | timeout=10) |
|
63 | url, data=data, headers=headers, timeout=10 | |
|
64 | ) | |||
67 | except Exception as e: |
|
65 | except Exception as e: | |
68 | raise IntegrationException( |
|
66 | raise IntegrationException( | |
69 |
_( |
|
67 | _("Error communicating with Flowdock: %s") % (e,) | |
|
68 | ) | |||
70 | if resp.status_code > 299: |
|
69 | if resp.status_code > 299: | |
71 | raise IntegrationException(resp.text) |
|
70 | raise IntegrationException(resp.text) | |
72 | return resp |
|
71 | return resp | |
73 |
|
72 | |||
74 | def send_to_chat(self, payload): |
|
73 | def send_to_chat(self, payload): | |
75 |
url = |
|
74 | url = "%(api_url)s/chat/%(api_token)s" % { | |
76 | 'api_token': self.api_token} |
|
75 | "api_url": self.api_url, | |
77 | return self.make_request(url, method='post', data=payload).json() |
|
76 | "api_token": self.api_token, | |
|
77 | } | |||
|
78 | return self.make_request(url, method="post", data=payload).json() | |||
78 |
|
79 | |||
79 | def send_to_inbox(self, payload): |
|
80 | def send_to_inbox(self, payload): | |
80 |
f_args = { |
|
81 | f_args = {"api_url": self.api_url, "api_token": self.api_token} | |
81 |
url = |
|
82 | url = "%(api_url)s/team_inbox/%(api_token)s" % f_args | |
82 |
return self.make_request(url, method= |
|
83 | return self.make_request(url, method="post", data=payload).json() |
@@ -27,14 +27,12 b' class GithubAuthException(Exception):' | |||||
27 |
|
27 | |||
28 |
|
28 | |||
29 | class GithubIntegration(IntegrationBase): |
|
29 | class GithubIntegration(IntegrationBase): | |
30 | __mapper_args__ = { |
|
30 | __mapper_args__ = {"polymorphic_identity": "github"} | |
31 | 'polymorphic_identity': 'github' |
|
|||
32 | } |
|
|||
33 | front_visible = True |
|
31 | front_visible = True | |
34 | as_alert_channel = False |
|
32 | as_alert_channel = False | |
35 | supports_report_alerting = False |
|
33 | supports_report_alerting = False | |
36 | action_notification = True |
|
34 | action_notification = True | |
37 |
integration_action = |
|
35 | integration_action = "Add issue to Github" | |
38 |
|
36 | |||
39 | @classmethod |
|
37 | @classmethod | |
40 | def create_client(cls, request, user_name=None, repo_name=None): |
|
38 | def create_client(cls, request, user_name=None, repo_name=None): | |
@@ -45,112 +43,116 b' class GithubIntegration(IntegrationBase):' | |||||
45 | token = None |
|
43 | token = None | |
46 | secret = None |
|
44 | secret = None | |
47 | for identity in request.user.external_identities: |
|
45 | for identity in request.user.external_identities: | |
48 |
if identity.provider_name == |
|
46 | if identity.provider_name == "github": | |
49 | token = identity.access_token |
|
47 | token = identity.access_token | |
50 | secret = identity.token_secret |
|
48 | secret = identity.token_secret | |
51 | break |
|
49 | break | |
52 | if not token: |
|
50 | if not token: | |
53 | raise IntegrationException( |
|
51 | raise IntegrationException("No valid auth token present for this service") | |
54 | 'No valid auth token present for this service') |
|
|||
55 | client = GithubClient(token=token, owner=user_name, name=repo_name) |
|
52 | client = GithubClient(token=token, owner=user_name, name=repo_name) | |
56 | return client |
|
53 | return client | |
57 |
|
54 | |||
58 |
|
55 | |||
59 | class GithubClient(object): |
|
56 | class GithubClient(object): | |
60 |
api_url = |
|
57 | api_url = "https://api.github.com" | |
61 |
repo_type = |
|
58 | repo_type = "github" | |
62 |
|
59 | |||
63 | def __init__(self, token, owner, name): |
|
60 | def __init__(self, token, owner, name): | |
64 | self.access_token = token |
|
61 | self.access_token = token | |
65 | self.owner = owner |
|
62 | self.owner = owner | |
66 | self.name = name |
|
63 | self.name = name | |
67 |
|
64 | |||
68 |
def make_request(self, url, method= |
|
65 | def make_request(self, url, method="get", data=None, headers=None): | |
69 |
req_headers = { |
|
66 | req_headers = { | |
70 | 'Content-Type': 'application/json', |
|
67 | "User-Agent": "appenlight", | |
71 | 'Authorization': 'token %s' % self.access_token} |
|
68 | "Content-Type": "application/json", | |
|
69 | "Authorization": "token %s" % self.access_token, | |||
|
70 | } | |||
72 | try: |
|
71 | try: | |
73 | if data: |
|
72 | if data: | |
74 | data = json.dumps(data) |
|
73 | data = json.dumps(data) | |
75 |
resp = getattr(requests, method)( |
|
74 | resp = getattr(requests, method)( | |
76 | headers=req_headers, |
|
75 | url, data=data, headers=req_headers, timeout=10 | |
77 | timeout=10) |
|
76 | ) | |
78 | except Exception as e: |
|
77 | except Exception as e: | |
79 |
msg = |
|
78 | msg = "Error communicating with Github: %s" | |
80 | raise IntegrationException(_(msg) % (e,)) |
|
79 | raise IntegrationException(_(msg) % (e,)) | |
81 |
|
80 | |||
82 | if resp.status_code == 404: |
|
81 | if resp.status_code == 404: | |
83 |
msg = |
|
82 | msg = "User or repo name are incorrect" | |
84 | raise IntegrationException(_(msg)) |
|
83 | raise IntegrationException(_(msg)) | |
85 | if resp.status_code == 401: |
|
84 | if resp.status_code == 401: | |
86 |
msg = |
|
85 | msg = "You are not authorized to access this repo" | |
87 | raise IntegrationException(_(msg)) |
|
86 | raise IntegrationException(_(msg)) | |
88 | elif resp.status_code not in [200, 201]: |
|
87 | elif resp.status_code not in [200, 201]: | |
89 |
msg = |
|
88 | msg = "Github response_code: %s" | |
90 | raise IntegrationException(_(msg) % resp.status_code) |
|
89 | raise IntegrationException(_(msg) % resp.status_code) | |
91 | try: |
|
90 | try: | |
92 | return resp.json() |
|
91 | return resp.json() | |
93 | except Exception as e: |
|
92 | except Exception as e: | |
94 |
msg = |
|
93 | msg = "Error decoding response from Github: %s" | |
95 | raise IntegrationException(_(msg) % (e,)) |
|
94 | raise IntegrationException(_(msg) % (e,)) | |
96 |
|
95 | |||
97 | def get_statuses(self): |
|
96 | def get_statuses(self): | |
98 | """Gets list of possible item statuses""" |
|
97 | """Gets list of possible item statuses""" | |
99 |
url = |
|
98 | url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % { | |
100 |
|
|
99 | "api_url": self.api_url, | |
101 |
|
|
100 | "owner": self.owner, | |
102 |
|
|
101 | "name": self.name, | |
|
102 | } | |||
103 |
|
103 | |||
104 | data = self.make_request(url) |
|
104 | data = self.make_request(url) | |
105 |
|
105 | |||
106 | statuses = [] |
|
106 | statuses = [] | |
107 | for status in data: |
|
107 | for status in data: | |
108 |
statuses.append(status[ |
|
108 | statuses.append(status["name"]) | |
109 | return statuses |
|
109 | return statuses | |
110 |
|
110 | |||
111 | def get_repo(self): |
|
111 | def get_repo(self): | |
112 | """Gets list of possible item statuses""" |
|
112 | """Gets list of possible item statuses""" | |
113 |
url = |
|
113 | url = "%(api_url)s/repos/%(owner)s/%(name)s" % { | |
114 |
|
|
114 | "api_url": self.api_url, | |
115 |
|
|
115 | "owner": self.owner, | |
116 |
|
|
116 | "name": self.name, | |
|
117 | } | |||
117 |
|
118 | |||
118 | data = self.make_request(url) |
|
119 | data = self.make_request(url) | |
119 | return data |
|
120 | return data | |
120 |
|
121 | |||
121 | def get_assignees(self): |
|
122 | def get_assignees(self): | |
122 | """Gets list of possible assignees""" |
|
123 | """Gets list of possible assignees""" | |
123 |
url = |
|
124 | url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % { | |
124 |
|
|
125 | "api_url": self.api_url, | |
125 |
|
|
126 | "owner": self.owner, | |
126 |
|
|
127 | "name": self.name, | |
|
128 | } | |||
127 | data = self.make_request(url) |
|
129 | data = self.make_request(url) | |
128 | results = [] |
|
130 | results = [] | |
129 | for entry in data: |
|
131 | for entry in data: | |
130 |
results.append({"user": entry[ |
|
132 | results.append({"user": entry["login"], "name": entry.get("name")}) | |
131 | "name": entry.get('name')}) |
|
|||
132 | return results |
|
133 | return results | |
133 |
|
134 | |||
134 | def create_issue(self, form_data): |
|
135 | def create_issue(self, form_data): | |
135 | """ |
|
136 | """ | |
136 | Make a REST call to create issue in Github's issue tracker |
|
137 | Make a REST call to create issue in Github's issue tracker | |
137 | """ |
|
138 | """ | |
138 |
url = |
|
139 | url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % { | |
139 |
|
|
140 | "api_url": self.api_url, | |
140 |
|
|
141 | "owner": self.owner, | |
141 |
|
|
142 | "name": self.name, | |
|
143 | } | |||
142 |
|
144 | |||
143 | payload = { |
|
145 | payload = { | |
144 |
"title": form_data[ |
|
146 | "title": form_data["title"], | |
145 |
"body": form_data[ |
|
147 | "body": form_data["content"], | |
146 | "labels": [], |
|
148 | "labels": [], | |
147 |
"assignee": form_data[ |
|
149 | "assignee": form_data["responsible"], | |
148 | } |
|
150 | } | |
149 |
payload[ |
|
151 | payload["labels"].extend(form_data["kind"]) | |
150 |
data = self.make_request(url, |
|
152 | data = self.make_request(url, "post", data=payload) | |
151 | to_return = { |
|
153 | to_return = { | |
152 |
|
|
154 | "id": data["number"], | |
153 |
|
|
155 | "resource_url": data["url"], | |
154 |
|
|
156 | "web_url": data["html_url"], | |
155 | } |
|
157 | } | |
156 | return to_return |
|
158 | return to_return |
@@ -30,14 +30,12 b' class NotFoundException(Exception):' | |||||
30 |
|
30 | |||
31 |
|
31 | |||
32 | class HipchatIntegration(IntegrationBase): |
|
32 | class HipchatIntegration(IntegrationBase): | |
33 | __mapper_args__ = { |
|
33 | __mapper_args__ = {"polymorphic_identity": "hipchat"} | |
34 | 'polymorphic_identity': 'hipchat' |
|
|||
35 | } |
|
|||
36 | front_visible = False |
|
34 | front_visible = False | |
37 | as_alert_channel = True |
|
35 | as_alert_channel = True | |
38 | supports_report_alerting = True |
|
36 | supports_report_alerting = True | |
39 | action_notification = True |
|
37 | action_notification = True | |
40 |
integration_action = |
|
38 | integration_action = "Message via Hipchat" | |
41 |
|
39 | |||
42 | @classmethod |
|
40 | @classmethod | |
43 | def create_client(cls, api_token): |
|
41 | def create_client(cls, api_token): | |
@@ -48,36 +46,30 b' class HipchatIntegration(IntegrationBase):' | |||||
48 | class HipchatClient(object): |
|
46 | class HipchatClient(object): | |
49 | def __init__(self, api_token): |
|
47 | def __init__(self, api_token): | |
50 | self.api_token = api_token |
|
48 | self.api_token = api_token | |
51 |
self.api_url = |
|
49 | self.api_url = "https://api.hipchat.com/v1" | |
52 |
|
50 | |||
53 |
def make_request(self, endpoint, method= |
|
51 | def make_request(self, endpoint, method="get", data=None): | |
54 | headers = { |
|
52 | headers = {"User-Agent": "appenlight-hipchat"} | |
55 | 'User-Agent': 'appenlight-hipchat', |
|
53 | url = "%s%s" % (self.api_url, endpoint) | |
56 | } |
|
54 | params = {"format": "json", "auth_token": self.api_token} | |
57 | url = '%s%s' % (self.api_url, endpoint) |
|
|||
58 | params = { |
|
|||
59 | 'format': 'json', |
|
|||
60 | 'auth_token': self.api_token |
|
|||
61 | } |
|
|||
62 | try: |
|
55 | try: | |
63 |
resp = getattr(requests, method)( |
|
56 | resp = getattr(requests, method)( | |
64 | params=params, |
|
57 | url, data=data, headers=headers, params=params, timeout=3 | |
65 | timeout=3) |
|
58 | ) | |
66 | except Exception as e: |
|
59 | except Exception as e: | |
67 |
msg = |
|
60 | msg = "Error communicating with Hipchat: %s" | |
68 | raise IntegrationException(_(msg) % (e,)) |
|
61 | raise IntegrationException(_(msg) % (e,)) | |
69 | if resp.status_code == 404: |
|
62 | if resp.status_code == 404: | |
70 |
msg = |
|
63 | msg = "Error communicating with Hipchat - Room not found" | |
71 | raise IntegrationException(msg) |
|
64 | raise IntegrationException(msg) | |
72 | elif resp.status_code != requests.codes.ok: |
|
65 | elif resp.status_code != requests.codes.ok: | |
73 |
msg = |
|
66 | msg = "Error communicating with Hipchat - status code: %s" | |
74 | raise IntegrationException(msg % resp.status_code) |
|
67 | raise IntegrationException(msg % resp.status_code) | |
75 | return resp |
|
68 | return resp | |
76 |
|
69 | |||
77 | def get_rooms(self): |
|
70 | def get_rooms(self): | |
78 | # not used with notification api token |
|
71 | # not used with notification api token | |
79 |
return self.make_request( |
|
72 | return self.make_request("/rooms/list") | |
80 |
|
73 | |||
81 | def send(self, payload): |
|
74 | def send(self, payload): | |
82 |
return self.make_request( |
|
75 | return self.make_request("/rooms/message", method="post", data=payload).json() | |
83 | data=payload).json() |
|
@@ -15,8 +15,7 b'' | |||||
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import jira |
|
17 | import jira | |
18 |
from appenlight.models.integrations import |
|
18 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
19 | IntegrationException) |
|
|||
20 |
|
19 | |||
21 | _ = str |
|
20 | _ = str | |
22 |
|
21 | |||
@@ -26,14 +25,12 b' class NotFoundException(Exception):' | |||||
26 |
|
25 | |||
27 |
|
26 | |||
28 | class JiraIntegration(IntegrationBase): |
|
27 | class JiraIntegration(IntegrationBase): | |
29 | __mapper_args__ = { |
|
28 | __mapper_args__ = {"polymorphic_identity": "jira"} | |
30 | 'polymorphic_identity': 'jira' |
|
|||
31 | } |
|
|||
32 | front_visible = True |
|
29 | front_visible = True | |
33 | as_alert_channel = False |
|
30 | as_alert_channel = False | |
34 | supports_report_alerting = False |
|
31 | supports_report_alerting = False | |
35 | action_notification = True |
|
32 | action_notification = True | |
36 |
integration_action = |
|
33 | integration_action = "Add issue to Jira" | |
37 |
|
34 | |||
38 |
|
35 | |||
39 | class JiraClient(object): |
|
36 | class JiraClient(object): | |
@@ -44,12 +41,14 b' class JiraClient(object):' | |||||
44 | self.project = project |
|
41 | self.project = project | |
45 | self.request = request |
|
42 | self.request = request | |
46 | try: |
|
43 | try: | |
47 |
self.client = jira.client.JIRA( |
|
44 | self.client = jira.client.JIRA( | |
48 |
|
|
45 | options={"server": host_name}, basic_auth=(user_name, password) | |
|
46 | ) | |||
49 | except jira.JIRAError as e: |
|
47 | except jira.JIRAError as e: | |
50 | raise IntegrationException( |
|
48 | raise IntegrationException( | |
51 |
|
|
49 | "Communication problem: HTTP_STATUS:%s, URL:%s " | |
52 |
|
|
50 | % (e.status_code, e.url) | |
|
51 | ) | |||
53 |
|
52 | |||
54 | def get_projects(self): |
|
53 | def get_projects(self): | |
55 | projects = self.client.projects() |
|
54 | projects = self.client.projects() | |
@@ -58,42 +57,42 b' class JiraClient(object):' | |||||
58 | def get_assignees(self, request): |
|
57 | def get_assignees(self, request): | |
59 | """Gets list of possible assignees""" |
|
58 | """Gets list of possible assignees""" | |
60 | cache_region = request.registry.cache_regions.redis_sec_30 |
|
59 | cache_region = request.registry.cache_regions.redis_sec_30 | |
61 | @cache_region.cache_on_arguments('JiraClient.get_assignees') |
|
60 | ||
|
61 | @cache_region.cache_on_arguments("JiraClient.get_assignees") | |||
62 | def cached(project_name): |
|
62 | def cached(project_name): | |
63 | users = self.client.search_assignable_users_for_issues( |
|
63 | users = self.client.search_assignable_users_for_issues( | |
64 |
None, project=project_name |
|
64 | None, project=project_name | |
|
65 | ) | |||
65 | results = [] |
|
66 | results = [] | |
66 | for user in users: |
|
67 | for user in users: | |
67 | results.append({"id": user.name, "name": user.displayName}) |
|
68 | results.append({"id": user.name, "name": user.displayName}) | |
68 | return results |
|
69 | return results | |
|
70 | ||||
69 | return cached(self.project) |
|
71 | return cached(self.project) | |
70 |
|
72 | |||
71 | def get_issue_types(self, request): |
|
73 | def get_issue_types(self, request): | |
72 | metadata = self.get_metadata(request) |
|
74 | metadata = self.get_metadata(request) | |
73 | assignees = self.get_assignees(request) |
|
75 | assignees = self.get_assignees(request) | |
74 | parsed_metadata = [] |
|
76 | parsed_metadata = [] | |
75 |
for entry in metadata[ |
|
77 | for entry in metadata["projects"][0]["issuetypes"]: | |
76 |
issue = {"name": entry[ |
|
78 | issue = {"name": entry["name"], "id": entry["id"], "fields": []} | |
77 | "id": entry['id'], |
|
79 | for i_id, field_i in entry["fields"].items(): | |
78 | "fields": []} |
|
|||
79 | for i_id, field_i in entry['fields'].items(): |
|
|||
80 | field = { |
|
80 | field = { | |
81 |
"name": field_i[ |
|
81 | "name": field_i["name"], | |
82 | "id": i_id, |
|
82 | "id": i_id, | |
83 |
"required": field_i[ |
|
83 | "required": field_i["required"], | |
84 | "values": [], |
|
84 | "values": [], | |
85 |
"type": field_i[ |
|
85 | "type": field_i["schema"].get("type"), | |
86 | } |
|
86 | } | |
87 |
if field_i.get( |
|
87 | if field_i.get("allowedValues"): | |
88 |
field[ |
|
88 | field["values"] = [] | |
89 |
for i in field_i[ |
|
89 | for i in field_i["allowedValues"]: | |
90 |
field[ |
|
90 | field["values"].append( | |
91 |
{ |
|
91 | {"id": i["id"], "name": i.get("name", i.get("value", ""))} | |
92 | 'name': i.get('name', i.get('value', '')) |
|
92 | ) | |
93 | }) |
|
93 | if field["id"] == "assignee": | |
94 |
|
|
94 | field["values"] = assignees | |
95 | field['values'] = assignees |
|
95 | issue["fields"].append(field) | |
96 | issue['fields'].append(field) |
|
|||
97 | parsed_metadata.append(issue) |
|
96 | parsed_metadata.append(issue) | |
98 | return parsed_metadata |
|
97 | return parsed_metadata | |
99 |
|
98 | |||
@@ -102,35 +101,37 b' class JiraClient(object):' | |||||
102 | # @cache_region.cache_on_arguments('JiraClient.get_metadata') |
|
101 | # @cache_region.cache_on_arguments('JiraClient.get_metadata') | |
103 | def cached(project_name): |
|
102 | def cached(project_name): | |
104 | return self.client.createmeta( |
|
103 | return self.client.createmeta( | |
105 |
projectKeys=project_name, expand= |
|
104 | projectKeys=project_name, expand="projects.issuetypes.fields" | |
|
105 | ) | |||
|
106 | ||||
106 | return cached(self.project) |
|
107 | return cached(self.project) | |
107 |
|
108 | |||
108 | def create_issue(self, form_data, request): |
|
109 | def create_issue(self, form_data, request): | |
109 | issue_types = self.get_issue_types(request) |
|
110 | issue_types = self.get_issue_types(request) | |
110 | payload = { |
|
111 | payload = { | |
111 |
|
|
112 | "project": {"key": form_data["project"]}, | |
112 |
|
|
113 | "summary": form_data["title"], | |
113 |
|
|
114 | "description": form_data["content"], | |
114 |
|
|
115 | "issuetype": {"id": form_data["issue_type"]}, | |
115 |
"priority": { |
|
116 | "priority": {"id": form_data["priority"]}, | |
116 |
"assignee": { |
|
117 | "assignee": {"name": form_data["responsible"]}, | |
117 | } |
|
118 | } | |
118 | for issue_type in issue_types: |
|
119 | for issue_type in issue_types: | |
119 |
if issue_type[ |
|
120 | if issue_type["id"] == form_data["issue_type"]: | |
120 |
for field in issue_type[ |
|
121 | for field in issue_type["fields"]: | |
121 | # set some defaults for other required fields |
|
122 | # set some defaults for other required fields | |
122 |
if field == |
|
123 | if field == "reporter": | |
123 |
payload["reporter"] = { |
|
124 | payload["reporter"] = {"id": self.user_name} | |
124 |
if field[ |
|
125 | if field["required"] and field["id"] not in payload: | |
125 |
if field[ |
|
126 | if field["type"] == "array": | |
126 |
payload[field[ |
|
127 | payload[field["id"]] = [field["values"][0]] | |
127 |
elif field[ |
|
128 | elif field["type"] == "string": | |
128 |
payload[field[ |
|
129 | payload[field["id"]] = "" | |
129 | new_issue = self.client.create_issue(fields=payload) |
|
130 | new_issue = self.client.create_issue(fields=payload) | |
130 |
web_url = self.host_name + |
|
131 | web_url = self.host_name + "/browse/" + new_issue.key | |
131 | to_return = { |
|
132 | to_return = { | |
132 |
|
|
133 | "id": new_issue.id, | |
133 |
|
|
134 | "resource_url": new_issue.self, | |
134 |
|
|
135 | "web_url": web_url, | |
135 | } |
|
136 | } | |
136 | return to_return |
|
137 | return to_return |
@@ -18,8 +18,7 b' import logging' | |||||
18 |
|
18 | |||
19 | import requests |
|
19 | import requests | |
20 |
|
20 | |||
21 |
from appenlight.models.integrations import |
|
21 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
22 | IntegrationException) |
|
|||
23 | from appenlight.lib.ext_json import json |
|
22 | from appenlight.lib.ext_json import json | |
24 |
|
23 | |||
25 | _ = str |
|
24 | _ = str | |
@@ -32,14 +31,12 b' class NotFoundException(Exception):' | |||||
32 |
|
31 | |||
33 |
|
32 | |||
34 | class SlackIntegration(IntegrationBase): |
|
33 | class SlackIntegration(IntegrationBase): | |
35 | __mapper_args__ = { |
|
34 | __mapper_args__ = {"polymorphic_identity": "slack"} | |
36 | 'polymorphic_identity': 'slack' |
|
|||
37 | } |
|
|||
38 | front_visible = False |
|
35 | front_visible = False | |
39 | as_alert_channel = True |
|
36 | as_alert_channel = True | |
40 | supports_report_alerting = True |
|
37 | supports_report_alerting = True | |
41 | action_notification = True |
|
38 | action_notification = True | |
42 |
integration_action = |
|
39 | integration_action = "Message via Slack" | |
43 |
|
40 | |||
44 | @classmethod |
|
41 | @classmethod | |
45 | def create_client(cls, api_token): |
|
42 | def create_client(cls, api_token): | |
@@ -52,23 +49,17 b' class SlackClient(object):' | |||||
52 | self.api_url = api_url |
|
49 | self.api_url = api_url | |
53 |
|
50 | |||
54 | def make_request(self, data=None): |
|
51 | def make_request(self, data=None): | |
55 | headers = { |
|
52 | headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"} | |
56 | 'User-Agent': 'appenlight-slack', |
|
|||
57 | 'Content-Type': 'application/json' |
|
|||
58 | } |
|
|||
59 | try: |
|
53 | try: | |
60 |
resp = getattr(requests, |
|
54 | resp = getattr(requests, "post")( | |
61 | data=json.dumps(data), |
|
55 | self.api_url, data=json.dumps(data), headers=headers, timeout=3 | |
62 | headers=headers, |
|
56 | ) | |
63 | timeout=3) |
|
|||
64 | except Exception as e: |
|
57 | except Exception as e: | |
65 | raise IntegrationException( |
|
58 | raise IntegrationException(_("Error communicating with Slack: %s") % (e,)) | |
66 | _('Error communicating with Slack: %s') % (e,)) |
|
|||
67 | if resp.status_code != requests.codes.ok: |
|
59 | if resp.status_code != requests.codes.ok: | |
68 |
msg = |
|
60 | msg = "Error communicating with Slack - status code: %s" | |
69 | raise IntegrationException(msg % resp.status_code) |
|
61 | raise IntegrationException(msg % resp.status_code) | |
70 | return resp |
|
62 | return resp | |
71 |
|
63 | |||
72 | def send(self, payload): |
|
64 | def send(self, payload): | |
73 |
return self.make_request( |
|
65 | return self.make_request("/rooms/message", method="post", data=payload).json() | |
74 | data=payload).json() |
|
@@ -18,8 +18,7 b' import logging' | |||||
18 |
|
18 | |||
19 | import requests |
|
19 | import requests | |
20 |
|
20 | |||
21 |
from appenlight.models.integrations import |
|
21 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
22 | IntegrationException) |
|
|||
23 | from appenlight.models.alert_channel import AlertChannel |
|
22 | from appenlight.models.alert_channel import AlertChannel | |
24 | from appenlight.lib.ext_json import json |
|
23 | from appenlight.lib.ext_json import json | |
25 |
|
24 | |||
@@ -33,14 +32,12 b' class NotFoundException(Exception):' | |||||
33 |
|
32 | |||
34 |
|
33 | |||
35 | class WebhooksIntegration(IntegrationBase): |
|
34 | class WebhooksIntegration(IntegrationBase): | |
36 | __mapper_args__ = { |
|
35 | __mapper_args__ = {"polymorphic_identity": "webhooks"} | |
37 | 'polymorphic_identity': 'webhooks' |
|
|||
38 | } |
|
|||
39 | front_visible = False |
|
36 | front_visible = False | |
40 | as_alert_channel = True |
|
37 | as_alert_channel = True | |
41 | supports_report_alerting = True |
|
38 | supports_report_alerting = True | |
42 | action_notification = True |
|
39 | action_notification = True | |
43 |
integration_action = |
|
40 | integration_action = "Message via Webhooks" | |
44 |
|
41 | |||
45 | @classmethod |
|
42 | @classmethod | |
46 | def create_client(cls, url): |
|
43 | def create_client(cls, url): | |
@@ -52,34 +49,33 b' class WebhooksClient(object):' | |||||
52 | def __init__(self, url): |
|
49 | def __init__(self, url): | |
53 | self.api_url = url |
|
50 | self.api_url = url | |
54 |
|
51 | |||
55 |
def make_request(self, url, method= |
|
52 | def make_request(self, url, method="get", data=None): | |
56 | headers = { |
|
53 | headers = { | |
57 |
|
|
54 | "Content-Type": "application/json", | |
58 |
|
|
55 | "User-Agent": "appenlight-webhooks", | |
59 | } |
|
56 | } | |
60 | try: |
|
57 | try: | |
61 | if data: |
|
58 | if data: | |
62 | data = json.dumps(data) |
|
59 | data = json.dumps(data) | |
63 | resp = getattr(requests, method)(url, data=data, headers=headers, |
|
60 | resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3) | |
64 | timeout=3) |
|
|||
65 | except Exception as e: |
|
61 | except Exception as e: | |
66 | raise IntegrationException( |
|
62 | raise IntegrationException( | |
67 |
_( |
|
63 | _("Error communicating with Webhooks: {}").format(e) | |
|
64 | ) | |||
68 | if resp.status_code > 299: |
|
65 | if resp.status_code > 299: | |
69 | raise IntegrationException( |
|
66 | raise IntegrationException( | |
70 |
|
|
67 | "Error communicating with Webhooks - status code: {}".format( | |
71 |
resp.status_code |
|
68 | resp.status_code | |
|
69 | ) | |||
|
70 | ) | |||
72 | return resp |
|
71 | return resp | |
73 |
|
72 | |||
74 | def send_to_hook(self, payload): |
|
73 | def send_to_hook(self, payload): | |
75 |
return self.make_request(self.api_url, method= |
|
74 | return self.make_request(self.api_url, method="post", data=payload).json() | |
76 | data=payload).json() |
|
|||
77 |
|
75 | |||
78 |
|
76 | |||
79 | class WebhooksAlertChannel(AlertChannel): |
|
77 | class WebhooksAlertChannel(AlertChannel): | |
80 | __mapper_args__ = { |
|
78 | __mapper_args__ = {"polymorphic_identity": "webhooks"} | |
81 | 'polymorphic_identity': 'webhooks' |
|
|||
82 | } |
|
|||
83 |
|
79 | |||
84 | def notify_reports(self, **kwargs): |
|
80 | def notify_reports(self, **kwargs): | |
85 | """ |
|
81 | """ | |
@@ -95,17 +91,28 b' class WebhooksAlertChannel(AlertChannel):' | |||||
95 | """ |
|
91 | """ | |
96 | template_vars = self.get_notification_basic_vars(kwargs) |
|
92 | template_vars = self.get_notification_basic_vars(kwargs) | |
97 | payload = [] |
|
93 | payload = [] | |
98 | include_keys = ('id', 'http_status', 'report_type', 'resource_name', |
|
94 | include_keys = ( | |
99 | 'front_url', 'resource_id', 'error', 'url_path', |
|
95 | "id", | |
100 | 'tags', 'duration') |
|
96 | "http_status", | |
101 |
|
97 | "report_type", | ||
102 | for occurences, report in kwargs['reports']: |
|
98 | "resource_name", | |
103 | r_dict = report.last_report_ref.get_dict(kwargs['request'], |
|
99 | "front_url", | |
104 | include_keys=include_keys) |
|
100 | "resource_id", | |
105 | r_dict['group']['occurences'] = occurences |
|
101 | "error", | |
|
102 | "url_path", | |||
|
103 | "tags", | |||
|
104 | "duration", | |||
|
105 | ) | |||
|
106 | ||||
|
107 | for occurences, report in kwargs["reports"]: | |||
|
108 | r_dict = report.last_report_ref.get_dict( | |||
|
109 | kwargs["request"], include_keys=include_keys | |||
|
110 | ) | |||
|
111 | r_dict["group"]["occurences"] = occurences | |||
106 | payload.append(r_dict) |
|
112 | payload.append(r_dict) | |
107 | client = WebhooksIntegration.create_client( |
|
113 | client = WebhooksIntegration.create_client( | |
108 |
self.integration.config[ |
|
114 | self.integration.config["reports_webhook"] | |
|
115 | ) | |||
109 | client.send_to_hook(payload) |
|
116 | client.send_to_hook(payload) | |
110 |
|
117 | |||
111 | def notify_alert(self, **kwargs): |
|
118 | def notify_alert(self, **kwargs): | |
@@ -120,19 +127,19 b' class WebhooksAlertChannel(AlertChannel):' | |||||
120 |
|
127 | |||
121 | """ |
|
128 | """ | |
122 | payload = { |
|
129 | payload = { | |
123 |
|
|
130 | "alert_action": kwargs["event"].unified_alert_action(), | |
124 |
|
|
131 | "alert_name": kwargs["event"].unified_alert_name(), | |
125 |
|
|
132 | "event_time": kwargs["event"].end_date or kwargs["event"].start_date, | |
126 | 'event'].start_date, |
|
133 | "resource_name": None, | |
127 |
|
|
134 | "resource_id": None, | |
128 | 'resource_id': None |
|
|||
129 | } |
|
135 | } | |
130 |
if kwargs[ |
|
136 | if kwargs["event"].values and kwargs["event"].values.get("reports"): | |
131 |
payload[ |
|
137 | payload["reports"] = kwargs["event"].values.get("reports", []) | |
132 |
if |
|
138 | if "application" in kwargs: | |
133 |
payload[ |
|
139 | payload["resource_name"] = kwargs["application"].resource_name | |
134 |
payload[ |
|
140 | payload["resource_id"] = kwargs["application"].resource_id | |
135 |
|
141 | |||
136 | client = WebhooksIntegration.create_client( |
|
142 | client = WebhooksIntegration.create_client( | |
137 |
self.integration.config[ |
|
143 | self.integration.config["alerts_webhook"] | |
|
144 | ) | |||
138 | client.send_to_hook(payload) |
|
145 | client.send_to_hook(payload) |
@@ -29,21 +29,23 b' log = logging.getLogger(__name__)' | |||||
29 |
|
29 | |||
30 |
|
30 | |||
31 | class Log(Base, BaseModel): |
|
31 | class Log(Base, BaseModel): | |
32 |
__tablename__ = |
|
32 | __tablename__ = "logs" | |
33 |
__table_args__ = { |
|
33 | __table_args__ = {"implicit_returning": False} | |
34 |
|
34 | |||
35 | log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) |
|
35 | log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) | |
36 |
resource_id = sa.Column( |
|
36 | resource_id = sa.Column( | |
37 | sa.ForeignKey('applications.resource_id', |
|
37 | sa.Integer(), | |
38 | onupdate='CASCADE', |
|
38 | sa.ForeignKey( | |
39 | ondelete='CASCADE'), |
|
39 | "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE" | |
|
40 | ), | |||
40 |
|
|
41 | nullable=False, | |
41 |
|
|
42 | index=True, | |
42 | log_level = sa.Column(sa.Unicode, nullable=False, index=True, |
|
43 | ) | |
43 | default='INFO') |
|
44 | log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO") | |
44 |
message = sa.Column(sa.UnicodeText(), default= |
|
45 | message = sa.Column(sa.UnicodeText(), default="") | |
45 | timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
46 | timestamp = sa.Column( | |
46 | server_default=sa.func.now()) |
|
47 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
48 | ) | |||
47 | request_id = sa.Column(sa.Unicode()) |
|
49 | request_id = sa.Column(sa.Unicode()) | |
48 | namespace = sa.Column(sa.Unicode()) |
|
50 | namespace = sa.Column(sa.Unicode()) | |
49 | primary_key = sa.Column(sa.Unicode()) |
|
51 | primary_key = sa.Column(sa.Unicode()) | |
@@ -52,39 +54,40 b' class Log(Base, BaseModel):' | |||||
52 | permanent = sa.Column(sa.Boolean(), nullable=False, default=False) |
|
54 | permanent = sa.Column(sa.Boolean(), nullable=False, default=False) | |
53 |
|
55 | |||
54 | def __str__(self): |
|
56 | def __str__(self): | |
55 |
return self.__unicode__().encode( |
|
57 | return self.__unicode__().encode("utf8") | |
56 |
|
58 | |||
57 | def __unicode__(self): |
|
59 | def __unicode__(self): | |
58 |
return |
|
60 | return "<Log id:%s, lv:%s, ns:%s >" % ( | |
59 |
self.log_id, |
|
61 | self.log_id, | |
|
62 | self.log_level, | |||
|
63 | self.namespace, | |||
|
64 | ) | |||
60 |
|
65 | |||
61 | def set_data(self, data, resource): |
|
66 | def set_data(self, data, resource): | |
62 |
level = data.get( |
|
67 | level = data.get("log_level").upper() | |
63 | self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN) |
|
68 | self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN) | |
64 |
self.message = data.get( |
|
69 | self.message = data.get("message", "") | |
65 |
server_name = data.get( |
|
70 | server_name = data.get("server", "").lower() or "unknown" | |
66 | self.tags = { |
|
71 | self.tags = {"server_name": server_name} | |
67 | 'server_name': server_name |
|
72 | if data.get("tags"): | |
68 | } |
|
73 | for tag_tuple in data["tags"]: | |
69 | if data.get('tags'): |
|
|||
70 | for tag_tuple in data['tags']: |
|
|||
71 | self.tags[tag_tuple[0]] = tag_tuple[1] |
|
74 | self.tags[tag_tuple[0]] = tag_tuple[1] | |
72 |
self.timestamp = data[ |
|
75 | self.timestamp = data["date"] | |
73 |
r_id = data.get( |
|
76 | r_id = data.get("request_id", "") | |
74 | if not r_id: |
|
77 | if not r_id: | |
75 |
r_id = |
|
78 | r_id = "" | |
76 |
self.request_id = r_id.replace( |
|
79 | self.request_id = r_id.replace("-", "") | |
77 | self.resource_id = resource.resource_id |
|
80 | self.resource_id = resource.resource_id | |
78 |
self.namespace = data.get( |
|
81 | self.namespace = data.get("namespace") or "" | |
79 |
self.permanent = data.get( |
|
82 | self.permanent = data.get("permanent") | |
80 |
self.primary_key = data.get( |
|
83 | self.primary_key = data.get("primary_key") | |
81 | if self.primary_key is not None: |
|
84 | if self.primary_key is not None: | |
82 |
self.tags[ |
|
85 | self.tags["appenlight_primary_key"] = self.primary_key | |
83 |
|
86 | |||
84 | def get_dict(self): |
|
87 | def get_dict(self): | |
85 | instance_dict = super(Log, self).get_dict() |
|
88 | instance_dict = super(Log, self).get_dict() | |
86 |
instance_dict[ |
|
89 | instance_dict["log_level"] = LogLevel.key_from_value(self.log_level) | |
87 |
instance_dict[ |
|
90 | instance_dict["resource_name"] = self.application.resource_name | |
88 | return instance_dict |
|
91 | return instance_dict | |
89 |
|
92 | |||
90 | @property |
|
93 | @property | |
@@ -92,39 +95,38 b' class Log(Base, BaseModel):' | |||||
92 | if not self.primary_key: |
|
95 | if not self.primary_key: | |
93 | return None |
|
96 | return None | |
94 |
|
97 | |||
95 |
to_hash = |
|
98 | to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace) | |
96 | self.namespace) |
|
99 | return hashlib.sha1(to_hash.encode("utf8")).hexdigest() | |
97 | return hashlib.sha1(to_hash.encode('utf8')).hexdigest() |
|
|||
98 |
|
100 | |||
99 | def es_doc(self): |
|
101 | def es_doc(self): | |
100 | tags = {} |
|
102 | tags = {} | |
101 | tag_list = [] |
|
103 | tag_list = [] | |
102 | for name, value in self.tags.items(): |
|
104 | for name, value in self.tags.items(): | |
103 | # replace dot in indexed tag name |
|
105 | # replace dot in indexed tag name | |
104 |
name = name.replace( |
|
106 | name = name.replace(".", "_") | |
105 | tag_list.append(name) |
|
107 | tag_list.append(name) | |
106 | tags[name] = { |
|
108 | tags[name] = { | |
107 | "values": convert_es_type(value), |
|
109 | "values": convert_es_type(value), | |
108 |
"numeric_values": value |
|
110 | "numeric_values": value | |
109 |
|
|
111 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
110 | not isinstance(value, bool)) else None |
|
112 | else None, | |
111 | } |
|
113 | } | |
112 | return { |
|
114 | return { | |
113 |
|
|
115 | "pg_id": str(self.log_id), | |
114 |
|
|
116 | "delete_hash": self.delete_hash, | |
115 |
|
|
117 | "resource_id": self.resource_id, | |
116 |
|
|
118 | "request_id": self.request_id, | |
117 |
|
|
119 | "log_level": LogLevel.key_from_value(self.log_level), | |
118 |
|
|
120 | "timestamp": self.timestamp, | |
119 |
|
|
121 | "message": self.message if self.message else "", | |
120 |
|
|
122 | "namespace": self.namespace if self.namespace else "", | |
121 |
|
|
123 | "tags": tags, | |
122 |
|
|
124 | "tag_list": tag_list, | |
123 | } |
|
125 | } | |
124 |
|
126 | |||
125 | @property |
|
127 | @property | |
126 | def partition_id(self): |
|
128 | def partition_id(self): | |
127 | if self.permanent: |
|
129 | if self.permanent: | |
128 |
return |
|
130 | return "rcae_l_%s" % self.timestamp.strftime("%Y_%m") | |
129 | else: |
|
131 | else: | |
130 |
return |
|
132 | return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d") |
@@ -25,40 +25,44 b' from appenlight.models import Base' | |||||
25 |
|
25 | |||
26 |
|
26 | |||
27 | class Metric(Base, BaseModel): |
|
27 | class Metric(Base, BaseModel): | |
28 |
__tablename__ = |
|
28 | __tablename__ = "metrics" | |
29 |
__table_args__ = { |
|
29 | __table_args__ = {"implicit_returning": False} | |
30 |
|
30 | |||
31 | pkey = sa.Column(sa.BigInteger(), primary_key=True) |
|
31 | pkey = sa.Column(sa.BigInteger(), primary_key=True) | |
32 |
resource_id = sa.Column( |
|
32 | resource_id = sa.Column( | |
33 | sa.ForeignKey('applications.resource_id'), |
|
33 | sa.Integer(), | |
34 | nullable=False, primary_key=True) |
|
34 | sa.ForeignKey("applications.resource_id"), | |
35 | timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
35 | nullable=False, | |
36 | server_default=sa.func.now()) |
|
36 | primary_key=True, | |
|
37 | ) | |||
|
38 | timestamp = sa.Column( | |||
|
39 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |||
|
40 | ) | |||
37 | tags = sa.Column(JSON(), default={}) |
|
41 | tags = sa.Column(JSON(), default={}) | |
38 | namespace = sa.Column(sa.Unicode(255)) |
|
42 | namespace = sa.Column(sa.Unicode(255)) | |
39 |
|
43 | |||
40 | @property |
|
44 | @property | |
41 | def partition_id(self): |
|
45 | def partition_id(self): | |
42 |
return |
|
46 | return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d") | |
43 |
|
47 | |||
44 | def es_doc(self): |
|
48 | def es_doc(self): | |
45 | tags = {} |
|
49 | tags = {} | |
46 | tag_list = [] |
|
50 | tag_list = [] | |
47 | for name, value in self.tags.items(): |
|
51 | for name, value in self.tags.items(): | |
48 | # replace dot in indexed tag name |
|
52 | # replace dot in indexed tag name | |
49 |
name = name.replace( |
|
53 | name = name.replace(".", "_") | |
50 | tag_list.append(name) |
|
54 | tag_list.append(name) | |
51 | tags[name] = { |
|
55 | tags[name] = { | |
52 | "values": convert_es_type(value), |
|
56 | "values": convert_es_type(value), | |
53 |
"numeric_values": value |
|
57 | "numeric_values": value | |
54 |
|
|
58 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
55 | not isinstance(value, bool)) else None |
|
59 | else None, | |
56 | } |
|
60 | } | |
57 |
|
61 | |||
58 | return { |
|
62 | return { | |
59 |
|
|
63 | "resource_id": self.resource_id, | |
60 |
|
|
64 | "timestamp": self.timestamp, | |
61 |
|
|
65 | "namespace": self.namespace, | |
62 |
|
|
66 | "tags": tags, | |
63 |
|
|
67 | "tag_list": tag_list, | |
64 | } |
|
68 | } |
@@ -22,19 +22,19 b' from . import Base' | |||||
22 |
|
22 | |||
23 |
|
23 | |||
24 | class PluginConfig(Base, BaseModel): |
|
24 | class PluginConfig(Base, BaseModel): | |
25 |
__tablename__ = |
|
25 | __tablename__ = "plugin_configs" | |
26 |
|
26 | |||
27 | id = sa.Column(sa.Integer, primary_key=True) |
|
27 | id = sa.Column(sa.Integer, primary_key=True) | |
28 | plugin_name = sa.Column(sa.Unicode) |
|
28 | plugin_name = sa.Column(sa.Unicode) | |
29 | section = sa.Column(sa.Unicode) |
|
29 | section = sa.Column(sa.Unicode) | |
30 | config = sa.Column(JSON, nullable=False) |
|
30 | config = sa.Column(JSON, nullable=False) | |
31 |
resource_id = sa.Column( |
|
31 | resource_id = sa.Column( | |
32 | sa.ForeignKey('resources.resource_id', |
|
32 | sa.Integer(), | |
33 | onupdate='cascade', |
|
33 | sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"), | |
34 | ondelete='cascade')) |
|
34 | ) | |
35 |
owner_id = sa.Column( |
|
35 | owner_id = sa.Column( | |
36 |
|
|
36 | sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade") | |
37 | ondelete='cascade')) |
|
37 | ) | |
38 |
|
38 | |||
39 | def __json__(self, request): |
|
39 | def __json__(self, request): | |
40 | return self.get_dict() |
|
40 | return self.get_dict() |
This diff has been collapsed as it changes many lines, (515 lines changed) Show them Hide them | |||||
@@ -36,51 +36,55 b' from ziggurat_foundations.models.base import BaseModel' | |||||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 | REPORT_TYPE_MATRIX = { |
|
38 | REPORT_TYPE_MATRIX = { | |
39 | 'http_status': {"type": 'int', |
|
39 | "http_status": {"type": "int", "ops": ("eq", "ne", "ge", "le")}, | |
40 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
40 | "group:priority": {"type": "int", "ops": ("eq", "ne", "ge", "le")}, | |
41 | 'group:priority': {"type": 'int', |
|
41 | "duration": {"type": "float", "ops": ("ge", "le")}, | |
42 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
42 | "url_domain": { | |
43 | 'duration': {"type": 'float', |
|
43 | "type": "unicode", | |
44 | "ops": ('ge', 'le',)}, |
|
44 | "ops": ("eq", "ne", "startswith", "endswith", "contains"), | |
45 | 'url_domain': {"type": 'unicode', |
|
45 | }, | |
46 | "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)}, |
|
46 | "url_path": { | |
47 |
|
|
47 | "type": "unicode", | |
48 |
|
|
48 | "ops": ("eq", "ne", "startswith", "endswith", "contains"), | |
49 | 'error': {"type": 'unicode', |
|
49 | }, | |
50 | "ops": ('eq', 'ne', 'startswith', 'endswith', 'contains',)}, |
|
50 | "error": { | |
51 |
|
|
51 | "type": "unicode", | |
52 |
|
|
52 | "ops": ("eq", "ne", "startswith", "endswith", "contains"), | |
53 | 'contains',)}, |
|
53 | }, | |
54 | 'traceback': {"type": 'unicode', |
|
54 | "tags:server_name": { | |
55 | "ops": ('contains',)}, |
|
55 | "type": "unicode", | |
56 | 'group:occurences': {"type": 'int', |
|
56 | "ops": ("eq", "ne", "startswith", "endswith", "contains"), | |
57 | "ops": ('eq', 'ne', 'ge', 'le',)} |
|
57 | }, | |
|
58 | "traceback": {"type": "unicode", "ops": ("contains",)}, | |||
|
59 | "group:occurences": {"type": "int", "ops": ("eq", "ne", "ge", "le")}, | |||
58 | } |
|
60 | } | |
59 |
|
61 | |||
60 |
|
62 | |||
61 | class Report(Base, BaseModel): |
|
63 | class Report(Base, BaseModel): | |
62 |
__tablename__ = |
|
64 | __tablename__ = "reports" | |
63 |
__table_args__ = { |
|
65 | __table_args__ = {"implicit_returning": False} | |
64 |
|
66 | |||
65 | id = sa.Column(sa.Integer, nullable=False, primary_key=True) |
|
67 | id = sa.Column(sa.Integer, nullable=False, primary_key=True) | |
66 |
group_id = sa.Column( |
|
68 | group_id = sa.Column( | |
67 | sa.ForeignKey('reports_groups.id', ondelete='cascade', |
|
69 | sa.BigInteger, | |
68 | onupdate='cascade')) |
|
70 | sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"), | |
|
71 | ) | |||
69 | resource_id = sa.Column(sa.Integer(), nullable=False, index=True) |
|
72 | resource_id = sa.Column(sa.Integer(), nullable=False, index=True) | |
70 | report_type = sa.Column(sa.Integer(), nullable=False, index=True) |
|
73 | report_type = sa.Column(sa.Integer(), nullable=False, index=True) | |
71 | error = sa.Column(sa.UnicodeText(), index=True) |
|
74 | error = sa.Column(sa.UnicodeText(), index=True) | |
72 | extra = sa.Column(JSON(), default={}) |
|
75 | extra = sa.Column(JSON(), default={}) | |
73 | request = sa.Column(JSON(), nullable=False, default={}) |
|
76 | request = sa.Column(JSON(), nullable=False, default={}) | |
74 |
ip = sa.Column(sa.String(39), index=True, default= |
|
77 | ip = sa.Column(sa.String(39), index=True, default="") | |
75 |
username = sa.Column(sa.Unicode(255), default= |
|
78 | username = sa.Column(sa.Unicode(255), default="") | |
76 |
user_agent = sa.Column(sa.Unicode(255), default= |
|
79 | user_agent = sa.Column(sa.Unicode(255), default="") | |
77 | url = sa.Column(sa.UnicodeText(), index=True) |
|
80 | url = sa.Column(sa.UnicodeText(), index=True) | |
78 | request_id = sa.Column(sa.Text()) |
|
81 | request_id = sa.Column(sa.Text()) | |
79 | request_stats = sa.Column(JSON(), nullable=False, default={}) |
|
82 | request_stats = sa.Column(JSON(), nullable=False, default={}) | |
80 | traceback = sa.Column(JSON(), nullable=False, default=None) |
|
83 | traceback = sa.Column(JSON(), nullable=False, default=None) | |
81 | traceback_hash = sa.Column(sa.Text()) |
|
84 | traceback_hash = sa.Column(sa.Text()) | |
82 | start_time = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
85 | start_time = sa.Column( | |
83 | server_default=sa.func.now()) |
|
86 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
87 | ) | |||
84 | end_time = sa.Column(sa.DateTime()) |
|
88 | end_time = sa.Column(sa.DateTime()) | |
85 | duration = sa.Column(sa.Float, default=0) |
|
89 | duration = sa.Column(sa.Float, default=0) | |
86 | http_status = sa.Column(sa.Integer, index=True) |
|
90 | http_status = sa.Column(sa.Integer, index=True) | |
@@ -89,99 +93,104 b' class Report(Base, BaseModel):' | |||||
89 | tags = sa.Column(JSON(), nullable=False, default={}) |
|
93 | tags = sa.Column(JSON(), nullable=False, default={}) | |
90 | language = sa.Column(sa.Integer(), default=0) |
|
94 | language = sa.Column(sa.Integer(), default=0) | |
91 | # this is used to determine partition for the report |
|
95 | # this is used to determine partition for the report | |
92 |
report_group_time = sa.Column( |
|
96 | report_group_time = sa.Column( | |
93 | server_default=sa.func.now()) |
|
97 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
98 | ) | |||
94 |
|
99 | |||
95 | logs = sa.orm.relationship( |
|
100 | logs = sa.orm.relationship( | |
96 |
|
|
101 | "Log", | |
97 |
lazy= |
|
102 | lazy="dynamic", | |
98 | passive_deletes=True, |
|
103 | passive_deletes=True, | |
99 | passive_updates=True, |
|
104 | passive_updates=True, | |
100 | primaryjoin="and_(Report.request_id==Log.request_id, " |
|
105 | primaryjoin="and_(Report.request_id==Log.request_id, " | |
101 |
|
|
106 | "Log.request_id != None, Log.request_id != '')", | |
102 |
foreign_keys= |
|
107 | foreign_keys="[Log.request_id]", | |
|
108 | ) | |||
103 |
|
109 | |||
104 |
slow_calls = sa.orm.relationship( |
|
110 | slow_calls = sa.orm.relationship( | |
105 | backref='detail', |
|
111 | "SlowCall", | |
|
112 | backref="detail", | |||
106 |
|
|
113 | cascade="all, delete-orphan", | |
107 |
|
|
114 | passive_deletes=True, | |
108 |
|
|
115 | passive_updates=True, | |
109 |
|
|
116 | order_by="SlowCall.timestamp", | |
|
117 | ) | |||
110 |
|
118 | |||
111 | def set_data(self, data, resource, protocol_version=None): |
|
119 | def set_data(self, data, resource, protocol_version=None): | |
112 |
self.http_status = data[ |
|
120 | self.http_status = data["http_status"] | |
113 |
self.priority = data[ |
|
121 | self.priority = data["priority"] | |
114 |
self.error = data[ |
|
122 | self.error = data["error"] | |
115 |
report_language = data.get( |
|
123 | report_language = data.get("language", "").lower() | |
116 | self.language = getattr(Language, report_language, Language.unknown) |
|
124 | self.language = getattr(Language, report_language, Language.unknown) | |
117 | # we need temp holder here to decide later |
|
125 | # we need temp holder here to decide later | |
118 | # if we want to to commit the tags if report is marked for creation |
|
126 | # if we want to to commit the tags if report is marked for creation | |
119 | self.tags = { |
|
127 | self.tags = {"server_name": data["server"], "view_name": data["view_name"]} | |
120 | 'server_name': data['server'], |
|
128 | if data.get("tags"): | |
121 | 'view_name': data['view_name'] |
|
129 | for tag_tuple in data["tags"]: | |
122 | } |
|
|||
123 | if data.get('tags'): |
|
|||
124 | for tag_tuple in data['tags']: |
|
|||
125 | self.tags[tag_tuple[0]] = tag_tuple[1] |
|
130 | self.tags[tag_tuple[0]] = tag_tuple[1] | |
126 |
self.traceback = data[ |
|
131 | self.traceback = data["traceback"] | |
127 | stripped_traceback = self.stripped_traceback() |
|
132 | stripped_traceback = self.stripped_traceback() | |
128 |
tb_repr = repr(stripped_traceback).encode( |
|
133 | tb_repr = repr(stripped_traceback).encode("utf8") | |
129 | self.traceback_hash = hashlib.sha1(tb_repr).hexdigest() |
|
134 | self.traceback_hash = hashlib.sha1(tb_repr).hexdigest() | |
130 | url_info = urllib.parse.urlsplit( |
|
135 | url_info = urllib.parse.urlsplit(data.get("url", ""), allow_fragments=False) | |
131 | data.get('url', ''), allow_fragments=False) |
|
|||
132 | self.url_domain = url_info.netloc[:128] |
|
136 | self.url_domain = url_info.netloc[:128] | |
133 | self.url_path = url_info.path[:2048] |
|
137 | self.url_path = url_info.path[:2048] | |
134 |
self.occurences = data[ |
|
138 | self.occurences = data["occurences"] | |
135 | if self.error: |
|
139 | if self.error: | |
136 | self.report_type = ReportType.error |
|
140 | self.report_type = ReportType.error | |
137 | else: |
|
141 | else: | |
138 | self.report_type = ReportType.slow |
|
142 | self.report_type = ReportType.slow | |
139 |
|
143 | |||
140 | # but if its status 404 its 404 type |
|
144 | # but if its status 404 its 404 type | |
141 |
if self.http_status in [404, |
|
145 | if self.http_status in [404, "404"] or self.error == "404 Not Found": | |
142 | self.report_type = ReportType.not_found |
|
146 | self.report_type = ReportType.not_found | |
143 |
self.error = |
|
147 | self.error = "" | |
144 |
|
148 | |||
145 |
self.generate_grouping_hash( |
|
149 | self.generate_grouping_hash( | |
146 | data.get('group_string')), |
|
150 | data.get("appenlight.group_string", data.get("group_string")), | |
147 |
|
|
151 | resource.default_grouping, | |
148 |
|
|
152 | protocol_version, | |
|
153 | ) | |||
149 |
|
154 | |||
150 | # details |
|
155 | # details | |
151 |
if data[ |
|
156 | if data["http_status"] in [404, "404"]: | |
152 | data = {"username": data["username"], |
|
157 | data = { | |
|
158 | "username": data["username"], | |||
153 |
|
|
159 | "ip": data["ip"], | |
154 |
|
|
160 | "url": data["url"], | |
155 |
|
|
161 | "user_agent": data["user_agent"], | |
156 | if data.get('HTTP_REFERER') or data.get('http_referer'): |
|
162 | } | |
157 |
|
|
163 | if data.get("HTTP_REFERER") or data.get("http_referer"): | |
158 | 'HTTP_REFERER', '') or data.get('http_referer', '') |
|
164 | data["HTTP_REFERER"] = data.get("HTTP_REFERER", "") or data.get( | |
|
165 | "http_referer", "" | |||
|
166 | ) | |||
159 |
|
167 | |||
160 | self.resource_id = resource.resource_id |
|
168 | self.resource_id = resource.resource_id | |
161 |
self.username = data[ |
|
169 | self.username = data["username"] | |
162 |
self.user_agent = data[ |
|
170 | self.user_agent = data["user_agent"] | |
163 |
self.ip = data[ |
|
171 | self.ip = data["ip"] | |
164 | self.extra = {} |
|
172 | self.extra = {} | |
165 |
if data.get( |
|
173 | if data.get("extra"): | |
166 |
for extra_tuple in data[ |
|
174 | for extra_tuple in data["extra"]: | |
167 | self.extra[extra_tuple[0]] = extra_tuple[1] |
|
175 | self.extra[extra_tuple[0]] = extra_tuple[1] | |
168 |
|
176 | |||
169 |
self.url = data[ |
|
177 | self.url = data["url"] | |
170 |
self.request_id = data.get( |
|
178 | self.request_id = data.get("request_id", "").replace("-", "") or str( | |
171 |
uuid.uuid4() |
|
179 | uuid.uuid4() | |
172 | request_data = data.get('request', {}) |
|
180 | ) | |
|
181 | request_data = data.get("request", {}) | |||
173 |
|
182 | |||
174 | self.request = request_data |
|
183 | self.request = request_data | |
175 |
self.request_stats = data.get( |
|
184 | self.request_stats = data.get("request_stats", {}) | |
176 |
traceback = data.get( |
|
185 | traceback = data.get("traceback") | |
177 | if not traceback: |
|
186 | if not traceback: | |
178 |
traceback = data.get( |
|
187 | traceback = data.get("frameinfo") | |
179 | self.traceback = traceback |
|
188 | self.traceback = traceback | |
180 |
start_date = convert_date(data.get( |
|
189 | start_date = convert_date(data.get("start_time")) | |
181 | if not self.start_time or self.start_time < start_date: |
|
190 | if not self.start_time or self.start_time < start_date: | |
182 | self.start_time = start_date |
|
191 | self.start_time = start_date | |
183 |
|
192 | |||
184 |
self.end_time = convert_date(data.get( |
|
193 | self.end_time = convert_date(data.get("end_time"), False) | |
185 | self.duration = 0 |
|
194 | self.duration = 0 | |
186 |
|
195 | |||
187 | if self.start_time and self.end_time: |
|
196 | if self.start_time and self.end_time: | |
@@ -190,81 +199,85 b' class Report(Base, BaseModel):' | |||||
190 |
|
199 | |||
191 | # update tags with other vars |
|
200 | # update tags with other vars | |
192 | if self.username: |
|
201 | if self.username: | |
193 |
self.tags[ |
|
202 | self.tags["user_name"] = self.username | |
194 |
self.tags[ |
|
203 | self.tags["report_language"] = Language.key_from_value(self.language) | |
195 |
|
204 | |||
196 | def add_slow_calls(self, data, report_group): |
|
205 | def add_slow_calls(self, data, report_group): | |
197 | slow_calls = [] |
|
206 | slow_calls = [] | |
198 |
for call in data.get( |
|
207 | for call in data.get("slow_calls", []): | |
199 | sc_inst = SlowCall() |
|
208 | sc_inst = SlowCall() | |
200 |
sc_inst.set_data( |
|
209 | sc_inst.set_data( | |
201 |
|
|
210 | call, resource_id=self.resource_id, report_group=report_group | |
|
211 | ) | |||
202 | slow_calls.append(sc_inst) |
|
212 | slow_calls.append(sc_inst) | |
203 | self.slow_calls.extend(slow_calls) |
|
213 | self.slow_calls.extend(slow_calls) | |
204 | return slow_calls |
|
214 | return slow_calls | |
205 |
|
215 | |||
206 | def get_dict(self, request, details=False, exclude_keys=None, |
|
216 | def get_dict(self, request, details=False, exclude_keys=None, include_keys=None): | |
207 | include_keys=None): |
|
|||
208 | from appenlight.models.services.report_group import ReportGroupService |
|
217 | from appenlight.models.services.report_group import ReportGroupService | |
209 | instance_dict = super(Report, self).get_dict() |
|
|||
210 | instance_dict['req_stats'] = self.req_stats() |
|
|||
211 | instance_dict['group'] = {} |
|
|||
212 | instance_dict['group']['id'] = self.report_group.id |
|
|||
213 | instance_dict['group'][ |
|
|||
214 | 'total_reports'] = self.report_group.total_reports |
|
|||
215 | instance_dict['group']['last_report'] = self.report_group.last_report |
|
|||
216 | instance_dict['group']['priority'] = self.report_group.priority |
|
|||
217 | instance_dict['group']['occurences'] = self.report_group.occurences |
|
|||
218 | instance_dict['group'][ |
|
|||
219 | 'last_timestamp'] = self.report_group.last_timestamp |
|
|||
220 | instance_dict['group'][ |
|
|||
221 | 'first_timestamp'] = self.report_group.first_timestamp |
|
|||
222 | instance_dict['group']['public'] = self.report_group.public |
|
|||
223 | instance_dict['group']['fixed'] = self.report_group.fixed |
|
|||
224 | instance_dict['group']['read'] = self.report_group.read |
|
|||
225 | instance_dict['group'][ |
|
|||
226 | 'average_duration'] = self.report_group.average_duration |
|
|||
227 |
|
218 | |||
228 | instance_dict[ |
|
219 | instance_dict = super(Report, self).get_dict() | |
229 | 'resource_name'] = self.report_group.application.resource_name |
|
220 | instance_dict["req_stats"] = self.req_stats() | |
230 |
instance_dict[ |
|
221 | instance_dict["group"] = {} | |
231 |
|
222 | instance_dict["group"]["id"] = self.report_group.id | ||
232 | if instance_dict['http_status'] == 404 and not instance_dict['error']: |
|
223 | instance_dict["group"]["total_reports"] = self.report_group.total_reports | |
233 | instance_dict['error'] = '404 Not Found' |
|
224 | instance_dict["group"]["last_report"] = self.report_group.last_report | |
|
225 | instance_dict["group"]["priority"] = self.report_group.priority | |||
|
226 | instance_dict["group"]["occurences"] = self.report_group.occurences | |||
|
227 | instance_dict["group"]["last_timestamp"] = self.report_group.last_timestamp | |||
|
228 | instance_dict["group"]["first_timestamp"] = self.report_group.first_timestamp | |||
|
229 | instance_dict["group"]["public"] = self.report_group.public | |||
|
230 | instance_dict["group"]["fixed"] = self.report_group.fixed | |||
|
231 | instance_dict["group"]["read"] = self.report_group.read | |||
|
232 | instance_dict["group"]["average_duration"] = self.report_group.average_duration | |||
|
233 | ||||
|
234 | instance_dict["resource_name"] = self.report_group.application.resource_name | |||
|
235 | instance_dict["report_type"] = self.report_type | |||
|
236 | ||||
|
237 | if instance_dict["http_status"] == 404 and not instance_dict["error"]: | |||
|
238 | instance_dict["error"] = "404 Not Found" | |||
234 |
|
239 | |||
235 | if details: |
|
240 | if details: | |
236 |
instance_dict[ |
|
241 | instance_dict[ | |
237 |
|
|
242 | "affected_users_count" | |
238 | instance_dict['top_affected_users'] = [ |
|
243 | ] = ReportGroupService.affected_users_count(self.report_group) | |
239 | {'username': u.username, 'count': u.count} for u in |
|
244 | instance_dict["top_affected_users"] = [ | |
240 | ReportGroupService.top_affected_users(self.report_group)] |
|
245 | {"username": u.username, "count": u.count} | |
241 | instance_dict['application'] = {'integrations': []} |
|
246 | for u in ReportGroupService.top_affected_users(self.report_group) | |
|
247 | ] | |||
|
248 | instance_dict["application"] = {"integrations": []} | |||
242 | for integration in self.report_group.application.integrations: |
|
249 | for integration in self.report_group.application.integrations: | |
243 | if integration.front_visible: |
|
250 | if integration.front_visible: | |
244 |
instance_dict[ |
|
251 | instance_dict["application"]["integrations"].append( | |
245 | {'name': integration.integration_name, |
|
252 | { | |
246 |
|
|
253 | "name": integration.integration_name, | |
247 | instance_dict['comments'] = [c.get_dict() for c in |
|
254 | "action": integration.integration_action, | |
248 | self.report_group.comments] |
|
255 | } | |
249 |
|
256 | ) | ||
250 |
instance_dict[ |
|
257 | instance_dict["comments"] = [ | |
251 | instance_dict['group']['previous_report'] = None |
|
258 | c.get_dict() for c in self.report_group.comments | |
|
259 | ] | |||
|
260 | ||||
|
261 | instance_dict["group"]["next_report"] = None | |||
|
262 | instance_dict["group"]["previous_report"] = None | |||
252 | next_in_group = self.get_next_in_group(request) |
|
263 | next_in_group = self.get_next_in_group(request) | |
253 | previous_in_group = self.get_previous_in_group(request) |
|
264 | previous_in_group = self.get_previous_in_group(request) | |
254 | if next_in_group: |
|
265 | if next_in_group: | |
255 |
instance_dict[ |
|
266 | instance_dict["group"]["next_report"] = next_in_group | |
256 | if previous_in_group: |
|
267 | if previous_in_group: | |
257 |
instance_dict[ |
|
268 | instance_dict["group"]["previous_report"] = previous_in_group | |
258 |
|
269 | |||
259 | # slow call ordering |
|
270 | # slow call ordering | |
260 | def find_parent(row, data): |
|
271 | def find_parent(row, data): | |
261 | for r in reversed(data): |
|
272 | for r in reversed(data): | |
262 | try: |
|
273 | try: | |
263 |
if ( |
|
274 | if ( | |
264 |
|
|
275 | row["timestamp"] > r["timestamp"] | |
|
276 | and row["end_time"] < r["end_time"] | |||
|
277 | ): | |||
265 | return r |
|
278 | return r | |
266 | except TypeError as e: |
|
279 | except TypeError as e: | |
267 |
log.warning( |
|
280 | log.warning("reports_view.find_parent: %s" % e) | |
268 | return None |
|
281 | return None | |
269 |
|
282 | |||
270 | new_calls = [] |
|
283 | new_calls = [] | |
@@ -274,24 +287,23 b' class Report(Base, BaseModel):' | |||||
274 | for x in range(len(calls) - 1, -1, -1): |
|
287 | for x in range(len(calls) - 1, -1, -1): | |
275 | parent = find_parent(calls[x], calls) |
|
288 | parent = find_parent(calls[x], calls) | |
276 | if parent: |
|
289 | if parent: | |
277 |
parent[ |
|
290 | parent["children"].append(calls[x]) | |
278 | else: |
|
291 | else: | |
279 | # no parent at all? append to new calls anyways |
|
292 | # no parent at all? append to new calls anyways | |
280 | new_calls.append(calls[x]) |
|
293 | new_calls.append(calls[x]) | |
281 | # print 'append', calls[x] |
|
294 | # print 'append', calls[x] | |
282 | del calls[x] |
|
295 | del calls[x] | |
283 | break |
|
296 | break | |
284 |
instance_dict[ |
|
297 | instance_dict["slow_calls"] = new_calls | |
285 |
|
298 | |||
286 |
instance_dict[ |
|
299 | instance_dict["front_url"] = self.get_public_url(request) | |
287 |
|
300 | |||
288 | exclude_keys_list = exclude_keys or [] |
|
301 | exclude_keys_list = exclude_keys or [] | |
289 | include_keys_list = include_keys or [] |
|
302 | include_keys_list = include_keys or [] | |
290 | for k in list(instance_dict.keys()): |
|
303 | for k in list(instance_dict.keys()): | |
291 |
if k == |
|
304 | if k == "group": | |
292 | continue |
|
305 | continue | |
293 |
if |
|
306 | if k in exclude_keys_list or (k not in include_keys_list and include_keys): | |
294 | (k not in include_keys_list and include_keys)): |
|
|||
295 | del instance_dict[k] |
|
307 | del instance_dict[k] | |
296 | return instance_dict |
|
308 | return instance_dict | |
297 |
|
309 | |||
@@ -301,19 +313,20 b' class Report(Base, BaseModel):' | |||||
301 | "query": { |
|
313 | "query": { | |
302 | "filtered": { |
|
314 | "filtered": { | |
303 | "filter": { |
|
315 | "filter": { | |
304 |
"and": [ |
|
316 | "and": [ | |
305 |
|
|
317 | {"term": {"group_id": self.group_id}}, | |
|
318 | {"range": {"pg_id": {"lt": self.id}}}, | |||
|
319 | ] | |||
306 | } |
|
320 | } | |
307 | } |
|
321 | } | |
308 | }, |
|
322 | }, | |
309 | "sort": [ |
|
323 | "sort": [{"_doc": {"order": "desc"}}], | |
310 | {"_doc": {"order": "desc"}}, |
|
|||
311 | ], |
|
|||
312 | } |
|
324 | } | |
313 |
result = request.es_conn.search( |
|
325 | result = request.es_conn.search( | |
314 | doc_type='report') |
|
326 | body=query, index=self.partition_id, doc_type="report" | |
315 | if result['hits']['total']: |
|
327 | ) | |
316 | return result['hits']['hits'][0]['_source']['pg_id'] |
|
328 | if result["hits"]["total"]: | |
|
329 | return result["hits"]["hits"][0]["_source"]["pg_id"] | |||
317 |
|
330 | |||
318 | def get_next_in_group(self, request): |
|
331 | def get_next_in_group(self, request): | |
319 | query = { |
|
332 | query = { | |
@@ -321,19 +334,20 b' class Report(Base, BaseModel):' | |||||
321 | "query": { |
|
334 | "query": { | |
322 | "filtered": { |
|
335 | "filtered": { | |
323 | "filter": { |
|
336 | "filter": { | |
324 |
"and": [ |
|
337 | "and": [ | |
325 |
|
|
338 | {"term": {"group_id": self.group_id}}, | |
|
339 | {"range": {"pg_id": {"gt": self.id}}}, | |||
|
340 | ] | |||
326 | } |
|
341 | } | |
327 | } |
|
342 | } | |
328 | }, |
|
343 | }, | |
329 | "sort": [ |
|
344 | "sort": [{"_doc": {"order": "asc"}}], | |
330 | {"_doc": {"order": "asc"}}, |
|
|||
331 | ], |
|
|||
332 | } |
|
345 | } | |
333 |
result = request.es_conn.search( |
|
346 | result = request.es_conn.search( | |
334 | doc_type='report') |
|
347 | body=query, index=self.partition_id, doc_type="report" | |
335 | if result['hits']['total']: |
|
348 | ) | |
336 | return result['hits']['hits'][0]['_source']['pg_id'] |
|
349 | if result["hits"]["total"]: | |
|
350 | return result["hits"]["hits"][0]["_source"]["pg_id"] | |||
337 |
|
351 | |||
338 | def get_public_url(self, request=None, report_group=None, _app_url=None): |
|
352 | def get_public_url(self, request=None, report_group=None, _app_url=None): | |
339 | """ |
|
353 | """ | |
@@ -341,53 +355,51 b' class Report(Base, BaseModel):' | |||||
341 | """ |
|
355 | """ | |
342 | if not request: |
|
356 | if not request: | |
343 | request = get_current_request() |
|
357 | request = get_current_request() | |
344 |
url = request.route_url( |
|
358 | url = request.route_url("/", _app_url=_app_url) | |
345 | if report_group: |
|
359 | if report_group: | |
346 |
return (url + |
|
360 | return (url + "ui/report/%s/%s") % (report_group.id, self.id) | |
347 |
return (url + |
|
361 | return (url + "ui/report/%s/%s") % (self.group_id, self.id) | |
348 |
|
362 | |||
349 | def req_stats(self): |
|
363 | def req_stats(self): | |
350 | stats = self.request_stats.copy() |
|
364 | stats = self.request_stats.copy() | |
351 |
stats[ |
|
365 | stats["percentages"] = {} | |
352 |
stats[ |
|
366 | stats["percentages"]["main"] = 100.0 | |
353 |
main = stats.get( |
|
367 | main = stats.get("main", 0.0) | |
354 | if not main: |
|
368 | if not main: | |
355 | return None |
|
369 | return None | |
356 | for name, call_time in stats.items(): |
|
370 | for name, call_time in stats.items(): | |
357 |
if |
|
371 | if "calls" not in name and "main" not in name and "percentages" not in name: | |
358 | 'percentages' not in name): |
|
372 | stats["main"] -= call_time | |
359 | stats['main'] -= call_time |
|
373 | stats["percentages"][name] = math.floor((call_time / main * 100.0)) | |
360 |
stats[ |
|
374 | stats["percentages"]["main"] -= stats["percentages"][name] | |
361 | (call_time / main * 100.0)) |
|
375 | if stats["percentages"]["main"] < 0.0: | |
362 |
|
|
376 | stats["percentages"]["main"] = 0.0 | |
363 |
|
|
377 | stats["main"] = 0.0 | |
364 | stats['percentages']['main'] = 0.0 |
|
|||
365 | stats['main'] = 0.0 |
|
|||
366 | return stats |
|
378 | return stats | |
367 |
|
379 | |||
368 | def generate_grouping_hash(self, hash_string=None, default_grouping=None, |
|
380 | def generate_grouping_hash( | |
369 | protocol_version=None): |
|
381 | self, hash_string=None, default_grouping=None, protocol_version=None | |
|
382 | ): | |||
370 | """ |
|
383 | """ | |
371 | Generates SHA1 hash that will be used to group reports together |
|
384 | Generates SHA1 hash that will be used to group reports together | |
372 | """ |
|
385 | """ | |
373 | if not hash_string: |
|
386 | if not hash_string: | |
374 |
location = self.tags.get( |
|
387 | location = self.tags.get("view_name") or self.url_path | |
375 |
server_name = self.tags.get( |
|
388 | server_name = self.tags.get("server_name") or "" | |
376 |
if default_grouping == |
|
389 | if default_grouping == "url_traceback": | |
377 |
hash_string = |
|
390 | hash_string = "%s_%s_%s" % (self.traceback_hash, location, self.error) | |
378 | self.error) |
|
|||
379 | if self.language == Language.javascript: |
|
391 | if self.language == Language.javascript: | |
380 |
hash_string = |
|
392 | hash_string = "%s_%s" % (self.traceback_hash, self.error) | |
381 |
|
393 | |||
382 |
elif default_grouping == |
|
394 | elif default_grouping == "traceback_server": | |
383 |
hash_string = |
|
395 | hash_string = "%s_%s" % (self.traceback_hash, server_name) | |
384 | if self.language == Language.javascript: |
|
396 | if self.language == Language.javascript: | |
385 |
hash_string = |
|
397 | hash_string = "%s_%s" % (self.traceback_hash, server_name) | |
386 | else: |
|
398 | else: | |
387 |
hash_string = |
|
399 | hash_string = "%s_%s" % (self.error, location) | |
388 | month = datetime.utcnow().date().replace(day=1) |
|
400 | month = datetime.utcnow().date().replace(day=1) | |
389 |
hash_string = |
|
401 | hash_string = "{}_{}".format(month, hash_string) | |
390 |
binary_string = hash_string.encode( |
|
402 | binary_string = hash_string.encode("utf8") | |
391 | self.grouping_hash = hashlib.sha1(binary_string).hexdigest() |
|
403 | self.grouping_hash = hashlib.sha1(binary_string).hexdigest() | |
392 | return self.grouping_hash |
|
404 | return self.grouping_hash | |
393 |
|
405 | |||
@@ -399,7 +411,7 b' class Report(Base, BaseModel):' | |||||
399 |
|
411 | |||
400 | if isinstance(stripped_traceback, list): |
|
412 | if isinstance(stripped_traceback, list): | |
401 | for row in stripped_traceback: |
|
413 | for row in stripped_traceback: | |
402 |
row.pop( |
|
414 | row.pop("vars", None) | |
403 | return stripped_traceback |
|
415 | return stripped_traceback | |
404 |
|
416 | |||
405 | def notify_channel(self, report_group): |
|
417 | def notify_channel(self, report_group): | |
@@ -407,78 +419,81 b' class Report(Base, BaseModel):' | |||||
407 | Sends notification to websocket channel |
|
419 | Sends notification to websocket channel | |
408 | """ |
|
420 | """ | |
409 | settings = get_current_registry().settings |
|
421 | settings = get_current_registry().settings | |
410 |
log.info( |
|
422 | log.info("notify channelstream") | |
411 | if self.report_type != ReportType.error: |
|
423 | if self.report_type != ReportType.error: | |
412 | return |
|
424 | return | |
413 | payload = { |
|
425 | payload = { | |
414 |
|
|
426 | "type": "message", | |
415 |
"user": |
|
427 | "user": "__system__", | |
416 |
"channel": |
|
428 | "channel": "app_%s" % self.resource_id, | |
417 |
|
|
429 | "message": { | |
418 |
|
|
430 | "topic": "front_dashboard.new_topic", | |
419 |
|
|
431 | "report": { | |
420 |
|
|
432 | "group": { | |
421 |
|
|
433 | "priority": report_group.priority, | |
422 |
|
|
434 | "first_timestamp": report_group.first_timestamp, | |
423 |
|
|
435 | "last_timestamp": report_group.last_timestamp, | |
424 |
|
|
436 | "average_duration": report_group.average_duration, | |
425 |
|
|
437 | "occurences": report_group.occurences, | |
|
438 | }, | |||
|
439 | "report_id": self.id, | |||
|
440 | "group_id": self.group_id, | |||
|
441 | "resource_id": self.resource_id, | |||
|
442 | "http_status": self.http_status, | |||
|
443 | "url_domain": self.url_domain, | |||
|
444 | "url_path": self.url_path, | |||
|
445 | "error": self.error or "", | |||
|
446 | "server": self.tags.get("server_name"), | |||
|
447 | "view_name": self.tags.get("view_name"), | |||
|
448 | "front_url": self.get_public_url(), | |||
|
449 | }, | |||
426 |
|
|
450 | }, | |
427 | 'report_id': self.id, |
|
|||
428 | 'group_id': self.group_id, |
|
|||
429 | 'resource_id': self.resource_id, |
|
|||
430 | 'http_status': self.http_status, |
|
|||
431 | 'url_domain': self.url_domain, |
|
|||
432 | 'url_path': self.url_path, |
|
|||
433 | 'error': self.error or '', |
|
|||
434 | 'server': self.tags.get('server_name'), |
|
|||
435 | 'view_name': self.tags.get('view_name'), |
|
|||
436 | 'front_url': self.get_public_url(), |
|
|||
437 | } |
|
|||
438 | } |
|
|||
439 |
|
||||
440 | } |
|
451 | } | |
441 | channelstream_request(settings['cometd.secret'], '/message', [payload], |
|
452 | channelstream_request( | |
442 |
|
|
453 | settings["cometd.secret"], | |
|
454 | "/message", | |||
|
455 | [payload], | |||
|
456 | servers=[settings["cometd_servers"]], | |||
|
457 | ) | |||
443 |
|
458 | |||
444 | def es_doc(self): |
|
459 | def es_doc(self): | |
445 | tags = {} |
|
460 | tags = {} | |
446 | tag_list = [] |
|
461 | tag_list = [] | |
447 | for name, value in self.tags.items(): |
|
462 | for name, value in self.tags.items(): | |
448 |
name = name.replace( |
|
463 | name = name.replace(".", "_") | |
449 | tag_list.append(name) |
|
464 | tag_list.append(name) | |
450 | tags[name] = { |
|
465 | tags[name] = { | |
451 | "values": convert_es_type(value), |
|
466 | "values": convert_es_type(value), | |
452 |
"numeric_values": value |
|
467 | "numeric_values": value | |
453 |
|
|
468 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
454 | not isinstance(value, bool)) else None} |
|
469 | else None, | |
|
470 | } | |||
455 |
|
471 | |||
456 |
if |
|
472 | if "user_name" not in self.tags and self.username: | |
457 | tags["user_name"] = {"value": [self.username], |
|
473 | tags["user_name"] = {"value": [self.username], "numeric_value": None} | |
458 | "numeric_value": None} |
|
|||
459 | return { |
|
474 | return { | |
460 |
|
|
475 | "_id": str(self.id), | |
461 |
|
|
476 | "pg_id": str(self.id), | |
462 |
|
|
477 | "resource_id": self.resource_id, | |
463 |
|
|
478 | "http_status": self.http_status or "", | |
464 |
|
|
479 | "start_time": self.start_time, | |
465 |
|
|
480 | "end_time": self.end_time, | |
466 |
|
|
481 | "url_domain": self.url_domain if self.url_domain else "", | |
467 |
|
|
482 | "url_path": self.url_path if self.url_path else "", | |
468 |
|
|
483 | "duration": self.duration, | |
469 |
|
|
484 | "error": self.error if self.error else "", | |
470 |
|
|
485 | "report_type": self.report_type, | |
471 |
|
|
486 | "request_id": self.request_id, | |
472 |
|
|
487 | "ip": self.ip, | |
473 |
|
|
488 | "group_id": str(self.group_id), | |
474 |
|
|
489 | "_parent": str(self.group_id), | |
475 |
|
|
490 | "tags": tags, | |
476 |
|
|
491 | "tag_list": tag_list, | |
477 | } |
|
492 | } | |
478 |
|
493 | |||
479 | @property |
|
494 | @property | |
480 | def partition_id(self): |
|
495 | def partition_id(self): | |
481 |
return |
|
496 | return "rcae_r_%s" % self.report_group_time.strftime("%Y_%m") | |
482 |
|
497 | |||
483 | def partition_range(self): |
|
498 | def partition_range(self): | |
484 | start_date = self.report_group_time.date().replace(day=1) |
|
499 | start_date = self.report_group_time.date().replace(day=1) | |
@@ -488,27 +503,31 b' class Report(Base, BaseModel):' | |||||
488 |
|
503 | |||
489 |
|
504 | |||
490 | def after_insert(mapper, connection, target): |
|
505 | def after_insert(mapper, connection, target): | |
491 |
if not hasattr(target, |
|
506 | if not hasattr(target, "_skip_ft_index"): | |
492 | data = target.es_doc() |
|
507 | data = target.es_doc() | |
493 |
data.pop( |
|
508 | data.pop("_id", None) | |
494 |
Datastores.es.index( |
|
509 | Datastores.es.index( | |
495 |
|
|
510 | target.partition_id, "report", data, parent=target.group_id, id=target.id | |
|
511 | ) | |||
496 |
|
512 | |||
497 |
|
513 | |||
498 | def after_update(mapper, connection, target): |
|
514 | def after_update(mapper, connection, target): | |
499 |
if not hasattr(target, |
|
515 | if not hasattr(target, "_skip_ft_index"): | |
500 | data = target.es_doc() |
|
516 | data = target.es_doc() | |
501 |
data.pop( |
|
517 | data.pop("_id", None) | |
502 |
Datastores.es.index( |
|
518 | Datastores.es.index( | |
503 |
|
|
519 | target.partition_id, "report", data, parent=target.group_id, id=target.id | |
|
520 | ) | |||
504 |
|
521 | |||
505 |
|
522 | |||
506 | def after_delete(mapper, connection, target): |
|
523 | def after_delete(mapper, connection, target): | |
507 |
if not hasattr(target, |
|
524 | if not hasattr(target, "_skip_ft_index"): | |
508 |
query = {"query":{ |
|
525 | query = {"query": {"term": {"pg_id": target.id}}} | |
509 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query) |
|
526 | Datastores.es.transport.perform_request( | |
|
527 | "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query | |||
|
528 | ) | |||
510 |
|
529 | |||
511 |
|
530 | |||
512 |
sa.event.listen(Report, |
|
531 | sa.event.listen(Report, "after_insert", after_insert) | |
513 |
sa.event.listen(Report, |
|
532 | sa.event.listen(Report, "after_update", after_update) | |
514 |
sa.event.listen(Report, |
|
533 | sa.event.listen(Report, "after_delete", after_delete) |
@@ -20,13 +20,16 b' import sqlalchemy as sa' | |||||
20 |
|
20 | |||
21 |
|
21 | |||
22 | class ReportAssignment(Base, BaseModel): |
|
22 | class ReportAssignment(Base, BaseModel): | |
23 |
__tablename__ = |
|
23 | __tablename__ = "reports_assignments" | |
24 |
|
24 | |||
25 |
group_id = sa.Column( |
|
25 | group_id = sa.Column( | |
26 | sa.ForeignKey('reports_groups.id', ondelete='cascade', |
|
26 | sa.BigInteger, | |
27 | onupdate='cascade'), |
|
27 | sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"), | |
28 |
|
|
28 | primary_key=True, | |
29 | owner_id = sa.Column(sa.Integer, |
|
29 | ) | |
30 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
30 | owner_id = sa.Column( | |
31 | ondelete='CASCADE'), primary_key=True) |
|
31 | sa.Integer, | |
|
32 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |||
|
33 | primary_key=True, | |||
|
34 | ) | |||
32 | report_time = sa.Column(sa.DateTime(), nullable=False) |
|
35 | report_time = sa.Column(sa.DateTime(), nullable=False) |
@@ -22,23 +22,23 b' from ziggurat_foundations.models.base import BaseModel' | |||||
22 |
|
22 | |||
23 |
|
23 | |||
24 | class ReportComment(Base, BaseModel): |
|
24 | class ReportComment(Base, BaseModel): | |
25 |
__tablename__ = |
|
25 | __tablename__ = "reports_comments" | |
26 |
|
26 | |||
27 | comment_id = sa.Column(sa.Integer, nullable=False, primary_key=True) |
|
27 | comment_id = sa.Column(sa.Integer, nullable=False, primary_key=True) | |
28 |
group_id = sa.Column( |
|
28 | group_id = sa.Column( | |
29 | sa.ForeignKey('reports_groups.id', ondelete='cascade', |
|
29 | sa.BigInteger, | |
30 | onupdate='cascade')) |
|
30 | sa.ForeignKey("reports_groups.id", ondelete="cascade", onupdate="cascade"), | |
31 | body = sa.Column(sa.UnicodeText(), default='') |
|
31 | ) | |
32 | owner_id = sa.Column(sa.Integer, |
|
32 | body = sa.Column(sa.UnicodeText(), default="") | |
33 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
33 | owner_id = sa.Column( | |
34 | ondelete='CASCADE')) |
|
34 | sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE") | |
35 | created_timestamp = sa.Column(sa.DateTime(), |
|
35 | ) | |
36 | default=datetime.utcnow, |
|
36 | created_timestamp = sa.Column( | |
37 | server_default=sa.func.now()) |
|
37 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
38 | ) | |||
38 | report_time = sa.Column(sa.DateTime(), nullable=False) |
|
39 | report_time = sa.Column(sa.DateTime(), nullable=False) | |
39 |
|
40 | |||
40 |
owner = sa.orm.relationship( |
|
41 | owner = sa.orm.relationship("User", lazy="joined") | |
41 | lazy='joined') |
|
|||
42 |
|
42 | |||
43 | @property |
|
43 | @property | |
44 | def processed_body(self): |
|
44 | def processed_body(self): | |
@@ -46,5 +46,5 b' class ReportComment(Base, BaseModel):' | |||||
46 |
|
46 | |||
47 | def get_dict(self): |
|
47 | def get_dict(self): | |
48 | instance_dict = super(ReportComment, self).get_dict() |
|
48 | instance_dict = super(ReportComment, self).get_dict() | |
49 |
instance_dict[ |
|
49 | instance_dict["user_name"] = self.owner.user_name | |
50 | return instance_dict |
|
50 | return instance_dict |
@@ -33,26 +33,30 b' log = logging.getLogger(__name__)' | |||||
33 |
|
33 | |||
34 |
|
34 | |||
35 | class ReportGroup(Base, BaseModel): |
|
35 | class ReportGroup(Base, BaseModel): | |
36 |
__tablename__ = |
|
36 | __tablename__ = "reports_groups" | |
37 |
__table_args__ = { |
|
37 | __table_args__ = {"implicit_returning": False} | |
38 |
|
38 | |||
39 | id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) |
|
39 | id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) | |
40 |
resource_id = sa.Column( |
|
40 | resource_id = sa.Column( | |
41 | sa.ForeignKey('applications.resource_id', |
|
41 | sa.Integer(), | |
42 | onupdate='CASCADE', |
|
42 | sa.ForeignKey( | |
43 | ondelete='CASCADE'), |
|
43 | "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE" | |
|
44 | ), | |||
44 |
|
|
45 | nullable=False, | |
45 |
|
|
46 | index=True, | |
46 | priority = sa.Column(sa.Integer, nullable=False, index=True, default=5, |
|
47 | ) | |
47 | server_default='5') |
|
48 | priority = sa.Column( | |
48 | first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
49 | sa.Integer, nullable=False, index=True, default=5, server_default="5" | |
49 | server_default=sa.func.now()) |
|
50 | ) | |
50 | last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
51 | first_timestamp = sa.Column( | |
51 | server_default=sa.func.now()) |
|
52 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
53 | ) | |||
|
54 | last_timestamp = sa.Column( | |||
|
55 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |||
|
56 | ) | |||
52 | error = sa.Column(sa.UnicodeText(), index=True) |
|
57 | error = sa.Column(sa.UnicodeText(), index=True) | |
53 |
grouping_hash = sa.Column(sa.String(40), default= |
|
58 | grouping_hash = sa.Column(sa.String(40), default="") | |
54 | triggered_postprocesses_ids = sa.Column(JSON(), nullable=False, |
|
59 | triggered_postprocesses_ids = sa.Column(JSON(), nullable=False, default=list) | |
55 | default=list) |
|
|||
56 | report_type = sa.Column(sa.Integer, default=1) |
|
60 | report_type = sa.Column(sa.Integer, default=1) | |
57 | total_reports = sa.Column(sa.Integer, default=1) |
|
61 | total_reports = sa.Column(sa.Integer, default=1) | |
58 | last_report = sa.Column(sa.Integer) |
|
62 | last_report = sa.Column(sa.Integer) | |
@@ -64,50 +68,58 b' class ReportGroup(Base, BaseModel):' | |||||
64 | notified = sa.Column(sa.Boolean(), index=True, default=False) |
|
68 | notified = sa.Column(sa.Boolean(), index=True, default=False) | |
65 | public = sa.Column(sa.Boolean(), index=True, default=False) |
|
69 | public = sa.Column(sa.Boolean(), index=True, default=False) | |
66 |
|
70 | |||
67 |
reports = sa.orm.relationship( |
|
71 | reports = sa.orm.relationship( | |
68 | lazy='dynamic', |
|
72 | "Report", | |
69 | backref='report_group', |
|
73 | lazy="dynamic", | |
|
74 | backref="report_group", | |||
70 |
|
|
75 | cascade="all, delete-orphan", | |
71 |
|
|
76 | passive_deletes=True, | |
72 |
|
|
77 | passive_updates=True, | |
|
78 | ) | |||
73 |
|
79 | |||
74 |
comments = sa.orm.relationship( |
|
80 | comments = sa.orm.relationship( | |
75 | lazy='dynamic', |
|
81 | "ReportComment", | |
76 | backref='report', |
|
82 | lazy="dynamic", | |
|
83 | backref="report", | |||
77 |
|
|
84 | cascade="all, delete-orphan", | |
78 |
|
|
85 | passive_deletes=True, | |
79 |
|
|
86 | passive_updates=True, | |
80 |
|
|
87 | order_by="ReportComment.comment_id", | |
|
88 | ) | |||
81 |
|
89 | |||
82 |
assigned_users = sa.orm.relationship( |
|
90 | assigned_users = sa.orm.relationship( | |
|
91 | "User", | |||
83 |
|
|
92 | backref=sa.orm.backref( | |
84 |
|
|
93 | "assigned_reports_relation", | |
85 | lazy='dynamic', |
|
94 | lazy="dynamic", | |
86 | order_by=sa.desc( |
|
95 | order_by=sa.desc(sa.text("reports_groups.id")), | |
87 | sa.text("reports_groups.id")) |
|
|||
88 | ), |
|
96 | ), | |
89 |
|
|
97 | passive_deletes=True, | |
90 |
|
|
98 | passive_updates=True, | |
91 |
|
|
99 | secondary="reports_assignments", | |
92 |
|
|
100 | order_by="User.user_name", | |
93 |
|
101 | ) | ||
94 | stats = sa.orm.relationship('ReportStat', |
|
102 | ||
95 | lazy='dynamic', |
|
103 | stats = sa.orm.relationship( | |
96 | backref='report', |
|
104 | "ReportStat", | |
|
105 | lazy="dynamic", | |||
|
106 | backref="report", | |||
97 |
|
|
107 | passive_deletes=True, | |
98 |
|
|
108 | passive_updates=True, | |
|
109 | ) | |||
99 |
|
110 | |||
100 |
last_report_ref = sa.orm.relationship( |
|
111 | last_report_ref = sa.orm.relationship( | |
|
112 | "Report", | |||
101 | uselist=False, |
|
113 | uselist=False, | |
102 |
|
|
114 | primaryjoin="ReportGroup.last_report " "== Report.id", | |
103 | "== Report.id", |
|
|||
104 |
|
|
115 | foreign_keys="Report.id", | |
105 |
|
|
116 | cascade="all, delete-orphan", | |
106 |
|
|
117 | passive_deletes=True, | |
107 |
|
|
118 | passive_updates=True, | |
|
119 | ) | |||
108 |
|
120 | |||
109 | def __repr__(self): |
|
121 | def __repr__(self): | |
110 |
return |
|
122 | return "<ReportGroup id:{}>".format(self.id) | |
111 |
|
123 | |||
112 | def get_report(self, report_id=None, public=False): |
|
124 | def get_report(self, report_id=None, public=False): | |
113 | """ |
|
125 | """ | |
@@ -121,8 +133,8 b' class ReportGroup(Base, BaseModel):' | |||||
121 | return self.reports.filter(Report.id == report_id).first() |
|
133 | return self.reports.filter(Report.id == report_id).first() | |
122 |
|
134 | |||
123 | def get_public_url(self, request, _app_url=None): |
|
135 | def get_public_url(self, request, _app_url=None): | |
124 |
url = request.route_url( |
|
136 | url = request.route_url("/", _app_url=_app_url) | |
125 |
return (url + |
|
137 | return (url + "ui/report/%s") % self.id | |
126 |
|
138 | |||
127 | def run_postprocessing(self, report): |
|
139 | def run_postprocessing(self, report): | |
128 | """ |
|
140 | """ | |
@@ -135,12 +147,15 b' class ReportGroup(Base, BaseModel):' | |||||
135 | rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX) |
|
147 | rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX) | |
136 | report_dict = report.get_dict(request) |
|
148 | report_dict = report.get_dict(request) | |
137 | # if was not processed yet |
|
149 | # if was not processed yet | |
138 | if (rule_obj.match(report_dict) and |
|
150 | if ( | |
139 | action.pkey not in self.triggered_postprocesses_ids): |
|
151 | rule_obj.match(report_dict) | |
|
152 | and action.pkey not in self.triggered_postprocesses_ids | |||
|
153 | ): | |||
140 | action.postprocess(self) |
|
154 | action.postprocess(self) | |
141 | # this way sqla can track mutation of list |
|
155 | # this way sqla can track mutation of list | |
142 |
self.triggered_postprocesses_ids = |
|
156 | self.triggered_postprocesses_ids = self.triggered_postprocesses_ids + [ | |
143 |
|
|
157 | action.pkey | |
|
158 | ] | |||
144 |
|
159 | |||
145 | get_db_session(None, self).flush() |
|
160 | get_db_session(None, self).flush() | |
146 | # do not go out of bounds |
|
161 | # do not go out of bounds | |
@@ -151,31 +166,30 b' class ReportGroup(Base, BaseModel):' | |||||
151 |
|
166 | |||
152 | def get_dict(self, request): |
|
167 | def get_dict(self, request): | |
153 | instance_dict = super(ReportGroup, self).get_dict() |
|
168 | instance_dict = super(ReportGroup, self).get_dict() | |
154 |
instance_dict[ |
|
169 | instance_dict["server_name"] = self.get_report().tags.get("server_name") | |
155 | 'server_name') |
|
170 | instance_dict["view_name"] = self.get_report().tags.get("view_name") | |
156 |
instance_dict[ |
|
171 | instance_dict["resource_name"] = self.application.resource_name | |
157 |
instance_dict[ |
|
172 | instance_dict["report_type"] = self.get_report().report_type | |
158 |
instance_dict[ |
|
173 | instance_dict["url_path"] = self.get_report().url_path | |
159 |
instance_dict[ |
|
174 | instance_dict["front_url"] = self.get_report().get_public_url(request) | |
160 | instance_dict['front_url'] = self.get_report().get_public_url(request) |
|
175 | del instance_dict["triggered_postprocesses_ids"] | |
161 | del instance_dict['triggered_postprocesses_ids'] |
|
|||
162 | return instance_dict |
|
176 | return instance_dict | |
163 |
|
177 | |||
164 | def es_doc(self): |
|
178 | def es_doc(self): | |
165 | return { |
|
179 | return { | |
166 |
|
|
180 | "_id": str(self.id), | |
167 |
|
|
181 | "pg_id": str(self.id), | |
168 |
|
|
182 | "resource_id": self.resource_id, | |
169 |
|
|
183 | "error": self.error, | |
170 |
|
|
184 | "fixed": self.fixed, | |
171 |
|
|
185 | "public": self.public, | |
172 |
|
|
186 | "read": self.read, | |
173 |
|
|
187 | "priority": self.priority, | |
174 |
|
|
188 | "occurences": self.occurences, | |
175 |
|
|
189 | "average_duration": self.average_duration, | |
176 |
|
|
190 | "summed_duration": self.summed_duration, | |
177 |
|
|
191 | "first_timestamp": self.first_timestamp, | |
178 |
|
|
192 | "last_timestamp": self.last_timestamp, | |
179 | } |
|
193 | } | |
180 |
|
194 | |||
181 | def set_notification_info(self, notify_10=False, notify_100=False): |
|
195 | def set_notification_info(self, notify_10=False, notify_100=False): | |
@@ -184,53 +198,54 b' class ReportGroup(Base, BaseModel):' | |||||
184 | """ |
|
198 | """ | |
185 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
199 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
186 | # global app counter |
|
200 | # global app counter | |
187 |
key = REDIS_KEYS[ |
|
201 | key = REDIS_KEYS["counters"]["reports_per_type"].format( | |
188 |
self.report_type, current_time |
|
202 | self.report_type, current_time | |
|
203 | ) | |||
189 | redis_pipeline = Datastores.redis.pipeline() |
|
204 | redis_pipeline = Datastores.redis.pipeline() | |
190 | redis_pipeline.incr(key) |
|
205 | redis_pipeline.incr(key) | |
191 | redis_pipeline.expire(key, 3600 * 24) |
|
206 | redis_pipeline.expire(key, 3600 * 24) | |
192 | # detailed app notification for alerts and notifications |
|
207 | # detailed app notification for alerts and notifications | |
|
208 | redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id) | |||
193 | redis_pipeline.sadd( |
|
209 | redis_pipeline.sadd( | |
194 |
REDIS_KEYS[ |
|
210 | REDIS_KEYS["apps_that_had_reports_alerting"], self.resource_id | |
195 | redis_pipeline.sadd( |
|
211 | ) | |
196 | REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id) |
|
|||
197 | # only notify for exceptions here |
|
212 | # only notify for exceptions here | |
198 | if self.report_type == ReportType.error: |
|
213 | if self.report_type == ReportType.error: | |
|
214 | redis_pipeline.sadd(REDIS_KEYS["apps_that_had_reports"], self.resource_id) | |||
199 | redis_pipeline.sadd( |
|
215 | redis_pipeline.sadd( | |
200 |
REDIS_KEYS[ |
|
216 | REDIS_KEYS["apps_that_had_error_reports_alerting"], self.resource_id | |
201 | redis_pipeline.sadd( |
|
217 | ) | |
202 | REDIS_KEYS['apps_that_had_error_reports_alerting'], |
|
218 | key = REDIS_KEYS["counters"]["report_group_occurences"].format(self.id) | |
203 | self.resource_id) |
|
|||
204 | key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id) |
|
|||
205 | redis_pipeline.incr(key) |
|
219 | redis_pipeline.incr(key) | |
206 | redis_pipeline.expire(key, 3600 * 24) |
|
220 | redis_pipeline.expire(key, 3600 * 24) | |
207 |
key = REDIS_KEYS[ |
|
221 | key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(self.id) | |
208 | self.id) |
|
|||
209 | redis_pipeline.incr(key) |
|
222 | redis_pipeline.incr(key) | |
210 | redis_pipeline.expire(key, 3600 * 24) |
|
223 | redis_pipeline.expire(key, 3600 * 24) | |
211 |
|
224 | |||
212 | if notify_10: |
|
225 | if notify_10: | |
213 |
key = REDIS_KEYS[ |
|
226 | key = REDIS_KEYS["counters"]["report_group_occurences_10th"].format(self.id) | |
214 | 'report_group_occurences_10th'].format(self.id) |
|
|||
215 | redis_pipeline.setex(key, 3600 * 24, 1) |
|
227 | redis_pipeline.setex(key, 3600 * 24, 1) | |
216 | if notify_100: |
|
228 | if notify_100: | |
217 |
key = REDIS_KEYS[ |
|
229 | key = REDIS_KEYS["counters"]["report_group_occurences_100th"].format( | |
218 | 'report_group_occurences_100th'].format(self.id) |
|
230 | self.id | |
|
231 | ) | |||
219 | redis_pipeline.setex(key, 3600 * 24, 1) |
|
232 | redis_pipeline.setex(key, 3600 * 24, 1) | |
220 |
|
233 | |||
221 |
key = REDIS_KEYS[ |
|
234 | key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
222 |
self.report_type, self.resource_id |
|
235 | self.report_type, self.resource_id | |
|
236 | ) | |||
223 | redis_pipeline.sadd(key, self.id) |
|
237 | redis_pipeline.sadd(key, self.id) | |
224 | redis_pipeline.expire(key, 3600 * 24) |
|
238 | redis_pipeline.expire(key, 3600 * 24) | |
225 |
key = REDIS_KEYS[ |
|
239 | key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
226 |
self.report_type, self.resource_id |
|
240 | self.report_type, self.resource_id | |
|
241 | ) | |||
227 | redis_pipeline.sadd(key, self.id) |
|
242 | redis_pipeline.sadd(key, self.id) | |
228 | redis_pipeline.expire(key, 3600 * 24) |
|
243 | redis_pipeline.expire(key, 3600 * 24) | |
229 | redis_pipeline.execute() |
|
244 | redis_pipeline.execute() | |
230 |
|
245 | |||
231 | @property |
|
246 | @property | |
232 | def partition_id(self): |
|
247 | def partition_id(self): | |
233 |
return |
|
248 | return "rcae_r_%s" % self.first_timestamp.strftime("%Y_%m") | |
234 |
|
249 | |||
235 | def partition_range(self): |
|
250 | def partition_range(self): | |
236 | start_date = self.first_timestamp.date().replace(day=1) |
|
251 | start_date = self.first_timestamp.date().replace(day=1) | |
@@ -240,29 +255,33 b' class ReportGroup(Base, BaseModel):' | |||||
240 |
|
255 | |||
241 |
|
256 | |||
242 | def after_insert(mapper, connection, target): |
|
257 | def after_insert(mapper, connection, target): | |
243 |
if not hasattr(target, |
|
258 | if not hasattr(target, "_skip_ft_index"): | |
244 | data = target.es_doc() |
|
259 | data = target.es_doc() | |
245 |
data.pop( |
|
260 | data.pop("_id", None) | |
246 |
Datastores.es.index(target.partition_id, |
|
261 | Datastores.es.index(target.partition_id, "report_group", data, id=target.id) | |
247 | data, id=target.id) |
|
|||
248 |
|
262 | |||
249 |
|
263 | |||
250 | def after_update(mapper, connection, target): |
|
264 | def after_update(mapper, connection, target): | |
251 |
if not hasattr(target, |
|
265 | if not hasattr(target, "_skip_ft_index"): | |
252 | data = target.es_doc() |
|
266 | data = target.es_doc() | |
253 |
data.pop( |
|
267 | data.pop("_id", None) | |
254 |
Datastores.es.index(target.partition_id, |
|
268 | Datastores.es.index(target.partition_id, "report_group", data, id=target.id) | |
255 | data, id=target.id) |
|
|||
256 |
|
269 | |||
257 |
|
270 | |||
258 | def after_delete(mapper, connection, target): |
|
271 | def after_delete(mapper, connection, target): | |
259 |
query = {"query": { |
|
272 | query = {"query": {"term": {"group_id": target.id}}} | |
260 | # delete by query |
|
273 | # delete by query | |
261 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query) |
|
274 | Datastores.es.transport.perform_request( | |
262 | query = {"query": {'term': {'pg_id': target.id}}} |
|
275 | "DELETE", "/{}/{}/_query".format(target.partition_id, "report"), body=query | |
263 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query) |
|
276 | ) | |
264 |
|
277 | query = {"query": {"term": {"pg_id": target.id}}} | ||
265 |
|
278 | Datastores.es.transport.perform_request( | ||
266 | sa.event.listen(ReportGroup, 'after_insert', after_insert) |
|
279 | "DELETE", | |
267 | sa.event.listen(ReportGroup, 'after_update', after_update) |
|
280 | "/{}/{}/_query".format(target.partition_id, "report_group"), | |
268 | sa.event.listen(ReportGroup, 'after_delete', after_delete) |
|
281 | body=query, | |
|
282 | ) | |||
|
283 | ||||
|
284 | ||||
|
285 | sa.event.listen(ReportGroup, "after_insert", after_insert) | |||
|
286 | sa.event.listen(ReportGroup, "after_update", after_update) | |||
|
287 | sa.event.listen(ReportGroup, "after_delete", after_delete) |
@@ -22,53 +22,58 b' from ziggurat_foundations.models.base import BaseModel' | |||||
22 |
|
22 | |||
23 |
|
23 | |||
24 | class ReportStat(Base, BaseModel): |
|
24 | class ReportStat(Base, BaseModel): | |
25 |
__tablename__ = |
|
25 | __tablename__ = "reports_stats" | |
26 |
__table_args__ = { |
|
26 | __table_args__ = {"implicit_returning": False} | |
27 |
|
27 | |||
28 |
group_id = sa.Column( |
|
28 | group_id = sa.Column( | |
29 |
|
|
29 | sa.BigInteger(), sa.ForeignKey("reports_groups.id"), nullable=False | |
30 | nullable=False) |
|
30 | ) | |
31 |
resource_id = sa.Column( |
|
31 | resource_id = sa.Column( | |
32 |
|
|
32 | sa.Integer(), sa.ForeignKey("applications.resource_id"), nullable=False | |
33 | nullable=False) |
|
33 | ) | |
34 | start_interval = sa.Column(sa.DateTime(), nullable=False) |
|
34 | start_interval = sa.Column(sa.DateTime(), nullable=False) | |
35 | occurences = sa.Column(sa.Integer, nullable=True, default=0) |
|
35 | occurences = sa.Column(sa.Integer, nullable=True, default=0) | |
36 |
owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey( |
|
36 | owner_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
37 | nullable=True) |
|
|||
38 | type = sa.Column(sa.Integer, nullable=True, default=0) |
|
37 | type = sa.Column(sa.Integer, nullable=True, default=0) | |
39 | duration = sa.Column(sa.Float, nullable=True, default=0) |
|
38 | duration = sa.Column(sa.Float, nullable=True, default=0) | |
40 | id = sa.Column(sa.BigInteger, nullable=False, primary_key=True) |
|
39 | id = sa.Column(sa.BigInteger, nullable=False, primary_key=True) | |
41 |
server_name = sa.Column(sa.Unicode(128), nullable=False, default= |
|
40 | server_name = sa.Column(sa.Unicode(128), nullable=False, default="") | |
42 |
view_name = sa.Column(sa.Unicode(128), nullable=False, default= |
|
41 | view_name = sa.Column(sa.Unicode(128), nullable=False, default="") | |
43 |
|
42 | |||
44 | @property |
|
43 | @property | |
45 | def partition_id(self): |
|
44 | def partition_id(self): | |
46 |
return |
|
45 | return "rcae_r_%s" % self.start_interval.strftime("%Y_%m") | |
47 |
|
46 | |||
48 | def es_doc(self): |
|
47 | def es_doc(self): | |
49 | return { |
|
48 | return { | |
50 |
|
|
49 | "resource_id": self.resource_id, | |
51 |
|
|
50 | "timestamp": self.start_interval, | |
52 |
|
|
51 | "pg_id": str(self.id), | |
53 |
|
|
52 | "permanent": True, | |
54 |
|
|
53 | "request_id": None, | |
55 |
|
|
54 | "log_level": "ERROR", | |
56 |
|
|
55 | "message": None, | |
57 |
|
|
56 | "namespace": "appenlight.error", | |
58 |
|
|
57 | "tags": { | |
59 |
|
|
58 | "duration": {"values": self.duration, "numeric_values": self.duration}, | |
60 | 'numeric_values': self.duration}, |
|
59 | "occurences": { | |
61 |
|
|
60 | "values": self.occurences, | |
62 |
|
|
61 | "numeric_values": self.occurences, | |
63 | 'group_id': {'values': self.group_id, |
|
|||
64 | 'numeric_values': self.group_id}, |
|
|||
65 | 'type': {'values': ReportType.key_from_value(self.type), |
|
|||
66 | 'numeric_values': self.type}, |
|
|||
67 | 'server_name': {'values': self.server_name, |
|
|||
68 | 'numeric_values': None}, |
|
|||
69 | 'view_name': {'values': self.view_name, |
|
|||
70 | 'numeric_values': None}, |
|
|||
71 | }, |
|
62 | }, | |
72 | 'tag_list': ['duration', 'occurences', 'group_id', 'type', |
|
63 | "group_id": {"values": self.group_id, "numeric_values": self.group_id}, | |
73 | 'server_name', 'view_name'] |
|
64 | "type": { | |
|
65 | "values": ReportType.key_from_value(self.type), | |||
|
66 | "numeric_values": self.type, | |||
|
67 | }, | |||
|
68 | "server_name": {"values": self.server_name, "numeric_values": None}, | |||
|
69 | "view_name": {"values": self.view_name, "numeric_values": None}, | |||
|
70 | }, | |||
|
71 | "tag_list": [ | |||
|
72 | "duration", | |||
|
73 | "occurences", | |||
|
74 | "group_id", | |||
|
75 | "type", | |||
|
76 | "server_name", | |||
|
77 | "view_name", | |||
|
78 | ], | |||
74 | } |
|
79 | } |
@@ -23,11 +23,13 b' from ziggurat_foundations.models.services.resource import ResourceService' | |||||
23 |
|
23 | |||
24 |
|
24 | |||
25 | class Resource(ResourceMixin, Base): |
|
25 | class Resource(ResourceMixin, Base): | |
26 |
events = sa.orm.relationship( |
|
26 | events = sa.orm.relationship( | |
27 | lazy='dynamic', |
|
27 | "Event", | |
28 | backref='resource', |
|
28 | lazy="dynamic", | |
|
29 | backref="resource", | |||
29 |
|
|
30 | passive_deletes=True, | |
30 |
|
|
31 | passive_updates=True, | |
|
32 | ) | |||
31 |
|
33 | |||
32 | @property |
|
34 | @property | |
33 | def owner_user_name(self): |
|
35 | def owner_user_name(self): | |
@@ -39,46 +41,56 b' class Resource(ResourceMixin, Base):' | |||||
39 | if self.owner_group: |
|
41 | if self.owner_group: | |
40 | return self.owner_group.group_name |
|
42 | return self.owner_group.group_name | |
41 |
|
43 | |||
42 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
44 | def get_dict( | |
43 | include_perms=False, include_processing_rules=False): |
|
45 | self, | |
|
46 | exclude_keys=None, | |||
|
47 | include_keys=None, | |||
|
48 | include_perms=False, | |||
|
49 | include_processing_rules=False, | |||
|
50 | ): | |||
44 | result = super(Resource, self).get_dict(exclude_keys, include_keys) |
|
51 | result = super(Resource, self).get_dict(exclude_keys, include_keys) | |
45 |
result[ |
|
52 | result["possible_permissions"] = self.__possible_permissions__ | |
46 | if include_perms: |
|
53 | if include_perms: | |
47 |
result[ |
|
54 | result["current_permissions"] = self.user_permissions_list | |
48 | else: |
|
55 | else: | |
49 |
result[ |
|
56 | result["current_permissions"] = [] | |
50 | if include_processing_rules: |
|
57 | if include_processing_rules: | |
51 |
result["postprocessing_rules"] = [ |
|
58 | result["postprocessing_rules"] = [ | |
52 |
|
|
59 | rule.get_dict() for rule in self.postprocess_conf | |
|
60 | ] | |||
53 | else: |
|
61 | else: | |
54 | result["postprocessing_rules"] = [] |
|
62 | result["postprocessing_rules"] = [] | |
55 | exclude_keys_list = exclude_keys or [] |
|
63 | exclude_keys_list = exclude_keys or [] | |
56 | include_keys_list = include_keys or [] |
|
64 | include_keys_list = include_keys or [] | |
57 | d = {} |
|
65 | d = {} | |
58 | for k in result.keys(): |
|
66 | for k in result.keys(): | |
59 |
if |
|
67 | if k not in exclude_keys_list and ( | |
60 |
|
|
68 | k in include_keys_list or not include_keys | |
|
69 | ): | |||
61 | d[k] = result[k] |
|
70 | d[k] = result[k] | |
62 |
for k in [ |
|
71 | for k in ["owner_user_name", "owner_group_name"]: | |
63 |
if |
|
72 | if k not in exclude_keys_list and ( | |
64 |
|
|
73 | k in include_keys_list or not include_keys | |
|
74 | ): | |||
65 | d[k] = getattr(self, k) |
|
75 | d[k] = getattr(self, k) | |
66 | return d |
|
76 | return d | |
67 |
|
77 | |||
68 | @property |
|
78 | @property | |
69 | def user_permissions_list(self): |
|
79 | def user_permissions_list(self): | |
70 | return [permission_tuple_to_dict(perm) for perm in |
|
80 | return [ | |
71 | ResourceService.users_for_perm( |
|
81 | permission_tuple_to_dict(perm) | |
72 | self, '__any_permission__', limit_group_permissions=True)] |
|
82 | for perm in ResourceService.users_for_perm( | |
|
83 | self, "__any_permission__", limit_group_permissions=True | |||
|
84 | ) | |||
|
85 | ] | |||
73 |
|
86 | |||
74 | @property |
|
87 | @property | |
75 | def __acl__(self): |
|
88 | def __acl__(self): | |
76 | acls = [] |
|
89 | acls = [] | |
77 |
|
90 | |||
78 | if self.owner_user_id: |
|
91 | if self.owner_user_id: | |
79 |
acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS |
|
92 | acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS)]) | |
80 |
|
93 | |||
81 | if self.owner_group_id: |
|
94 | if self.owner_group_id: | |
82 | acls.extend([(Allow, "group:%s" % self.owner_group_id, |
|
95 | acls.extend([(Allow, "group:%s" % self.owner_group_id, ALL_PERMISSIONS)]) | |
83 | ALL_PERMISSIONS,), ]) |
|
|||
84 | return acls |
|
96 | return acls |
@@ -13,4 +13,3 b'' | |||||
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
@@ -33,7 +33,6 b' log = logging.getLogger(__name__)' | |||||
33 |
|
33 | |||
34 |
|
34 | |||
35 | class ApplicationService(BaseService): |
|
35 | class ApplicationService(BaseService): | |
36 |
|
||||
37 | @classmethod |
|
36 | @classmethod | |
38 | def all(cls, db_session=None): |
|
37 | def all(cls, db_session=None): | |
39 | db_session = get_db_session(db_session) |
|
38 | db_session = get_db_session(db_session) | |
@@ -51,9 +50,9 b' class ApplicationService(BaseService):' | |||||
51 | @classmethod |
|
50 | @classmethod | |
52 | def by_api_key_cached(cls, db_session=None): |
|
51 | def by_api_key_cached(cls, db_session=None): | |
53 | db_session = get_db_session(db_session) |
|
52 | db_session = get_db_session(db_session) | |
54 |
cache_region = get_region( |
|
53 | cache_region = get_region("redis_min_1") | |
55 |
|
54 | |||
56 |
@cache_region.cache_on_arguments( |
|
55 | @cache_region.cache_on_arguments("ApplicationService.by_api_key") | |
57 | def cached(*args, **kwargs): |
|
56 | def cached(*args, **kwargs): | |
58 | app = cls.by_api_key(*args, db_session=db_session, **kwargs) |
|
57 | app = cls.by_api_key(*args, db_session=db_session, **kwargs) | |
59 | if app: |
|
58 | if app: | |
@@ -63,10 +62,11 b' class ApplicationService(BaseService):' | |||||
63 | return cached |
|
62 | return cached | |
64 |
|
63 | |||
65 | @classmethod |
|
64 | @classmethod | |
66 | def by_public_api_key(cls, api_key, db_session=None, from_cache=False, |
|
65 | def by_public_api_key( | |
67 | request=None): |
|
66 | cls, api_key, db_session=None, from_cache=False, request=None | |
|
67 | ): | |||
68 | db_session = get_db_session(db_session) |
|
68 | db_session = get_db_session(db_session) | |
69 |
cache_region = get_region( |
|
69 | cache_region = get_region("redis_min_1") | |
70 |
|
70 | |||
71 | def uncached(api_key): |
|
71 | def uncached(api_key): | |
72 | q = db_session.query(Application) |
|
72 | q = db_session.query(Application) | |
@@ -75,8 +75,8 b' class ApplicationService(BaseService):' | |||||
75 | return q.first() |
|
75 | return q.first() | |
76 |
|
76 | |||
77 | if from_cache: |
|
77 | if from_cache: | |
78 | @cache_region.cache_on_arguments( |
|
78 | ||
79 |
|
|
79 | @cache_region.cache_on_arguments("ApplicationService.by_public_api_key") | |
80 | def cached(api_key): |
|
80 | def cached(api_key): | |
81 | app = uncached(api_key) |
|
81 | app = uncached(api_key) | |
82 | if app: |
|
82 | if app: | |
@@ -98,9 +98,9 b' class ApplicationService(BaseService):' | |||||
98 | @classmethod |
|
98 | @classmethod | |
99 | def by_id_cached(cls, db_session=None): |
|
99 | def by_id_cached(cls, db_session=None): | |
100 | db_session = get_db_session(db_session) |
|
100 | db_session = get_db_session(db_session) | |
101 |
cache_region = get_region( |
|
101 | cache_region = get_region("redis_min_1") | |
102 |
|
102 | |||
103 |
@cache_region.cache_on_arguments( |
|
103 | @cache_region.cache_on_arguments("ApplicationService.by_id") | |
104 | def cached(*args, **kwargs): |
|
104 | def cached(*args, **kwargs): | |
105 | app = cls.by_id(*args, db_session=db_session, **kwargs) |
|
105 | app = cls.by_id(*args, db_session=db_session, **kwargs) | |
106 | if app: |
|
106 | if app: | |
@@ -119,10 +119,9 b' class ApplicationService(BaseService):' | |||||
119 | @classmethod |
|
119 | @classmethod | |
120 | def by_http_referer(cls, referer_string, db_session=None): |
|
120 | def by_http_referer(cls, referer_string, db_session=None): | |
121 | db_session = get_db_session(db_session) |
|
121 | db_session = get_db_session(db_session) | |
122 | domain = urllib.parse.urlsplit( |
|
122 | domain = urllib.parse.urlsplit(referer_string, allow_fragments=False).netloc | |
123 | referer_string, allow_fragments=False).netloc |
|
|||
124 | if domain: |
|
123 | if domain: | |
125 |
if domain.startswith( |
|
124 | if domain.startswith("www."): | |
126 | domain = domain[4:] |
|
125 | domain = domain[4:] | |
127 | q = db_session.query(Application).filter(Application.domain == domain) |
|
126 | q = db_session.query(Application).filter(Application.domain == domain) | |
128 | return q.first() |
|
127 | return q.first() | |
@@ -132,7 +131,8 b' class ApplicationService(BaseService):' | |||||
132 | db_session = get_db_session(db_session) |
|
131 | db_session = get_db_session(db_session) | |
133 | q = db_session.query(Application) |
|
132 | q = db_session.query(Application) | |
134 | q2 = ReportGroup.last_updated( |
|
133 | q2 = ReportGroup.last_updated( | |
135 |
since_when, exclude_status=exclude_status, db_session=db_session |
|
134 | since_when, exclude_status=exclude_status, db_session=db_session | |
|
135 | ) | |||
136 | q2 = q2.from_self(ReportGroup.resource_id) |
|
136 | q2 = q2.from_self(ReportGroup.resource_id) | |
137 | q2 = q2.group_by(ReportGroup.resource_id) |
|
137 | q2 = q2.group_by(ReportGroup.resource_id) | |
138 | q = q.filter(Application.resource_id.in_(q2)) |
|
138 | q = q.filter(Application.resource_id.in_(q2)) | |
@@ -142,10 +142,10 b' class ApplicationService(BaseService):' | |||||
142 | def check_for_groups_alert(cls, resource, event_type, *args, **kwargs): |
|
142 | def check_for_groups_alert(cls, resource, event_type, *args, **kwargs): | |
143 | """ Check for open alerts depending on group type. |
|
143 | """ Check for open alerts depending on group type. | |
144 | Create new one if nothing is found and send alerts """ |
|
144 | Create new one if nothing is found and send alerts """ | |
145 |
db_session = get_db_session(kwargs.get( |
|
145 | db_session = get_db_session(kwargs.get("db_session")) | |
146 | request = get_current_request() |
|
146 | request = get_current_request() | |
147 |
report_groups = kwargs[ |
|
147 | report_groups = kwargs["report_groups"] | |
148 |
occurence_dict = kwargs[ |
|
148 | occurence_dict = kwargs["occurence_dict"] | |
149 |
|
149 | |||
150 | error_reports = 0 |
|
150 | error_reports = 0 | |
151 | slow_reports = 0 |
|
151 | slow_reports = 0 | |
@@ -156,38 +156,45 b' class ApplicationService(BaseService):' | |||||
156 | elif group.get_report().report_type == ReportType.slow: |
|
156 | elif group.get_report().report_type == ReportType.slow: | |
157 | slow_reports += occurences |
|
157 | slow_reports += occurences | |
158 |
|
158 | |||
159 |
log_msg = |
|
159 | log_msg = "LIMIT INFO: %s : %s error reports. %s slow_reports" % ( | |
160 | resource, |
|
160 | resource, | |
161 | error_reports, |
|
161 | error_reports, | |
162 |
slow_reports |
|
162 | slow_reports, | |
|
163 | ) | |||
163 | logging.warning(log_msg) |
|
164 | logging.warning(log_msg) | |
164 | threshold = 10 |
|
165 | threshold = 10 | |
165 |
for event_type in [ |
|
166 | for event_type in ["error_report_alert", "slow_report_alert"]: | |
166 | if (error_reports < resource.error_report_threshold and |
|
167 | if ( | |
167 | event_type == 'error_report_alert'): |
|
168 | error_reports < resource.error_report_threshold | |
|
169 | and event_type == "error_report_alert" | |||
|
170 | ): | |||
168 | continue |
|
171 | continue | |
169 | elif (slow_reports <= resource.slow_report_threshold and |
|
172 | elif ( | |
170 | event_type == 'slow_report_alert'): |
|
173 | slow_reports <= resource.slow_report_threshold | |
|
174 | and event_type == "slow_report_alert" | |||
|
175 | ): | |||
171 | continue |
|
176 | continue | |
172 |
if event_type == |
|
177 | if event_type == "error_report_alert": | |
173 | amount = error_reports |
|
178 | amount = error_reports | |
174 | threshold = resource.error_report_threshold |
|
179 | threshold = resource.error_report_threshold | |
175 |
elif event_type == |
|
180 | elif event_type == "slow_report_alert": | |
176 | amount = slow_reports |
|
181 | amount = slow_reports | |
177 | threshold = resource.slow_report_threshold |
|
182 | threshold = resource.slow_report_threshold | |
178 |
|
183 | |||
179 |
event = EventService.for_resource( |
|
184 | event = EventService.for_resource( | |
180 | event_type=Event.types[ |
|
185 | [resource.resource_id], | |
181 | event_type], |
|
186 | event_type=Event.types[event_type], | |
182 |
|
|
187 | status=Event.statuses["active"], | |
|
188 | ) | |||
183 | if event.first(): |
|
189 | if event.first(): | |
184 |
log.info( |
|
190 | log.info("ALERT: PROGRESS: %s %s" % (event_type, resource)) | |
185 | else: |
|
191 | else: | |
186 |
log.warning( |
|
192 | log.warning("ALERT: OPEN: %s %s" % (event_type, resource)) | |
187 |
new_event = Event( |
|
193 | new_event = Event( | |
|
194 | resource_id=resource.resource_id, | |||
188 |
|
|
195 | event_type=Event.types[event_type], | |
189 |
|
|
196 | status=Event.statuses["active"], | |
190 |
|
|
197 | values={"reports": amount, "threshold": threshold}, | |
191 | 'threshold': threshold}) |
|
198 | ) | |
192 | db_session.add(new_event) |
|
199 | db_session.add(new_event) | |
193 | new_event.send_alerts(request=request, resource=resource) |
|
200 | new_event.send_alerts(request=request, resource=resource) |
@@ -21,7 +21,6 b' from appenlight.models.services.base import BaseService' | |||||
21 |
|
21 | |||
22 |
|
22 | |||
23 | class ApplicationPostprocessConfService(BaseService): |
|
23 | class ApplicationPostprocessConfService(BaseService): | |
24 |
|
||||
25 | @classmethod |
|
24 | @classmethod | |
26 | def by_pkey(cls, pkey, db_session=None): |
|
25 | def by_pkey(cls, pkey, db_session=None): | |
27 | db_session = get_db_session(db_session) |
|
26 | db_session = get_db_session(db_session) |
@@ -40,9 +40,10 b' class ConfigService(BaseService):' | |||||
40 | if pairs: |
|
40 | if pairs: | |
41 | conditions = [] |
|
41 | conditions = [] | |
42 | for pair in pairs: |
|
42 | for pair in pairs: | |
43 |
conditions.append( |
|
43 | conditions.append( | |
44 | Config.key == pair['key'], |
|
44 | sa.and_( | |
45 |
Config.section == pair[ |
|
45 | Config.key == pair["key"], Config.section == pair["section"] | |
|
46 | ) | |||
46 | ) |
|
47 | ) | |
47 |
|
48 | |||
48 | query = query.filter(sa.or_(*conditions)) |
|
49 | query = query.filter(sa.or_(*conditions)) | |
@@ -57,13 +58,15 b' class ConfigService(BaseService):' | |||||
57 | return config |
|
58 | return config | |
58 |
|
59 | |||
59 | @classmethod |
|
60 | @classmethod | |
60 | def by_key_and_section(cls, key, section, auto_create=False, |
|
61 | def by_key_and_section( | |
61 |
|
|
62 | cls, key, section, auto_create=False, default_value=None, db_session=None | |
|
63 | ): | |||
62 | db_session = get_db_session(db_session) |
|
64 | db_session = get_db_session(db_session) | |
63 | registry = get_current_registry() |
|
65 | registry = get_current_registry() | |
64 |
|
66 | |||
65 | @registry.cache_regions.memory_min_1.cache_on_arguments( |
|
67 | @registry.cache_regions.memory_min_1.cache_on_arguments( | |
66 |
namespace= |
|
68 | namespace="ConfigService.by_key_and_section" | |
|
69 | ) | |||
67 | def cached(key, section): |
|
70 | def cached(key, section): | |
68 | query = db_session.query(Config).filter(Config.key == key) |
|
71 | query = db_session.query(Config).filter(Config.key == key) | |
69 | query = query.filter(Config.section == section) |
|
72 | query = query.filter(Config.section == section) | |
@@ -76,8 +79,7 b' class ConfigService(BaseService):' | |||||
76 | if config: |
|
79 | if config: | |
77 | config = db_session.merge(config, load=False) |
|
80 | config = db_session.merge(config, load=False) | |
78 | if config is None and auto_create: |
|
81 | if config is None and auto_create: | |
79 | config = ConfigService.create_config(key, section, |
|
82 | config = ConfigService.create_config(key, section, value=default_value) | |
80 | value=default_value) |
|
|||
81 | cached.invalidate(key, section) |
|
83 | cached.invalidate(key, section) | |
82 | return config |
|
84 | return config | |
83 |
|
85 | |||
@@ -87,14 +89,28 b' class ConfigService(BaseService):' | |||||
87 | Will add fresh default config values to database if no keys are found |
|
89 | Will add fresh default config values to database if no keys are found | |
88 | :return: |
|
90 | :return: | |
89 | """ |
|
91 | """ | |
90 |
log.info( |
|
92 | log.info("Checking/setting default values") | |
91 |
self.by_key_and_section( |
|
93 | self.by_key_and_section( | |
92 |
|
|
94 | "template_footer_html", "global", default_value="", auto_create=True | |
93 | self.by_key_and_section('list_groups_to_non_admins', 'global', |
|
95 | ) | |
94 | default_value=True, auto_create=True) |
|
96 | self.by_key_and_section( | |
95 | self.by_key_and_section('per_application_reports_rate_limit', 'global', |
|
97 | "list_groups_to_non_admins", "global", default_value=True, auto_create=True | |
96 | default_value=2000, auto_create=True) |
|
98 | ) | |
97 | self.by_key_and_section('per_application_logs_rate_limit', 'global', |
|
99 | self.by_key_and_section( | |
98 | default_value=100000, auto_create=True) |
|
100 | "per_application_reports_rate_limit", | |
99 | self.by_key_and_section('per_application_metrics_rate_limit', 'global', |
|
101 | "global", | |
100 | default_value=100000, auto_create=True) |
|
102 | default_value=2000, | |
|
103 | auto_create=True, | |||
|
104 | ) | |||
|
105 | self.by_key_and_section( | |||
|
106 | "per_application_logs_rate_limit", | |||
|
107 | "global", | |||
|
108 | default_value=100000, | |||
|
109 | auto_create=True, | |||
|
110 | ) | |||
|
111 | self.by_key_and_section( | |||
|
112 | "per_application_metrics_rate_limit", | |||
|
113 | "global", | |||
|
114 | default_value=100000, | |||
|
115 | auto_create=True, | |||
|
116 | ) |
@@ -26,10 +26,19 b' from appenlight.models.services.base import BaseService' | |||||
26 |
|
26 | |||
27 | class EventService(BaseService): |
|
27 | class EventService(BaseService): | |
28 | @classmethod |
|
28 | @classmethod | |
29 | def for_resource(cls, resource_ids, event_type=None, status=None, |
|
29 | def for_resource( | |
30 | since_when=None, limit=20, event_id=None, |
|
30 | cls, | |
31 | target_uuid=None, order_by=None, or_target_user_id=None, |
|
31 | resource_ids, | |
32 | db_session=None): |
|
32 | event_type=None, | |
|
33 | status=None, | |||
|
34 | since_when=None, | |||
|
35 | limit=20, | |||
|
36 | event_id=None, | |||
|
37 | target_uuid=None, | |||
|
38 | order_by=None, | |||
|
39 | or_target_user_id=None, | |||
|
40 | db_session=None, | |||
|
41 | ): | |||
33 | """ |
|
42 | """ | |
34 | Fetches events including based on passed params OR if target_user_id |
|
43 | Fetches events including based on passed params OR if target_user_id | |
35 | is present include events that just target this user |
|
44 | is present include events that just target this user | |
@@ -57,8 +66,7 b' class EventService(BaseService):' | |||||
57 | if or_target_user_id: |
|
66 | if or_target_user_id: | |
58 | or_cond.append(sa.or_(Event.target_user_id == or_target_user_id)) |
|
67 | or_cond.append(sa.or_(Event.target_user_id == or_target_user_id)) | |
59 |
|
68 | |||
60 | query = query.filter(sa.or_(sa.and_(*and_cond), |
|
69 | query = query.filter(sa.or_(sa.and_(*and_cond), *or_cond)) | |
61 | *or_cond)) |
|
|||
62 | if not order_by: |
|
70 | if not order_by: | |
63 | query = query.order_by(sa.desc(Event.start_date)) |
|
71 | query = query.order_by(sa.desc(Event.start_date)) | |
64 | if limit: |
|
72 | if limit: | |
@@ -67,8 +75,15 b' class EventService(BaseService):' | |||||
67 | return query |
|
75 | return query | |
68 |
|
76 | |||
69 | @classmethod |
|
77 | @classmethod | |
70 | def by_type_and_status(cls, event_types, status_types, since_when=None, |
|
78 | def by_type_and_status( | |
71 | older_than=None, db_session=None, app_ids=None): |
|
79 | cls, | |
|
80 | event_types, | |||
|
81 | status_types, | |||
|
82 | since_when=None, | |||
|
83 | older_than=None, | |||
|
84 | db_session=None, | |||
|
85 | app_ids=None, | |||
|
86 | ): | |||
72 | db_session = get_db_session(db_session) |
|
87 | db_session = get_db_session(db_session) | |
73 | query = db_session.query(Event) |
|
88 | query = db_session.query(Event) | |
74 | query = query.filter(Event.event_type.in_(event_types)) |
|
89 | query = query.filter(Event.event_type.in_(event_types)) | |
@@ -84,26 +99,38 b' class EventService(BaseService):' | |||||
84 | @classmethod |
|
99 | @classmethod | |
85 | def latest_for_user(cls, user, db_session=None): |
|
100 | def latest_for_user(cls, user, db_session=None): | |
86 | registry = get_current_registry() |
|
101 | registry = get_current_registry() | |
87 |
resources = UserService.resources_with_perms( |
|
102 | resources = UserService.resources_with_perms( | |
|
103 | user, ["view"], resource_types=registry.resource_types | |||
|
104 | ) | |||
88 | resource_ids = [r.resource_id for r in resources] |
|
105 | resource_ids = [r.resource_id for r in resources] | |
89 | db_session = get_db_session(db_session) |
|
106 | db_session = get_db_session(db_session) | |
90 | return EventService.for_resource( |
|
107 | return EventService.for_resource( | |
91 | resource_ids, or_target_user_id=user.id, limit=10, |
|
108 | resource_ids, or_target_user_id=user.id, limit=10, db_session=db_session | |
92 | db_session=db_session) |
|
109 | ) | |
93 |
|
110 | |||
94 | @classmethod |
|
111 | @classmethod | |
95 | def get_paginator(cls, user, page=1, item_count=None, items_per_page=50, |
|
112 | def get_paginator( | |
96 | order_by=None, filter_settings=None, db_session=None): |
|
113 | cls, | |
|
114 | user, | |||
|
115 | page=1, | |||
|
116 | item_count=None, | |||
|
117 | items_per_page=50, | |||
|
118 | order_by=None, | |||
|
119 | filter_settings=None, | |||
|
120 | db_session=None, | |||
|
121 | ): | |||
97 | if not filter_settings: |
|
122 | if not filter_settings: | |
98 | filter_settings = {} |
|
123 | filter_settings = {} | |
99 | registry = get_current_registry() |
|
124 | registry = get_current_registry() | |
100 |
resources = UserService.resources_with_perms( |
|
125 | resources = UserService.resources_with_perms( | |
|
126 | user, ["view"], resource_types=registry.resource_types | |||
|
127 | ) | |||
101 | resource_ids = [r.resource_id for r in resources] |
|
128 | resource_ids = [r.resource_id for r in resources] | |
102 | query = EventService.for_resource( |
|
129 | query = EventService.for_resource( | |
103 | resource_ids, or_target_user_id=user.id, limit=100, |
|
130 | resource_ids, or_target_user_id=user.id, limit=100, db_session=db_session | |
104 | db_session=db_session) |
|
131 | ) | |
105 |
|
132 | |||
106 |
paginator = SqlalchemyOrmPage( |
|
133 | paginator = SqlalchemyOrmPage( | |
107 |
|
|
134 | query, page=page, items_per_page=items_per_page, **filter_settings | |
108 | **filter_settings) |
|
135 | ) | |
109 | return paginator |
|
136 | return paginator |
@@ -16,18 +16,20 b'' | |||||
16 |
|
16 | |||
17 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
17 | from appenlight.models.group_resource_permission import GroupResourcePermission | |
18 | from appenlight.models import get_db_session |
|
18 | from appenlight.models import get_db_session | |
19 |
from ziggurat_foundations.models.services.group_resource_permission import |
|
19 | from ziggurat_foundations.models.services.group_resource_permission import ( | |
|
20 | GroupResourcePermissionService, | |||
|
21 | ) | |||
20 |
|
22 | |||
21 |
|
23 | |||
22 | class GroupResourcePermissionService(GroupResourcePermissionService): |
|
24 | class GroupResourcePermissionService(GroupResourcePermissionService): | |
23 | @classmethod |
|
25 | @classmethod | |
24 |
def by_resource_group_and_perm( |
|
26 | def by_resource_group_and_perm( | |
25 | db_session=None): |
|
27 | cls, group_id, perm_name, resource_id, db_session=None | |
|
28 | ): | |||
26 | """ return all instances by user name, perm name and resource id """ |
|
29 | """ return all instances by user name, perm name and resource id """ | |
27 | db_session = get_db_session(db_session) |
|
30 | db_session = get_db_session(db_session) | |
28 | query = db_session.query(GroupResourcePermission) |
|
31 | query = db_session.query(GroupResourcePermission) | |
29 | query = query.filter(GroupResourcePermission.group_id == group_id) |
|
32 | query = query.filter(GroupResourcePermission.group_id == group_id) | |
30 | query = query.filter( |
|
33 | query = query.filter(GroupResourcePermission.resource_id == resource_id) | |
31 | GroupResourcePermission.resource_id == resource_id) |
|
|||
32 | query = query.filter(GroupResourcePermission.perm_name == perm_name) |
|
34 | query = query.filter(GroupResourcePermission.perm_name == perm_name) | |
33 | return query.first() |
|
35 | return query.first() |
@@ -28,8 +28,7 b' log = logging.getLogger(__name__)' | |||||
28 |
|
28 | |||
29 | class LogService(BaseService): |
|
29 | class LogService(BaseService): | |
30 | @classmethod |
|
30 | @classmethod | |
31 | def get_logs(cls, resource_ids=None, filter_settings=None, |
|
31 | def get_logs(cls, resource_ids=None, filter_settings=None, db_session=None): | |
32 | db_session=None): |
|
|||
33 | # ensure we always have id's passed |
|
32 | # ensure we always have id's passed | |
34 | if not resource_ids: |
|
33 | if not resource_ids: | |
35 | # raise Exception('No App ID passed') |
|
34 | # raise Exception('No App ID passed') | |
@@ -37,18 +36,17 b' class LogService(BaseService):' | |||||
37 | db_session = get_db_session(db_session) |
|
36 | db_session = get_db_session(db_session) | |
38 | q = db_session.query(Log) |
|
37 | q = db_session.query(Log) | |
39 | q = q.filter(Log.resource_id.in_(resource_ids)) |
|
38 | q = q.filter(Log.resource_id.in_(resource_ids)) | |
40 |
if filter_settings.get( |
|
39 | if filter_settings.get("start_date"): | |
41 |
q = q.filter(Log.timestamp >= filter_settings.get( |
|
40 | q = q.filter(Log.timestamp >= filter_settings.get("start_date")) | |
42 |
if filter_settings.get( |
|
41 | if filter_settings.get("end_date"): | |
43 |
q = q.filter(Log.timestamp <= filter_settings.get( |
|
42 | q = q.filter(Log.timestamp <= filter_settings.get("end_date")) | |
44 |
if filter_settings.get( |
|
43 | if filter_settings.get("log_level"): | |
45 | q = q.filter( |
|
44 | q = q.filter(Log.log_level == filter_settings.get("log_level").upper()) | |
46 | Log.log_level == filter_settings.get('log_level').upper()) |
|
45 | if filter_settings.get("request_id"): | |
47 |
|
|
46 | request_id = filter_settings.get("request_id", "") | |
48 | request_id = filter_settings.get('request_id', '') |
|
47 | q = q.filter(Log.request_id == request_id.replace("-", "")) | |
49 | q = q.filter(Log.request_id == request_id.replace('-', '')) |
|
48 | if filter_settings.get("namespace"): | |
50 |
|
|
49 | q = q.filter(Log.namespace == filter_settings.get("namespace")) | |
51 | q = q.filter(Log.namespace == filter_settings.get('namespace')) |
|
|||
52 | q = q.order_by(sa.desc(Log.timestamp)) |
|
50 | q = q.order_by(sa.desc(Log.timestamp)) | |
53 | return q |
|
51 | return q | |
54 |
|
52 | |||
@@ -60,20 +58,18 b' class LogService(BaseService):' | |||||
60 | query = { |
|
58 | query = { | |
61 | "query": { |
|
59 | "query": { | |
62 | "filtered": { |
|
60 | "filtered": { | |
63 | "filter": { |
|
61 | "filter": {"and": [{"terms": {"resource_id": list(app_ids)}}]} | |
64 | "and": [{"terms": {"resource_id": list(app_ids)}}] |
|
|||
65 | } |
|
|||
66 | } |
|
62 | } | |
67 | } |
|
63 | } | |
68 | } |
|
64 | } | |
69 |
|
65 | |||
70 |
start_date = filter_settings.get( |
|
66 | start_date = filter_settings.get("start_date") | |
71 |
end_date = filter_settings.get( |
|
67 | end_date = filter_settings.get("end_date") | |
72 |
filter_part = query[ |
|
68 | filter_part = query["query"]["filtered"]["filter"]["and"] | |
73 |
|
69 | |||
74 |
for tag in filter_settings.get( |
|
70 | for tag in filter_settings.get("tags", []): | |
75 |
tag_values = [v.lower() for v in tag[ |
|
71 | tag_values = [v.lower() for v in tag["value"]] | |
76 |
key = "tags.%s.values" % tag[ |
|
72 | key = "tags.%s.values" % tag["name"].replace(".", "_") | |
77 | filter_part.append({"terms": {key: tag_values}}) |
|
73 | filter_part.append({"terms": {key: tag_values}}) | |
78 |
|
74 | |||
79 | date_range = {"range": {"timestamp": {}}} |
|
75 | date_range = {"range": {"timestamp": {}}} | |
@@ -84,26 +80,21 b' class LogService(BaseService):' | |||||
84 | if start_date or end_date: |
|
80 | if start_date or end_date: | |
85 | filter_part.append(date_range) |
|
81 | filter_part.append(date_range) | |
86 |
|
82 | |||
87 |
levels = filter_settings.get( |
|
83 | levels = filter_settings.get("level") | |
88 | if levels: |
|
84 | if levels: | |
89 |
filter_part.append({"terms": { |
|
85 | filter_part.append({"terms": {"log_level": levels}}) | |
90 |
namespaces = filter_settings.get( |
|
86 | namespaces = filter_settings.get("namespace") | |
91 | if namespaces: |
|
87 | if namespaces: | |
92 |
filter_part.append({"terms": { |
|
88 | filter_part.append({"terms": {"namespace": namespaces}}) | |
93 |
|
89 | |||
94 |
request_ids = filter_settings.get( |
|
90 | request_ids = filter_settings.get("request_id") | |
95 | if request_ids: |
|
91 | if request_ids: | |
96 |
filter_part.append({"terms": { |
|
92 | filter_part.append({"terms": {"request_id": request_ids}}) | |
97 |
|
93 | |||
98 |
messages = filter_settings.get( |
|
94 | messages = filter_settings.get("message") | |
99 | if messages: |
|
95 | if messages: | |
100 |
query[ |
|
96 | query["query"]["filtered"]["query"] = { | |
101 | 'match': { |
|
97 | "match": {"message": {"query": " ".join(messages), "operator": "and"}} | |
102 | 'message': { |
|
|||
103 | 'query': ' '.join(messages), |
|
|||
104 | 'operator': 'and' |
|
|||
105 | } |
|
|||
106 | } |
|
|||
107 | } |
|
98 | } | |
108 | return query |
|
99 | return query | |
109 |
|
100 | |||
@@ -118,76 +109,96 b' class LogService(BaseService):' | |||||
118 | "field": "timestamp", |
|
109 | "field": "timestamp", | |
119 | "interval": "1h", |
|
110 | "interval": "1h", | |
120 | "min_doc_count": 0, |
|
111 | "min_doc_count": 0, | |
121 |
|
|
112 | "extended_bounds": { | |
122 |
|
|
113 | "max": filter_settings.get("end_date"), | |
123 |
|
|
114 | "min": filter_settings.get("start_date"), | |
|
115 | }, | |||
124 | } |
|
116 | } | |
125 | } |
|
117 | } | |
126 | } |
|
118 | } | |
127 | log.debug(es_query) |
|
119 | log.debug(es_query) | |
128 |
index_names = es_index_name_limiter( |
|
120 | index_names = es_index_name_limiter( | |
129 |
|
|
121 | filter_settings.get("start_date"), | |
130 | ixtypes=['logs']) |
|
122 | filter_settings.get("end_date"), | |
|
123 | ixtypes=["logs"], | |||
|
124 | ) | |||
131 | if index_names: |
|
125 | if index_names: | |
132 | results = Datastores.es.search( |
|
126 | results = Datastores.es.search( | |
133 |
body=es_query, index=index_names, doc_type= |
|
127 | body=es_query, index=index_names, doc_type="log", size=0 | |
|
128 | ) | |||
134 | else: |
|
129 | else: | |
135 | results = [] |
|
130 | results = [] | |
136 | return results |
|
131 | return results | |
137 |
|
132 | |||
138 | @classmethod |
|
133 | @classmethod | |
139 | def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50, |
|
134 | def get_search_iterator( | |
140 | order_by=None, filter_settings=None, limit=None): |
|
135 | cls, | |
|
136 | app_ids=None, | |||
|
137 | page=1, | |||
|
138 | items_per_page=50, | |||
|
139 | order_by=None, | |||
|
140 | filter_settings=None, | |||
|
141 | limit=None, | |||
|
142 | ): | |||
141 | if not app_ids: |
|
143 | if not app_ids: | |
142 | return {}, 0 |
|
144 | return {}, 0 | |
143 |
|
145 | |||
144 | es_query = cls.es_query_builder(app_ids, filter_settings) |
|
146 | es_query = cls.es_query_builder(app_ids, filter_settings) | |
145 | sort_query = { |
|
147 | sort_query = {"sort": [{"timestamp": {"order": "desc"}}]} | |
146 | "sort": [ |
|
|||
147 | {"timestamp": {"order": "desc"}} |
|
|||
148 | ] |
|
|||
149 | } |
|
|||
150 | es_query.update(sort_query) |
|
148 | es_query.update(sort_query) | |
151 | log.debug(es_query) |
|
149 | log.debug(es_query) | |
152 | es_from = (page - 1) * items_per_page |
|
150 | es_from = (page - 1) * items_per_page | |
153 |
index_names = es_index_name_limiter( |
|
151 | index_names = es_index_name_limiter( | |
154 |
|
|
152 | filter_settings.get("start_date"), | |
155 | ixtypes=['logs']) |
|
153 | filter_settings.get("end_date"), | |
|
154 | ixtypes=["logs"], | |||
|
155 | ) | |||
156 | if not index_names: |
|
156 | if not index_names: | |
157 | return {}, 0 |
|
157 | return {}, 0 | |
158 |
|
158 | |||
159 |
results = Datastores.es.search( |
|
159 | results = Datastores.es.search( | |
160 | doc_type='log', size=items_per_page, |
|
160 | body=es_query, | |
161 | from_=es_from) |
|
161 | index=index_names, | |
162 | if results['hits']['total'] > 5000: |
|
162 | doc_type="log", | |
|
163 | size=items_per_page, | |||
|
164 | from_=es_from, | |||
|
165 | ) | |||
|
166 | if results["hits"]["total"] > 5000: | |||
163 | count = 5000 |
|
167 | count = 5000 | |
164 | else: |
|
168 | else: | |
165 |
count = results[ |
|
169 | count = results["hits"]["total"] | |
166 |
return results[ |
|
170 | return results["hits"], count | |
167 |
|
171 | |||
168 | @classmethod |
|
172 | @classmethod | |
169 |
def get_paginator_by_app_ids( |
|
173 | def get_paginator_by_app_ids( | |
170 | items_per_page=50, order_by=None, |
|
174 | cls, | |
|
175 | app_ids=None, | |||
|
176 | page=1, | |||
|
177 | item_count=None, | |||
|
178 | items_per_page=50, | |||
|
179 | order_by=None, | |||
171 |
|
|
180 | filter_settings=None, | |
172 | exclude_columns=None, db_session=None): |
|
181 | exclude_columns=None, | |
|
182 | db_session=None, | |||
|
183 | ): | |||
173 | if not filter_settings: |
|
184 | if not filter_settings: | |
174 | filter_settings = {} |
|
185 | filter_settings = {} | |
175 |
results, item_count = cls.get_search_iterator( |
|
186 | results, item_count = cls.get_search_iterator( | |
176 | items_per_page, order_by, |
|
187 | app_ids, page, items_per_page, order_by, filter_settings | |
177 | filter_settings) |
|
188 | ) | |
178 |
paginator = paginate.Page( |
|
189 | paginator = paginate.Page( | |
179 | item_count=item_count, |
|
190 | [], item_count=item_count, items_per_page=items_per_page, **filter_settings | |
180 | items_per_page=items_per_page, |
|
191 | ) | |
181 | **filter_settings) |
|
192 | ordered_ids = tuple( | |
182 | ordered_ids = tuple(item['_source']['pg_id'] |
|
193 | item["_source"]["pg_id"] for item in results.get("hits", []) | |
183 | for item in results.get('hits', [])) |
|
194 | ) | |
184 |
|
195 | |||
185 | sorted_instance_list = [] |
|
196 | sorted_instance_list = [] | |
186 | if ordered_ids: |
|
197 | if ordered_ids: | |
187 | db_session = get_db_session(db_session) |
|
198 | db_session = get_db_session(db_session) | |
188 | query = db_session.query(Log) |
|
199 | query = db_session.query(Log) | |
189 | query = query.filter(Log.log_id.in_(ordered_ids)) |
|
200 | query = query.filter(Log.log_id.in_(ordered_ids)) | |
190 |
query = query.order_by(sa.desc( |
|
201 | query = query.order_by(sa.desc("timestamp")) | |
191 | sa_items = query.all() |
|
202 | sa_items = query.all() | |
192 | # resort by score |
|
203 | # resort by score | |
193 | for i_id in ordered_ids: |
|
204 | for i_id in ordered_ids: | |
@@ -198,14 +209,14 b' class LogService(BaseService):' | |||||
198 | return paginator |
|
209 | return paginator | |
199 |
|
210 | |||
200 | @classmethod |
|
211 | @classmethod | |
201 | def query_by_primary_key_and_namespace(cls, list_of_pairs, |
|
212 | def query_by_primary_key_and_namespace(cls, list_of_pairs, db_session=None): | |
202 | db_session=None): |
|
|||
203 | db_session = get_db_session(db_session) |
|
213 | db_session = get_db_session(db_session) | |
204 | list_of_conditions = [] |
|
214 | list_of_conditions = [] | |
205 | query = db_session.query(Log) |
|
215 | query = db_session.query(Log) | |
206 | for pair in list_of_pairs: |
|
216 | for pair in list_of_pairs: | |
207 |
list_of_conditions.append( |
|
217 | list_of_conditions.append( | |
208 |
Log.primary_key == pair[ |
|
218 | sa.and_(Log.primary_key == pair["pk"], Log.namespace == pair["ns"]) | |
|
219 | ) | |||
209 | query = query.filter(sa.or_(*list_of_conditions)) |
|
220 | query = query.filter(sa.or_(*list_of_conditions)) | |
210 | query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id)) |
|
221 | query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id)) | |
211 | return query |
|
222 | return query |
@@ -38,8 +38,9 b' class PluginConfigService(BaseService):' | |||||
38 | return query.first() |
|
38 | return query.first() | |
39 |
|
39 | |||
40 | @classmethod |
|
40 | @classmethod | |
41 | def by_query(cls, resource_id=None, plugin_name=None, |
|
41 | def by_query( | |
42 |
|
|
42 | cls, resource_id=None, plugin_name=None, section=None, db_session=None | |
|
43 | ): | |||
43 | db_session = get_db_session(db_session) |
|
44 | db_session = get_db_session(db_session) | |
44 |
|
45 | |||
45 | query = db_session.query(PluginConfig) |
|
46 | query = db_session.query(PluginConfig) |
@@ -37,21 +37,24 b' class ReportService(BaseService):' | |||||
37 | return q |
|
37 | return q | |
38 |
|
38 | |||
39 | @classmethod |
|
39 | @classmethod | |
40 | def generate_stat_rows(cls, report, resource, report_group, occurences=1, |
|
40 | def generate_stat_rows( | |
41 | db_session=None): |
|
41 | cls, report, resource, report_group, occurences=1, db_session=None | |
|
42 | ): | |||
42 | """ |
|
43 | """ | |
43 | Generates timeseries for this report's group |
|
44 | Generates timeseries for this report's group | |
44 | """ |
|
45 | """ | |
45 | db_session = get_db_session(db_session) |
|
46 | db_session = get_db_session(db_session) | |
46 |
stats = ReportStat( |
|
47 | stats = ReportStat( | |
|
48 | resource_id=report.resource_id, | |||
47 |
|
|
49 | group_id=report_group.id, | |
48 |
|
|
50 | start_interval=report.start_time, | |
49 |
|
|
51 | owner_user_id=resource.owner_user_id, | |
50 |
|
|
52 | server_name=report.tags.get("server_name"), | |
51 |
|
|
53 | view_name=report.tags.get("view_name"), | |
52 |
|
|
54 | type=report.report_type, | |
53 |
|
|
55 | occurences=occurences, | |
54 |
|
|
56 | duration=report.duration, | |
|
57 | ) | |||
55 | db_session.add(stats) |
|
58 | db_session.add(stats) | |
56 | db_session.flush() |
|
59 | db_session.flush() | |
57 | return stats |
|
60 | return stats |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now