Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,81 +1,89 b'' | |||
|
1 | 1 | import os |
|
2 | 2 | import re |
|
3 | 3 | |
|
4 | 4 | from setuptools import setup, find_packages |
|
5 | 5 | |
|
6 | 6 | here = os.path.abspath(os.path.dirname(__file__)) |
|
7 |
README = open(os.path.join(here, |
|
|
8 |
CHANGES = open(os.path.join(here, |
|
|
7 | README = open(os.path.join(here, "README.rst")).read() | |
|
8 | CHANGES = open(os.path.join(here, "CHANGELOG.rst")).read() | |
|
9 | 9 | |
|
10 |
REQUIREMENTS = open(os.path.join(here, |
|
|
10 | REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines() | |
|
11 | 11 | |
|
12 |
compiled = re.compile( |
|
|
12 | compiled = re.compile("([^=><]*).*") | |
|
13 | 13 | |
|
14 | 14 | |
|
15 | 15 | def parse_req(req): |
|
16 | 16 | return compiled.search(req).group(1).strip() |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f] |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | def _get_meta_var(name, data, callback_handler=None): |
|
23 | 23 | import re |
|
24 | matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data) | |
|
24 | ||
|
25 | matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data) | |
|
25 | 26 | if matches: |
|
26 | 27 | if not callable(callback_handler): |
|
27 | 28 | callback_handler = lambda v: v |
|
28 | 29 | |
|
29 | 30 | return callback_handler(eval(matches.groups()[0])) |
|
30 | 31 | |
|
31 | 32 | |
|
32 |
with open(os.path.join(here, |
|
|
33 | with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta: | |
|
33 | 34 | _metadata = _meta.read() |
|
34 | 35 | |
|
35 |
with open(os.path.join(here, |
|
|
36 | with open(os.path.join(here, "VERSION"), "r") as _meta_version: | |
|
36 | 37 | __version__ = _meta_version.read().strip() |
|
37 | 38 | |
|
38 |
__license__ = _get_meta_var( |
|
|
39 |
__author__ = _get_meta_var( |
|
|
40 |
__url__ = _get_meta_var( |
|
|
41 | ||
|
42 |
found_packages = find_packages( |
|
|
43 |
found_packages.append( |
|
|
44 | setup(name='appenlight', | |
|
45 |
|
|
|
46 | long_description=README + '\n\n' + CHANGES, | |
|
47 | classifiers=[ | |
|
48 | "Programming Language :: Python", | |
|
49 |
|
|
|
50 | "Topic :: Internet :: WWW/HTTP", | |
|
51 |
|
|
|
52 | ], | |
|
53 | version=__version__, | |
|
54 | license=__license__, | |
|
55 | author=__author__, | |
|
56 |
|
|
|
57 | keywords='web wsgi bfg pylons pyramid', | |
|
58 | package_dir={'': 'src'}, | |
|
59 | packages=found_packages, | |
|
60 | include_package_data=True, | |
|
61 | zip_safe=False, | |
|
62 | test_suite='appenlight', | |
|
63 | install_requires=requires, | |
|
64 | extras_require={ | |
|
65 | "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "pytest-mock", "webtest"], | |
|
66 | "lint": ["black"], | |
|
67 | }, | |
|
68 | entry_points={ | |
|
69 | 'paste.app_factory': [ | |
|
70 | 'main = appenlight:main' | |
|
71 |
|
|
|
72 | 'console_scripts': [ | |
|
73 | 'appenlight-cleanup = appenlight.scripts.cleanup:main', | |
|
74 | 'appenlight-initializedb = appenlight.scripts.initialize_db:main', | |
|
75 | 'appenlight-migratedb = appenlight.scripts.migratedb:main', | |
|
76 | 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main', | |
|
77 | 'appenlight-static = appenlight.scripts.static:main', | |
|
78 | 'appenlight-make-config = appenlight.scripts.make_config:main', | |
|
79 | ] | |
|
80 | } | |
|
81 | ) | |
|
39 | __license__ = _get_meta_var("__license__", _metadata) | |
|
40 | __author__ = _get_meta_var("__author__", _metadata) | |
|
41 | __url__ = _get_meta_var("__url__", _metadata) | |
|
42 | ||
|
43 | found_packages = find_packages("src") | |
|
44 | found_packages.append("appenlight.migrations.versions") | |
|
45 | setup( | |
|
46 | name="appenlight", | |
|
47 | description="appenlight", | |
|
48 | long_description=README + "\n\n" + CHANGES, | |
|
49 | classifiers=[ | |
|
50 | "Programming Language :: Python", | |
|
51 | "Framework :: Pylons", | |
|
52 | "Topic :: Internet :: WWW/HTTP", | |
|
53 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", | |
|
54 | ], | |
|
55 | version=__version__, | |
|
56 | license=__license__, | |
|
57 | author=__author__, | |
|
58 | url=__url__, | |
|
59 | keywords="web wsgi bfg pylons pyramid", | |
|
60 | package_dir={"": "src"}, | |
|
61 | packages=found_packages, | |
|
62 | include_package_data=True, | |
|
63 | zip_safe=False, | |
|
64 | test_suite="appenlight", | |
|
65 | install_requires=requires, | |
|
66 | extras_require={ | |
|
67 | "dev": [ | |
|
68 | "coverage", | |
|
69 | "pytest", | |
|
70 | "pyramid", | |
|
71 | "tox", | |
|
72 | "mock", | |
|
73 | "pytest-mock", | |
|
74 | "webtest", | |
|
75 | ], | |
|
76 | "lint": ["black"], | |
|
77 | }, | |
|
78 | entry_points={ | |
|
79 | "paste.app_factory": ["main = appenlight:main"], | |
|
80 | "console_scripts": [ | |
|
81 | "appenlight-cleanup = appenlight.scripts.cleanup:main", | |
|
82 | "appenlight-initializedb = appenlight.scripts.initialize_db:main", | |
|
83 | "appenlight-migratedb = appenlight.scripts.migratedb:main", | |
|
84 | "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main", | |
|
85 | "appenlight-static = appenlight.scripts.static:main", | |
|
86 | "appenlight-make-config = appenlight.scripts.make_config:main", | |
|
87 | ], | |
|
88 | }, | |
|
89 | ) |
@@ -1,225 +1,254 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import datetime |
|
18 | 18 | import logging |
|
19 | 19 | from elasticsearch import Elasticsearch |
|
20 | 20 | import redis |
|
21 | 21 | import os |
|
22 | 22 | import pkg_resources |
|
23 | 23 | from pkg_resources import iter_entry_points |
|
24 | 24 | |
|
25 | 25 | import appenlight.lib.jinja2_filters as jinja2_filters |
|
26 | 26 | import appenlight.lib.encryption as encryption |
|
27 | 27 | |
|
28 | 28 | from pyramid.config import PHASE3_CONFIG |
|
29 | 29 | from pyramid.authentication import AuthTktAuthenticationPolicy |
|
30 | 30 | from pyramid.authorization import ACLAuthorizationPolicy |
|
31 | 31 | from pyramid_mailer.interfaces import IMailer |
|
32 | 32 | from pyramid.renderers import JSON |
|
33 | 33 | from pyramid_redis_sessions import session_factory_from_settings |
|
34 | 34 | from pyramid.settings import asbool, aslist |
|
35 | 35 | from pyramid.security import AllPermissionsList |
|
36 | 36 | from pyramid_authstack import AuthenticationStackPolicy |
|
37 | 37 | from redlock import Redlock |
|
38 | 38 | from sqlalchemy import engine_from_config |
|
39 | 39 | |
|
40 | 40 | from appenlight.celery import configure_celery |
|
41 |
from appenlight.lib.configurator import ( |
|
|
42 | register_appenlight_plugin) | |
|
41 | from appenlight.lib.configurator import ( | |
|
42 | CythonCompatConfigurator, | |
|
43 | register_appenlight_plugin, | |
|
44 | ) | |
|
43 | 45 | from appenlight.lib import cache_regions |
|
44 | 46 | from appenlight.lib.ext_json import json |
|
45 | 47 | from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy |
|
46 | 48 | |
|
47 |
__license__ = |
|
|
48 |
__author__ = |
|
|
49 |
__url__ = |
|
|
49 | __license__ = "Apache 2.0" | |
|
50 | __author__ = "RhodeCode GmbH" | |
|
51 | __url__ = "http://rhodecode.com" | |
|
50 | 52 | __version__ = pkg_resources.get_distribution("appenlight").parsed_version |
|
51 | 53 | |
|
52 | 54 | json_renderer = JSON(serializer=json.dumps, indent=4) |
|
53 | 55 | |
|
54 | 56 | log = logging.getLogger(__name__) |
|
55 | 57 | |
|
56 | 58 | |
|
57 | 59 | def datetime_adapter(obj, request): |
|
58 | 60 | return obj.isoformat() |
|
59 | 61 | |
|
60 | 62 | |
|
61 | 63 | def all_permissions_adapter(obj, request): |
|
62 |
return |
|
|
64 | return "__all_permissions__" | |
|
63 | 65 | |
|
64 | 66 | |
|
65 | 67 | json_renderer.add_adapter(datetime.datetime, datetime_adapter) |
|
66 | 68 | json_renderer.add_adapter(AllPermissionsList, all_permissions_adapter) |
|
67 | 69 | |
|
68 | 70 | |
|
69 | 71 | def main(global_config, **settings): |
|
70 | 72 | """ This function returns a Pyramid WSGI application. |
|
71 | 73 | """ |
|
72 | 74 | auth_tkt_policy = AuthTktAuthenticationPolicy( |
|
73 |
settings[ |
|
|
74 |
hashalg= |
|
|
75 | settings["authtkt.secret"], | |
|
76 | hashalg="sha512", | |
|
75 | 77 | callback=groupfinder, |
|
76 | 78 | max_age=2592000, |
|
77 |
secure=asbool(settings.get( |
|
|
78 | auth_token_policy = AuthTokenAuthenticationPolicy( | |
|
79 | callback=groupfinder | |
|
79 | secure=asbool(settings.get("authtkt.secure", "false")), | |
|
80 | 80 | ) |
|
81 | auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder) | |
|
81 | 82 | authorization_policy = ACLAuthorizationPolicy() |
|
82 | 83 | authentication_policy = AuthenticationStackPolicy() |
|
83 |
authentication_policy.add_policy( |
|
|
84 |
authentication_policy.add_policy( |
|
|
84 | authentication_policy.add_policy("auth_tkt", auth_tkt_policy) | |
|
85 | authentication_policy.add_policy("auth_token", auth_token_policy) | |
|
85 | 86 | # set crypto key |
|
86 |
encryption.ENCRYPTION_SECRET = settings.get( |
|
|
87 | encryption.ENCRYPTION_SECRET = settings.get("encryption_secret") | |
|
87 | 88 | # import this later so encyption key can be monkeypatched |
|
88 | 89 | from appenlight.models import DBSession, register_datastores |
|
89 | 90 | |
|
90 | 91 | # registration |
|
91 |
settings[ |
|
|
92 |
settings.get( |
|
|
92 | settings["appenlight.disable_registration"] = asbool( | |
|
93 | settings.get("appenlight.disable_registration") | |
|
94 | ) | |
|
93 | 95 | |
|
94 | 96 | # update config with cometd info |
|
95 |
settings[ |
|
|
96 |
|
|
|
97 | settings["cometd_servers"] = { | |
|
98 | "server": settings["cometd.server"], | |
|
99 | "secret": settings["cometd.secret"], | |
|
100 | } | |
|
97 | 101 | |
|
98 | 102 | # Create the Pyramid Configurator. |
|
99 |
settings[ |
|
|
103 | settings["_mail_url"] = settings["mailing.app_url"] | |
|
100 | 104 | config = CythonCompatConfigurator( |
|
101 | 105 | settings=settings, |
|
102 | 106 | authentication_policy=authentication_policy, |
|
103 | 107 | authorization_policy=authorization_policy, |
|
104 |
root_factory= |
|
|
105 |
default_permission= |
|
|
108 | root_factory="appenlight.security.RootFactory", | |
|
109 | default_permission="view", | |
|
110 | ) | |
|
106 | 111 | # custom registry variables |
|
107 | 112 | |
|
108 | 113 | # resource type information |
|
109 |
config.registry.resource_types = [ |
|
|
114 | config.registry.resource_types = ["resource", "application"] | |
|
110 | 115 | # plugin information |
|
111 | 116 | config.registry.appenlight_plugins = {} |
|
112 | 117 | |
|
113 |
config.set_default_csrf_options(require_csrf=True, header= |
|
|
114 |
config.add_view_deriver( |
|
|
115 | name='csrf_view') | |
|
118 | config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN") | |
|
119 | config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view") | |
|
116 | 120 | |
|
117 | 121 | # later, when config is available |
|
118 |
dogpile_config = { |
|
|
119 | "redis_expiration_time": 86400, | |
|
120 | "redis_distributed_lock": True} | |
|
122 | dogpile_config = { | |
|
123 | "url": settings["redis.url"], | |
|
124 | "redis_expiration_time": 86400, | |
|
125 | "redis_distributed_lock": True, | |
|
126 | } | |
|
121 | 127 | cache_regions.regions = cache_regions.CacheRegions(dogpile_config) |
|
122 | 128 | config.registry.cache_regions = cache_regions.regions |
|
123 |
engine = engine_from_config(settings, |
|
|
124 | json_serializer=json.dumps) | |
|
129 | engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps) | |
|
125 | 130 | DBSession.configure(bind=engine) |
|
126 | 131 | |
|
127 | 132 | # json rederer that serializes datetime |
|
128 |
config.add_renderer( |
|
|
129 | config.add_request_method('appenlight.lib.request.es_conn', 'es_conn', property=True) | |
|
130 | config.add_request_method('appenlight.lib.request.get_user', 'user', | |
|
131 | reify=True, property=True) | |
|
132 |
config.add_request_method( |
|
|
133 | 'csrf_token', reify=True, property=True) | |
|
134 | config.add_request_method('appenlight.lib.request.safe_json_body', | |
|
135 | 'safe_json_body', reify=True, property=True) | |
|
136 | config.add_request_method('appenlight.lib.request.unsafe_json_body', | |
|
137 | 'unsafe_json_body', reify=True, property=True) | |
|
138 | config.add_request_method('appenlight.lib.request.add_flash_to_headers', | |
|
139 | 'add_flash_to_headers') | |
|
140 | config.add_request_method('appenlight.lib.request.get_authomatic', | |
|
141 | 'authomatic', reify=True) | |
|
142 | ||
|
143 | config.include('pyramid_redis_sessions') | |
|
144 | config.include('pyramid_tm') | |
|
145 | config.include('pyramid_jinja2') | |
|
146 | config.include('pyramid_mailer') | |
|
147 | config.include('appenlight_client.ext.pyramid_tween') | |
|
148 | config.include('ziggurat_foundations.ext.pyramid.sign_in') | |
|
149 | es_server_list = aslist(settings['elasticsearch.nodes']) | |
|
150 | redis_url = settings['redis.url'] | |
|
151 | log.warning('Elasticsearch server list: {}'.format(es_server_list)) | |
|
152 | log.warning('Redis server: {}'.format(redis_url)) | |
|
133 | config.add_renderer("json", json_renderer) | |
|
134 | config.add_request_method( | |
|
135 | "appenlight.lib.request.es_conn", "es_conn", property=True | |
|
136 | ) | |
|
137 | config.add_request_method( | |
|
138 | "appenlight.lib.request.get_user", "user", reify=True, property=True | |
|
139 | ) | |
|
140 | config.add_request_method( | |
|
141 | "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True | |
|
142 | ) | |
|
143 | config.add_request_method( | |
|
144 | "appenlight.lib.request.safe_json_body", | |
|
145 | "safe_json_body", | |
|
146 | reify=True, | |
|
147 | property=True, | |
|
148 | ) | |
|
149 | config.add_request_method( | |
|
150 | "appenlight.lib.request.unsafe_json_body", | |
|
151 | "unsafe_json_body", | |
|
152 | reify=True, | |
|
153 | property=True, | |
|
154 | ) | |
|
155 | config.add_request_method( | |
|
156 | "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers" | |
|
157 | ) | |
|
158 | config.add_request_method( | |
|
159 | "appenlight.lib.request.get_authomatic", "authomatic", reify=True | |
|
160 | ) | |
|
161 | ||
|
162 | config.include("pyramid_redis_sessions") | |
|
163 | config.include("pyramid_tm") | |
|
164 | config.include("pyramid_jinja2") | |
|
165 | config.include("pyramid_mailer") | |
|
166 | config.include("appenlight_client.ext.pyramid_tween") | |
|
167 | config.include("ziggurat_foundations.ext.pyramid.sign_in") | |
|
168 | es_server_list = aslist(settings["elasticsearch.nodes"]) | |
|
169 | redis_url = settings["redis.url"] | |
|
170 | log.warning("Elasticsearch server list: {}".format(es_server_list)) | |
|
171 | log.warning("Redis server: {}".format(redis_url)) | |
|
153 | 172 | config.registry.es_conn = Elasticsearch(es_server_list) |
|
154 | 173 | config.registry.redis_conn = redis.StrictRedis.from_url(redis_url) |
|
155 | 174 | |
|
156 |
config.registry.redis_lockmgr = Redlock( |
|
|
157 | retry_count=0, retry_delay=0) | |
|
175 | config.registry.redis_lockmgr = Redlock( | |
|
176 | [settings["redis.redlock.url"]], retry_count=0, retry_delay=0 | |
|
177 | ) | |
|
158 | 178 | # mailer bw compat |
|
159 | 179 | config.registry.mailer = config.registry.getUtility(IMailer) |
|
160 | 180 | |
|
161 | 181 | # Configure sessions |
|
162 | 182 | session_factory = session_factory_from_settings(settings) |
|
163 | 183 | config.set_session_factory(session_factory) |
|
164 | 184 | |
|
165 | 185 | # Configure renderers and event subscribers |
|
166 |
config.add_jinja2_extension( |
|
|
167 |
config.add_jinja2_search_path( |
|
|
186 | config.add_jinja2_extension("jinja2.ext.loopcontrols") | |
|
187 | config.add_jinja2_search_path("appenlight:templates") | |
|
168 | 188 | # event subscribers |
|
169 | config.add_subscriber("appenlight.subscribers.application_created", | |
|
170 | "pyramid.events.ApplicationCreated") | |
|
171 | config.add_subscriber("appenlight.subscribers.add_renderer_globals", | |
|
172 | "pyramid.events.BeforeRender") | |
|
173 | config.add_subscriber('appenlight.subscribers.new_request', | |
|
174 | 'pyramid.events.NewRequest') | |
|
175 | config.add_view_predicate('context_type_class', | |
|
176 | 'appenlight.predicates.contextTypeClass') | |
|
177 | ||
|
178 | register_datastores(es_conn=config.registry.es_conn, | |
|
179 | redis_conn=config.registry.redis_conn, | |
|
180 | redis_lockmgr=config.registry.redis_lockmgr) | |
|
189 | config.add_subscriber( | |
|
190 | "appenlight.subscribers.application_created", | |
|
191 | "pyramid.events.ApplicationCreated", | |
|
192 | ) | |
|
193 | config.add_subscriber( | |
|
194 | "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender" | |
|
195 | ) | |
|
196 | config.add_subscriber( | |
|
197 | "appenlight.subscribers.new_request", "pyramid.events.NewRequest" | |
|
198 | ) | |
|
199 | config.add_view_predicate( | |
|
200 | "context_type_class", "appenlight.predicates.contextTypeClass" | |
|
201 | ) | |
|
202 | ||
|
203 | register_datastores( | |
|
204 | es_conn=config.registry.es_conn, | |
|
205 | redis_conn=config.registry.redis_conn, | |
|
206 | redis_lockmgr=config.registry.redis_lockmgr, | |
|
207 | ) | |
|
181 | 208 | |
|
182 | 209 | # base stuff and scan |
|
183 | 210 | |
|
184 | 211 | # need to ensure webassets exists otherwise config.override_asset() |
|
185 | 212 | # throws exception |
|
186 |
if not os.path.exists(settings[ |
|
|
187 |
os.mkdir(settings[ |
|
|
188 |
config.add_static_view( |
|
|
189 |
|
|
|
190 | config.override_asset(to_override='appenlight:webassets/', | |
|
191 | override_with=settings['webassets.dir']) | |
|
192 | ||
|
193 | config.include('appenlight.views') | |
|
194 | config.include('appenlight.views.admin') | |
|
195 | config.scan(ignore=['appenlight.migrations', 'appenlight.scripts', | |
|
196 | 'appenlight.tests']) | |
|
197 | ||
|
198 | config.add_directive('register_appenlight_plugin', | |
|
199 | register_appenlight_plugin) | |
|
200 | ||
|
201 | for entry_point in iter_entry_points(group='appenlight.plugins'): | |
|
213 | if not os.path.exists(settings["webassets.dir"]): | |
|
214 | os.mkdir(settings["webassets.dir"]) | |
|
215 | config.add_static_view( | |
|
216 | path="appenlight:webassets", name="static", cache_max_age=3600 | |
|
217 | ) | |
|
218 | config.override_asset( | |
|
219 | to_override="appenlight:webassets/", override_with=settings["webassets.dir"] | |
|
220 | ) | |
|
221 | ||
|
222 | config.include("appenlight.views") | |
|
223 | config.include("appenlight.views.admin") | |
|
224 | config.scan( | |
|
225 | ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"] | |
|
226 | ) | |
|
227 | ||
|
228 | config.add_directive("register_appenlight_plugin", register_appenlight_plugin) | |
|
229 | ||
|
230 | for entry_point in iter_entry_points(group="appenlight.plugins"): | |
|
202 | 231 | plugin = entry_point.load() |
|
203 | 232 | plugin.includeme(config) |
|
204 | 233 | |
|
205 | 234 | # include other appenlight plugins explictly if needed |
|
206 |
includes = aslist(settings.get( |
|
|
235 | includes = aslist(settings.get("appenlight.includes", [])) | |
|
207 | 236 | for inc in includes: |
|
208 | 237 | config.include(inc) |
|
209 | 238 | |
|
210 | 239 | # run this after everything registers in configurator |
|
211 | 240 | |
|
212 | 241 | def pre_commit(): |
|
213 | 242 | jinja_env = config.get_jinja2_environment() |
|
214 |
jinja_env.filters[ |
|
|
215 |
jinja_env.filters[ |
|
|
243 | jinja_env.filters["tojson"] = json.dumps | |
|
244 | jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe | |
|
216 | 245 | |
|
217 | 246 | config.action(None, pre_commit, order=PHASE3_CONFIG + 999) |
|
218 | 247 | |
|
219 | 248 | def wrap_config_celery(): |
|
220 | 249 | configure_celery(config.registry) |
|
221 | 250 | |
|
222 | 251 | config.action(None, wrap_config_celery, order=PHASE3_CONFIG + 999) |
|
223 | 252 | |
|
224 | 253 | app = config.make_wsgi_app() |
|
225 | 254 | return app |
@@ -1,171 +1,181 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | from datetime import timedelta |
|
20 | 20 | from celery import Celery |
|
21 | 21 | from celery.bin import Option |
|
22 | 22 | from celery.schedules import crontab |
|
23 | 23 | from celery.signals import worker_init, task_revoked, user_preload_options |
|
24 | 24 | from celery.signals import task_prerun, task_retry, task_failure, task_success |
|
25 | 25 | from kombu.serialization import register |
|
26 | 26 | from pyramid.paster import bootstrap |
|
27 | 27 | from pyramid.request import Request |
|
28 | 28 | from pyramid.scripting import prepare |
|
29 | 29 | from pyramid.settings import asbool |
|
30 | 30 | from pyramid.threadlocal import get_current_request |
|
31 | 31 | |
|
32 | 32 | from appenlight.celery.encoders import json_dumps, json_loads |
|
33 | 33 | from appenlight_client.ext.celery import register_signals |
|
34 | 34 | |
|
35 | 35 | log = logging.getLogger(__name__) |
|
36 | 36 | |
|
37 | register('date_json', json_dumps, json_loads, | |
|
38 | content_type='application/x-date_json', | |
|
39 | content_encoding='utf-8') | |
|
37 | register( | |
|
38 | "date_json", | |
|
39 | json_dumps, | |
|
40 | json_loads, | |
|
41 | content_type="application/x-date_json", | |
|
42 | content_encoding="utf-8", | |
|
43 | ) | |
|
40 | 44 | |
|
41 | 45 | celery = Celery() |
|
42 | 46 | |
|
43 |
celery.user_options[ |
|
|
44 | Option('--ini', dest='ini', default=None, | |
|
45 | help='Specifies pyramid configuration file location.') | |
|
47 | celery.user_options["preload"].add( | |
|
48 | Option( | |
|
49 | "--ini", | |
|
50 | dest="ini", | |
|
51 | default=None, | |
|
52 | help="Specifies pyramid configuration file location.", | |
|
53 | ) | |
|
46 | 54 | ) |
|
47 | 55 | |
|
48 | 56 | |
|
49 | 57 | @user_preload_options.connect |
|
50 | 58 | def on_preload_parsed(options, **kwargs): |
|
51 | 59 | """ |
|
52 | 60 | This actually configures celery from pyramid config file |
|
53 | 61 | """ |
|
54 |
celery.conf[ |
|
|
62 | celery.conf["INI_PYRAMID"] = options["ini"] | |
|
55 | 63 | import appenlight_client.client as e_client |
|
56 | ini_location = options['ini'] | |
|
64 | ||
|
65 | ini_location = options["ini"] | |
|
57 | 66 | if not ini_location: |
|
58 | raise Exception('You need to pass pyramid ini location using ' | |
|
59 | '--ini=filename.ini argument to the worker') | |
|
67 | raise Exception( | |
|
68 | "You need to pass pyramid ini location using " | |
|
69 | "--ini=filename.ini argument to the worker" | |
|
70 | ) | |
|
60 | 71 | env = bootstrap(ini_location[0]) |
|
61 |
api_key = env[ |
|
|
62 |
tr_config = env[ |
|
|
63 | 'appenlight.transport_config') | |
|
64 | CONFIG = e_client.get_config({'appenlight.api_key': api_key}) | |
|
72 | api_key = env["request"].registry.settings["appenlight.api_key"] | |
|
73 | tr_config = env["request"].registry.settings.get("appenlight.transport_config") | |
|
74 | CONFIG = e_client.get_config({"appenlight.api_key": api_key}) | |
|
65 | 75 | if tr_config: |
|
66 |
CONFIG[ |
|
|
76 | CONFIG["appenlight.transport_config"] = tr_config | |
|
67 | 77 | APPENLIGHT_CLIENT = e_client.Client(CONFIG) |
|
68 | 78 | # log.addHandler(APPENLIGHT_CLIENT.log_handler) |
|
69 | 79 | register_signals(APPENLIGHT_CLIENT) |
|
70 | 80 | celery.pyramid = env |
|
71 | 81 | |
|
72 | 82 | |
|
73 | 83 | celery_config = { |
|
74 |
|
|
|
75 |
|
|
|
76 |
|
|
|
77 |
|
|
|
78 |
|
|
|
79 |
|
|
|
80 |
|
|
|
81 |
|
|
|
82 |
|
|
|
83 |
|
|
|
84 |
|
|
|
85 |
|
|
|
86 |
|
|
|
87 |
|
|
|
84 | "CELERY_IMPORTS": ["appenlight.celery.tasks"], | |
|
85 | "CELERYD_TASK_TIME_LIMIT": 60, | |
|
86 | "CELERYD_MAX_TASKS_PER_CHILD": 1000, | |
|
87 | "CELERY_IGNORE_RESULT": True, | |
|
88 | "CELERY_ACCEPT_CONTENT": ["date_json"], | |
|
89 | "CELERY_TASK_SERIALIZER": "date_json", | |
|
90 | "CELERY_RESULT_SERIALIZER": "date_json", | |
|
91 | "BROKER_URL": None, | |
|
92 | "CELERYD_CONCURRENCY": None, | |
|
93 | "CELERY_TIMEZONE": None, | |
|
94 | "CELERYBEAT_SCHEDULE": { | |
|
95 | "alerting_reports": { | |
|
96 | "task": "appenlight.celery.tasks.alerting_reports", | |
|
97 | "schedule": timedelta(seconds=60), | |
|
88 | 98 | }, |
|
89 |
|
|
|
90 |
|
|
|
91 |
|
|
|
92 | } | |
|
93 | } | |
|
99 | "close_alerts": { | |
|
100 | "task": "appenlight.celery.tasks.close_alerts", | |
|
101 | "schedule": timedelta(seconds=60), | |
|
102 | }, | |
|
103 | }, | |
|
94 | 104 | } |
|
95 | 105 | celery.config_from_object(celery_config) |
|
96 | 106 | |
|
97 | 107 | |
|
98 | 108 | def configure_celery(pyramid_registry): |
|
99 | 109 | settings = pyramid_registry.settings |
|
100 |
celery_config[ |
|
|
101 |
celery_config[ |
|
|
102 |
celery_config[ |
|
|
110 | celery_config["BROKER_URL"] = settings["celery.broker_url"] | |
|
111 | celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"] | |
|
112 | celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"] | |
|
103 | 113 | |
|
104 | notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60)) | |
|
114 | notifications_seconds = int( | |
|
115 | settings.get("tasks.notifications_reports.interval", 60) | |
|
116 | ) | |
|
105 | 117 | |
|
106 |
celery_config[ |
|
|
107 |
|
|
|
108 |
|
|
|
118 | celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = { | |
|
119 | "task": "appenlight.celery.tasks.notifications_reports", | |
|
120 | "schedule": timedelta(seconds=notifications_seconds), | |
|
109 | 121 | } |
|
110 | 122 | |
|
111 |
celery_config[ |
|
|
112 |
|
|
|
113 |
|
|
|
123 | celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = { | |
|
124 | "task": "appenlight.celery.tasks.daily_digest", | |
|
125 | "schedule": crontab(minute=1, hour="4,12,20"), | |
|
114 | 126 | } |
|
115 | 127 | |
|
116 |
if asbool(settings.get( |
|
|
117 |
celery_config[ |
|
|
118 |
celery_config[ |
|
|
128 | if asbool(settings.get("celery.always_eager")): | |
|
129 | celery_config["CELERY_ALWAYS_EAGER"] = True | |
|
130 | celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True | |
|
119 | 131 | |
|
120 | 132 | for plugin in pyramid_registry.appenlight_plugins.values(): |
|
121 |
if plugin.get( |
|
|
122 |
celery_config[ |
|
|
123 |
if plugin.get( |
|
|
124 |
for name, config in plugin[ |
|
|
125 |
celery_config[ |
|
|
133 | if plugin.get("celery_tasks"): | |
|
134 | celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"]) | |
|
135 | if plugin.get("celery_beats"): | |
|
136 | for name, config in plugin["celery_beats"]: | |
|
137 | celery_config["CELERYBEAT_SCHEDULE"][name] = config | |
|
126 | 138 | celery.config_from_object(celery_config) |
|
127 | 139 | |
|
128 | 140 | |
|
129 | 141 | @task_prerun.connect |
|
130 | 142 | def task_prerun_signal(task_id, task, args, kwargs, **kwaargs): |
|
131 |
if hasattr(celery, |
|
|
143 | if hasattr(celery, "pyramid"): | |
|
132 | 144 | env = celery.pyramid |
|
133 |
env = prepare(registry=env[ |
|
|
134 |
proper_base_url = env[ |
|
|
135 |
tmp_req = Request.blank( |
|
|
145 | env = prepare(registry=env["request"].registry) | |
|
146 | proper_base_url = env["request"].registry.settings["mailing.app_url"] | |
|
147 | tmp_req = Request.blank("/", base_url=proper_base_url) | |
|
136 | 148 | # ensure tasks generate url for right domain from config |
|
137 |
env[ |
|
|
138 |
env[ |
|
|
139 |
env[ |
|
|
140 |
env[ |
|
|
141 | tmp_req.environ['wsgi.url_scheme'] | |
|
149 | env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"] | |
|
150 | env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"] | |
|
151 | env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"] | |
|
152 | env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"] | |
|
142 | 153 | get_current_request().tm.begin() |
|
143 | 154 | |
|
144 | 155 | |
|
145 | 156 | @task_success.connect |
|
146 | 157 | def task_success_signal(result, **kwargs): |
|
147 | 158 | get_current_request().tm.commit() |
|
148 |
if hasattr(celery, |
|
|
159 | if hasattr(celery, "pyramid"): | |
|
149 | 160 | celery.pyramid["closer"]() |
|
150 | 161 | |
|
151 | 162 | |
|
152 | 163 | @task_retry.connect |
|
153 | 164 | def task_retry_signal(request, reason, einfo, **kwargs): |
|
154 | 165 | get_current_request().tm.abort() |
|
155 |
if hasattr(celery, |
|
|
166 | if hasattr(celery, "pyramid"): | |
|
156 | 167 | celery.pyramid["closer"]() |
|
157 | 168 | |
|
158 | 169 | |
|
159 | 170 | @task_failure.connect |
|
160 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, | |
|
161 | **kwaargs): | |
|
171 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs): | |
|
162 | 172 | get_current_request().tm.abort() |
|
163 |
if hasattr(celery, |
|
|
173 | if hasattr(celery, "pyramid"): | |
|
164 | 174 | celery.pyramid["closer"]() |
|
165 | 175 | |
|
166 | 176 | |
|
167 | 177 | @task_revoked.connect |
|
168 | 178 | def task_revoked_signal(request, terminated, signum, expired, **kwaargs): |
|
169 | 179 | get_current_request().tm.abort() |
|
170 |
if hasattr(celery, |
|
|
180 | if hasattr(celery, "pyramid"): | |
|
171 | 181 | celery.pyramid["closer"]() |
@@ -1,60 +1,51 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import json |
|
18 | 18 | from datetime import datetime, date, timedelta |
|
19 | 19 | |
|
20 |
DATE_FORMAT = |
|
|
20 | DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" | |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | class DateEncoder(json.JSONEncoder): |
|
24 | 24 | def default(self, obj): |
|
25 | 25 | if isinstance(obj, datetime): |
|
26 | return { | |
|
27 | '__type__': '__datetime__', | |
|
28 | 'iso': obj.strftime(DATE_FORMAT) | |
|
29 | } | |
|
26 | return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)} | |
|
30 | 27 | elif isinstance(obj, date): |
|
31 | return { | |
|
32 | '__type__': '__date__', | |
|
33 | 'iso': obj.strftime(DATE_FORMAT) | |
|
34 | } | |
|
28 | return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)} | |
|
35 | 29 | elif isinstance(obj, timedelta): |
|
36 | return { | |
|
37 | '__type__': '__timedelta__', | |
|
38 | 'seconds': obj.total_seconds() | |
|
39 | } | |
|
30 | return {"__type__": "__timedelta__", "seconds": obj.total_seconds()} | |
|
40 | 31 | else: |
|
41 | 32 | return json.JSONEncoder.default(self, obj) |
|
42 | 33 | |
|
43 | 34 | |
|
44 | 35 | def date_decoder(dct): |
|
45 |
if |
|
|
46 |
if dct[ |
|
|
47 |
return datetime.strptime(dct[ |
|
|
48 |
elif dct[ |
|
|
49 |
return datetime.strptime(dct[ |
|
|
50 |
elif dct[ |
|
|
51 |
return timedelta(seconds=dct[ |
|
|
36 | if "__type__" in dct: | |
|
37 | if dct["__type__"] == "__datetime__": | |
|
38 | return datetime.strptime(dct["iso"], DATE_FORMAT) | |
|
39 | elif dct["__type__"] == "__date__": | |
|
40 | return datetime.strptime(dct["iso"], DATE_FORMAT).date() | |
|
41 | elif dct["__type__"] == "__timedelta__": | |
|
42 | return timedelta(seconds=dct["seconds"]) | |
|
52 | 43 | return dct |
|
53 | 44 | |
|
54 | 45 | |
|
55 | 46 | def json_dumps(obj): |
|
56 | 47 | return json.dumps(obj, cls=DateEncoder) |
|
57 | 48 | |
|
58 | 49 | |
|
59 | 50 | def json_loads(obj): |
|
60 |
return json.loads(obj.decode( |
|
|
51 | return json.loads(obj.decode("utf8"), object_hook=date_decoder) |
@@ -1,657 +1,708 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import bisect |
|
18 | 18 | import collections |
|
19 | 19 | import math |
|
20 | 20 | from datetime import datetime, timedelta |
|
21 | 21 | |
|
22 | 22 | import sqlalchemy as sa |
|
23 | 23 | import elasticsearch.exceptions |
|
24 | 24 | import elasticsearch.helpers |
|
25 | 25 | |
|
26 | 26 | from celery.utils.log import get_task_logger |
|
27 | 27 | from zope.sqlalchemy import mark_changed |
|
28 | 28 | from pyramid.threadlocal import get_current_request, get_current_registry |
|
29 | 29 | from ziggurat_foundations.models.services.resource import ResourceService |
|
30 | 30 | |
|
31 | 31 | from appenlight.celery import celery |
|
32 | 32 | from appenlight.models.report_group import ReportGroup |
|
33 | 33 | from appenlight.models import DBSession, Datastores |
|
34 | 34 | from appenlight.models.report import Report |
|
35 | 35 | from appenlight.models.log import Log |
|
36 | 36 | from appenlight.models.metric import Metric |
|
37 | 37 | from appenlight.models.event import Event |
|
38 | 38 | |
|
39 | 39 | from appenlight.models.services.application import ApplicationService |
|
40 | 40 | from appenlight.models.services.event import EventService |
|
41 | 41 | from appenlight.models.services.log import LogService |
|
42 | 42 | from appenlight.models.services.report import ReportService |
|
43 | 43 | from appenlight.models.services.report_group import ReportGroupService |
|
44 | 44 | from appenlight.models.services.user import UserService |
|
45 | 45 | from appenlight.models.tag import Tag |
|
46 | 46 | from appenlight.lib import print_traceback |
|
47 | 47 | from appenlight.lib.utils import parse_proto, in_batches |
|
48 | 48 | from appenlight.lib.ext_json import json |
|
49 | 49 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
50 | 50 | from appenlight.lib.enums import ReportType |
|
51 | 51 | |
|
52 | 52 | log = get_task_logger(__name__) |
|
53 | 53 | |
|
54 | sample_boundries = list(range(100, 1000, 100)) + \ | |
|
55 |
|
|
|
56 |
|
|
|
54 | sample_boundries = ( | |
|
55 | list(range(100, 1000, 100)) | |
|
56 | + list(range(1000, 10000, 1000)) | |
|
57 | + list(range(10000, 100000, 5000)) | |
|
58 | ) | |
|
57 | 59 | |
|
58 | 60 | |
|
59 | 61 | def pick_sample(total_occurences, report_type=None): |
|
60 | 62 | every = 1.0 |
|
61 | 63 | position = bisect.bisect_left(sample_boundries, total_occurences) |
|
62 | 64 | if position > 0: |
|
63 | 65 | if report_type == ReportType.not_found: |
|
64 | 66 | divide = 10.0 |
|
65 | 67 | else: |
|
66 | 68 | divide = 100.0 |
|
67 | 69 | every = sample_boundries[position - 1] / divide |
|
68 | 70 | return total_occurences % every == 0 |
|
69 | 71 | |
|
70 | 72 | |
|
71 | 73 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
72 | 74 | def test_exception_task(): |
|
73 |
log.error( |
|
|
74 |
log.warning( |
|
|
75 |
raise Exception( |
|
|
75 | log.error("test celery log", extra={"location": "celery"}) | |
|
76 | log.warning("test celery log", extra={"location": "celery"}) | |
|
77 | raise Exception("Celery exception test") | |
|
76 | 78 | |
|
77 | 79 | |
|
78 | 80 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
79 | 81 | def test_retry_exception_task(): |
|
80 | 82 | try: |
|
81 | 83 | import time |
|
82 | 84 | |
|
83 | 85 | time.sleep(1.3) |
|
84 |
log.error( |
|
|
85 |
log.warning( |
|
|
86 |
raise Exception( |
|
|
86 | log.error("test retry celery log", extra={"location": "celery"}) | |
|
87 | log.warning("test retry celery log", extra={"location": "celery"}) | |
|
88 | raise Exception("Celery exception test") | |
|
87 | 89 | except Exception as exc: |
|
88 | 90 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
89 | 91 | raise |
|
90 | 92 | test_retry_exception_task.retry(exc=exc) |
|
91 | 93 | |
|
92 | 94 | |
|
93 | 95 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) |
|
94 | 96 | def add_reports(resource_id, request_params, dataset, **kwargs): |
|
95 |
proto_version = parse_proto(request_params.get( |
|
|
97 | proto_version = parse_proto(request_params.get("protocol_version", "")) | |
|
96 | 98 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
97 | 99 | try: |
|
98 | 100 | # we will store solr docs here for single insert |
|
99 | 101 | es_report_docs = {} |
|
100 | 102 | es_report_group_docs = {} |
|
101 | 103 | resource = ApplicationService.by_id(resource_id) |
|
102 | 104 | |
|
103 | 105 | tags = [] |
|
104 | 106 | es_slow_calls_docs = {} |
|
105 | 107 | es_reports_stats_rows = {} |
|
106 | 108 | for report_data in dataset: |
|
107 | 109 | # build report details for later |
|
108 | 110 | added_details = 0 |
|
109 | 111 | report = Report() |
|
110 | 112 | report.set_data(report_data, resource, proto_version) |
|
111 | 113 | report._skip_ft_index = True |
|
112 | 114 | |
|
113 | 115 | # find latest group in this months partition |
|
114 | 116 | report_group = ReportGroupService.by_hash_and_resource( |
|
115 | 117 | report.resource_id, |
|
116 | 118 | report.grouping_hash, |
|
117 | since_when=datetime.utcnow().date().replace(day=1) | |
|
119 | since_when=datetime.utcnow().date().replace(day=1), | |
|
118 | 120 | ) |
|
119 |
occurences = report_data.get( |
|
|
121 | occurences = report_data.get("occurences", 1) | |
|
120 | 122 | if not report_group: |
|
121 | 123 | # total reports will be +1 moment later |
|
122 |
report_group = ReportGroup( |
|
|
123 | occurences=0, total_reports=0, | |
|
124 | last_report=0, | |
|
125 | priority=report.priority, | |
|
126 | error=report.error, | |
|
127 | first_timestamp=report.start_time) | |
|
124 | report_group = ReportGroup( | |
|
125 | grouping_hash=report.grouping_hash, | |
|
126 | occurences=0, | |
|
127 | total_reports=0, | |
|
128 | last_report=0, | |
|
129 | priority=report.priority, | |
|
130 | error=report.error, | |
|
131 | first_timestamp=report.start_time, | |
|
132 | ) | |
|
128 | 133 | report_group._skip_ft_index = True |
|
129 | 134 | report_group.report_type = report.report_type |
|
130 | 135 | report.report_group_time = report_group.first_timestamp |
|
131 |
add_sample = pick_sample( |
|
|
132 |
|
|
|
136 | add_sample = pick_sample( | |
|
137 | report_group.occurences, report_type=report_group.report_type | |
|
138 | ) | |
|
133 | 139 | if add_sample: |
|
134 | 140 | resource.report_groups.append(report_group) |
|
135 | 141 | report_group.reports.append(report) |
|
136 | 142 | added_details += 1 |
|
137 | 143 | DBSession.flush() |
|
138 | 144 | if report.partition_id not in es_report_docs: |
|
139 | 145 | es_report_docs[report.partition_id] = [] |
|
140 | 146 | es_report_docs[report.partition_id].append(report.es_doc()) |
|
141 | 147 | tags.extend(list(report.tags.items())) |
|
142 | 148 | slow_calls = report.add_slow_calls(report_data, report_group) |
|
143 | 149 | DBSession.flush() |
|
144 | 150 | for s_call in slow_calls: |
|
145 | 151 | if s_call.partition_id not in es_slow_calls_docs: |
|
146 | 152 | es_slow_calls_docs[s_call.partition_id] = [] |
|
147 | es_slow_calls_docs[s_call.partition_id].append( | |
|
148 | s_call.es_doc()) | |
|
153 | es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc()) | |
|
149 | 154 | # try generating new stat rows if needed |
|
150 | 155 | else: |
|
151 | 156 | # required for postprocessing to not fail later |
|
152 | 157 | report.report_group = report_group |
|
153 | 158 | |
|
154 | stat_row = ReportService.generate_stat_rows( | |
|
155 | report, resource, report_group) | |
|
159 | stat_row = ReportService.generate_stat_rows(report, resource, report_group) | |
|
156 | 160 | if stat_row.partition_id not in es_reports_stats_rows: |
|
157 | 161 | es_reports_stats_rows[stat_row.partition_id] = [] |
|
158 | es_reports_stats_rows[stat_row.partition_id].append( | |
|
159 | stat_row.es_doc()) | |
|
162 | es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc()) | |
|
160 | 163 | |
|
161 | 164 | # see if we should mark 10th occurence of report |
|
162 | 165 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) |
|
163 |
curr_occurences_10 = int( |
|
|
164 |
(report_group.occurences + report.occurences) / 10) |
|
|
165 | last_occurences_100 = int( | |
|
166 |
|
|
|
167 |
curr_occurences_100 = int( |
|
|
168 |
(report_group.occurences + report.occurences) / 100) |
|
|
166 | curr_occurences_10 = int( | |
|
167 | math.floor((report_group.occurences + report.occurences) / 10) | |
|
168 | ) | |
|
169 | last_occurences_100 = int(math.floor(report_group.occurences / 100)) | |
|
170 | curr_occurences_100 = int( | |
|
171 | math.floor((report_group.occurences + report.occurences) / 100) | |
|
172 | ) | |
|
169 | 173 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 |
|
170 | 174 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 |
|
171 | 175 | report_group.occurences = ReportGroup.occurences + occurences |
|
172 | 176 | report_group.last_timestamp = report.start_time |
|
173 | 177 | report_group.summed_duration = ReportGroup.summed_duration + report.duration |
|
174 | 178 | summed_duration = ReportGroup.summed_duration + report.duration |
|
175 | 179 | summed_occurences = ReportGroup.occurences + occurences |
|
176 | 180 | report_group.average_duration = summed_duration / summed_occurences |
|
177 | 181 | report_group.run_postprocessing(report) |
|
178 | 182 | if added_details: |
|
179 | 183 | report_group.total_reports = ReportGroup.total_reports + 1 |
|
180 | 184 | report_group.last_report = report.id |
|
181 |
report_group.set_notification_info( |
|
|
182 |
|
|
|
185 | report_group.set_notification_info( | |
|
186 | notify_10=notify_occurences_10, notify_100=notify_occurences_100 | |
|
187 | ) | |
|
183 | 188 | DBSession.flush() |
|
184 | 189 | report_group.get_report().notify_channel(report_group) |
|
185 | 190 | if report_group.partition_id not in es_report_group_docs: |
|
186 | 191 | es_report_group_docs[report_group.partition_id] = [] |
|
187 | 192 | es_report_group_docs[report_group.partition_id].append( |
|
188 |
report_group.es_doc() |
|
|
193 | report_group.es_doc() | |
|
194 | ) | |
|
189 | 195 | |
|
190 |
action = |
|
|
191 |
log_msg = |
|
|
196 | action = "REPORT" | |
|
197 | log_msg = "%s: %s %s, client: %s, proto: %s" % ( | |
|
192 | 198 | action, |
|
193 |
report_data.get( |
|
|
199 | report_data.get("http_status", "unknown"), | |
|
194 | 200 | str(resource), |
|
195 |
report_data.get( |
|
|
196 |
proto_version |
|
|
201 | report_data.get("client"), | |
|
202 | proto_version, | |
|
203 | ) | |
|
197 | 204 | log.info(log_msg) |
|
198 | 205 | total_reports = len(dataset) |
|
199 | 206 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
200 |
key = REDIS_KEYS[ |
|
|
207 | key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time) | |
|
201 | 208 | redis_pipeline.incr(key, total_reports) |
|
202 | 209 | redis_pipeline.expire(key, 3600 * 24) |
|
203 |
key = REDIS_KEYS[ |
|
|
204 |
resource.owner_user_id, current_time |
|
|
210 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
|
211 | resource.owner_user_id, current_time | |
|
212 | ) | |
|
205 | 213 | redis_pipeline.incr(key, total_reports) |
|
206 | 214 | redis_pipeline.expire(key, 3600) |
|
207 |
key = REDIS_KEYS[ |
|
|
208 |
resource_id, current_time.replace(minute=0) |
|
|
215 | key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format( | |
|
216 | resource_id, current_time.replace(minute=0) | |
|
217 | ) | |
|
209 | 218 | redis_pipeline.incr(key, total_reports) |
|
210 | 219 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
211 | 220 | redis_pipeline.sadd( |
|
212 |
REDIS_KEYS[ |
|
|
213 |
current_time.replace(minute=0) |
|
|
221 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
|
222 | current_time.replace(minute=0) | |
|
223 | ), | |
|
224 | resource_id, | |
|
225 | ) | |
|
214 | 226 | redis_pipeline.execute() |
|
215 | 227 | |
|
216 | 228 | add_reports_es(es_report_group_docs, es_report_docs) |
|
217 | 229 | add_reports_slow_calls_es(es_slow_calls_docs) |
|
218 | 230 | add_reports_stats_rows_es(es_reports_stats_rows) |
|
219 | 231 | return True |
|
220 | 232 | except Exception as exc: |
|
221 | 233 | print_traceback(log) |
|
222 | 234 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
223 | 235 | raise |
|
224 | 236 | add_reports.retry(exc=exc) |
|
225 | 237 | |
|
226 | 238 | |
|
227 | 239 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
228 | 240 | def add_reports_es(report_group_docs, report_docs): |
|
229 | 241 | for k, v in report_group_docs.items(): |
|
230 |
to_update = { |
|
|
242 | to_update = {"_index": k, "_type": "report_group"} | |
|
231 | 243 | [i.update(to_update) for i in v] |
|
232 | 244 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
233 | 245 | for k, v in report_docs.items(): |
|
234 |
to_update = { |
|
|
246 | to_update = {"_index": k, "_type": "report"} | |
|
235 | 247 | [i.update(to_update) for i in v] |
|
236 | 248 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
237 | 249 | |
|
238 | 250 | |
|
239 | 251 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
240 | 252 | def add_reports_slow_calls_es(es_docs): |
|
241 | 253 | for k, v in es_docs.items(): |
|
242 |
to_update = { |
|
|
254 | to_update = {"_index": k, "_type": "log"} | |
|
243 | 255 | [i.update(to_update) for i in v] |
|
244 | 256 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
245 | 257 | |
|
246 | 258 | |
|
247 | 259 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
248 | 260 | def add_reports_stats_rows_es(es_docs): |
|
249 | 261 | for k, v in es_docs.items(): |
|
250 |
to_update = { |
|
|
262 | to_update = {"_index": k, "_type": "log"} | |
|
251 | 263 | [i.update(to_update) for i in v] |
|
252 | 264 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
253 | 265 | |
|
254 | 266 | |
|
255 | 267 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) |
|
256 | 268 | def add_logs(resource_id, request_params, dataset, **kwargs): |
|
257 |
proto_version = request_params.get( |
|
|
269 | proto_version = request_params.get("protocol_version") | |
|
258 | 270 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
259 | 271 | |
|
260 | 272 | try: |
|
261 | 273 | es_docs = collections.defaultdict(list) |
|
262 | 274 | resource = ApplicationService.by_id_cached()(resource_id) |
|
263 | 275 | resource = DBSession.merge(resource, load=False) |
|
264 | 276 | ns_pairs = [] |
|
265 | 277 | for entry in dataset: |
|
266 | 278 | # gather pk and ns so we can remove older versions of row later |
|
267 |
if entry[ |
|
|
268 |
ns_pairs.append({"pk": entry[ |
|
|
269 | "ns": entry['namespace']}) | |
|
279 | if entry["primary_key"] is not None: | |
|
280 | ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]}) | |
|
270 | 281 | log_entry = Log() |
|
271 | 282 | log_entry.set_data(entry, resource=resource) |
|
272 | 283 | log_entry._skip_ft_index = True |
|
273 | 284 | resource.logs.append(log_entry) |
|
274 | 285 | DBSession.flush() |
|
275 | 286 | # insert non pk rows first |
|
276 |
if entry[ |
|
|
287 | if entry["primary_key"] is None: | |
|
277 | 288 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
278 | 289 | |
|
279 | 290 | # 2nd pass to delete all log entries from db foe same pk/ns pair |
|
280 | 291 | if ns_pairs: |
|
281 | 292 | ids_to_delete = [] |
|
282 | 293 | es_docs = collections.defaultdict(list) |
|
283 | 294 | es_docs_to_delete = collections.defaultdict(list) |
|
284 | 295 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( |
|
285 |
list_of_pairs=ns_pairs |
|
|
296 | list_of_pairs=ns_pairs | |
|
297 | ) | |
|
286 | 298 | log_dict = {} |
|
287 | 299 | for log_entry in found_pkey_logs: |
|
288 | 300 | log_key = (log_entry.primary_key, log_entry.namespace) |
|
289 | 301 | if log_key not in log_dict: |
|
290 | 302 | log_dict[log_key] = [] |
|
291 | 303 | log_dict[log_key].append(log_entry) |
|
292 | 304 | |
|
293 | 305 | for ns, entry_list in log_dict.items(): |
|
294 | 306 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) |
|
295 | 307 | # newest row needs to be indexed in es |
|
296 | 308 | log_entry = entry_list[-1] |
|
297 | 309 | # delete everything from pg and ES, leave the last row in pg |
|
298 | 310 | for e in entry_list[:-1]: |
|
299 | 311 | ids_to_delete.append(e.log_id) |
|
300 | 312 | es_docs_to_delete[e.partition_id].append(e.delete_hash) |
|
301 | 313 | |
|
302 | es_docs_to_delete[log_entry.partition_id].append( | |
|
303 | log_entry.delete_hash) | |
|
314 | es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash) | |
|
304 | 315 | |
|
305 | 316 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
306 | 317 | |
|
307 | 318 | if ids_to_delete: |
|
308 | query = DBSession.query(Log).filter( | |
|
309 | Log.log_id.in_(ids_to_delete)) | |
|
319 | query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete)) | |
|
310 | 320 | query.delete(synchronize_session=False) |
|
311 | 321 | if es_docs_to_delete: |
|
312 | 322 | # batch this to avoid problems with default ES bulk limits |
|
313 | 323 | for es_index in es_docs_to_delete.keys(): |
|
314 | 324 | for batch in in_batches(es_docs_to_delete[es_index], 20): |
|
315 |
query = {"query": { |
|
|
325 | query = {"query": {"terms": {"delete_hash": batch}}} | |
|
316 | 326 | |
|
317 | 327 | try: |
|
318 | 328 | Datastores.es.transport.perform_request( |
|
319 |
"DELETE", |
|
|
329 | "DELETE", | |
|
330 | "/{}/{}/_query".format(es_index, "log"), | |
|
331 | body=query, | |
|
332 | ) | |
|
320 | 333 | except elasticsearch.exceptions.NotFoundError as exc: |
|
321 |
msg = |
|
|
334 | msg = "skipping index {}".format(es_index) | |
|
322 | 335 | log.info(msg) |
|
323 | 336 | |
|
324 | 337 | total_logs = len(dataset) |
|
325 | 338 | |
|
326 |
log_msg = |
|
|
339 | log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % ( | |
|
327 | 340 | str(resource), |
|
328 | 341 | total_logs, |
|
329 |
proto_version |
|
|
342 | proto_version, | |
|
343 | ) | |
|
330 | 344 | log.info(log_msg) |
|
331 | 345 | # mark_changed(session) |
|
332 | 346 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
333 |
key = REDIS_KEYS[ |
|
|
347 | key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time) | |
|
334 | 348 | redis_pipeline.incr(key, total_logs) |
|
335 | 349 | redis_pipeline.expire(key, 3600 * 24) |
|
336 |
key = REDIS_KEYS[ |
|
|
337 |
resource.owner_user_id, current_time |
|
|
350 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
|
351 | resource.owner_user_id, current_time | |
|
352 | ) | |
|
338 | 353 | redis_pipeline.incr(key, total_logs) |
|
339 | 354 | redis_pipeline.expire(key, 3600) |
|
340 |
key = REDIS_KEYS[ |
|
|
341 |
resource_id, current_time.replace(minute=0) |
|
|
355 | key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format( | |
|
356 | resource_id, current_time.replace(minute=0) | |
|
357 | ) | |
|
342 | 358 | redis_pipeline.incr(key, total_logs) |
|
343 | 359 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
344 | 360 | redis_pipeline.sadd( |
|
345 |
REDIS_KEYS[ |
|
|
346 |
current_time.replace(minute=0) |
|
|
361 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
|
362 | current_time.replace(minute=0) | |
|
363 | ), | |
|
364 | resource_id, | |
|
365 | ) | |
|
347 | 366 | redis_pipeline.execute() |
|
348 | 367 | add_logs_es(es_docs) |
|
349 | 368 | return True |
|
350 | 369 | except Exception as exc: |
|
351 | 370 | print_traceback(log) |
|
352 | 371 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
353 | 372 | raise |
|
354 | 373 | add_logs.retry(exc=exc) |
|
355 | 374 | |
|
356 | 375 | |
|
357 | 376 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
358 | 377 | def add_logs_es(es_docs): |
|
359 | 378 | for k, v in es_docs.items(): |
|
360 |
to_update = { |
|
|
379 | to_update = {"_index": k, "_type": "log"} | |
|
361 | 380 | [i.update(to_update) for i in v] |
|
362 | 381 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
363 | 382 | |
|
364 | 383 | |
|
365 | 384 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) |
|
366 | 385 | def add_metrics(resource_id, request_params, dataset, proto_version): |
|
367 | 386 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
368 | 387 | try: |
|
369 | 388 | resource = ApplicationService.by_id_cached()(resource_id) |
|
370 | 389 | resource = DBSession.merge(resource, load=False) |
|
371 | 390 | es_docs = [] |
|
372 | 391 | rows = [] |
|
373 | 392 | for metric in dataset: |
|
374 |
tags = dict(metric[ |
|
|
375 |
server_n = tags.get( |
|
|
376 |
tags[ |
|
|
393 | tags = dict(metric["tags"]) | |
|
394 | server_n = tags.get("server_name", metric["server_name"]).lower() | |
|
395 | tags["server_name"] = server_n or "unknown" | |
|
377 | 396 | new_metric = Metric( |
|
378 |
timestamp=metric[ |
|
|
397 | timestamp=metric["timestamp"], | |
|
379 | 398 | resource_id=resource.resource_id, |
|
380 |
namespace=metric[ |
|
|
381 |
tags=tags |
|
|
399 | namespace=metric["namespace"], | |
|
400 | tags=tags, | |
|
401 | ) | |
|
382 | 402 | rows.append(new_metric) |
|
383 | 403 | es_docs.append(new_metric.es_doc()) |
|
384 | 404 | session = DBSession() |
|
385 | 405 | session.bulk_save_objects(rows) |
|
386 | 406 | session.flush() |
|
387 | 407 | |
|
388 |
action = |
|
|
389 |
metrics_msg = |
|
|
408 | action = "METRICS" | |
|
409 | metrics_msg = "%s: %s, metrics: %s, proto:%s" % ( | |
|
390 | 410 | action, |
|
391 | 411 | str(resource), |
|
392 | 412 | len(dataset), |
|
393 | proto_version | |
|
413 | proto_version, | |
|
394 | 414 | ) |
|
395 | 415 | log.info(metrics_msg) |
|
396 | 416 | |
|
397 | 417 | mark_changed(session) |
|
398 | 418 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
399 |
key = REDIS_KEYS[ |
|
|
419 | key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time) | |
|
400 | 420 | redis_pipeline.incr(key, len(rows)) |
|
401 | 421 | redis_pipeline.expire(key, 3600 * 24) |
|
402 |
key = REDIS_KEYS[ |
|
|
403 |
resource.owner_user_id, current_time |
|
|
422 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
|
423 | resource.owner_user_id, current_time | |
|
424 | ) | |
|
404 | 425 | redis_pipeline.incr(key, len(rows)) |
|
405 | 426 | redis_pipeline.expire(key, 3600) |
|
406 |
key = REDIS_KEYS[ |
|
|
407 |
resource_id, current_time.replace(minute=0) |
|
|
427 | key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format( | |
|
428 | resource_id, current_time.replace(minute=0) | |
|
429 | ) | |
|
408 | 430 | redis_pipeline.incr(key, len(rows)) |
|
409 | 431 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
410 | 432 | redis_pipeline.sadd( |
|
411 |
REDIS_KEYS[ |
|
|
412 |
current_time.replace(minute=0) |
|
|
433 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
|
434 | current_time.replace(minute=0) | |
|
435 | ), | |
|
436 | resource_id, | |
|
437 | ) | |
|
413 | 438 | redis_pipeline.execute() |
|
414 | 439 | add_metrics_es(es_docs) |
|
415 | 440 | return True |
|
416 | 441 | except Exception as exc: |
|
417 | 442 | print_traceback(log) |
|
418 | 443 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
419 | 444 | raise |
|
420 | 445 | add_metrics.retry(exc=exc) |
|
421 | 446 | |
|
422 | 447 | |
|
423 | 448 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
424 | 449 | def add_metrics_es(es_docs): |
|
425 | 450 | for doc in es_docs: |
|
426 |
partition = |
|
|
427 |
Datastores.es.index(partition, |
|
|
451 | partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d") | |
|
452 | Datastores.es.index(partition, "log", doc) | |
|
428 | 453 | |
|
429 | 454 | |
|
430 | 455 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
431 | 456 | def check_user_report_notifications(resource_id): |
|
432 | 457 | since_when = datetime.utcnow() |
|
433 | 458 | try: |
|
434 | 459 | request = get_current_request() |
|
435 | 460 | application = ApplicationService.by_id(resource_id) |
|
436 | 461 | if not application: |
|
437 | 462 | return |
|
438 |
error_key = REDIS_KEYS[ |
|
|
439 |
ReportType.error, resource_id |
|
|
440 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( | |
|
441 | ReportType.slow, resource_id) | |
|
463 | error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
|
464 | ReportType.error, resource_id | |
|
465 | ) | |
|
466 | slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
|
467 | ReportType.slow, resource_id | |
|
468 | ) | |
|
442 | 469 | error_group_ids = Datastores.redis.smembers(error_key) |
|
443 | 470 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
444 | 471 | Datastores.redis.delete(error_key) |
|
445 | 472 | Datastores.redis.delete(slow_key) |
|
446 | 473 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
447 | 474 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
448 | 475 | group_ids = err_gids + slow_gids |
|
449 | 476 | occurence_dict = {} |
|
450 | 477 | for g_id in group_ids: |
|
451 |
key = REDIS_KEYS[ |
|
|
452 | g_id) | |
|
478 | key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id) | |
|
453 | 479 | val = Datastores.redis.get(key) |
|
454 | 480 | Datastores.redis.delete(key) |
|
455 | 481 | if val: |
|
456 | 482 | occurence_dict[g_id] = int(val) |
|
457 | 483 | else: |
|
458 | 484 | occurence_dict[g_id] = 1 |
|
459 | 485 | report_groups = ReportGroupService.by_ids(group_ids) |
|
460 | 486 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
461 | 487 | |
|
462 | 488 | ApplicationService.check_for_groups_alert( |
|
463 | application, 'alert', report_groups=report_groups, | |
|
464 | occurence_dict=occurence_dict) | |
|
465 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) | |
|
489 | application, | |
|
490 | "alert", | |
|
491 | report_groups=report_groups, | |
|
492 | occurence_dict=occurence_dict, | |
|
493 | ) | |
|
494 | users = set( | |
|
495 | [p.user for p in ResourceService.users_for_perm(application, "view")] | |
|
496 | ) | |
|
466 | 497 | report_groups = report_groups.all() |
|
467 | 498 | for user in users: |
|
468 |
UserService.report_notify( |
|
|
469 | report_groups=report_groups, | |
|
470 | occurence_dict=occurence_dict) | |
|
499 | UserService.report_notify( | |
|
500 | user, | |
|
501 | request, | |
|
502 | application, | |
|
503 | report_groups=report_groups, | |
|
504 | occurence_dict=occurence_dict, | |
|
505 | ) | |
|
471 | 506 | for group in report_groups: |
|
472 | 507 | # marks report_groups as notified |
|
473 | 508 | if not group.notified: |
|
474 | 509 | group.notified = True |
|
475 | 510 | except Exception as exc: |
|
476 | 511 | print_traceback(log) |
|
477 | 512 | raise |
|
478 | 513 | |
|
479 | 514 | |
|
480 | 515 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
481 | 516 | def check_alerts(resource_id): |
|
482 | 517 | since_when = datetime.utcnow() |
|
483 | 518 | try: |
|
484 | 519 | request = get_current_request() |
|
485 | 520 | application = ApplicationService.by_id(resource_id) |
|
486 | 521 | if not application: |
|
487 | 522 | return |
|
488 | error_key = REDIS_KEYS[ | |
|
489 | 'reports_to_notify_per_type_per_app_alerting'].format( | |
|
490 | ReportType.error, resource_id) | |
|
491 | slow_key = REDIS_KEYS[ | |
|
492 | 'reports_to_notify_per_type_per_app_alerting'].format( | |
|
493 | ReportType.slow, resource_id) | |
|
523 | error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
|
524 | ReportType.error, resource_id | |
|
525 | ) | |
|
526 | slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
|
527 | ReportType.slow, resource_id | |
|
528 | ) | |
|
494 | 529 | error_group_ids = Datastores.redis.smembers(error_key) |
|
495 | 530 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
496 | 531 | Datastores.redis.delete(error_key) |
|
497 | 532 | Datastores.redis.delete(slow_key) |
|
498 | 533 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
499 | 534 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
500 | 535 | group_ids = err_gids + slow_gids |
|
501 | 536 | occurence_dict = {} |
|
502 | 537 | for g_id in group_ids: |
|
503 |
key = REDIS_KEYS[ |
|
|
504 | 'report_group_occurences_alerting'].format( | |
|
505 |
|
|
|
538 | key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format( | |
|
539 | g_id | |
|
540 | ) | |
|
506 | 541 | val = Datastores.redis.get(key) |
|
507 | 542 | Datastores.redis.delete(key) |
|
508 | 543 | if val: |
|
509 | 544 | occurence_dict[g_id] = int(val) |
|
510 | 545 | else: |
|
511 | 546 | occurence_dict[g_id] = 1 |
|
512 | 547 | report_groups = ReportGroupService.by_ids(group_ids) |
|
513 | 548 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
514 | 549 | |
|
515 | 550 | ApplicationService.check_for_groups_alert( |
|
516 | application, 'alert', report_groups=report_groups, | |
|
517 | occurence_dict=occurence_dict, since_when=since_when) | |
|
551 | application, | |
|
552 | "alert", | |
|
553 | report_groups=report_groups, | |
|
554 | occurence_dict=occurence_dict, | |
|
555 | since_when=since_when, | |
|
556 | ) | |
|
518 | 557 | except Exception as exc: |
|
519 | 558 | print_traceback(log) |
|
520 | 559 | raise |
|
521 | 560 | |
|
522 | 561 | |
|
523 | 562 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
524 | 563 | def close_alerts(): |
|
525 |
log.warning( |
|
|
564 | log.warning("Checking alerts") | |
|
526 | 565 | since_when = datetime.utcnow() |
|
527 | 566 | try: |
|
528 | event_types = [Event.types['error_report_alert'], | |
|
529 |
|
|
|
530 | statuses = [Event.statuses['active']] | |
|
567 | event_types = [ | |
|
568 | Event.types["error_report_alert"], | |
|
569 | Event.types["slow_report_alert"], | |
|
570 | ] | |
|
571 | statuses = [Event.statuses["active"]] | |
|
531 | 572 | # get events older than 5 min |
|
532 | 573 | events = EventService.by_type_and_status( |
|
533 | event_types, | |
|
534 | statuses, | |
|
535 | older_than=(since_when - timedelta(minutes=5))) | |
|
574 | event_types, statuses, older_than=(since_when - timedelta(minutes=5)) | |
|
575 | ) | |
|
536 | 576 | for event in events: |
|
537 | 577 | # see if we can close them |
|
538 | event.validate_or_close( | |
|
539 | since_when=(since_when - timedelta(minutes=1))) | |
|
578 | event.validate_or_close(since_when=(since_when - timedelta(minutes=1))) | |
|
540 | 579 | except Exception as exc: |
|
541 | 580 | print_traceback(log) |
|
542 | 581 | raise |
|
543 | 582 | |
|
544 | 583 | |
|
545 | 584 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) |
|
546 | 585 | def update_tag_counter(tag_name, tag_value, count): |
|
547 | 586 | try: |
|
548 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( | |
|
549 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), | |
|
550 | sa.types.TEXT)) | |
|
551 | query.update({'times_seen': Tag.times_seen + count, | |
|
552 | 'last_timestamp': datetime.utcnow()}, | |
|
553 | synchronize_session=False) | |
|
587 | query = ( | |
|
588 | DBSession.query(Tag) | |
|
589 | .filter(Tag.name == tag_name) | |
|
590 | .filter( | |
|
591 | sa.cast(Tag.value, sa.types.TEXT) | |
|
592 | == sa.cast(json.dumps(tag_value), sa.types.TEXT) | |
|
593 | ) | |
|
594 | ) | |
|
595 | query.update( | |
|
596 | {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()}, | |
|
597 | synchronize_session=False, | |
|
598 | ) | |
|
554 | 599 | session = DBSession() |
|
555 | 600 | mark_changed(session) |
|
556 | 601 | return True |
|
557 | 602 | except Exception as exc: |
|
558 | 603 | print_traceback(log) |
|
559 | 604 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
560 | 605 | raise |
|
561 | 606 | update_tag_counter.retry(exc=exc) |
|
562 | 607 | |
|
563 | 608 | |
|
564 | 609 | @celery.task(queue="default") |
|
565 | 610 | def update_tag_counters(): |
|
566 | 611 | """ |
|
567 | 612 | Sets task to update counters for application tags |
|
568 | 613 | """ |
|
569 |
tags = Datastores.redis.lrange(REDIS_KEYS[ |
|
|
570 |
Datastores.redis.delete(REDIS_KEYS[ |
|
|
614 | tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1) | |
|
615 | Datastores.redis.delete(REDIS_KEYS["seen_tag_list"]) | |
|
571 | 616 | c = collections.Counter(tags) |
|
572 | 617 | for t_json, count in c.items(): |
|
573 | 618 | tag_info = json.loads(t_json) |
|
574 | 619 | update_tag_counter.delay(tag_info[0], tag_info[1], count) |
|
575 | 620 | |
|
576 | 621 | |
|
577 | 622 | @celery.task(queue="default") |
|
578 | 623 | def daily_digest(): |
|
579 | 624 | """ |
|
580 | 625 | Sends daily digest with top 50 error reports |
|
581 | 626 | """ |
|
582 | 627 | request = get_current_request() |
|
583 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
|
584 |
Datastores.redis.delete(REDIS_KEYS[ |
|
|
628 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) | |
|
629 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) | |
|
585 | 630 | since_when = datetime.utcnow() - timedelta(hours=8) |
|
586 |
log.warning( |
|
|
631 | log.warning("Generating daily digests") | |
|
587 | 632 | for resource_id in apps: |
|
588 |
resource_id = resource_id.decode( |
|
|
633 | resource_id = resource_id.decode("utf8") | |
|
589 | 634 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
590 |
filter_settings = { |
|
|
591 | 'tags': [{'name': 'type', | |
|
592 | 'value': ['error'], 'op': None}], | |
|
593 | 'type': 'error', 'start_date': since_when, | |
|
594 | 'end_date': end_date} | |
|
635 | filter_settings = { | |
|
636 | "resource": [resource_id], | |
|
637 | "tags": [{"name": "type", "value": ["error"], "op": None}], | |
|
638 | "type": "error", | |
|
639 | "start_date": since_when, | |
|
640 | "end_date": end_date, | |
|
641 | } | |
|
595 | 642 | |
|
596 | 643 | reports = ReportGroupService.get_trending( |
|
597 |
request, filter_settings=filter_settings, limit=50 |
|
|
644 | request, filter_settings=filter_settings, limit=50 | |
|
645 | ) | |
|
598 | 646 | |
|
599 | 647 | application = ApplicationService.by_id(resource_id) |
|
600 | 648 | if application: |
|
601 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) | |
|
649 | users = set( | |
|
650 | [p.user for p in ResourceService.users_for_perm(application, "view")] | |
|
651 | ) | |
|
602 | 652 | for user in users: |
|
603 |
user.send_digest( |
|
|
604 |
|
|
|
653 | user.send_digest( | |
|
654 | request, application, reports=reports, since_when=since_when | |
|
655 | ) | |
|
605 | 656 | |
|
606 | 657 | |
|
607 | 658 | @celery.task(queue="default") |
|
608 | 659 | def notifications_reports(): |
|
609 | 660 | """ |
|
610 | 661 | Loop that checks redis for info and then issues new tasks to celery to |
|
611 | 662 | issue notifications |
|
612 | 663 | """ |
|
613 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
|
614 |
Datastores.redis.delete(REDIS_KEYS[ |
|
|
664 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) | |
|
665 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) | |
|
615 | 666 | for app in apps: |
|
616 |
log.warning( |
|
|
617 |
check_user_report_notifications.delay(app.decode( |
|
|
667 | log.warning("Notify for app: %s" % app) | |
|
668 | check_user_report_notifications.delay(app.decode("utf8")) | |
|
669 | ||
|
618 | 670 | |
|
619 | 671 | @celery.task(queue="default") |
|
620 | 672 | def alerting_reports(): |
|
621 | 673 | """ |
|
622 | 674 | Loop that checks redis for info and then issues new tasks to celery to |
|
623 | 675 | perform the following: |
|
624 | 676 | - which applications should have new alerts opened |
|
625 | 677 | """ |
|
626 | 678 | |
|
627 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
|
628 |
Datastores.redis.delete(REDIS_KEYS[ |
|
|
679 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"]) | |
|
680 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"]) | |
|
629 | 681 | for app in apps: |
|
630 |
log.warning( |
|
|
631 |
check_alerts.delay(app.decode( |
|
|
682 | log.warning("Notify for app: %s" % app) | |
|
683 | check_alerts.delay(app.decode("utf8")) | |
|
632 | 684 | |
|
633 | 685 | |
|
634 | @celery.task(queue="default", soft_time_limit=3600 * 4, | |
|
635 |
|
|
|
686 | @celery.task( | |
|
687 | queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144 | |
|
688 | ) | |
|
636 | 689 | def logs_cleanup(resource_id, filter_settings): |
|
637 | 690 | request = get_current_request() |
|
638 | 691 | request.tm.begin() |
|
639 | 692 | es_query = { |
|
640 | 693 | "query": { |
|
641 | "filtered": { | |
|
642 | "filter": { | |
|
643 | "and": [{"term": {"resource_id": resource_id}}] | |
|
644 | } | |
|
645 | } | |
|
694 | "filtered": {"filter": {"and": [{"term": {"resource_id": resource_id}}]}} | |
|
646 | 695 | } |
|
647 | 696 | } |
|
648 | 697 | |
|
649 | 698 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) |
|
650 |
if filter_settings[ |
|
|
651 |
query = query.filter(Log.namespace == filter_settings[ |
|
|
652 |
es_query[ |
|
|
653 |
{"term": {"namespace": filter_settings[ |
|
|
699 | if filter_settings["namespace"]: | |
|
700 | query = query.filter(Log.namespace == filter_settings["namespace"][0]) | |
|
701 | es_query["query"]["filtered"]["filter"]["and"].append( | |
|
702 | {"term": {"namespace": filter_settings["namespace"][0]}} | |
|
654 | 703 | ) |
|
655 | 704 | query.delete(synchronize_session=False) |
|
656 | 705 | request.tm.commit() |
|
657 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format('rcae_l_*', 'log'), body=es_query) | |
|
706 | Datastores.es.transport.perform_request( | |
|
707 | "DELETE", "/{}/{}/_query".format("rcae_l_*", "log"), body=es_query | |
|
708 | ) |
@@ -1,19 +1,20 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | ||
|
17 | 18 | def filter_callable(structure, section=None): |
|
18 |
structure[ |
|
|
19 | structure["SOMEVAL"] = "***REMOVED***" | |
|
19 | 20 | return structure |
This diff has been collapsed as it changes many lines, (863 lines changed) Show them Hide them | |||
@@ -1,896 +1,981 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import wtforms |
|
18 | 18 | import formencode |
|
19 | 19 | import re |
|
20 | 20 | import pyramid.threadlocal |
|
21 | 21 | import datetime |
|
22 | 22 | import appenlight.lib.helpers as h |
|
23 | 23 | |
|
24 | 24 | from ziggurat_foundations.models.services.user import UserService |
|
25 | 25 | from ziggurat_foundations.models.services.group import GroupService |
|
26 | 26 | from appenlight.models import DBSession |
|
27 | 27 | from appenlight.models.alert_channel import AlertChannel |
|
28 | 28 | from appenlight.models.integrations import IntegrationException |
|
29 | 29 | from appenlight.models.integrations.campfire import CampfireIntegration |
|
30 | 30 | from appenlight.models.integrations.bitbucket import BitbucketIntegration |
|
31 | 31 | from appenlight.models.integrations.github import GithubIntegration |
|
32 | 32 | from appenlight.models.integrations.flowdock import FlowdockIntegration |
|
33 | 33 | from appenlight.models.integrations.hipchat import HipchatIntegration |
|
34 | 34 | from appenlight.models.integrations.jira import JiraClient |
|
35 | 35 | from appenlight.models.integrations.slack import SlackIntegration |
|
36 | 36 | from appenlight.lib.ext_json import json |
|
37 | 37 | from wtforms.ext.csrf.form import SecureForm |
|
38 | 38 | from wtforms.compat import iteritems |
|
39 | 39 | from collections import defaultdict |
|
40 | 40 | |
|
41 | 41 | _ = str |
|
42 | 42 | |
|
43 | 43 | strip_filter = lambda x: x.strip() if x else None |
|
44 | 44 | uppercase_filter = lambda x: x.upper() if x else None |
|
45 | 45 | |
|
46 |
FALSE_VALUES = ( |
|
|
46 | FALSE_VALUES = ("false", "", False, None) | |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class CSRFException(Exception): |
|
50 | 50 | pass |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class ReactorForm(SecureForm): |
|
54 |
def __init__(self, formdata=None, obj=None, prefix= |
|
|
55 | **kwargs): | |
|
56 | super(ReactorForm, self).__init__(formdata=formdata, obj=obj, | |
|
57 | prefix=prefix, | |
|
58 | csrf_context=csrf_context, **kwargs) | |
|
54 | def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs): | |
|
55 | super(ReactorForm, self).__init__( | |
|
56 | formdata=formdata, | |
|
57 | obj=obj, | |
|
58 | prefix=prefix, | |
|
59 | csrf_context=csrf_context, | |
|
60 | **kwargs | |
|
61 | ) | |
|
59 | 62 | self._csrf_context = csrf_context |
|
60 | 63 | |
|
61 | 64 | def generate_csrf_token(self, csrf_context): |
|
62 | 65 | return csrf_context.session.get_csrf_token() |
|
63 | 66 | |
|
64 | 67 | def validate_csrf_token(self, field): |
|
65 | 68 | request = self._csrf_context or pyramid.threadlocal.get_current_request() |
|
66 |
is_from_auth_token = |
|
|
69 | is_from_auth_token = "auth:auth_token" in request.effective_principals | |
|
67 | 70 | if is_from_auth_token: |
|
68 | 71 | return True |
|
69 | 72 | |
|
70 | 73 | if field.data != field.current_token: |
|
71 | 74 | # try to save the day by using token from angular |
|
72 |
if request.headers.get( |
|
|
73 |
raise CSRFException( |
|
|
75 | if request.headers.get("X-XSRF-TOKEN") != field.current_token: | |
|
76 | raise CSRFException("Invalid CSRF token") | |
|
74 | 77 | |
|
75 | 78 | @property |
|
76 | 79 | def errors_dict(self): |
|
77 | 80 | r_dict = defaultdict(list) |
|
78 | 81 | for k, errors in self.errors.items(): |
|
79 | 82 | r_dict[k].extend([str(e) for e in errors]) |
|
80 | 83 | return r_dict |
|
81 | 84 | |
|
82 | 85 | @property |
|
83 | 86 | def errors_json(self): |
|
84 | 87 | return json.dumps(self.errors_dict) |
|
85 | 88 | |
|
86 | 89 | def populate_obj(self, obj, ignore_none=False): |
|
87 | 90 | """ |
|
88 | 91 | Populates the attributes of the passed `obj` with data from the form's |
|
89 | 92 | fields. |
|
90 | 93 | |
|
91 | 94 | :note: This is a destructive operation; Any attribute with the same name |
|
92 | 95 | as a field will be overridden. Use with caution. |
|
93 | 96 | """ |
|
94 | 97 | if ignore_none: |
|
95 | 98 | for name, field in iteritems(self._fields): |
|
96 | 99 | if field.data is not None: |
|
97 | 100 | field.populate_obj(obj, name) |
|
98 | 101 | else: |
|
99 | 102 | for name, field in iteritems(self._fields): |
|
100 | 103 | field.populate_obj(obj, name) |
|
101 | 104 | |
|
102 | 105 | css_classes = {} |
|
103 | 106 | ignore_labels = {} |
|
104 | 107 | |
|
105 | 108 | |
|
106 | 109 | class SignInForm(ReactorForm): |
|
107 | 110 | came_from = wtforms.HiddenField() |
|
108 |
sign_in_user_name = wtforms.StringField(_( |
|
|
109 |
sign_in_user_password = wtforms.PasswordField(_( |
|
|
111 | sign_in_user_name = wtforms.StringField(_("User Name")) | |
|
112 | sign_in_user_password = wtforms.PasswordField(_("Password")) | |
|
110 | 113 | |
|
111 |
ignore_labels = [ |
|
|
112 |
css_classes = { |
|
|
114 | ignore_labels = ["submit"] | |
|
115 | css_classes = {"submit": "btn btn-primary"} | |
|
113 | 116 | |
|
114 | html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'}, | |
|
115 | 'sign_in_user_password': { | |
|
116 |
|
|
|
117 | html_attrs = { | |
|
118 | "sign_in_user_name": {"placeholder": "Your login"}, | |
|
119 | "sign_in_user_password": {"placeholder": "Your password"}, | |
|
120 | } | |
|
117 | 121 | |
|
118 | 122 | |
|
119 | 123 | from wtforms.widgets import html_params, HTMLString |
|
120 | 124 | |
|
121 | 125 | |
|
122 |
def select_multi_checkbox(field, ul_class= |
|
|
126 | def select_multi_checkbox(field, ul_class="set", **kwargs): | |
|
123 | 127 | """Render a multi-checkbox widget""" |
|
124 |
kwargs.setdefault( |
|
|
125 |
field_id = kwargs.pop( |
|
|
126 |
html = [ |
|
|
128 | kwargs.setdefault("type", "checkbox") | |
|
129 | field_id = kwargs.pop("id", field.id) | |
|
130 | html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)] | |
|
127 | 131 | for value, label, checked in field.iter_choices(): |
|
128 |
choice_id = |
|
|
132 | choice_id = "%s-%s" % (field_id, value) | |
|
129 | 133 | options = dict(kwargs, name=field.name, value=value, id=choice_id) |
|
130 | 134 | if checked: |
|
131 |
options[ |
|
|
132 |
html.append( |
|
|
135 | options["checked"] = "checked" | |
|
136 | html.append("<li><input %s /> " % html_params(**options)) | |
|
133 | 137 | html.append('<label for="%s">%s</label></li>' % (choice_id, label)) |
|
134 |
html.append( |
|
|
135 |
return HTMLString( |
|
|
138 | html.append("</ul>") | |
|
139 | return HTMLString("".join(html)) | |
|
136 | 140 | |
|
137 | 141 | |
|
138 |
def button_widget(field, button_cls= |
|
|
142 | def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs): | |
|
139 | 143 | """Render a button widget""" |
|
140 |
kwargs.setdefault( |
|
|
141 |
field_id = kwargs.pop( |
|
|
142 |
kwargs.setdefault( |
|
|
143 | html = ['<button %s>%s</button>' % (html_params(id=field_id, | |
|
144 | class_=button_cls), | |
|
145 | kwargs['value'],)] | |
|
146 | return HTMLString(''.join(html)) | |
|
144 | kwargs.setdefault("type", "button") | |
|
145 | field_id = kwargs.pop("id", field.id) | |
|
146 | kwargs.setdefault("value", field.label.text) | |
|
147 | html = [ | |
|
148 | "<button %s>%s</button>" | |
|
149 | % (html_params(id=field_id, class_=button_cls), kwargs["value"]) | |
|
150 | ] | |
|
151 | return HTMLString("".join(html)) | |
|
147 | 152 | |
|
148 | 153 | |
|
149 | 154 | def clean_whitespace(value): |
|
150 | 155 | if value: |
|
151 | 156 | return value.strip() |
|
152 | 157 | return value |
|
153 | 158 | |
|
154 | 159 | |
|
155 | 160 | def found_username_validator(form, field): |
|
156 | 161 | user = UserService.by_user_name(field.data) |
|
157 | 162 | # sets user to recover in email validator |
|
158 | 163 | form.field_user = user |
|
159 | 164 | if not user: |
|
160 |
raise wtforms.ValidationError( |
|
|
165 | raise wtforms.ValidationError("This username does not exist") | |
|
161 | 166 | |
|
162 | 167 | |
|
163 | 168 | def found_username_email_validator(form, field): |
|
164 | 169 | user = UserService.by_email(field.data) |
|
165 | 170 | if not user: |
|
166 |
raise wtforms.ValidationError( |
|
|
171 | raise wtforms.ValidationError("Email is incorrect") | |
|
167 | 172 | |
|
168 | 173 | |
|
169 | 174 | def unique_username_validator(form, field): |
|
170 | 175 | user = UserService.by_user_name(field.data) |
|
171 | 176 | if user: |
|
172 |
raise wtforms.ValidationError( |
|
|
177 | raise wtforms.ValidationError("This username already exists in system") | |
|
173 | 178 | |
|
174 | 179 | |
|
175 | 180 | def unique_groupname_validator(form, field): |
|
176 | 181 | group = GroupService.by_group_name(field.data) |
|
177 |
mod_group = getattr(form, |
|
|
182 | mod_group = getattr(form, "_modified_group", None) | |
|
178 | 183 | if group and (not mod_group or mod_group.id != group.id): |
|
179 | raise wtforms.ValidationError( | |
|
180 | 'This group name already exists in system') | |
|
184 | raise wtforms.ValidationError("This group name already exists in system") | |
|
181 | 185 | |
|
182 | 186 | |
|
183 | 187 | def unique_email_validator(form, field): |
|
184 | 188 | user = UserService.by_email(field.data) |
|
185 | 189 | if user: |
|
186 |
raise wtforms.ValidationError( |
|
|
190 | raise wtforms.ValidationError("This email already exists in system") | |
|
187 | 191 | |
|
188 | 192 | |
|
189 | 193 | def email_validator(form, field): |
|
190 | 194 | validator = formencode.validators.Email() |
|
191 | 195 | try: |
|
192 | 196 | validator.to_python(field.data) |
|
193 | 197 | except formencode.Invalid as e: |
|
194 | 198 | raise wtforms.ValidationError(e) |
|
195 | 199 | |
|
196 | 200 | |
|
197 | 201 | def unique_alert_email_validator(form, field): |
|
198 | 202 | q = DBSession.query(AlertChannel) |
|
199 |
q = q.filter(AlertChannel.channel_name == |
|
|
203 | q = q.filter(AlertChannel.channel_name == "email") | |
|
200 | 204 | q = q.filter(AlertChannel.channel_value == field.data) |
|
201 | 205 | email = q.first() |
|
202 | 206 | if email: |
|
203 | raise wtforms.ValidationError( | |
|
204 | 'This email already exists in alert system') | |
|
207 | raise wtforms.ValidationError("This email already exists in alert system") | |
|
205 | 208 | |
|
206 | 209 | |
|
207 | 210 | def blocked_email_validator(form, field): |
|
208 | 211 | blocked_emails = [ |
|
209 |
|
|
|
210 |
|
|
|
211 |
|
|
|
212 |
|
|
|
213 |
|
|
|
212 | "goood-mail.org", | |
|
213 | "shoeonlineblog.com", | |
|
214 | "louboutinemart.com", | |
|
215 | "guccibagshere.com", | |
|
216 | "nikeshoesoutletforsale.com", | |
|
214 | 217 | ] |
|
215 |
data = field.data or |
|
|
216 |
domain = data.split( |
|
|
218 | data = field.data or "" | |
|
219 | domain = data.split("@")[-1] | |
|
217 | 220 | if domain in blocked_emails: |
|
218 |
raise wtforms.ValidationError(' |
|
|
221 | raise wtforms.ValidationError("Don't spam") | |
|
219 | 222 | |
|
220 | 223 | |
|
221 | 224 | def old_password_validator(form, field): |
|
222 |
if not UserService.check_password(field.user, field.data or |
|
|
223 |
raise wtforms.ValidationError( |
|
|
225 | if not UserService.check_password(field.user, field.data or ""): | |
|
226 | raise wtforms.ValidationError("You need to enter correct password") | |
|
224 | 227 | |
|
225 | 228 | |
|
226 | 229 | class UserRegisterForm(ReactorForm): |
|
227 | 230 | user_name = wtforms.StringField( |
|
228 |
_( |
|
|
231 | _("User Name"), | |
|
229 | 232 | filters=[strip_filter], |
|
230 | 233 | validators=[ |
|
231 | 234 | wtforms.validators.Length(min=2, max=30), |
|
232 | 235 | wtforms.validators.Regexp( |
|
233 |
re.compile(r |
|
|
234 | message="Invalid characters used"), | |
|
236 | re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used" | |
|
237 | ), | |
|
235 | 238 | unique_username_validator, |
|
236 | wtforms.validators.DataRequired() | |
|
237 |
] |
|
|
239 | wtforms.validators.DataRequired(), | |
|
240 | ], | |
|
241 | ) | |
|
238 | 242 | |
|
239 |
user_password = wtforms.PasswordField( |
|
|
240 | filters=[strip_filter], | |
|
241 | validators=[ | |
|
242 | wtforms.validators.Length(min=4), | |
|
243 | wtforms.validators.DataRequired() | |
|
244 | ]) | |
|
243 | user_password = wtforms.PasswordField( | |
|
244 | _("User Password"), | |
|
245 | filters=[strip_filter], | |
|
246 | validators=[ | |
|
247 | wtforms.validators.Length(min=4), | |
|
248 | wtforms.validators.DataRequired(), | |
|
249 | ], | |
|
250 | ) | |
|
245 | 251 | |
|
246 |
email = wtforms.StringField( |
|
|
247 | filters=[strip_filter], | |
|
248 | validators=[email_validator, | |
|
249 | unique_email_validator, | |
|
250 | blocked_email_validator, | |
|
251 | wtforms.validators.DataRequired()]) | |
|
252 | first_name = wtforms.HiddenField(_('First Name')) | |
|
253 | last_name = wtforms.HiddenField(_('Last Name')) | |
|
252 | email = wtforms.StringField( | |
|
253 | _("Email Address"), | |
|
254 | filters=[strip_filter], | |
|
255 | validators=[ | |
|
256 | email_validator, | |
|
257 | unique_email_validator, | |
|
258 | blocked_email_validator, | |
|
259 | wtforms.validators.DataRequired(), | |
|
260 | ], | |
|
261 | ) | |
|
262 | first_name = wtforms.HiddenField(_("First Name")) | |
|
263 | last_name = wtforms.HiddenField(_("Last Name")) | |
|
254 | 264 | |
|
255 |
ignore_labels = [ |
|
|
256 |
css_classes = { |
|
|
265 | ignore_labels = ["submit"] | |
|
266 | css_classes = {"submit": "btn btn-primary"} | |
|
257 | 267 | |
|
258 | html_attrs = {'user_name': {'placeholder': 'Your login'}, | |
|
259 |
|
|
|
260 | 'email': {'placeholder': 'Your email'}} | |
|
268 | html_attrs = { | |
|
269 | "user_name": {"placeholder": "Your login"}, | |
|
270 | "user_password": {"placeholder": "Your password"}, | |
|
271 | "email": {"placeholder": "Your email"}, | |
|
272 | } | |
|
261 | 273 | |
|
262 | 274 | |
|
263 | 275 | class UserCreateForm(UserRegisterForm): |
|
264 |
status = wtforms.BooleanField( |
|
|
265 | false_values=FALSE_VALUES) | |
|
276 | status = wtforms.BooleanField("User status", false_values=FALSE_VALUES) | |
|
266 | 277 | |
|
267 | 278 | |
|
268 | 279 | class UserUpdateForm(UserCreateForm): |
|
269 | 280 | user_name = None |
|
270 |
user_password = wtforms.PasswordField( |
|
|
271 | filters=[strip_filter], | |
|
272 | validators=[ | |
|
273 | wtforms.validators.Length(min=4), | |
|
274 | wtforms.validators.Optional() | |
|
275 | ]) | |
|
276 | email = wtforms.StringField(_('Email Address'), | |
|
277 |
|
|
|
278 | validators=[email_validator, | |
|
279 | wtforms.validators.DataRequired()]) | |
|
281 | user_password = wtforms.PasswordField( | |
|
282 | _("User Password"), | |
|
283 | filters=[strip_filter], | |
|
284 | validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()], | |
|
285 | ) | |
|
286 | email = wtforms.StringField( | |
|
287 | _("Email Address"), | |
|
288 | filters=[strip_filter], | |
|
289 | validators=[email_validator, wtforms.validators.DataRequired()], | |
|
290 | ) | |
|
280 | 291 | |
|
281 | 292 | |
|
282 | 293 | class LostPasswordForm(ReactorForm): |
|
283 |
email = wtforms.StringField( |
|
|
284 | filters=[strip_filter], | |
|
285 | validators=[email_validator, | |
|
286 | found_username_email_validator, | |
|
287 | wtforms.validators.DataRequired()]) | |
|
294 | email = wtforms.StringField( | |
|
295 | _("Email Address"), | |
|
296 | filters=[strip_filter], | |
|
297 | validators=[ | |
|
298 | email_validator, | |
|
299 | found_username_email_validator, | |
|
300 | wtforms.validators.DataRequired(), | |
|
301 | ], | |
|
302 | ) | |
|
288 | 303 | |
|
289 |
submit = wtforms.SubmitField(_( |
|
|
290 |
ignore_labels = [ |
|
|
291 |
css_classes = { |
|
|
304 | submit = wtforms.SubmitField(_("Reset password")) | |
|
305 | ignore_labels = ["submit"] | |
|
306 | css_classes = {"submit": "btn btn-primary"} | |
|
292 | 307 | |
|
293 | 308 | |
|
294 | 309 | class ChangePasswordForm(ReactorForm): |
|
295 | 310 | old_password = wtforms.PasswordField( |
|
296 |
|
|
|
311 | "Old Password", | |
|
297 | 312 | filters=[strip_filter], |
|
298 | validators=[old_password_validator, | |
|
299 | wtforms.validators.DataRequired()]) | |
|
313 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
|
314 | ) | |
|
300 | 315 | |
|
301 | 316 | new_password = wtforms.PasswordField( |
|
302 |
|
|
|
317 | "New Password", | |
|
303 | 318 | filters=[strip_filter], |
|
304 | validators=[wtforms.validators.Length(min=4), | |
|
305 |
|
|
|
319 | validators=[ | |
|
320 | wtforms.validators.Length(min=4), | |
|
321 | wtforms.validators.DataRequired(), | |
|
322 | ], | |
|
323 | ) | |
|
306 | 324 | new_password_confirm = wtforms.PasswordField( |
|
307 |
|
|
|
325 | "Confirm Password", | |
|
308 | 326 | filters=[strip_filter], |
|
309 | validators=[wtforms.validators.EqualTo('new_password'), | |
|
310 |
|
|
|
311 | submit = wtforms.SubmitField('Change Password') | |
|
312 | ignore_labels = ['submit'] | |
|
313 | css_classes = {'submit': 'btn btn-primary'} | |
|
327 | validators=[ | |
|
328 | wtforms.validators.EqualTo("new_password"), | |
|
329 | wtforms.validators.DataRequired(), | |
|
330 | ], | |
|
331 | ) | |
|
332 | submit = wtforms.SubmitField("Change Password") | |
|
333 | ignore_labels = ["submit"] | |
|
334 | css_classes = {"submit": "btn btn-primary"} | |
|
314 | 335 | |
|
315 | 336 | |
|
316 | 337 | class CheckPasswordForm(ReactorForm): |
|
317 | 338 | password = wtforms.PasswordField( |
|
318 |
|
|
|
339 | "Password", | |
|
319 | 340 | filters=[strip_filter], |
|
320 | validators=[old_password_validator, | |
|
321 | wtforms.validators.DataRequired()]) | |
|
341 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
|
342 | ) | |
|
322 | 343 | |
|
323 | 344 | |
|
324 | 345 | class NewPasswordForm(ReactorForm): |
|
325 | 346 | new_password = wtforms.PasswordField( |
|
326 |
|
|
|
347 | "New Password", | |
|
327 | 348 | filters=[strip_filter], |
|
328 | validators=[wtforms.validators.Length(min=4), | |
|
329 |
|
|
|
349 | validators=[ | |
|
350 | wtforms.validators.Length(min=4), | |
|
351 | wtforms.validators.DataRequired(), | |
|
352 | ], | |
|
353 | ) | |
|
330 | 354 | new_password_confirm = wtforms.PasswordField( |
|
331 |
|
|
|
355 | "Confirm Password", | |
|
332 | 356 | filters=[strip_filter], |
|
333 | validators=[wtforms.validators.EqualTo('new_password'), | |
|
334 |
|
|
|
335 | submit = wtforms.SubmitField('Set Password') | |
|
336 | ignore_labels = ['submit'] | |
|
337 | css_classes = {'submit': 'btn btn-primary'} | |
|
357 | validators=[ | |
|
358 | wtforms.validators.EqualTo("new_password"), | |
|
359 | wtforms.validators.DataRequired(), | |
|
360 | ], | |
|
361 | ) | |
|
362 | submit = wtforms.SubmitField("Set Password") | |
|
363 | ignore_labels = ["submit"] | |
|
364 | css_classes = {"submit": "btn btn-primary"} | |
|
338 | 365 | |
|
339 | 366 | |
|
340 | 367 | class CORSTextAreaField(wtforms.StringField): |
|
341 | 368 | """ |
|
342 | 369 | This field represents an HTML ``<textarea>`` and can be used to take |
|
343 | 370 | multi-line input. |
|
344 | 371 | """ |
|
372 | ||
|
345 | 373 | widget = wtforms.widgets.TextArea() |
|
346 | 374 | |
|
347 | 375 | def process_formdata(self, valuelist): |
|
348 | 376 | self.data = [] |
|
349 | 377 | if valuelist: |
|
350 |
data = [x.strip() for x in valuelist[0].split( |
|
|
378 | data = [x.strip() for x in valuelist[0].split("\n")] | |
|
351 | 379 | for d in data: |
|
352 | 380 | if not d: |
|
353 | 381 | continue |
|
354 |
if d.startswith( |
|
|
382 | if d.startswith("www."): | |
|
355 | 383 | d = d[4:] |
|
356 | 384 | if data: |
|
357 | 385 | self.data.append(d) |
|
358 | 386 | else: |
|
359 | 387 | self.data = [] |
|
360 |
self.data = |
|
|
388 | self.data = "\n".join(self.data) | |
|
361 | 389 | |
|
362 | 390 | |
|
363 | 391 | class ApplicationCreateForm(ReactorForm): |
|
364 | 392 | resource_name = wtforms.StringField( |
|
365 |
_( |
|
|
393 | _("Application name"), | |
|
366 | 394 | filters=[strip_filter], |
|
367 | validators=[wtforms.validators.Length(min=1), | |
|
368 |
|
|
|
395 | validators=[ | |
|
396 | wtforms.validators.Length(min=1), | |
|
397 | wtforms.validators.DataRequired(), | |
|
398 | ], | |
|
399 | ) | |
|
369 | 400 | |
|
370 | 401 | domains = CORSTextAreaField( |
|
371 |
_( |
|
|
372 | validators=[wtforms.validators.Length(min=1), | |
|
373 | wtforms.validators.Optional()], | |
|
374 | description='Required for Javascript error ' | |
|
375 | 'tracking (one line one domain, skip http:// part)') | |
|
402 | _("Domain names for CORS headers "), | |
|
403 | validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()], | |
|
404 | description="Required for Javascript error " | |
|
405 | "tracking (one line one domain, skip http:// part)", | |
|
406 | ) | |
|
376 | 407 | |
|
377 |
submit = wtforms.SubmitField(_( |
|
|
408 | submit = wtforms.SubmitField(_("Create Application")) | |
|
378 | 409 | |
|
379 |
ignore_labels = [ |
|
|
380 |
css_classes = { |
|
|
381 | html_attrs = {'resource_name': {'placeholder': 'Application Name'}, | |
|
382 | 'uptime_url': {'placeholder': 'http://somedomain.com'}} | |
|
410 | ignore_labels = ["submit"] | |
|
411 | css_classes = {"submit": "btn btn-primary"} | |
|
412 | html_attrs = { | |
|
413 | "resource_name": {"placeholder": "Application Name"}, | |
|
414 | "uptime_url": {"placeholder": "http://somedomain.com"}, | |
|
415 | } | |
|
383 | 416 | |
|
384 | 417 | |
|
385 | 418 | class ApplicationUpdateForm(ApplicationCreateForm): |
|
386 | 419 | default_grouping = wtforms.SelectField( |
|
387 |
_( |
|
|
388 | choices=[('url_type', 'Error Type + location',), | |
|
389 |
|
|
|
390 |
|
|
|
391 | default='url_traceback') | |
|
420 | _("Default grouping for errors"), | |
|
421 | choices=[ | |
|
422 | ("url_type", "Error Type + location"), | |
|
423 | ("url_traceback", "Traceback + location"), | |
|
424 | ("traceback_server", "Traceback + Server"), | |
|
425 | ], | |
|
426 | default="url_traceback", | |
|
427 | ) | |
|
392 | 428 | |
|
393 | 429 | error_report_threshold = wtforms.IntegerField( |
|
394 |
_( |
|
|
430 | _("Alert on error reports"), | |
|
395 | 431 | validators=[ |
|
396 | 432 | wtforms.validators.NumberRange(min=1), |
|
397 | wtforms.validators.DataRequired() | |
|
433 | wtforms.validators.DataRequired(), | |
|
398 | 434 | ], |
|
399 |
description= |
|
|
400 |
|
|
|
435 | description="Application requires to send at least this amount of " | |
|
436 | "error reports per minute to open alert", | |
|
401 | 437 | ) |
|
402 | 438 | |
|
403 | 439 | slow_report_threshold = wtforms.IntegerField( |
|
404 |
_( |
|
|
405 | validators=[wtforms.validators.NumberRange(min=1), | |
|
406 |
|
|
|
407 | description='Application requires to send at least this amount of ' | |
|
408 | 'slow reports per minute to open alert') | |
|
440 | _("Alert on slow reports"), | |
|
441 | validators=[ | |
|
442 | wtforms.validators.NumberRange(min=1), | |
|
443 | wtforms.validators.DataRequired(), | |
|
444 | ], | |
|
445 | description="Application requires to send at least this amount of " | |
|
446 | "slow reports per minute to open alert", | |
|
447 | ) | |
|
409 | 448 | |
|
410 | 449 | allow_permanent_storage = wtforms.BooleanField( |
|
411 |
_( |
|
|
450 | _("Permanent logs"), | |
|
412 | 451 | false_values=FALSE_VALUES, |
|
413 | description=_( | |
|
414 | 'Allow permanent storage of logs in separate DB partitions')) | |
|
452 | description=_("Allow permanent storage of logs in separate DB partitions"), | |
|
453 | ) | |
|
415 | 454 | |
|
416 |
submit = wtforms.SubmitField(_( |
|
|
455 | submit = wtforms.SubmitField(_("Create Application")) | |
|
417 | 456 | |
|
418 | 457 | |
|
419 | 458 | class UserSearchSchemaForm(ReactorForm): |
|
420 |
user_name = wtforms.StringField( |
|
|
421 | filters=[strip_filter], ) | |
|
459 | user_name = wtforms.StringField("User Name", filters=[strip_filter]) | |
|
422 | 460 | |
|
423 |
submit = wtforms.SubmitField(_( |
|
|
424 |
ignore_labels = [ |
|
|
425 |
css_classes = { |
|
|
461 | submit = wtforms.SubmitField(_("Search User")) | |
|
462 | ignore_labels = ["submit"] | |
|
463 | css_classes = {"submit": "btn btn-primary"} | |
|
426 | 464 | |
|
427 | 465 | '<li class="user_exists"><span></span></li>' |
|
428 | 466 | |
|
429 | 467 | |
|
430 | 468 | class YesNoForm(ReactorForm): |
|
431 |
no = wtforms.SubmitField( |
|
|
432 |
yes = wtforms.SubmitField( |
|
|
433 |
ignore_labels = [ |
|
|
434 |
css_classes = { |
|
|
469 | no = wtforms.SubmitField("No", default="") | |
|
470 | yes = wtforms.SubmitField("Yes", default="") | |
|
471 | ignore_labels = ["submit"] | |
|
472 | css_classes = {"submit": "btn btn-primary"} | |
|
435 | 473 | |
|
436 | 474 | |
|
437 |
status_codes = [( |
|
|
475 | status_codes = [("", "All"), ("500", "500"), ("404", "404")] | |
|
438 | 476 | |
|
439 |
priorities = [( |
|
|
477 | priorities = [("", "All")] | |
|
440 | 478 | for i in range(1, 11): |
|
441 |
priorities.append((str(i), str(i) |
|
|
479 | priorities.append((str(i), str(i))) | |
|
442 | 480 | |
|
443 |
report_status_choices = [ |
|
|
444 | ('never_reviewed', 'Never revieved',), | |
|
445 | ('reviewed', 'Revieved',), | |
|
446 | ('public', 'Public',), | |
|
447 | ('fixed', 'Fixed',), ] | |
|
481 | report_status_choices = [ | |
|
482 | ("", "All"), | |
|
483 | ("never_reviewed", "Never revieved"), | |
|
484 | ("reviewed", "Revieved"), | |
|
485 | ("public", "Public"), | |
|
486 | ("fixed", "Fixed"), | |
|
487 | ] | |
|
448 | 488 | |
|
449 | 489 | |
|
450 | 490 | class ReportBrowserForm(ReactorForm): |
|
451 |
applications = wtforms.SelectMultipleField( |
|
|
452 | widget=select_multi_checkbox) | |
|
453 | http_status = wtforms.SelectField('HTTP Status', choices=status_codes) | |
|
454 | priority = wtforms.SelectField('Priority', choices=priorities, default='') | |
|
455 | start_date = wtforms.DateField('Start Date') | |
|
456 |
|
|
|
457 | error = wtforms.StringField('Error') | |
|
458 |
|
|
|
459 |
url_ |
|
|
460 | report_status = wtforms.SelectField('Report status', | |
|
461 | choices=report_status_choices, | |
|
462 | default='') | |
|
463 | submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">' | |
|
464 | '</span> Filter results', | |
|
465 | widget=button_widget) | |
|
466 | ||
|
467 | ignore_labels = ['submit'] | |
|
468 | css_classes = {'submit': 'btn btn-primary'} | |
|
469 | ||
|
470 | ||
|
471 | slow_report_status_choices = [('', 'All',), | |
|
472 | ('never_reviewed', 'Never revieved',), | |
|
473 | ('reviewed', 'Revieved',), | |
|
474 | ('public', 'Public',), ] | |
|
491 | applications = wtforms.SelectMultipleField( | |
|
492 | "Applications", widget=select_multi_checkbox | |
|
493 | ) | |
|
494 | http_status = wtforms.SelectField("HTTP Status", choices=status_codes) | |
|
495 | priority = wtforms.SelectField("Priority", choices=priorities, default="") | |
|
496 | start_date = wtforms.DateField("Start Date") | |
|
497 | end_date = wtforms.DateField("End Date") | |
|
498 | error = wtforms.StringField("Error") | |
|
499 | url_path = wtforms.StringField("URL Path") | |
|
500 | url_domain = wtforms.StringField("URL Domain") | |
|
501 | report_status = wtforms.SelectField( | |
|
502 | "Report status", choices=report_status_choices, default="" | |
|
503 | ) | |
|
504 | submit = wtforms.SubmitField( | |
|
505 | '<span class="glyphicon glyphicon-search">' "</span> Filter results", | |
|
506 | widget=button_widget, | |
|
507 | ) | |
|
508 | ||
|
509 | ignore_labels = ["submit"] | |
|
510 | css_classes = {"submit": "btn btn-primary"} | |
|
511 | ||
|
512 | ||
|
513 | slow_report_status_choices = [ | |
|
514 | ("", "All"), | |
|
515 | ("never_reviewed", "Never revieved"), | |
|
516 | ("reviewed", "Revieved"), | |
|
517 | ("public", "Public"), | |
|
518 | ] | |
|
475 | 519 | |
|
476 | 520 | |
|
477 | 521 | class BulkOperationForm(ReactorForm): |
|
478 |
applications = wtforms.SelectField( |
|
|
522 | applications = wtforms.SelectField("Applications") | |
|
479 | 523 | start_date = wtforms.DateField( |
|
480 |
|
|
|
481 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta( | |
|
482 | days=90)) | |
|
483 |
end_date = wtforms.DateField( |
|
|
524 | "Start Date", | |
|
525 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90), | |
|
526 | ) | |
|
527 | end_date = wtforms.DateField("End Date") | |
|
484 | 528 | confirm = wtforms.BooleanField( |
|
485 | 'Confirm operation', | |
|
486 | validators=[wtforms.validators.DataRequired()]) | |
|
529 | "Confirm operation", validators=[wtforms.validators.DataRequired()] | |
|
530 | ) | |
|
487 | 531 | |
|
488 | 532 | |
|
489 | 533 | class LogBrowserForm(ReactorForm): |
|
490 |
applications = wtforms.SelectMultipleField( |
|
|
491 | widget=select_multi_checkbox) | |
|
492 | start_date = wtforms.DateField('Start Date') | |
|
493 | log_level = wtforms.StringField('Log level') | |
|
494 |
|
|
|
495 |
|
|
|
534 | applications = wtforms.SelectMultipleField( | |
|
535 | "Applications", widget=select_multi_checkbox | |
|
536 | ) | |
|
537 | start_date = wtforms.DateField("Start Date") | |
|
538 | log_level = wtforms.StringField("Log level") | |
|
539 | message = wtforms.StringField("Message") | |
|
540 | namespace = wtforms.StringField("Namespace") | |
|
496 | 541 | submit = wtforms.SubmitField( |
|
497 | 542 | '<span class="glyphicon glyphicon-search"></span> Filter results', |
|
498 |
widget=button_widget |
|
|
499 | ignore_labels = ['submit'] | |
|
500 | css_classes = {'submit': 'btn btn-primary'} | |
|
543 | widget=button_widget, | |
|
544 | ) | |
|
545 | ignore_labels = ["submit"] | |
|
546 | css_classes = {"submit": "btn btn-primary"} | |
|
501 | 547 | |
|
502 | 548 | |
|
503 | 549 | class CommentForm(ReactorForm): |
|
504 |
body = wtforms.TextAreaField( |
|
|
505 | wtforms.validators.Length(min=1), | |
|
506 | wtforms.validators.DataRequired() | |
|
507 | ]) | |
|
508 | submit = wtforms.SubmitField('Comment', ) | |
|
509 | ignore_labels = ['submit'] | |
|
510 | css_classes = {'submit': 'btn btn-primary'} | |
|
550 | body = wtforms.TextAreaField( | |
|
551 | "Comment", | |
|
552 | validators=[ | |
|
553 | wtforms.validators.Length(min=1), | |
|
554 | wtforms.validators.DataRequired(), | |
|
555 | ], | |
|
556 | ) | |
|
557 | submit = wtforms.SubmitField("Comment") | |
|
558 | ignore_labels = ["submit"] | |
|
559 | css_classes = {"submit": "btn btn-primary"} | |
|
511 | 560 | |
|
512 | 561 | |
|
513 | 562 | class EmailChannelCreateForm(ReactorForm): |
|
514 |
email = wtforms.StringField( |
|
|
515 | filters=[strip_filter], | |
|
516 | validators=[email_validator, | |
|
517 | unique_alert_email_validator, | |
|
518 | wtforms.validators.DataRequired()]) | |
|
519 | submit = wtforms.SubmitField('Add email channel', ) | |
|
520 | ignore_labels = ['submit'] | |
|
521 | css_classes = {'submit': 'btn btn-primary'} | |
|
563 | email = wtforms.StringField( | |
|
564 | _("Email Address"), | |
|
565 | filters=[strip_filter], | |
|
566 | validators=[ | |
|
567 | email_validator, | |
|
568 | unique_alert_email_validator, | |
|
569 | wtforms.validators.DataRequired(), | |
|
570 | ], | |
|
571 | ) | |
|
572 | submit = wtforms.SubmitField("Add email channel") | |
|
573 | ignore_labels = ["submit"] | |
|
574 | css_classes = {"submit": "btn btn-primary"} | |
|
522 | 575 | |
|
523 | 576 | |
|
524 | 577 | def gen_user_profile_form(): |
|
525 | 578 | class UserProfileForm(ReactorForm): |
|
526 | 579 | email = wtforms.StringField( |
|
527 |
_( |
|
|
528 |
validators=[email_validator, wtforms.validators.DataRequired()] |
|
|
529 | first_name = wtforms.StringField(_('First Name')) | |
|
530 |
|
|
|
531 |
|
|
|
532 |
company_ |
|
|
533 |
|
|
|
534 |
|
|
|
535 | notifications = wtforms.BooleanField('Account notifications', | |
|
536 | false_values=FALSE_VALUES) | |
|
537 | submit = wtforms.SubmitField(_('Update Account')) | |
|
538 | ignore_labels = ['submit'] | |
|
539 | css_classes = {'submit': 'btn btn-primary'} | |
|
580 | _("Email Address"), | |
|
581 | validators=[email_validator, wtforms.validators.DataRequired()], | |
|
582 | ) | |
|
583 | first_name = wtforms.StringField(_("First Name")) | |
|
584 | last_name = wtforms.StringField(_("Last Name")) | |
|
585 | company_name = wtforms.StringField(_("Company Name")) | |
|
586 | company_address = wtforms.TextAreaField(_("Company Address")) | |
|
587 | zip_code = wtforms.StringField(_("ZIP code")) | |
|
588 | city = wtforms.StringField(_("City")) | |
|
589 | notifications = wtforms.BooleanField( | |
|
590 | "Account notifications", false_values=FALSE_VALUES | |
|
591 | ) | |
|
592 | submit = wtforms.SubmitField(_("Update Account")) | |
|
593 | ignore_labels = ["submit"] | |
|
594 | css_classes = {"submit": "btn btn-primary"} | |
|
540 | 595 | |
|
541 | 596 | return UserProfileForm |
|
542 | 597 | |
|
543 | 598 | |
|
544 | 599 | class PurgeAppForm(ReactorForm): |
|
545 | 600 | resource_id = wtforms.HiddenField( |
|
546 | 'App Id', | |
|
547 | validators=[wtforms.validators.DataRequired()]) | |
|
548 | days = wtforms.IntegerField( | |
|
549 | 'Days', | |
|
550 | validators=[wtforms.validators.DataRequired()]) | |
|
601 | "App Id", validators=[wtforms.validators.DataRequired()] | |
|
602 | ) | |
|
603 | days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()]) | |
|
551 | 604 | password = wtforms.PasswordField( |
|
552 |
|
|
|
553 |
validators=[old_password_validator, wtforms.validators.DataRequired()] |
|
|
554 | submit = wtforms.SubmitField(_('Purge Data')) | |
|
555 | ignore_labels = ['submit'] | |
|
556 | css_classes = {'submit': 'btn btn-primary'} | |
|
605 | "Admin Password", | |
|
606 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
|
607 | ) | |
|
608 | submit = wtforms.SubmitField(_("Purge Data")) | |
|
609 | ignore_labels = ["submit"] | |
|
610 | css_classes = {"submit": "btn btn-primary"} | |
|
557 | 611 | |
|
558 | 612 | |
|
559 | 613 | class IntegrationRepoForm(ReactorForm): |
|
560 |
host_name = wtforms.StringField("Service Host", default= |
|
|
614 | host_name = wtforms.StringField("Service Host", default="") | |
|
561 | 615 | user_name = wtforms.StringField( |
|
562 | 616 | "User Name", |
|
563 | 617 | filters=[strip_filter], |
|
564 | validators=[wtforms.validators.DataRequired(), | |
|
565 |
|
|
|
618 | validators=[ | |
|
619 | wtforms.validators.DataRequired(), | |
|
620 | wtforms.validators.Length(min=1), | |
|
621 | ], | |
|
622 | ) | |
|
566 | 623 | repo_name = wtforms.StringField( |
|
567 | 624 | "Repo Name", |
|
568 | 625 | filters=[strip_filter], |
|
569 | validators=[wtforms.validators.DataRequired(), | |
|
570 |
|
|
|
626 | validators=[ | |
|
627 | wtforms.validators.DataRequired(), | |
|
628 | wtforms.validators.Length(min=1), | |
|
629 | ], | |
|
630 | ) | |
|
571 | 631 | |
|
572 | 632 | |
|
573 | 633 | class IntegrationBitbucketForm(IntegrationRepoForm): |
|
574 | host_name = wtforms.StringField("Service Host", | |
|
575 | default='https://bitbucket.org') | |
|
634 | host_name = wtforms.StringField("Service Host", default="https://bitbucket.org") | |
|
576 | 635 | |
|
577 | 636 | def validate_user_name(self, field): |
|
578 | 637 | try: |
|
579 | 638 | request = pyramid.threadlocal.get_current_request() |
|
580 | 639 | client = BitbucketIntegration.create_client( |
|
581 | request, | |
|
582 | self.user_name.data, | |
|
583 | self.repo_name.data) | |
|
640 | request, self.user_name.data, self.repo_name.data | |
|
641 | ) | |
|
584 | 642 | client.get_assignees() |
|
585 | 643 | except IntegrationException as e: |
|
586 | 644 | raise wtforms.validators.ValidationError(str(e)) |
|
587 | 645 | |
|
588 | 646 | |
|
589 | 647 | class IntegrationGithubForm(IntegrationRepoForm): |
|
590 | host_name = wtforms.StringField("Service Host", | |
|
591 | default='https://github.com') | |
|
648 | host_name = wtforms.StringField("Service Host", default="https://github.com") | |
|
592 | 649 | |
|
593 | 650 | def validate_user_name(self, field): |
|
594 | 651 | try: |
|
595 | 652 | request = pyramid.threadlocal.get_current_request() |
|
596 | 653 | client = GithubIntegration.create_client( |
|
597 | request, | |
|
598 | self.user_name.data, | |
|
599 | self.repo_name.data) | |
|
654 | request, self.user_name.data, self.repo_name.data | |
|
655 | ) | |
|
600 | 656 | client.get_assignees() |
|
601 | 657 | except IntegrationException as e: |
|
602 | 658 | raise wtforms.validators.ValidationError(str(e)) |
|
603 | 659 | raise wtforms.validators.ValidationError(str(e)) |
|
604 | 660 | |
|
605 | 661 | |
|
606 | 662 | def filter_rooms(data): |
|
607 | 663 | if data is not None: |
|
608 |
rooms = data.split( |
|
|
609 |
return |
|
|
664 | rooms = data.split(",") | |
|
665 | return ",".join([r.strip() for r in rooms]) | |
|
610 | 666 | |
|
611 | 667 | |
|
612 | 668 | class IntegrationCampfireForm(ReactorForm): |
|
613 | 669 | account = wtforms.StringField( |
|
614 |
|
|
|
670 | "Account", | |
|
615 | 671 | filters=[strip_filter], |
|
616 |
validators=[wtforms.validators.DataRequired()] |
|
|
672 | validators=[wtforms.validators.DataRequired()], | |
|
673 | ) | |
|
617 | 674 | api_token = wtforms.StringField( |
|
618 |
|
|
|
675 | "Api Token", | |
|
619 | 676 | filters=[strip_filter], |
|
620 |
validators=[wtforms.validators.DataRequired()] |
|
|
621 | rooms = wtforms.StringField('Room ID list', filters=[filter_rooms]) | |
|
677 | validators=[wtforms.validators.DataRequired()], | |
|
678 | ) | |
|
679 | rooms = wtforms.StringField("Room ID list", filters=[filter_rooms]) | |
|
622 | 680 | |
|
623 | 681 | def validate_api_token(self, field): |
|
624 | 682 | try: |
|
625 |
client = CampfireIntegration.create_client( |
|
|
626 | self.account.data) | |
|
683 | client = CampfireIntegration.create_client( | |
|
684 | self.api_token.data, self.account.data | |
|
685 | ) | |
|
627 | 686 | client.get_account() |
|
628 | 687 | except IntegrationException as e: |
|
629 | 688 | raise wtforms.validators.ValidationError(str(e)) |
|
630 | 689 | |
|
631 | 690 | def validate_rooms(self, field): |
|
632 | 691 | if not field.data: |
|
633 | 692 | return |
|
634 |
client = CampfireIntegration.create_client( |
|
|
635 | self.account.data) | |
|
693 | client = CampfireIntegration.create_client( | |
|
694 | self.api_token.data, self.account.data | |
|
695 | ) | |
|
636 | 696 | |
|
637 | 697 | try: |
|
638 |
room_list = [r[ |
|
|
698 | room_list = [r["id"] for r in client.get_rooms()] | |
|
639 | 699 | except IntegrationException as e: |
|
640 | 700 | raise wtforms.validators.ValidationError(str(e)) |
|
641 | 701 | |
|
642 |
rooms = field.data.split( |
|
|
702 | rooms = field.data.split(",") | |
|
643 | 703 | if len(rooms) > 3: |
|
644 |
msg = |
|
|
704 | msg = "You can use up to 3 room ids" | |
|
645 | 705 | raise wtforms.validators.ValidationError(msg) |
|
646 | 706 | if rooms: |
|
647 | 707 | for room_id in rooms: |
|
648 | 708 | if int(room_id) not in room_list: |
|
649 | 709 | msg = "Room %s doesn't exist" |
|
650 | 710 | raise wtforms.validators.ValidationError(msg % room_id) |
|
651 | 711 | if not room_id.strip().isdigit(): |
|
652 |
msg = |
|
|
712 | msg = "You must use only integers for room ids" | |
|
653 | 713 | raise wtforms.validators.ValidationError(msg) |
|
654 | 714 | |
|
655 |
submit = wtforms.SubmitField(_( |
|
|
656 |
ignore_labels = [ |
|
|
657 |
css_classes = { |
|
|
715 | submit = wtforms.SubmitField(_("Connect to Campfire")) | |
|
716 | ignore_labels = ["submit"] | |
|
717 | css_classes = {"submit": "btn btn-primary"} | |
|
658 | 718 | |
|
659 | 719 | |
|
660 | 720 | def filter_rooms(data): |
|
661 | 721 | if data is not None: |
|
662 |
rooms = data.split( |
|
|
663 |
return |
|
|
722 | rooms = data.split(",") | |
|
723 | return ",".join([r.strip() for r in rooms]) | |
|
664 | 724 | |
|
665 | 725 | |
|
666 | 726 | class IntegrationHipchatForm(ReactorForm): |
|
667 | 727 | api_token = wtforms.StringField( |
|
668 |
|
|
|
728 | "Api Token", | |
|
669 | 729 | filters=[strip_filter], |
|
670 |
validators=[wtforms.validators.DataRequired()] |
|
|
730 | validators=[wtforms.validators.DataRequired()], | |
|
731 | ) | |
|
671 | 732 | rooms = wtforms.StringField( |
|
672 |
|
|
|
733 | "Room ID list", | |
|
673 | 734 | filters=[filter_rooms], |
|
674 |
validators=[wtforms.validators.DataRequired()] |
|
|
735 | validators=[wtforms.validators.DataRequired()], | |
|
736 | ) | |
|
675 | 737 | |
|
676 | 738 | def validate_rooms(self, field): |
|
677 | 739 | if not field.data: |
|
678 | 740 | return |
|
679 | 741 | client = HipchatIntegration.create_client(self.api_token.data) |
|
680 |
rooms = field.data.split( |
|
|
742 | rooms = field.data.split(",") | |
|
681 | 743 | if len(rooms) > 3: |
|
682 |
msg = |
|
|
744 | msg = "You can use up to 3 room ids" | |
|
683 | 745 | raise wtforms.validators.ValidationError(msg) |
|
684 | 746 | if rooms: |
|
685 | 747 | for room_id in rooms: |
|
686 | 748 | if not room_id.strip().isdigit(): |
|
687 |
msg = |
|
|
749 | msg = "You must use only integers for room ids" | |
|
688 | 750 | raise wtforms.validators.ValidationError(msg) |
|
689 | 751 | try: |
|
690 |
client.send( |
|
|
691 |
|
|
|
692 |
"message": "te |
|
|
693 |
" |
|
|
694 |
"r |
|
|
695 |
" |
|
|
696 | }) | |
|
752 | client.send( | |
|
753 | { | |
|
754 | "message_format": "text", | |
|
755 | "message": "testing for room existence", | |
|
756 | "from": "AppEnlight", | |
|
757 | "room_id": room_id, | |
|
758 | "color": "green", | |
|
759 | } | |
|
760 | ) | |
|
697 | 761 | except IntegrationException as exc: |
|
698 |
msg = |
|
|
699 | raise wtforms.validators.ValidationError(msg % (room_id, | |
|
700 | exc)) | |
|
762 | msg = "Room id: %s exception: %s" | |
|
763 | raise wtforms.validators.ValidationError(msg % (room_id, exc)) | |
|
701 | 764 | |
|
702 | 765 | |
|
703 | 766 | class IntegrationFlowdockForm(ReactorForm): |
|
704 |
api_token = wtforms.StringField( |
|
|
705 | filters=[strip_filter], | |
|
706 | validators=[ | |
|
707 |
|
|
|
708 | ], ) | |
|
767 | api_token = wtforms.StringField( | |
|
768 | "API Token", | |
|
769 | filters=[strip_filter], | |
|
770 | validators=[wtforms.validators.DataRequired()], | |
|
771 | ) | |
|
709 | 772 | |
|
710 | 773 | def validate_api_token(self, field): |
|
711 | 774 | try: |
|
712 | 775 | client = FlowdockIntegration.create_client(self.api_token.data) |
|
713 | 776 | registry = pyramid.threadlocal.get_current_registry() |
|
714 | 777 | payload = { |
|
715 |
"source": registry.settings[ |
|
|
716 |
"from_address": registry.settings[ |
|
|
778 | "source": registry.settings["mailing.from_name"], | |
|
779 | "from_address": registry.settings["mailing.from_email"], | |
|
717 | 780 | "subject": "Integration test", |
|
718 | 781 | "content": "If you can see this it was successful", |
|
719 | 782 | "tags": ["appenlight"], |
|
720 |
"link": registry.settings[ |
|
|
783 | "link": registry.settings["mailing.app_url"], | |
|
721 | 784 | } |
|
722 | 785 | client.send_to_inbox(payload) |
|
723 | 786 | except IntegrationException as e: |
|
724 | 787 | raise wtforms.validators.ValidationError(str(e)) |
|
725 | 788 | |
|
726 | 789 | |
|
727 | 790 | class IntegrationSlackForm(ReactorForm): |
|
728 | 791 | webhook_url = wtforms.StringField( |
|
729 |
|
|
|
792 | "Reports webhook", | |
|
730 | 793 | filters=[strip_filter], |
|
731 |
validators=[wtforms.validators.DataRequired()] |
|
|
794 | validators=[wtforms.validators.DataRequired()], | |
|
795 | ) | |
|
732 | 796 | |
|
733 | 797 | def validate_webhook_url(self, field): |
|
734 | 798 | registry = pyramid.threadlocal.get_current_registry() |
|
735 | 799 | client = SlackIntegration.create_client(field.data) |
|
736 |
link = "<%s|%s>" % ( |
|
|
737 |
|
|
|
800 | link = "<%s|%s>" % ( | |
|
801 | registry.settings["mailing.app_url"], | |
|
802 | registry.settings["mailing.from_name"], | |
|
803 | ) | |
|
738 | 804 | test_data = { |
|
739 | 805 | "username": "AppEnlight", |
|
740 | 806 | "icon_emoji": ":fire:", |
|
741 | 807 | "attachments": [ |
|
742 | {"fallback": "Testing integration channel: %s" % link, | |
|
743 |
" |
|
|
744 | "color": "good", | |
|
745 |
" |
|
|
746 |
|
|
|
747 |
|
|
|
748 |
" |
|
|
749 |
" |
|
|
750 |
|
|
|
751 |
|
|
|
752 | ] | |
|
808 | { | |
|
809 | "fallback": "Testing integration channel: %s" % link, | |
|
810 | "pretext": "Testing integration channel: %s" % link, | |
|
811 | "color": "good", | |
|
812 | "fields": [ | |
|
813 | { | |
|
814 | "title": "Status", | |
|
815 | "value": "Integration is working fine", | |
|
816 | "short": False, | |
|
817 | } | |
|
818 | ], | |
|
819 | } | |
|
820 | ], | |
|
753 | 821 | } |
|
754 | 822 | try: |
|
755 | 823 | client.make_request(data=test_data) |
|
756 | 824 | except IntegrationException as exc: |
|
757 | 825 | raise wtforms.validators.ValidationError(str(exc)) |
|
758 | 826 | |
|
759 | 827 | |
|
760 | 828 | class IntegrationWebhooksForm(ReactorForm): |
|
761 | 829 | reports_webhook = wtforms.StringField( |
|
762 |
|
|
|
830 | "Reports webhook", | |
|
763 | 831 | filters=[strip_filter], |
|
764 |
validators=[wtforms.validators.DataRequired()] |
|
|
832 | validators=[wtforms.validators.DataRequired()], | |
|
833 | ) | |
|
765 | 834 | alerts_webhook = wtforms.StringField( |
|
766 |
|
|
|
835 | "Alerts webhook", | |
|
767 | 836 | filters=[strip_filter], |
|
768 |
validators=[wtforms.validators.DataRequired()] |
|
|
769 | submit = wtforms.SubmitField(_('Setup webhooks')) | |
|
770 | ignore_labels = ['submit'] | |
|
771 | css_classes = {'submit': 'btn btn-primary'} | |
|
837 | validators=[wtforms.validators.DataRequired()], | |
|
838 | ) | |
|
839 | submit = wtforms.SubmitField(_("Setup webhooks")) | |
|
840 | ignore_labels = ["submit"] | |
|
841 | css_classes = {"submit": "btn btn-primary"} | |
|
772 | 842 | |
|
773 | 843 | |
|
774 | 844 | class IntegrationJiraForm(ReactorForm): |
|
775 | 845 | host_name = wtforms.StringField( |
|
776 |
|
|
|
846 | "Server URL", | |
|
777 | 847 | filters=[strip_filter], |
|
778 |
validators=[wtforms.validators.DataRequired()] |
|
|
848 | validators=[wtforms.validators.DataRequired()], | |
|
849 | ) | |
|
779 | 850 | user_name = wtforms.StringField( |
|
780 |
|
|
|
851 | "Username", | |
|
781 | 852 | filters=[strip_filter], |
|
782 |
validators=[wtforms.validators.DataRequired()] |
|
|
853 | validators=[wtforms.validators.DataRequired()], | |
|
854 | ) | |
|
783 | 855 | password = wtforms.PasswordField( |
|
784 |
|
|
|
856 | "Password", | |
|
785 | 857 | filters=[strip_filter], |
|
786 |
validators=[wtforms.validators.DataRequired()] |
|
|
858 | validators=[wtforms.validators.DataRequired()], | |
|
859 | ) | |
|
787 | 860 | project = wtforms.StringField( |
|
788 |
|
|
|
861 | "Project key", | |
|
789 | 862 | filters=[uppercase_filter, strip_filter], |
|
790 |
validators=[wtforms.validators.DataRequired()] |
|
|
863 | validators=[wtforms.validators.DataRequired()], | |
|
864 | ) | |
|
791 | 865 | |
|
792 | 866 | def validate_project(self, field): |
|
793 | 867 | if not field.data: |
|
794 | 868 | return |
|
795 | 869 | try: |
|
796 |
client = JiraClient( |
|
|
797 |
|
|
|
798 |
|
|
|
799 |
|
|
|
870 | client = JiraClient( | |
|
871 | self.user_name.data, | |
|
872 | self.password.data, | |
|
873 | self.host_name.data, | |
|
874 | self.project.data, | |
|
875 | ) | |
|
800 | 876 | except Exception as exc: |
|
801 | 877 | raise wtforms.validators.ValidationError(str(exc)) |
|
802 | 878 | |
|
803 | 879 | room_list = [r.key.upper() for r in client.get_projects()] |
|
804 | 880 | if field.data.upper() not in room_list: |
|
805 | 881 | msg = "Project %s doesn\t exist in your Jira Instance" |
|
806 | 882 | raise wtforms.validators.ValidationError(msg % field.data) |
|
807 | 883 | |
|
808 | 884 | |
|
809 | 885 | def get_deletion_form(resource): |
|
810 | 886 | class F(ReactorForm): |
|
811 | 887 | application_name = wtforms.StringField( |
|
812 |
|
|
|
888 | "Application Name", | |
|
813 | 889 | filters=[strip_filter], |
|
814 |
validators=[wtforms.validators.AnyOf([resource.resource_name])] |
|
|
890 | validators=[wtforms.validators.AnyOf([resource.resource_name])], | |
|
891 | ) | |
|
815 | 892 | resource_id = wtforms.HiddenField(default=resource.resource_id) |
|
816 |
submit = wtforms.SubmitField(_( |
|
|
817 |
ignore_labels = [ |
|
|
818 |
css_classes = { |
|
|
893 | submit = wtforms.SubmitField(_("Delete my application")) | |
|
894 | ignore_labels = ["submit"] | |
|
895 | css_classes = {"submit": "btn btn-danger"} | |
|
819 | 896 | |
|
820 | 897 | return F |
|
821 | 898 | |
|
822 | 899 | |
|
823 | 900 | class ChangeApplicationOwnerForm(ReactorForm): |
|
824 | 901 | password = wtforms.PasswordField( |
|
825 |
|
|
|
902 | "Password", | |
|
826 | 903 | filters=[strip_filter], |
|
827 | validators=[old_password_validator, | |
|
828 | wtforms.validators.DataRequired()]) | |
|
904 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
|
905 | ) | |
|
829 | 906 | |
|
830 | 907 | user_name = wtforms.StringField( |
|
831 |
|
|
|
908 | "New owners username", | |
|
832 | 909 | filters=[strip_filter], |
|
833 | validators=[found_username_validator, | |
|
834 | wtforms.validators.DataRequired()]) | |
|
835 |
submit = wtforms.SubmitField(_( |
|
|
836 |
ignore_labels = [ |
|
|
837 |
css_classes = { |
|
|
910 | validators=[found_username_validator, wtforms.validators.DataRequired()], | |
|
911 | ) | |
|
912 | submit = wtforms.SubmitField(_("Transfer ownership of application")) | |
|
913 | ignore_labels = ["submit"] | |
|
914 | css_classes = {"submit": "btn btn-danger"} | |
|
838 | 915 | |
|
839 | 916 | |
|
840 | 917 | def default_filename(): |
|
841 |
return |
|
|
918 | return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m") | |
|
842 | 919 | |
|
843 | 920 | |
|
844 | 921 | class FileUploadForm(ReactorForm): |
|
845 |
title = wtforms.StringField( |
|
|
846 | default=default_filename, | |
|
847 | validators=[wtforms.validators.DataRequired()]) | |
|
848 | file = wtforms.FileField('File') | |
|
922 | title = wtforms.StringField( | |
|
923 | "File Title", | |
|
924 | default=default_filename, | |
|
925 | validators=[wtforms.validators.DataRequired()], | |
|
926 | ) | |
|
927 | file = wtforms.FileField("File") | |
|
849 | 928 | |
|
850 | 929 | def validate_file(self, field): |
|
851 |
if not hasattr(field.data, |
|
|
852 |
raise wtforms.ValidationError( |
|
|
930 | if not hasattr(field.data, "file"): | |
|
931 | raise wtforms.ValidationError("File is missing") | |
|
853 | 932 | |
|
854 |
submit = wtforms.SubmitField(_( |
|
|
933 | submit = wtforms.SubmitField(_("Upload")) | |
|
855 | 934 | |
|
856 | 935 | |
|
857 | 936 | def get_partition_deletion_form(es_indices, pg_indices): |
|
858 | 937 | class F(ReactorForm): |
|
859 |
es_index = wtforms.SelectMultipleField( |
|
|
860 | choices=[(ix, '') for ix in | |
|
861 | es_indices]) | |
|
862 |
pg_index = wtforms.SelectMultipleField( |
|
|
863 | choices=[(ix, '') for ix in | |
|
864 | pg_indices]) | |
|
865 |
confirm = wtforms.TextField( |
|
|
866 | filters=[uppercase_filter, strip_filter], | |
|
867 | validators=[ | |
|
868 | wtforms.validators.AnyOf(['CONFIRM']), | |
|
869 | wtforms.validators.DataRequired()]) | |
|
870 | ignore_labels = ['submit'] | |
|
871 | css_classes = {'submit': 'btn btn-danger'} | |
|
938 | es_index = wtforms.SelectMultipleField( | |
|
939 | "Elasticsearch", choices=[(ix, "") for ix in es_indices] | |
|
940 | ) | |
|
941 | pg_index = wtforms.SelectMultipleField( | |
|
942 | "pg", choices=[(ix, "") for ix in pg_indices] | |
|
943 | ) | |
|
944 | confirm = wtforms.TextField( | |
|
945 | "Confirm", | |
|
946 | filters=[uppercase_filter, strip_filter], | |
|
947 | validators=[ | |
|
948 | wtforms.validators.AnyOf(["CONFIRM"]), | |
|
949 | wtforms.validators.DataRequired(), | |
|
950 | ], | |
|
951 | ) | |
|
952 | ignore_labels = ["submit"] | |
|
953 | css_classes = {"submit": "btn btn-danger"} | |
|
872 | 954 | |
|
873 | 955 | return F |
|
874 | 956 | |
|
875 | 957 | |
|
876 | 958 | class GroupCreateForm(ReactorForm): |
|
877 | 959 | group_name = wtforms.StringField( |
|
878 |
_( |
|
|
960 | _("Group Name"), | |
|
879 | 961 | filters=[strip_filter], |
|
880 | 962 | validators=[ |
|
881 | 963 | wtforms.validators.Length(min=2, max=50), |
|
882 | 964 | unique_groupname_validator, |
|
883 | wtforms.validators.DataRequired() | |
|
884 |
] |
|
|
885 | description = wtforms.StringField(_('Group description')) | |
|
965 | wtforms.validators.DataRequired(), | |
|
966 | ], | |
|
967 | ) | |
|
968 | description = wtforms.StringField(_("Group description")) | |
|
886 | 969 | |
|
887 | 970 | |
|
888 |
time_choices = [(k, v[ |
|
|
971 | time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()] | |
|
889 | 972 | |
|
890 | 973 | |
|
891 | 974 | class AuthTokenCreateForm(ReactorForm): |
|
892 |
description = wtforms.StringField(_( |
|
|
893 |
expires = wtforms.SelectField( |
|
|
894 | coerce=lambda x: x, | |
|
895 | choices=time_choices, | |
|
896 | validators=[wtforms.validators.Optional()]) | |
|
975 | description = wtforms.StringField(_("Token description")) | |
|
976 | expires = wtforms.SelectField( | |
|
977 | "Expires", | |
|
978 | coerce=lambda x: x, | |
|
979 | choices=time_choices, | |
|
980 | validators=[wtforms.validators.Optional()], | |
|
981 | ) |
@@ -1,50 +1,49 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | """Miscellaneous support packages for {{project}}. |
|
18 | 18 | """ |
|
19 | 19 | import random |
|
20 | 20 | import string |
|
21 | 21 | import importlib |
|
22 | 22 | |
|
23 | 23 | from appenlight_client.exceptions import get_current_traceback |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def generate_random_string(chars=10): |
|
27 |
return |
|
|
28 | chars)) | |
|
27 | return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars)) | |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | def to_integer_safe(input): |
|
32 | 31 | try: |
|
33 | 32 | return int(input) |
|
34 |
except (TypeError, ValueError |
|
|
33 | except (TypeError, ValueError): | |
|
35 | 34 | return None |
|
36 | 35 | |
|
37 | 36 | |
|
38 | 37 | def print_traceback(log): |
|
39 |
traceback = get_current_traceback( |
|
|
40 | ignore_system_exceptions=True) | |
|
38 | traceback = get_current_traceback( | |
|
39 | skip=1, show_hidden_frames=True, ignore_system_exceptions=True | |
|
40 | ) | |
|
41 | 41 | exception_text = traceback.exception |
|
42 | 42 | log.error(exception_text) |
|
43 | 43 | log.error(traceback.plaintext) |
|
44 | 44 | del traceback |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | def get_callable(import_string): |
|
48 |
import_module, indexer_callable = import_string.split( |
|
|
49 | return getattr(importlib.import_module(import_module), | |
|
50 | indexer_callable) | |
|
48 | import_module, indexer_callable = import_string.split(":") | |
|
49 | return getattr(importlib.import_module(import_module), indexer_callable) |
@@ -1,80 +1,80 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import datetime |
|
18 | 18 | import logging |
|
19 | 19 | |
|
20 | 20 | from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests |
|
21 | 21 | |
|
22 | 22 | from appenlight.models.services.config import ConfigService |
|
23 | 23 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
24 | 24 | |
|
25 | 25 | log = logging.getLogger(__name__) |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | def rate_limiting(request, resource, section, to_increment=1): |
|
29 | 29 | tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0) |
|
30 |
key = REDIS_KEYS[ |
|
|
31 | resource.resource_id) | |
|
30 | key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id) | |
|
32 | 31 | redis_pipeline = request.registry.redis_conn.pipeline() |
|
33 | 32 | redis_pipeline.incr(key, to_increment) |
|
34 | 33 | redis_pipeline.expire(key, 3600 * 24) |
|
35 | 34 | results = redis_pipeline.execute() |
|
36 | 35 | current_count = results[0] |
|
37 |
config = ConfigService.by_key_and_section(section, |
|
|
36 | config = ConfigService.by_key_and_section(section, "global") | |
|
38 | 37 | limit = config.value if config else 1000 |
|
39 | 38 | if current_count > int(limit): |
|
40 |
log.info( |
|
|
41 | section, resource, current_count)) | |
|
42 | abort_msg = 'Rate limits are in effect for this application' | |
|
43 | raise HTTPTooManyRequests(abort_msg, | |
|
44 | headers={'X-AppEnlight': abort_msg}) | |
|
39 | log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count)) | |
|
40 | abort_msg = "Rate limits are in effect for this application" | |
|
41 | raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg}) | |
|
45 | 42 | |
|
46 | 43 | |
|
47 | 44 | def check_cors(request, application, should_return=True): |
|
48 | 45 | """ |
|
49 | 46 | Performs a check and validation if request comes from authorized domain for |
|
50 | 47 | application, otherwise return 403 |
|
51 | 48 | """ |
|
52 | 49 | origin_found = False |
|
53 |
origin = request.headers.get( |
|
|
50 | origin = request.headers.get("Origin") | |
|
54 | 51 | if should_return: |
|
55 |
log.info( |
|
|
52 | log.info("CORS for %s" % origin) | |
|
56 | 53 | if not origin: |
|
57 | 54 | return False |
|
58 |
for domain in application.domains.split( |
|
|
55 | for domain in application.domains.split("\n"): | |
|
59 | 56 | if domain in origin: |
|
60 | 57 | origin_found = True |
|
61 | 58 | if origin_found: |
|
62 |
request.response.headers.add( |
|
|
63 |
request.response.headers.add( |
|
|
64 |
request.response.headers.add( |
|
|
65 | 'GET, POST, OPTIONS') | |
|
66 | request.response.headers.add('Access-Control-Allow-Headers', | |
|
67 | 'Accept-Encoding, Accept-Language, ' | |
|
68 | 'Content-Type, ' | |
|
69 | 'Depth, User-Agent, X-File-Size, ' | |
|
70 | 'X-Requested-With, If-Modified-Since, ' | |
|
71 | 'X-File-Name, ' | |
|
72 | 'Cache-Control, Host, Pragma, Accept, ' | |
|
73 | 'Origin, Connection, ' | |
|
74 | 'Referer, Cookie, ' | |
|
75 | 'X-appenlight-public-api-key, ' | |
|
76 | 'x-appenlight-public-api-key') | |
|
77 | request.response.headers.add('Access-Control-Max-Age', '86400') | |
|
59 | request.response.headers.add("Access-Control-Allow-Origin", origin) | |
|
60 | request.response.headers.add("XDomainRequestAllowed", "1") | |
|
61 | request.response.headers.add( | |
|
62 | "Access-Control-Allow-Methods", "GET, POST, OPTIONS" | |
|
63 | ) | |
|
64 | request.response.headers.add( | |
|
65 | "Access-Control-Allow-Headers", | |
|
66 | "Accept-Encoding, Accept-Language, " | |
|
67 | "Content-Type, " | |
|
68 | "Depth, User-Agent, X-File-Size, " | |
|
69 | "X-Requested-With, If-Modified-Since, " | |
|
70 | "X-File-Name, " | |
|
71 | "Cache-Control, Host, Pragma, Accept, " | |
|
72 | "Origin, Connection, " | |
|
73 | "Referer, Cookie, " | |
|
74 | "X-appenlight-public-api-key, " | |
|
75 | "x-appenlight-public-api-key", | |
|
76 | ) | |
|
77 | request.response.headers.add("Access-Control-Max-Age", "86400") | |
|
78 | 78 | return request.response |
|
79 | 79 | else: |
|
80 | 80 | return HTTPForbidden() |
@@ -1,184 +1,169 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import hashlib |
|
19 | 19 | import inspect |
|
20 | 20 | |
|
21 | 21 | from dogpile.cache import make_region |
|
22 | 22 | from dogpile.cache.util import compat |
|
23 | 23 | |
|
24 | 24 | regions = None |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def key_mangler(key): |
|
28 | 28 | return "appenlight:dogpile:{}".format(key) |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def hashgen(namespace, fn, to_str=compat.string_type): |
|
32 | 32 | """Return a function that generates a string |
|
33 | 33 | key, based on a given function as well as |
|
34 | 34 | arguments to the returned function itself. |
|
35 | 35 | |
|
36 | 36 | This is used by :meth:`.CacheRegion.cache_on_arguments` |
|
37 | 37 | to generate a cache key from a decorated function. |
|
38 | 38 | |
|
39 | 39 | It can be replaced using the ``function_key_generator`` |
|
40 | 40 | argument passed to :func:`.make_region`. |
|
41 | 41 | |
|
42 | 42 | """ |
|
43 | 43 | |
|
44 | 44 | if namespace is None: |
|
45 |
namespace = |
|
|
45 | namespace = "%s:%s" % (fn.__module__, fn.__name__) | |
|
46 | 46 | else: |
|
47 |
namespace = |
|
|
47 | namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace) | |
|
48 | 48 | |
|
49 | 49 | args = inspect.getargspec(fn) |
|
50 |
has_self = args[0] and args[0][0] in ( |
|
|
50 | has_self = args[0] and args[0][0] in ("self", "cls") | |
|
51 | 51 | |
|
52 | 52 | def generate_key(*args, **kw): |
|
53 | 53 | if kw: |
|
54 | 54 | raise ValueError( |
|
55 | 55 | "dogpile.cache's default key creation " |
|
56 |
"function does not accept keyword arguments." |
|
|
56 | "function does not accept keyword arguments." | |
|
57 | ) | |
|
57 | 58 | if has_self: |
|
58 | 59 | args = args[1:] |
|
59 | 60 | |
|
60 | return namespace + "|" + hashlib.sha1( | |
|
61 | " ".join(map(to_str, args)).encode('utf8')).hexdigest() | |
|
61 | return ( | |
|
62 | namespace | |
|
63 | + "|" | |
|
64 | + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest() | |
|
65 | ) | |
|
62 | 66 | |
|
63 | 67 | return generate_key |
|
64 | 68 | |
|
65 | 69 | |
|
66 | 70 | class CacheRegions(object): |
|
67 | 71 | def __init__(self, settings): |
|
68 | 72 | config_redis = {"arguments": settings} |
|
69 | 73 | |
|
70 | 74 | self.redis_min_1 = make_region( |
|
71 | function_key_generator=hashgen, | |
|
72 | key_mangler=key_mangler).configure( | |
|
73 | "dogpile.cache.redis", | |
|
74 | expiration_time=60, | |
|
75 | **copy.deepcopy(config_redis)) | |
|
75 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
76 | ).configure( | |
|
77 | "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis) | |
|
78 | ) | |
|
76 | 79 | self.redis_min_5 = make_region( |
|
77 | function_key_generator=hashgen, | |
|
78 | key_mangler=key_mangler).configure( | |
|
79 | "dogpile.cache.redis", | |
|
80 | expiration_time=300, | |
|
81 | **copy.deepcopy(config_redis)) | |
|
80 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
81 | ).configure( | |
|
82 | "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis) | |
|
83 | ) | |
|
82 | 84 | |
|
83 | 85 | self.redis_min_10 = make_region( |
|
84 | function_key_generator=hashgen, | |
|
85 | key_mangler=key_mangler).configure( | |
|
86 | "dogpile.cache.redis", | |
|
87 | expiration_time=60, | |
|
88 | **copy.deepcopy(config_redis)) | |
|
86 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
87 | ).configure( | |
|
88 | "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis) | |
|
89 | ) | |
|
89 | 90 | |
|
90 | 91 | self.redis_min_60 = make_region( |
|
91 | function_key_generator=hashgen, | |
|
92 | key_mangler=key_mangler).configure( | |
|
93 | "dogpile.cache.redis", | |
|
94 | expiration_time=3600, | |
|
95 | **copy.deepcopy(config_redis)) | |
|
92 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
93 | ).configure( | |
|
94 | "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis) | |
|
95 | ) | |
|
96 | 96 | |
|
97 | 97 | self.redis_sec_1 = make_region( |
|
98 | function_key_generator=hashgen, | |
|
99 | key_mangler=key_mangler).configure( | |
|
100 | "dogpile.cache.redis", | |
|
101 | expiration_time=1, | |
|
102 | **copy.deepcopy(config_redis)) | |
|
98 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
99 | ).configure( | |
|
100 | "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis) | |
|
101 | ) | |
|
103 | 102 | |
|
104 | 103 | self.redis_sec_5 = make_region( |
|
105 | function_key_generator=hashgen, | |
|
106 | key_mangler=key_mangler).configure( | |
|
107 | "dogpile.cache.redis", | |
|
108 | expiration_time=5, | |
|
109 | **copy.deepcopy(config_redis)) | |
|
104 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
105 | ).configure( | |
|
106 | "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis) | |
|
107 | ) | |
|
110 | 108 | |
|
111 | 109 | self.redis_sec_30 = make_region( |
|
112 | function_key_generator=hashgen, | |
|
113 | key_mangler=key_mangler).configure( | |
|
114 | "dogpile.cache.redis", | |
|
115 | expiration_time=30, | |
|
116 | **copy.deepcopy(config_redis)) | |
|
110 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
111 | ).configure( | |
|
112 | "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis) | |
|
113 | ) | |
|
117 | 114 | |
|
118 | 115 | self.redis_day_1 = make_region( |
|
119 | function_key_generator=hashgen, | |
|
120 | key_mangler=key_mangler).configure( | |
|
121 | "dogpile.cache.redis", | |
|
122 | expiration_time=86400, | |
|
123 | **copy.deepcopy(config_redis)) | |
|
116 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
117 | ).configure( | |
|
118 | "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis) | |
|
119 | ) | |
|
124 | 120 | |
|
125 | 121 | self.redis_day_7 = make_region( |
|
126 | function_key_generator=hashgen, | |
|
127 | key_mangler=key_mangler).configure( | |
|
122 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
123 | ).configure( | |
|
128 | 124 | "dogpile.cache.redis", |
|
129 | 125 | expiration_time=86400 * 7, |
|
130 |
**copy.deepcopy(config_redis) |
|
|
126 | **copy.deepcopy(config_redis) | |
|
127 | ) | |
|
131 | 128 | |
|
132 | 129 | self.redis_day_30 = make_region( |
|
133 | function_key_generator=hashgen, | |
|
134 | key_mangler=key_mangler).configure( | |
|
130 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
131 | ).configure( | |
|
135 | 132 | "dogpile.cache.redis", |
|
136 | 133 | expiration_time=86400 * 30, |
|
137 |
**copy.deepcopy(config_redis) |
|
|
134 | **copy.deepcopy(config_redis) | |
|
135 | ) | |
|
138 | 136 | |
|
139 | 137 | self.memory_day_1 = make_region( |
|
140 | function_key_generator=hashgen, | |
|
141 | key_mangler=key_mangler).configure( | |
|
142 | "dogpile.cache.memory", | |
|
143 | expiration_time=86400, | |
|
144 | **copy.deepcopy(config_redis)) | |
|
138 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
139 | ).configure( | |
|
140 | "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis) | |
|
141 | ) | |
|
145 | 142 | |
|
146 | 143 | self.memory_sec_1 = make_region( |
|
147 | function_key_generator=hashgen, | |
|
148 | key_mangler=key_mangler).configure( | |
|
149 | "dogpile.cache.memory", | |
|
150 | expiration_time=1) | |
|
144 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
145 | ).configure("dogpile.cache.memory", expiration_time=1) | |
|
151 | 146 | |
|
152 | 147 | self.memory_sec_5 = make_region( |
|
153 | function_key_generator=hashgen, | |
|
154 | key_mangler=key_mangler).configure( | |
|
155 | "dogpile.cache.memory", | |
|
156 | expiration_time=5) | |
|
148 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
149 | ).configure("dogpile.cache.memory", expiration_time=5) | |
|
157 | 150 | |
|
158 | 151 | self.memory_min_1 = make_region( |
|
159 | function_key_generator=hashgen, | |
|
160 | key_mangler=key_mangler).configure( | |
|
161 | "dogpile.cache.memory", | |
|
162 | expiration_time=60) | |
|
152 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
153 | ).configure("dogpile.cache.memory", expiration_time=60) | |
|
163 | 154 | |
|
164 | 155 | self.memory_min_5 = make_region( |
|
165 | function_key_generator=hashgen, | |
|
166 | key_mangler=key_mangler).configure( | |
|
167 | "dogpile.cache.memory", | |
|
168 | expiration_time=300) | |
|
156 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
157 | ).configure("dogpile.cache.memory", expiration_time=300) | |
|
169 | 158 | |
|
170 | 159 | self.memory_min_10 = make_region( |
|
171 | function_key_generator=hashgen, | |
|
172 | key_mangler=key_mangler).configure( | |
|
173 | "dogpile.cache.memory", | |
|
174 | expiration_time=600) | |
|
160 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
161 | ).configure("dogpile.cache.memory", expiration_time=600) | |
|
175 | 162 | |
|
176 | 163 | self.memory_min_60 = make_region( |
|
177 | function_key_generator=hashgen, | |
|
178 | key_mangler=key_mangler).configure( | |
|
179 | "dogpile.cache.memory", | |
|
180 | expiration_time=3600) | |
|
164 | function_key_generator=hashgen, key_mangler=key_mangler | |
|
165 | ).configure("dogpile.cache.memory", expiration_time=3600) | |
|
181 | 166 | |
|
182 | 167 | |
|
183 | 168 | def get_region(region): |
|
184 | 169 | return getattr(regions, region) |
@@ -1,92 +1,92 b'' | |||
|
1 | 1 | import inspect |
|
2 | 2 | import logging |
|
3 | 3 | |
|
4 | 4 | from pyramid.config import Configurator |
|
5 | 5 | |
|
6 | 6 | log = logging.getLogger(__name__) |
|
7 | 7 | |
|
8 | ||
|
8 | 9 | class InspectProxy(object): |
|
9 | 10 | """ |
|
10 | 11 | Proxy to the `inspect` module that allows us to use the pyramid include |
|
11 | 12 | mechanism for cythonized modules without source file. |
|
12 | 13 | """ |
|
13 | 14 | |
|
14 | 15 | def _get_cyfunction_func_code(self, cyfunction): |
|
15 | 16 | """ |
|
16 | 17 | Unpack the `func_code` attribute of a cython function. |
|
17 | 18 | """ |
|
18 | 19 | if inspect.ismethod(cyfunction): |
|
19 | 20 | cyfunction = cyfunction.im_func |
|
20 |
return getattr(cyfunction, |
|
|
21 | return getattr(cyfunction, "func_code") | |
|
21 | 22 | |
|
22 | 23 | def getmodule(self, *args, **kwds): |
|
23 | 24 | """ |
|
24 | 25 | Simple proxy to `inspect.getmodule`. |
|
25 | 26 | """ |
|
26 | 27 | return inspect.getmodule(*args, **kwds) |
|
27 | 28 | |
|
28 | 29 | def getsourcefile(self, obj): |
|
29 | 30 | """ |
|
30 | 31 | Proxy to `inspect.getsourcefile` or `inspect.getfile` depending on if |
|
31 | 32 | it's called to look up the source file that contains the magic pyramid |
|
32 | 33 | `includeme` callable. |
|
33 | 34 | |
|
34 | 35 | For cythonized modules the source file may be deleted. Therefore we |
|
35 | 36 | return the result of `inspect.getfile` instead. In the case of the |
|
36 | 37 | `configurator.include` method this is OK, because the result is passed |
|
37 | 38 | to `os.path.dirname` which strips the file name. So it doesn't matter |
|
38 | 39 | if we return the path to the source file or another file in the same |
|
39 | 40 | directory. |
|
40 | 41 | """ |
|
41 | 42 | # Check if it's called to look up the source file that contains the |
|
42 | 43 | # magic pyramid `includeme` callable. |
|
43 |
if getattr(obj, |
|
|
44 | if getattr(obj, "__name__") == "includeme": | |
|
44 | 45 | try: |
|
45 | 46 | return inspect.getfile(obj) |
|
46 | 47 | except TypeError as e: |
|
47 | 48 | # Cython functions are not recognized as functions by the |
|
48 | 49 | # inspect module. We have to unpack the func_code attribute |
|
49 | 50 | # ourself. |
|
50 |
if |
|
|
51 | if "cyfunction" in e.message: | |
|
51 | 52 | obj = self._get_cyfunction_func_code(obj) |
|
52 | 53 | return inspect.getfile(obj) |
|
53 | 54 | raise |
|
54 | 55 | else: |
|
55 | 56 | return inspect.getsourcefile(obj) |
|
56 | 57 | |
|
57 | 58 | |
|
58 | 59 | class CythonCompatConfigurator(Configurator): |
|
59 | 60 | """ |
|
60 | 61 | Customized configurator to replace the inspect class attribute with |
|
61 | 62 | a custom one that is cython compatible. |
|
62 | 63 | """ |
|
64 | ||
|
63 | 65 | inspect = InspectProxy() |
|
64 | 66 | |
|
65 | 67 | |
|
66 | 68 | def register_appenlight_plugin(config, plugin_name, plugin_config): |
|
67 | 69 | def register(): |
|
68 |
log.warning( |
|
|
70 | log.warning("Registering plugin: {}".format(plugin_name)) | |
|
69 | 71 | if plugin_name not in config.registry.appenlight_plugins: |
|
70 | 72 | config.registry.appenlight_plugins[plugin_name] = { |
|
71 |
|
|
|
72 |
|
|
|
73 |
|
|
|
74 |
|
|
|
75 |
|
|
|
76 |
|
|
|
77 |
|
|
|
78 |
|
|
|
79 |
|
|
|
80 |
|
|
|
81 |
|
|
|
73 | "javascript": None, | |
|
74 | "static": None, | |
|
75 | "css": None, | |
|
76 | "celery_tasks": None, | |
|
77 | "celery_beats": None, | |
|
78 | "fulltext_indexer": None, | |
|
79 | "sqlalchemy_migrations": None, | |
|
80 | "default_values_setter": None, | |
|
81 | "header_html": None, | |
|
82 | "resource_types": [], | |
|
83 | "url_gen": None, | |
|
82 | 84 | } |
|
83 | config.registry.appenlight_plugins[plugin_name].update( | |
|
84 | plugin_config) | |
|
85 | config.registry.appenlight_plugins[plugin_name].update(plugin_config) | |
|
85 | 86 | # inform AE what kind of resource types we have available |
|
86 | 87 | # so we can avoid failing when a plugin is removed but data |
|
87 | 88 | # is still present in the db |
|
88 |
if plugin_config.get( |
|
|
89 | config.registry.resource_types.extend( | |
|
90 | plugin_config['resource_types']) | |
|
89 | if plugin_config.get("resource_types"): | |
|
90 | config.registry.resource_types.extend(plugin_config["resource_types"]) | |
|
91 | 91 | |
|
92 |
config.action( |
|
|
92 | config.action("appenlight_plugin={}".format(plugin_name), register) |
@@ -1,58 +1,58 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | # this gets set on runtime |
|
18 | 18 | from cryptography.fernet import Fernet |
|
19 | 19 | |
|
20 | 20 | ENCRYPTION_SECRET = None |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def encrypt_fernet(value): |
|
24 | 24 | # avoid double encryption |
|
25 | 25 | # not sure if this is needed but it won't hurt too much to have this |
|
26 |
if value.startswith( |
|
|
26 | if value.startswith("enc$fernet$"): | |
|
27 | 27 | return value |
|
28 | 28 | f = Fernet(ENCRYPTION_SECRET) |
|
29 |
return |
|
|
29 | return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8")) | |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def decrypt_fernet(value): |
|
33 |
parts = value.split( |
|
|
33 | parts = value.split("$", 3) | |
|
34 | 34 | if not len(parts) == 3: |
|
35 | 35 | # not encrypted values |
|
36 | 36 | return value |
|
37 | 37 | else: |
|
38 | 38 | f = Fernet(ENCRYPTION_SECRET) |
|
39 |
decrypted_data = f.decrypt(parts[2].encode( |
|
|
39 | decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8") | |
|
40 | 40 | return decrypted_data |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | def encrypt_dictionary_keys(_dict, exclude_keys=None): |
|
44 | 44 | if not exclude_keys: |
|
45 | 45 | exclude_keys = [] |
|
46 | 46 | keys = [k for k in _dict.keys() if k not in exclude_keys] |
|
47 | 47 | for k in keys: |
|
48 | 48 | _dict[k] = encrypt_fernet(_dict[k]) |
|
49 | 49 | return _dict |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def decrypt_dictionary_keys(_dict, exclude_keys=None): |
|
53 | 53 | if not exclude_keys: |
|
54 | 54 | exclude_keys = [] |
|
55 | 55 | keys = [k for k in _dict.keys() if k not in exclude_keys] |
|
56 | 56 | for k in keys: |
|
57 | 57 | _dict[k] = decrypt_fernet(_dict[k]) |
|
58 | 58 | return _dict |
@@ -1,88 +1,90 b'' | |||
|
1 | 1 | import collections |
|
2 | ||
|
2 | 3 | # -*- coding: utf-8 -*- |
|
3 | 4 | |
|
4 | 5 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
5 | 6 | # |
|
6 | 7 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
7 | 8 | # you may not use this file except in compliance with the License. |
|
8 | 9 | # You may obtain a copy of the License at |
|
9 | 10 | # |
|
10 | 11 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
11 | 12 | # |
|
12 | 13 | # Unless required by applicable law or agreed to in writing, software |
|
13 | 14 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
14 | 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
15 | 16 | # See the License for the specific language governing permissions and |
|
16 | 17 | # limitations under the License. |
|
17 | 18 | |
|
18 | 19 | |
|
19 | 20 | class StupidEnum(object): |
|
20 | 21 | @classmethod |
|
21 | 22 | def set_inverse(cls): |
|
22 | 23 | cls._inverse_values = dict( |
|
23 | (y, x) for x, y in vars(cls).items() if | |
|
24 | not x.startswith('_') and not callable(y) | |
|
24 | (y, x) | |
|
25 | for x, y in vars(cls).items() | |
|
26 | if not x.startswith("_") and not callable(y) | |
|
25 | 27 | ) |
|
26 | 28 | |
|
27 | 29 | @classmethod |
|
28 | 30 | def key_from_value(cls, value): |
|
29 |
if not hasattr(cls, |
|
|
31 | if not hasattr(cls, "_inverse_values"): | |
|
30 | 32 | cls.set_inverse() |
|
31 | 33 | return cls._inverse_values.get(value) |
|
32 | 34 | |
|
33 | 35 | |
|
34 | 36 | class ReportType(StupidEnum): |
|
35 | 37 | unknown = 0 |
|
36 | 38 | error = 1 |
|
37 | 39 | not_found = 2 |
|
38 | 40 | slow = 3 |
|
39 | 41 | |
|
40 | 42 | |
|
41 | 43 | class Language(StupidEnum): |
|
42 | 44 | unknown = 0 |
|
43 | 45 | python = 1 |
|
44 | 46 | javascript = 2 |
|
45 | 47 | java = 3 |
|
46 | 48 | objectivec = 4 |
|
47 | 49 | swift = 5 |
|
48 | 50 | cpp = 6 |
|
49 | 51 | basic = 7 |
|
50 | 52 | csharp = 8 |
|
51 | 53 | php = 9 |
|
52 | 54 | perl = 10 |
|
53 | 55 | vb = 11 |
|
54 | 56 | vbnet = 12 |
|
55 | 57 | ruby = 13 |
|
56 | 58 | fsharp = 14 |
|
57 | 59 | actionscript = 15 |
|
58 | 60 | go = 16 |
|
59 | 61 | scala = 17 |
|
60 | 62 | haskell = 18 |
|
61 | 63 | erlang = 19 |
|
62 | 64 | haxe = 20 |
|
63 | 65 | scheme = 21 |
|
64 | 66 | |
|
65 | 67 | |
|
66 | 68 | class LogLevel(StupidEnum): |
|
67 | 69 | UNKNOWN = 0 |
|
68 | 70 | DEBUG = 2 |
|
69 | 71 | TRACE = 4 |
|
70 | 72 | INFO = 6 |
|
71 | 73 | WARNING = 8 |
|
72 | 74 | ERROR = 10 |
|
73 | 75 | CRITICAL = 12 |
|
74 | 76 | FATAL = 14 |
|
75 | 77 | |
|
76 | 78 | |
|
77 | 79 | class LogLevelPython(StupidEnum): |
|
78 | 80 | CRITICAL = 50 |
|
79 | 81 | ERROR = 40 |
|
80 | 82 | WARNING = 30 |
|
81 | 83 | INFO = 20 |
|
82 | 84 | DEBUG = 10 |
|
83 | 85 | NOTSET = 0 |
|
84 | 86 | |
|
85 | 87 | |
|
86 | 88 | class ParsedSentryEventType(StupidEnum): |
|
87 | 89 | ERROR_REPORT = 1 |
|
88 | 90 | LOG = 2 |
@@ -1,148 +1,143 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | """ |
|
18 | 18 | ex-json borrowed from Marcin Kuzminski |
|
19 | 19 | |
|
20 | 20 | source: https://secure.rhodecode.org/ext-json |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | import datetime |
|
24 | 24 | import functools |
|
25 | 25 | import decimal |
|
26 | 26 | import imp |
|
27 | 27 | |
|
28 |
__all__ = [ |
|
|
28 | __all__ = ["json", "simplejson", "stdlibjson"] | |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def _is_aware(value): |
|
32 | 32 | """ |
|
33 | 33 | Determines if a given datetime.time is aware. |
|
34 | 34 | |
|
35 | 35 | The logic is described in Python's docs: |
|
36 | 36 | http://docs.python.org/library/datetime.html#datetime.tzinfo |
|
37 | 37 | """ |
|
38 | return (value.tzinfo is not None | |
|
39 | and value.tzinfo.utcoffset(value) is not None) | |
|
38 | return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None | |
|
40 | 39 | |
|
41 | 40 | |
|
42 | 41 | def _obj_dump(obj): |
|
43 | 42 | """ |
|
44 | 43 | Custom function for dumping objects to JSON, if obj has __json__ attribute |
|
45 | 44 | or method defined it will be used for serialization |
|
46 | 45 | |
|
47 | 46 | :param obj: |
|
48 | 47 | """ |
|
49 | 48 | |
|
50 | 49 | if isinstance(obj, complex): |
|
51 | 50 | return [obj.real, obj.imag] |
|
52 | 51 | # See "Date Time String Format" in the ECMA-262 specification. |
|
53 | 52 | # some code borrowed from django 1.4 |
|
54 | 53 | elif isinstance(obj, datetime.datetime): |
|
55 | 54 | r = obj.isoformat() |
|
56 | 55 | # if obj.microsecond: |
|
57 | 56 | # r = r[:23] + r[26:] |
|
58 |
if r.endswith( |
|
|
59 |
r = r[:-6] + |
|
|
57 | if r.endswith("+00:00"): | |
|
58 | r = r[:-6] + "Z" | |
|
60 | 59 | return r |
|
61 | 60 | elif isinstance(obj, datetime.date): |
|
62 | 61 | return obj.isoformat() |
|
63 | 62 | elif isinstance(obj, decimal.Decimal): |
|
64 | 63 | return str(obj) |
|
65 | 64 | elif isinstance(obj, datetime.time): |
|
66 | 65 | if _is_aware(obj): |
|
67 | 66 | raise ValueError("JSON can't represent timezone-aware times.") |
|
68 | 67 | r = obj.isoformat() |
|
69 | 68 | if obj.microsecond: |
|
70 | 69 | r = r[:12] |
|
71 | 70 | return r |
|
72 | 71 | elif isinstance(obj, set): |
|
73 | 72 | return list(obj) |
|
74 |
elif hasattr(obj, |
|
|
73 | elif hasattr(obj, "__json__"): | |
|
75 | 74 | if callable(obj.__json__): |
|
76 | 75 | return obj.__json__() |
|
77 | 76 | else: |
|
78 | 77 | return obj.__json__ |
|
79 | 78 | else: |
|
80 | 79 | raise NotImplementedError |
|
81 | 80 | |
|
82 | 81 | |
|
83 | 82 | # Import simplejson |
|
84 | 83 | try: |
|
85 | 84 | # import simplejson initially |
|
86 |
_sj = imp.load_module( |
|
|
87 | ||
|
85 | _sj = imp.load_module("_sj", *imp.find_module("simplejson")) | |
|
88 | 86 | |
|
89 | 87 | def extended_encode(obj): |
|
90 | 88 | try: |
|
91 | 89 | return _obj_dump(obj) |
|
92 | 90 | except NotImplementedError: |
|
93 | 91 | pass |
|
94 | 92 | raise TypeError("%r is not JSON serializable" % (obj,)) |
|
95 | 93 | |
|
96 | ||
|
97 | 94 | # we handle decimals our own it makes unified behavior of json vs |
|
98 | 95 | # simplejson |
|
99 |
sj_version = [int(x) for x in _sj.__version__.split( |
|
|
96 | sj_version = [int(x) for x in _sj.__version__.split(".")] | |
|
100 | 97 | major, minor = sj_version[0], sj_version[1] |
|
101 | 98 | if major < 2 or (major == 2 and minor < 1): |
|
102 | 99 | # simplejson < 2.1 doesnt support use_decimal |
|
103 | _sj.dumps = functools.partial( | |
|
104 |
|
|
|
105 | _sj.dump = functools.partial( | |
|
106 | _sj.dump, default=extended_encode) | |
|
100 | _sj.dumps = functools.partial(_sj.dumps, default=extended_encode) | |
|
101 | _sj.dump = functools.partial(_sj.dump, default=extended_encode) | |
|
107 | 102 | else: |
|
108 | 103 | _sj.dumps = functools.partial( |
|
109 |
_sj.dumps, default=extended_encode, use_decimal=False |
|
|
104 | _sj.dumps, default=extended_encode, use_decimal=False | |
|
105 | ) | |
|
110 | 106 | _sj.dump = functools.partial( |
|
111 |
_sj.dump, default=extended_encode, use_decimal=False |
|
|
107 | _sj.dump, default=extended_encode, use_decimal=False | |
|
108 | ) | |
|
112 | 109 | simplejson = _sj |
|
113 | 110 | |
|
114 | 111 | except ImportError: |
|
115 | 112 | # no simplejson set it to None |
|
116 | 113 | simplejson = None |
|
117 | 114 | |
|
118 | 115 | try: |
|
119 | 116 | # simplejson not found try out regular json module |
|
120 |
_json = imp.load_module( |
|
|
121 | ||
|
117 | _json = imp.load_module("_json", *imp.find_module("json")) | |
|
122 | 118 | |
|
123 | 119 | # extended JSON encoder for json |
|
124 | 120 | class ExtendedEncoder(_json.JSONEncoder): |
|
125 | 121 | def default(self, obj): |
|
126 | 122 | try: |
|
127 | 123 | return _obj_dump(obj) |
|
128 | 124 | except NotImplementedError: |
|
129 | 125 | pass |
|
130 | 126 | raise TypeError("%r is not JSON serializable" % (obj,)) |
|
131 | 127 | |
|
132 | ||
|
133 | 128 | # monkey-patch JSON encoder to use extended version |
|
134 | 129 | _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder) |
|
135 | 130 | _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder) |
|
136 | 131 | |
|
137 | 132 | except ImportError: |
|
138 | 133 | json = None |
|
139 | 134 | |
|
140 | 135 | stdlibjson = _json |
|
141 | 136 | |
|
142 | 137 | # set all available json modules |
|
143 | 138 | if simplejson: |
|
144 | 139 | json = _sj |
|
145 | 140 | elif _json: |
|
146 | 141 | json = _json |
|
147 | 142 | else: |
|
148 |
raise ImportError( |
|
|
143 | raise ImportError("Could not find any json modules") |
@@ -1,119 +1,160 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | """ |
|
18 | 18 | Helper functions |
|
19 | 19 | """ |
|
20 | 20 | import copy |
|
21 | 21 | import datetime |
|
22 | 22 | |
|
23 | 23 | from collections import namedtuple, OrderedDict |
|
24 | 24 | |
|
25 | 25 | _ = lambda x: x |
|
26 | 26 | |
|
27 | 27 | time_deltas = OrderedDict() |
|
28 | 28 | |
|
29 | time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1), | |
|
30 | 'label': '1 minute', 'minutes': 1} | |
|
31 | ||
|
32 | time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5), | |
|
33 | 'label': '5 minutes', 'minutes': 5} | |
|
34 | time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30), | |
|
35 | 'label': '30 minutes', 'minutes': 30} | |
|
36 |
|
|
|
37 | 'label': '60 minutes', 'minutes': 60} | |
|
38 | time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours', | |
|
39 | 'minutes': 60 * 4} | |
|
40 | time_deltas['12h'] = {'delta': datetime.timedelta(hours=12), | |
|
41 | 'label': '12 hours', 'minutes': 60 * 12} | |
|
42 | time_deltas['24h'] = {'delta': datetime.timedelta(hours=24), | |
|
43 | 'label': '24 hours', 'minutes': 60 * 24} | |
|
44 | time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days', | |
|
45 | 'minutes': 60 * 24 * 3} | |
|
46 | time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days', | |
|
47 | 'minutes': 60 * 24 * 7} | |
|
48 | time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days', | |
|
49 | 'minutes': 60 * 24 * 14} | |
|
50 | time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days', | |
|
51 | 'minutes': 60 * 24 * 31} | |
|
52 | time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3), | |
|
53 | 'label': '3 months', | |
|
54 | 'minutes': 60 * 24 * 31 * 3} | |
|
55 | time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6), | |
|
56 | 'label': '6 months', | |
|
57 | 'minutes': 60 * 24 * 31 * 6} | |
|
58 | time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12), | |
|
59 | 'label': '12 months', | |
|
60 | 'minutes': 60 * 24 * 31 * 12} | |
|
29 | time_deltas["1m"] = { | |
|
30 | "delta": datetime.timedelta(minutes=1), | |
|
31 | "label": "1 minute", | |
|
32 | "minutes": 1, | |
|
33 | } | |
|
34 | ||
|
35 | time_deltas["5m"] = { | |
|
36 | "delta": datetime.timedelta(minutes=5), | |
|
37 | "label": "5 minutes", | |
|
38 | "minutes": 5, | |
|
39 | } | |
|
40 | time_deltas["30m"] = { | |
|
41 | "delta": datetime.timedelta(minutes=30), | |
|
42 | "label": "30 minutes", | |
|
43 | "minutes": 30, | |
|
44 | } | |
|
45 | time_deltas["1h"] = { | |
|
46 | "delta": datetime.timedelta(hours=1), | |
|
47 | "label": "60 minutes", | |
|
48 | "minutes": 60, | |
|
49 | } | |
|
50 | time_deltas["4h"] = { | |
|
51 | "delta": datetime.timedelta(hours=4), | |
|
52 | "label": "4 hours", | |
|
53 | "minutes": 60 * 4, | |
|
54 | } | |
|
55 | time_deltas["12h"] = { | |
|
56 | "delta": datetime.timedelta(hours=12), | |
|
57 | "label": "12 hours", | |
|
58 | "minutes": 60 * 12, | |
|
59 | } | |
|
60 | time_deltas["24h"] = { | |
|
61 | "delta": datetime.timedelta(hours=24), | |
|
62 | "label": "24 hours", | |
|
63 | "minutes": 60 * 24, | |
|
64 | } | |
|
65 | time_deltas["3d"] = { | |
|
66 | "delta": datetime.timedelta(days=3), | |
|
67 | "label": "3 days", | |
|
68 | "minutes": 60 * 24 * 3, | |
|
69 | } | |
|
70 | time_deltas["1w"] = { | |
|
71 | "delta": datetime.timedelta(days=7), | |
|
72 | "label": "7 days", | |
|
73 | "minutes": 60 * 24 * 7, | |
|
74 | } | |
|
75 | time_deltas["2w"] = { | |
|
76 | "delta": datetime.timedelta(days=14), | |
|
77 | "label": "14 days", | |
|
78 | "minutes": 60 * 24 * 14, | |
|
79 | } | |
|
80 | time_deltas["1M"] = { | |
|
81 | "delta": datetime.timedelta(days=31), | |
|
82 | "label": "31 days", | |
|
83 | "minutes": 60 * 24 * 31, | |
|
84 | } | |
|
85 | time_deltas["3M"] = { | |
|
86 | "delta": datetime.timedelta(days=31 * 3), | |
|
87 | "label": "3 months", | |
|
88 | "minutes": 60 * 24 * 31 * 3, | |
|
89 | } | |
|
90 | time_deltas["6M"] = { | |
|
91 | "delta": datetime.timedelta(days=31 * 6), | |
|
92 | "label": "6 months", | |
|
93 | "minutes": 60 * 24 * 31 * 6, | |
|
94 | } | |
|
95 | time_deltas["12M"] = { | |
|
96 | "delta": datetime.timedelta(days=31 * 12), | |
|
97 | "label": "12 months", | |
|
98 | "minutes": 60 * 24 * 31 * 12, | |
|
99 | } | |
|
61 | 100 | |
|
62 | 101 | # used in json representation |
|
63 | time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']}) | |
|
64 | for k, v in time_deltas.items()]) | |
|
65 | FlashMsg = namedtuple('FlashMsg', ['msg', 'level']) | |
|
102 | time_options = dict( | |
|
103 | [ | |
|
104 | (k, {"label": v["label"], "minutes": v["minutes"]}) | |
|
105 | for k, v in time_deltas.items() | |
|
106 | ] | |
|
107 | ) | |
|
108 | FlashMsg = namedtuple("FlashMsg", ["msg", "level"]) | |
|
66 | 109 | |
|
67 | 110 | |
|
68 | 111 | def get_flash(request): |
|
69 | 112 | messages = [] |
|
70 | 113 | messages.extend( |
|
71 | [FlashMsg(msg, 'error') | |
|
72 | for msg in request.session.peek_flash('error')]) | |
|
73 | messages.extend([FlashMsg(msg, 'warning') | |
|
74 | for msg in request.session.peek_flash('warning')]) | |
|
114 | [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")] | |
|
115 | ) | |
|
75 | 116 | messages.extend( |
|
76 |
[FlashMsg(msg, |
|
|
117 | [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")] | |
|
118 | ) | |
|
119 | messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()]) | |
|
77 | 120 | return messages |
|
78 | 121 | |
|
79 | 122 | |
|
80 | 123 | def clear_flash(request): |
|
81 |
request.session.pop_flash( |
|
|
82 |
request.session.pop_flash( |
|
|
124 | request.session.pop_flash("error") | |
|
125 | request.session.pop_flash("warning") | |
|
83 | 126 | request.session.pop_flash() |
|
84 | 127 | |
|
85 | 128 | |
|
86 | 129 | def get_type_formatted_flash(request): |
|
87 | return [{'msg': message.msg, 'type': message.level} | |
|
88 |
for message in get_flash(request) |
|
|
130 | return [ | |
|
131 | {"msg": message.msg, "type": message.level} for message in get_flash(request) | |
|
132 | ] | |
|
89 | 133 | |
|
90 | 134 | |
|
91 | 135 | def gen_pagination_headers(request, paginator): |
|
92 | 136 | headers = { |
|
93 |
|
|
|
94 |
|
|
|
95 |
|
|
|
137 | "x-total-count": str(paginator.item_count), | |
|
138 | "x-current-page": str(paginator.page), | |
|
139 | "x-items-per-page": str(paginator.items_per_page), | |
|
96 | 140 | } |
|
97 | 141 | params_dict = request.GET.dict_of_lists() |
|
98 | 142 | last_page_params = copy.deepcopy(params_dict) |
|
99 |
last_page_params[ |
|
|
143 | last_page_params["page"] = paginator.last_page or 1 | |
|
100 | 144 | first_page_params = copy.deepcopy(params_dict) |
|
101 |
first_page_params.pop( |
|
|
145 | first_page_params.pop("page", None) | |
|
102 | 146 | next_page_params = copy.deepcopy(params_dict) |
|
103 |
next_page_params[ |
|
|
147 | next_page_params["page"] = paginator.next_page or paginator.last_page or 1 | |
|
104 | 148 | prev_page_params = copy.deepcopy(params_dict) |
|
105 |
prev_page_params[ |
|
|
149 | prev_page_params["page"] = paginator.previous_page or 1 | |
|
106 | 150 | lp_url = request.current_route_url(_query=last_page_params) |
|
107 | 151 | fp_url = request.current_route_url(_query=first_page_params) |
|
108 | links = [ | |
|
109 | 'rel="last", <{}>'.format(lp_url), | |
|
110 | 'rel="first", <{}>'.format(fp_url), | |
|
111 | ] | |
|
152 | links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)] | |
|
112 | 153 | if first_page_params != prev_page_params: |
|
113 | 154 | prev_url = request.current_route_url(_query=prev_page_params) |
|
114 | 155 | links.append('rel="prev", <{}>'.format(prev_url)) |
|
115 | 156 | if last_page_params != next_page_params: |
|
116 | 157 | next_url = request.current_route_url(_query=next_page_params) |
|
117 | 158 | links.append('rel="next", <{}>'.format(next_url)) |
|
118 |
headers[ |
|
|
159 | headers["link"] = "; ".join(links) | |
|
119 | 160 | return headers |
@@ -1,46 +1,53 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import re |
|
18 | 18 | from appenlight.lib.ext_json import json |
|
19 | 19 | from jinja2 import Markup, escape, evalcontextfilter |
|
20 | 20 | |
|
21 |
_paragraph_re = re.compile(r |
|
|
21 | _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}") | |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | @evalcontextfilter |
|
25 | 25 | def nl2br(eval_ctx, value): |
|
26 | 26 | if eval_ctx.autoescape: |
|
27 | result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n')) | |
|
28 | for p in _paragraph_re.split(escape(value))) | |
|
27 | result = "\n\n".join( | |
|
28 | "<p>%s</p>" % p.replace("\n", Markup("<br>\n")) | |
|
29 | for p in _paragraph_re.split(escape(value)) | |
|
30 | ) | |
|
29 | 31 | else: |
|
30 | result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n') | |
|
31 | for p in _paragraph_re.split(escape(value))) | |
|
32 | result = "\n\n".join( | |
|
33 | "<p>%s</p>" % p.replace("\n", "<br>\n") | |
|
34 | for p in _paragraph_re.split(escape(value)) | |
|
35 | ) | |
|
32 | 36 | if eval_ctx.autoescape: |
|
33 | 37 | result = Markup(result) |
|
34 | 38 | return result |
|
35 | 39 | |
|
36 | 40 | |
|
37 | 41 | @evalcontextfilter |
|
38 | 42 | def toJSONUnsafe(eval_ctx, value): |
|
39 | encoded = json.dumps(value).replace('&', '\\u0026') \ | |
|
40 | .replace('<', '\\u003c') \ | |
|
41 |
.replace( |
|
|
42 |
.replace( |
|
|
43 |
.replace( |
|
|
44 |
.replace(" |
|
|
45 |
.replace( |
|
|
43 | encoded = ( | |
|
44 | json.dumps(value) | |
|
45 | .replace("&", "\\u0026") | |
|
46 | .replace("<", "\\u003c") | |
|
47 | .replace(">", "\\u003e") | |
|
48 | .replace(">", "\\u003e") | |
|
49 | .replace('"', "\\u0022") | |
|
50 | .replace("'", "\\u0027") | |
|
51 | .replace(r"\n", "/\\\n") | |
|
52 | ) | |
|
46 | 53 | return Markup("'%s'" % encoded) |
@@ -1,64 +1,83 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import json |
|
18 | 18 | import logging |
|
19 | 19 | |
|
20 | ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text', | |
|
21 | 'filename', 'funcName', 'levelname', 'levelno', 'lineno', | |
|
22 | 'message', 'module', 'msecs', 'msg', 'name', 'pathname', | |
|
23 | 'process', 'processName', 'relativeCreated', 'stack_info', | |
|
24 | 'thread', 'threadName'] | |
|
20 | ignored_keys = [ | |
|
21 | "args", | |
|
22 | "asctime", | |
|
23 | "created", | |
|
24 | "exc_info", | |
|
25 | "exc_text", | |
|
26 | "filename", | |
|
27 | "funcName", | |
|
28 | "levelname", | |
|
29 | "levelno", | |
|
30 | "lineno", | |
|
31 | "message", | |
|
32 | "module", | |
|
33 | "msecs", | |
|
34 | "msg", | |
|
35 | "name", | |
|
36 | "pathname", | |
|
37 | "process", | |
|
38 | "processName", | |
|
39 | "relativeCreated", | |
|
40 | "stack_info", | |
|
41 | "thread", | |
|
42 | "threadName", | |
|
43 | ] | |
|
25 | 44 | |
|
26 | 45 | |
|
27 | 46 | class JSONFormatter(logging.Formatter): |
|
28 | 47 | def format(self, record): |
|
29 | 48 | """ |
|
30 | 49 | Format the specified record as text. |
|
31 | 50 | |
|
32 | 51 | The record's attribute dictionary is used as the operand to a |
|
33 | 52 | string formatting operation which yields the returned string. |
|
34 | 53 | Before formatting the dictionary, a couple of preparatory steps |
|
35 | 54 | are carried out. The message attribute of the record is computed |
|
36 | 55 | using LogRecord.getMessage(). If the formatting string uses the |
|
37 | 56 | time (as determined by a call to usesTime(), formatTime() is |
|
38 | 57 | called to format the event time. If there is exception information, |
|
39 | 58 | it is formatted using formatException() and appended to the message. |
|
40 | 59 | """ |
|
41 | 60 | record.message = record.getMessage() |
|
42 | 61 | log_dict = vars(record) |
|
43 | 62 | keys = [k for k in log_dict.keys() if k not in ignored_keys] |
|
44 |
payload = { |
|
|
63 | payload = {"message": record.message} | |
|
45 | 64 | payload.update({k: log_dict[k] for k in keys}) |
|
46 | 65 | record.message = json.dumps(payload, default=lambda x: str(x)) |
|
47 | 66 | |
|
48 | 67 | if self.usesTime(): |
|
49 | 68 | record.asctime = self.formatTime(record, self.datefmt) |
|
50 | 69 | s = self.formatMessage(record) |
|
51 | 70 | if record.exc_info: |
|
52 | 71 | # Cache the traceback text to avoid converting it multiple times |
|
53 | 72 | # (it's constant anyway) |
|
54 | 73 | if not record.exc_text: |
|
55 | 74 | record.exc_text = self.formatException(record.exc_info) |
|
56 | 75 | if record.exc_text: |
|
57 | 76 | if s[-1:] != "\n": |
|
58 | 77 | s = s + "\n" |
|
59 | 78 | s = s + record.exc_text |
|
60 | 79 | if record.stack_info: |
|
61 | 80 | if s[-1:] != "\n": |
|
62 | 81 | s = s + "\n" |
|
63 | 82 | s = s + self.formatStack(record.stack_info) |
|
64 | 83 | return s |
@@ -1,65 +1,69 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 |
BASE = |
|
|
17 | BASE = "appenlight:data:{}" | |
|
18 | 18 | |
|
19 | 19 | REDIS_KEYS = { |
|
20 |
|
|
|
21 |
|
|
|
22 |
|
|
|
20 | "tasks": { | |
|
21 | "add_reports_lock": BASE.format("add_reports_lock:{}"), | |
|
22 | "add_logs_lock": BASE.format("add_logs_lock:{}"), | |
|
23 | 23 | }, |
|
24 |
|
|
|
25 |
|
|
|
26 | 'events_per_minute_per_user:{}:{}'), | |
|
27 |
|
|
|
28 |
|
|
|
29 | 'reports_per_hour_per_app:{}:{}'), | |
|
30 |
|
|
|
31 |
|
|
|
32 |
|
|
|
33 | 'logs_per_hour_per_app:{}:{}'), | |
|
34 | 'metrics_per_minute': BASE.format('metrics_per_minute:{}'), | |
|
35 | 'metrics_per_hour_per_app': BASE.format( | |
|
36 | 'metrics_per_hour_per_app:{}:{}'), | |
|
37 |
|
|
|
38 |
|
|
|
39 |
|
|
|
40 | 'report_group_occurences_10th': BASE.format( | |
|
41 | 'report_group_occurences_10th:{}'), | |
|
42 | 'report_group_occurences_100th': BASE.format( | |
|
43 | 'report_group_occurences_100th:{}'), | |
|
24 | "counters": { | |
|
25 | "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"), | |
|
26 | "reports_per_minute": BASE.format("reports_per_minute:{}"), | |
|
27 | "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"), | |
|
28 | "reports_per_type": BASE.format("reports_per_type:{}"), | |
|
29 | "logs_per_minute": BASE.format("logs_per_minute:{}"), | |
|
30 | "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"), | |
|
31 | "metrics_per_minute": BASE.format("metrics_per_minute:{}"), | |
|
32 | "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"), | |
|
33 | "report_group_occurences": BASE.format("report_group_occurences:{}"), | |
|
34 | "report_group_occurences_alerting": BASE.format( | |
|
35 | "report_group_occurences_alerting:{}" | |
|
36 | ), | |
|
37 | "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"), | |
|
38 | "report_group_occurences_100th": BASE.format( | |
|
39 | "report_group_occurences_100th:{}" | |
|
40 | ), | |
|
44 | 41 | }, |
|
45 |
|
|
|
46 |
|
|
|
47 |
|
|
|
48 | 'per_application_logs_rate_limit': BASE.format( | |
|
49 |
|
|
|
50 |
|
|
|
51 | 'per_application_metrics_rate_limit:{}:{}'), | |
|
42 | "rate_limits": { | |
|
43 | "per_application_reports_rate_limit": BASE.format( | |
|
44 | "per_application_reports_limit:{}:{}" | |
|
45 | ), | |
|
46 | "per_application_logs_rate_limit": BASE.format( | |
|
47 | "per_application_logs_rate_limit:{}:{}" | |
|
48 | ), | |
|
49 | "per_application_metrics_rate_limit": BASE.format( | |
|
50 | "per_application_metrics_rate_limit:{}:{}" | |
|
51 | ), | |
|
52 | 52 | }, |
|
53 |
|
|
|
54 | 'apps_that_had_reports': BASE.format('apps_that_had_reports'), | |
|
55 | 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'), | |
|
56 |
|
|
|
57 | 'apps_that_had_reports_alerting'), | |
|
58 |
|
|
|
59 |
|
|
|
60 | 'reports_to_notify_per_type_per_app': BASE.format( | |
|
61 | 'reports_to_notify_per_type_per_app:{}:{}'), | |
|
62 |
|
|
|
63 |
|
|
|
64 | 'seen_tag_list': BASE.format('seen_tag_list') | |
|
53 | "apps_that_got_new_data_per_hour": BASE.format( | |
|
54 | "apps_that_got_new_data_per_hour:{}" | |
|
55 | ), | |
|
56 | "apps_that_had_reports": BASE.format("apps_that_had_reports"), | |
|
57 | "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"), | |
|
58 | "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"), | |
|
59 | "apps_that_had_error_reports_alerting": BASE.format( | |
|
60 | "apps_that_had_error_reports_alerting" | |
|
61 | ), | |
|
62 | "reports_to_notify_per_type_per_app": BASE.format( | |
|
63 | "reports_to_notify_per_type_per_app:{}:{}" | |
|
64 | ), | |
|
65 | "reports_to_notify_per_type_per_app_alerting": BASE.format( | |
|
66 | "reports_to_notify_per_type_per_app_alerting:{}:{}" | |
|
67 | ), | |
|
68 | "seen_tag_list": BASE.format("seen_tag_list"), | |
|
65 | 69 | } |
@@ -1,135 +1,131 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import json |
|
18 | 18 | |
|
19 | 19 | from pyramid.security import unauthenticated_userid |
|
20 | 20 | |
|
21 | 21 | import appenlight.lib.helpers as helpers |
|
22 | 22 | |
|
23 | 23 | from authomatic.providers import oauth2, oauth1 |
|
24 | 24 | from authomatic import Authomatic |
|
25 | 25 | from ziggurat_foundations.models.services.user import UserService |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class CSRFException(Exception): |
|
29 | 29 | pass |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | class JSONException(Exception): |
|
33 | 33 | pass |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def get_csrf_token(request): |
|
37 | 37 | return request.session.get_csrf_token() |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def safe_json_body(request): |
|
41 | 41 | """ |
|
42 | 42 | Returns None if json body is missing or erroneous |
|
43 | 43 | """ |
|
44 | 44 | try: |
|
45 | 45 | return request.json_body |
|
46 | 46 | except ValueError: |
|
47 | 47 | return None |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def unsafe_json_body(request): |
|
51 | 51 | """ |
|
52 | 52 | Throws JSONException if json can't deserialize |
|
53 | 53 | """ |
|
54 | 54 | try: |
|
55 | 55 | return request.json_body |
|
56 | 56 | except ValueError: |
|
57 |
raise JSONException( |
|
|
57 | raise JSONException("Incorrect JSON") | |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def get_user(request): |
|
61 |
if not request.path_info.startswith( |
|
|
61 | if not request.path_info.startswith("/static"): | |
|
62 | 62 | user_id = unauthenticated_userid(request) |
|
63 | 63 | try: |
|
64 | 64 | user_id = int(user_id) |
|
65 | 65 | except Exception: |
|
66 | 66 | return None |
|
67 | 67 | |
|
68 | 68 | if user_id: |
|
69 | 69 | user = UserService.by_id(user_id) |
|
70 | 70 | if user: |
|
71 |
request.environ[ |
|
|
72 |
user_id, |
|
|
71 | request.environ["appenlight.username"] = "%d:%s" % ( | |
|
72 | user_id, | |
|
73 | user.user_name, | |
|
74 | ) | |
|
73 | 75 | return user |
|
74 | 76 | else: |
|
75 | 77 | return None |
|
76 | 78 | |
|
77 | 79 | |
|
78 | 80 | def es_conn(request): |
|
79 | 81 | return request.registry.es_conn |
|
80 | 82 | |
|
81 | 83 | |
|
82 | 84 | def add_flash_to_headers(request, clear=True): |
|
83 | 85 | """ |
|
84 | 86 | Adds pending flash messages to response, if clear is true clears out the |
|
85 | 87 | flash queue |
|
86 | 88 | """ |
|
87 | 89 | flash_msgs = helpers.get_type_formatted_flash(request) |
|
88 |
request.response.headers[ |
|
|
90 | request.response.headers["x-flash-messages"] = json.dumps(flash_msgs) | |
|
89 | 91 | helpers.clear_flash(request) |
|
90 | 92 | |
|
91 | 93 | |
|
92 | 94 | def get_authomatic(request): |
|
93 | 95 | settings = request.registry.settings |
|
94 | 96 | # authomatic social auth |
|
95 | 97 | authomatic_conf = { |
|
96 | 98 | # callback http://yourapp.com/social_auth/twitter |
|
97 |
|
|
|
98 |
|
|
|
99 |
|
|
|
100 |
|
|
|
101 | ''), | |
|
99 | "twitter": { | |
|
100 | "class_": oauth1.Twitter, | |
|
101 | "consumer_key": settings.get("authomatic.pr.twitter.key", ""), | |
|
102 | "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""), | |
|
102 | 103 | }, |
|
103 | 104 | # callback http://yourapp.com/social_auth/facebook |
|
104 |
|
|
|
105 |
|
|
|
106 |
|
|
|
107 |
|
|
|
108 | ''), | |
|
109 | 'scope': ['email'], | |
|
105 | "facebook": { | |
|
106 | "class_": oauth2.Facebook, | |
|
107 | "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""), | |
|
108 | "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""), | |
|
109 | "scope": ["email"], | |
|
110 | 110 | }, |
|
111 | 111 | # callback http://yourapp.com/social_auth/google |
|
112 |
|
|
|
113 |
|
|
|
114 |
|
|
|
115 |
|
|
|
116 | 'authomatic.pr.google.secret', ''), | |
|
117 | 'scope': ['profile', 'email'], | |
|
112 | "google": { | |
|
113 | "class_": oauth2.Google, | |
|
114 | "consumer_key": settings.get("authomatic.pr.google.key", ""), | |
|
115 | "consumer_secret": settings.get("authomatic.pr.google.secret", ""), | |
|
116 | "scope": ["profile", "email"], | |
|
118 | 117 | }, |
|
119 |
|
|
|
120 |
|
|
|
121 |
|
|
|
122 |
|
|
|
123 | 'authomatic.pr.github.secret', ''), | |
|
124 | 'scope': ['repo', 'public_repo', 'user:email'], | |
|
125 | 'access_headers': {'User-Agent': 'AppEnlight'}, | |
|
118 | "github": { | |
|
119 | "class_": oauth2.GitHub, | |
|
120 | "consumer_key": settings.get("authomatic.pr.github.key", ""), | |
|
121 | "consumer_secret": settings.get("authomatic.pr.github.secret", ""), | |
|
122 | "scope": ["repo", "public_repo", "user:email"], | |
|
123 | "access_headers": {"User-Agent": "AppEnlight"}, | |
|
124 | }, | |
|
125 | "bitbucket": { | |
|
126 | "class_": oauth1.Bitbucket, | |
|
127 | "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""), | |
|
128 | "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""), | |
|
126 | 129 | }, |
|
127 | 'bitbucket': { | |
|
128 | 'class_': oauth1.Bitbucket, | |
|
129 | 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''), | |
|
130 | 'consumer_secret': settings.get( | |
|
131 | 'authomatic.pr.bitbucket.secret', '') | |
|
132 | } | |
|
133 | 130 | } |
|
134 | return Authomatic( | |
|
135 | config=authomatic_conf, secret=settings['authomatic.secret']) | |
|
131 | return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"]) |
@@ -1,298 +1,312 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import operator |
|
19 | 19 | |
|
20 | 20 | log = logging.getLogger(__name__) |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | class RuleException(Exception): |
|
24 | 24 | pass |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | class KeyNotFoundException(RuleException): |
|
28 | 28 | pass |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | class UnknownTypeException(RuleException): |
|
32 | 32 | pass |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | class BadConfigException(RuleException): |
|
36 | 36 | pass |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class InvalidValueException(RuleException): |
|
40 | 40 | pass |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class RuleBase(object): |
|
44 | 44 | @classmethod |
|
45 | 45 | def default_dict_struct_getter(cls, struct, field_name): |
|
46 | 46 | """ |
|
47 | 47 | returns a key from dictionary based on field_name, if the name contains |
|
48 | 48 | `:` then it means additional nesting levels should be checked for the |
|
49 | 49 | key so `a:b:c` means return struct['a']['b']['c'] |
|
50 | 50 | |
|
51 | 51 | :param struct: |
|
52 | 52 | :param field_name: |
|
53 | 53 | :return: |
|
54 | 54 | """ |
|
55 |
parts = field_name.split( |
|
|
55 | parts = field_name.split(":") if field_name else [] | |
|
56 | 56 | found = struct |
|
57 | 57 | while parts: |
|
58 | 58 | current_key = parts.pop(0) |
|
59 | 59 | found = found.get(current_key) |
|
60 | 60 | if not found and parts: |
|
61 |
raise KeyNotFoundException( |
|
|
61 | raise KeyNotFoundException("Key not found in structure") | |
|
62 | 62 | return found |
|
63 | 63 | |
|
64 | 64 | @classmethod |
|
65 | 65 | def default_obj_struct_getter(cls, struct, field_name): |
|
66 | 66 | """ |
|
67 | 67 | returns a key from instance based on field_name, if the name contains |
|
68 | 68 | `:` then it means additional nesting levels should be checked for the |
|
69 | 69 | key so `a:b:c` means return struct.a.b.c |
|
70 | 70 | |
|
71 | 71 | :param struct: |
|
72 | 72 | :param field_name: |
|
73 | 73 | :return: |
|
74 | 74 | """ |
|
75 |
parts = field_name.split( |
|
|
75 | parts = field_name.split(":") | |
|
76 | 76 | found = struct |
|
77 | 77 | while parts: |
|
78 | 78 | current_key = parts.pop(0) |
|
79 | 79 | found = getattr(found, current_key, None) |
|
80 | 80 | if not found and parts: |
|
81 |
raise KeyNotFoundException( |
|
|
81 | raise KeyNotFoundException("Key not found in structure") | |
|
82 | 82 | return found |
|
83 | 83 | |
|
84 | 84 | def normalized_type(self, field, value): |
|
85 | 85 | """ |
|
86 | 86 | Converts text values from self.conf_value based on type_matrix below |
|
87 | 87 | check_matrix defines what kind of checks we can perform on a field |
|
88 | 88 | value based on field name |
|
89 | 89 | """ |
|
90 | 90 | f_type = self.type_matrix.get(field) |
|
91 | 91 | if f_type: |
|
92 |
cast_to = f_type[ |
|
|
92 | cast_to = f_type["type"] | |
|
93 | 93 | else: |
|
94 |
raise UnknownTypeException( |
|
|
94 | raise UnknownTypeException("Unknown type") | |
|
95 | 95 | |
|
96 | 96 | if value is None: |
|
97 | 97 | return None |
|
98 | 98 | |
|
99 | 99 | try: |
|
100 |
if cast_to == |
|
|
100 | if cast_to == "int": | |
|
101 | 101 | return int(value) |
|
102 |
elif cast_to == |
|
|
102 | elif cast_to == "float": | |
|
103 | 103 | return float(value) |
|
104 |
elif cast_to == |
|
|
104 | elif cast_to == "unicode": | |
|
105 | 105 | return str(value) |
|
106 | 106 | except ValueError as exc: |
|
107 | 107 | raise InvalidValueException(exc) |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | class Rule(RuleBase): |
|
111 | def __init__(self, config, type_matrix, | |
|
112 | struct_getter=RuleBase.default_dict_struct_getter, | |
|
113 | config_manipulator=None): | |
|
111 | def __init__( | |
|
112 | self, | |
|
113 | config, | |
|
114 | type_matrix, | |
|
115 | struct_getter=RuleBase.default_dict_struct_getter, | |
|
116 | config_manipulator=None, | |
|
117 | ): | |
|
114 | 118 | """ |
|
115 | 119 | |
|
116 | 120 | :param config: dict - contains rule configuration |
|
117 | 121 | example:: |
|
118 | 122 | { |
|
119 | 123 | "field": "__OR__", |
|
120 | 124 | "rules": [ |
|
121 | 125 | { |
|
122 | 126 | "field": "__AND__", |
|
123 | 127 | "rules": [ |
|
124 | 128 | { |
|
125 | 129 | "op": "ge", |
|
126 | 130 | "field": "occurences", |
|
127 | 131 | "value": "10" |
|
128 | 132 | }, |
|
129 | 133 | { |
|
130 | 134 | "op": "ge", |
|
131 | 135 | "field": "priority", |
|
132 | 136 | "value": "4" |
|
133 | 137 | } |
|
134 | 138 | ] |
|
135 | 139 | }, |
|
136 | 140 | { |
|
137 | 141 | "op": "eq", |
|
138 | 142 | "field": "http_status", |
|
139 | 143 | "value": "500" |
|
140 | 144 | } |
|
141 | 145 | ] |
|
142 | 146 | } |
|
143 | 147 | :param type_matrix: dict - contains map of type casts |
|
144 | 148 | example:: |
|
145 | 149 | { |
|
146 | 150 | 'http_status': 'int', |
|
147 | 151 | 'priority': 'unicode', |
|
148 | 152 | } |
|
149 | 153 | :param struct_getter: callable - used to grab the value of field from |
|
150 | 154 | the structure passed to match() based |
|
151 | 155 | on key, default |
|
152 | 156 | |
|
153 | 157 | """ |
|
154 | 158 | self.type_matrix = type_matrix |
|
155 | 159 | self.config = config |
|
156 | 160 | self.struct_getter = struct_getter |
|
157 | 161 | self.config_manipulator = config_manipulator |
|
158 | 162 | if config_manipulator: |
|
159 | 163 | config_manipulator(self) |
|
160 | 164 | |
|
161 | 165 | def subrule_check(self, rule_config, struct): |
|
162 | rule = Rule(rule_config, self.type_matrix, | |
|
163 |
|
|
|
166 | rule = Rule( | |
|
167 | rule_config, self.type_matrix, config_manipulator=self.config_manipulator | |
|
168 | ) | |
|
164 | 169 | return rule.match(struct) |
|
165 | 170 | |
|
166 | 171 | def match(self, struct): |
|
167 | 172 | """ |
|
168 | 173 | Check if rule matched for this specific report |
|
169 | 174 | First tries report value, then tests tags in not found, then finally |
|
170 | 175 | report group |
|
171 | 176 | """ |
|
172 |
field_name = self.config.get( |
|
|
173 |
test_value = self.config.get( |
|
|
177 | field_name = self.config.get("field") | |
|
178 | test_value = self.config.get("value") | |
|
174 | 179 | |
|
175 | 180 | if not field_name: |
|
176 | 181 | return False |
|
177 | 182 | |
|
178 |
if field_name == |
|
|
179 | rule = AND(self.config['rules'], self.type_matrix, | |
|
180 | config_manipulator=self.config_manipulator) | |
|
183 | if field_name == "__AND__": | |
|
184 | rule = AND( | |
|
185 | self.config["rules"], | |
|
186 | self.type_matrix, | |
|
187 | config_manipulator=self.config_manipulator, | |
|
188 | ) | |
|
181 | 189 | return rule.match(struct) |
|
182 |
elif field_name == |
|
|
183 | rule = OR(self.config['rules'], self.type_matrix, | |
|
184 | config_manipulator=self.config_manipulator) | |
|
190 | elif field_name == "__OR__": | |
|
191 | rule = OR( | |
|
192 | self.config["rules"], | |
|
193 | self.type_matrix, | |
|
194 | config_manipulator=self.config_manipulator, | |
|
195 | ) | |
|
185 | 196 | return rule.match(struct) |
|
186 |
elif field_name == |
|
|
187 | rule = NOT(self.config['rules'], self.type_matrix, | |
|
188 | config_manipulator=self.config_manipulator) | |
|
197 | elif field_name == "__NOT__": | |
|
198 | rule = NOT( | |
|
199 | self.config["rules"], | |
|
200 | self.type_matrix, | |
|
201 | config_manipulator=self.config_manipulator, | |
|
202 | ) | |
|
189 | 203 | return rule.match(struct) |
|
190 | 204 | |
|
191 | 205 | if test_value is None: |
|
192 | 206 | return False |
|
193 | 207 | |
|
194 | 208 | try: |
|
195 |
struct_value = self.normalized_type( |
|
|
196 |
|
|
|
197 | field_name)) | |
|
209 | struct_value = self.normalized_type( | |
|
210 | field_name, self.struct_getter(struct, field_name) | |
|
211 | ) | |
|
198 | 212 | except (UnknownTypeException, InvalidValueException) as exc: |
|
199 | 213 | log.error(str(exc)) |
|
200 | 214 | return False |
|
201 | 215 | |
|
202 | 216 | try: |
|
203 | 217 | test_value = self.normalized_type(field_name, test_value) |
|
204 | 218 | except (UnknownTypeException, InvalidValueException) as exc: |
|
205 | 219 | log.error(str(exc)) |
|
206 | 220 | return False |
|
207 | 221 | |
|
208 |
if self.config[ |
|
|
222 | if self.config["op"] not in ("startswith", "endswith", "contains"): | |
|
209 | 223 | try: |
|
210 | return getattr(operator, | |
|
211 | self.config['op'])(struct_value, test_value) | |
|
224 | return getattr(operator, self.config["op"])(struct_value, test_value) | |
|
212 | 225 | except TypeError: |
|
213 | 226 | return False |
|
214 |
elif self.config[ |
|
|
227 | elif self.config["op"] == "startswith": | |
|
215 | 228 | return struct_value.startswith(test_value) |
|
216 |
elif self.config[ |
|
|
229 | elif self.config["op"] == "endswith": | |
|
217 | 230 | return struct_value.endswith(test_value) |
|
218 |
elif self.config[ |
|
|
231 | elif self.config["op"] == "contains": | |
|
219 | 232 | return test_value in struct_value |
|
220 |
raise BadConfigException( |
|
|
221 |
|
|
|
233 | raise BadConfigException( | |
|
234 | "Invalid configuration, " "unknown operator: {}".format(self.config) | |
|
235 | ) | |
|
222 | 236 | |
|
223 | 237 | def __repr__(self): |
|
224 |
return |
|
|
225 | self.config.get('value')) | |
|
238 | return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value")) | |
|
226 | 239 | |
|
227 | 240 | |
|
228 | 241 | class AND(Rule): |
|
229 | 242 | def __init__(self, rules, *args, **kwargs): |
|
230 | 243 | super(AND, self).__init__({}, *args, **kwargs) |
|
231 | 244 | self.rules = rules |
|
232 | 245 | |
|
233 | 246 | def match(self, struct): |
|
234 | return all([self.subrule_check(r_conf, struct) for r_conf | |
|
235 | in self.rules]) | |
|
247 | return all([self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
|
236 | 248 | |
|
237 | 249 | |
|
238 | 250 | class NOT(Rule): |
|
239 | 251 | def __init__(self, rules, *args, **kwargs): |
|
240 | 252 | super(NOT, self).__init__({}, *args, **kwargs) |
|
241 | 253 | self.rules = rules |
|
242 | 254 | |
|
243 | 255 | def match(self, struct): |
|
244 | return all([not self.subrule_check(r_conf, struct) for r_conf | |
|
245 | in self.rules]) | |
|
256 | return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
|
246 | 257 | |
|
247 | 258 | |
|
248 | 259 | class OR(Rule): |
|
249 | 260 | def __init__(self, rules, *args, **kwargs): |
|
250 | 261 | super(OR, self).__init__({}, *args, **kwargs) |
|
251 | 262 | self.rules = rules |
|
252 | 263 | |
|
253 | 264 | def match(self, struct): |
|
254 | return any([self.subrule_check(r_conf, struct) for r_conf | |
|
255 | in self.rules]) | |
|
265 | return any([self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
|
256 | 266 | |
|
257 | 267 | |
|
258 | 268 | class RuleService(object): |
|
259 | 269 | @staticmethod |
|
260 | def rule_from_config(config, field_mappings, labels_dict, | |
|
261 | manipulator_func=None): | |
|
270 | def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None): | |
|
262 | 271 | """ |
|
263 | 272 | Returns modified rule with manipulator function |
|
264 | 273 | By default manipulator function replaces field id from labels_dict |
|
265 | 274 | with current field id proper for the rule from fields_mappings |
|
266 | 275 | |
|
267 | 276 | because label X_X id might be pointing different value on next request |
|
268 | 277 | when new term is returned from elasticsearch - this ensures things |
|
269 | 278 | are kept 1:1 all the time |
|
270 | 279 | """ |
|
271 | 280 | rev_map = {} |
|
272 | 281 | for k, v in labels_dict.items(): |
|
273 |
rev_map[(v[ |
|
|
282 | rev_map[(v["agg"], v["key"])] = k | |
|
274 | 283 | |
|
275 | 284 | if manipulator_func is None: |
|
285 | ||
|
276 | 286 | def label_rewriter_func(rule): |
|
277 |
field = rule.config.get( |
|
|
278 |
if not field or rule.config[ |
|
|
279 | '__AND__', '__NOT__']: | |
|
287 | field = rule.config.get("field") | |
|
288 | if not field or rule.config["field"] in [ | |
|
289 | "__OR__", | |
|
290 | "__AND__", | |
|
291 | "__NOT__", | |
|
292 | ]: | |
|
280 | 293 | return |
|
281 | 294 | |
|
282 |
to_map = field_mappings.get(rule.config[ |
|
|
295 | to_map = field_mappings.get(rule.config["field"]) | |
|
283 | 296 | |
|
284 | 297 | # we need to replace series field with _AE_NOT_FOUND_ to not match |
|
285 | 298 | # accidently some other field which happens to have the series that |
|
286 | 299 | # was used when the alert was created |
|
287 | 300 | if to_map: |
|
288 |
to_replace = rev_map.get( |
|
|
289 | '_AE_NOT_FOUND_') | |
|
301 | to_replace = rev_map.get( | |
|
302 | (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_" | |
|
303 | ) | |
|
290 | 304 | else: |
|
291 |
to_replace = |
|
|
305 | to_replace = "_AE_NOT_FOUND_" | |
|
292 | 306 | |
|
293 |
rule.config[ |
|
|
294 |
rule.type_matrix[to_replace] = {"type": |
|
|
307 | rule.config["field"] = to_replace | |
|
308 | rule.type_matrix[to_replace] = {"type": "float"} | |
|
295 | 309 | |
|
296 | 310 | manipulator_func = label_rewriter_func |
|
297 | 311 | |
|
298 | 312 | return Rule(config, {}, config_manipulator=manipulator_func) |
@@ -1,60 +1,62 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 |
from ziggurat_foundations.models.services.external_identity import |
|
|
18 | ExternalIdentityService | |
|
17 | from ziggurat_foundations.models.services.external_identity import ( | |
|
18 | ExternalIdentityService, | |
|
19 | ) | |
|
19 | 20 | from appenlight.models.external_identity import ExternalIdentity |
|
20 | 21 | |
|
21 | 22 | |
|
22 | 23 | def handle_social_data(request, user, social_data): |
|
23 | 24 | social_data = social_data |
|
24 | 25 | update_identity = False |
|
25 | 26 | |
|
26 | 27 | extng_id = ExternalIdentityService.by_external_id_and_provider( |
|
27 | social_data['user']['id'], | |
|
28 | social_data['credentials'].provider_name | |
|
28 | social_data["user"]["id"], social_data["credentials"].provider_name | |
|
29 | 29 | ) |
|
30 | 30 | |
|
31 | 31 | # fix legacy accounts with wrong google ID |
|
32 |
if not extng_id and social_data[ |
|
|
32 | if not extng_id and social_data["credentials"].provider_name == "google": | |
|
33 | 33 | extng_id = ExternalIdentityService.by_external_id_and_provider( |
|
34 |
social_data[ |
|
|
35 | social_data['credentials'].provider_name | |
|
34 | social_data["user"]["email"], social_data["credentials"].provider_name | |
|
36 | 35 | ) |
|
37 | 36 | |
|
38 | 37 | if extng_id: |
|
39 | 38 | extng_id.delete() |
|
40 | 39 | update_identity = True |
|
41 | 40 | |
|
42 |
if not social_data[ |
|
|
41 | if not social_data["user"]["id"]: | |
|
43 | 42 | request.session.flash( |
|
44 |
|
|
|
45 |
|
|
|
43 | "No external user id found? Perhaps permissions for " | |
|
44 | "authentication are set incorrectly", | |
|
45 | "error", | |
|
46 | ) | |
|
46 | 47 | return False |
|
47 | 48 | |
|
48 | 49 | if not extng_id or update_identity: |
|
49 | 50 | if not update_identity: |
|
50 |
request.session.flash( |
|
|
51 |
|
|
|
51 | request.session.flash( | |
|
52 | "Your external identity is now " "connected with your account" | |
|
53 | ) | |
|
52 | 54 | ex_identity = ExternalIdentity() |
|
53 |
ex_identity.external_id = social_data[ |
|
|
54 |
ex_identity.external_user_name = social_data[ |
|
|
55 |
ex_identity.provider_name = social_data[ |
|
|
56 |
ex_identity.access_token = social_data[ |
|
|
57 |
ex_identity.token_secret = social_data[ |
|
|
58 |
ex_identity.alt_token = social_data[ |
|
|
55 | ex_identity.external_id = social_data["user"]["id"] | |
|
56 | ex_identity.external_user_name = social_data["user"]["user_name"] | |
|
57 | ex_identity.provider_name = social_data["credentials"].provider_name | |
|
58 | ex_identity.access_token = social_data["credentials"].token | |
|
59 | ex_identity.token_secret = social_data["credentials"].token_secret | |
|
60 | ex_identity.alt_token = social_data["credentials"].refresh_token | |
|
59 | 61 | user.external_identities.append(ex_identity) |
|
60 |
request.session.pop( |
|
|
62 | request.session.pop("zigg.social_auth", None) |
@@ -1,491 +1,548 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | """ |
|
18 | 18 | Utility functions. |
|
19 | 19 | """ |
|
20 | 20 | import logging |
|
21 | 21 | import requests |
|
22 | 22 | import hashlib |
|
23 | 23 | import json |
|
24 | 24 | import copy |
|
25 | 25 | import uuid |
|
26 | 26 | import appenlight.lib.helpers as h |
|
27 | 27 | from collections import namedtuple |
|
28 | 28 | from datetime import timedelta, datetime, date |
|
29 | 29 | from dogpile.cache.api import NO_VALUE |
|
30 | 30 | from appenlight.models import Datastores |
|
31 |
from appenlight.validators import |
|
|
32 | TagListSchema, | |
|
33 | accepted_search_params) | |
|
31 | from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params | |
|
34 | 32 | from itsdangerous import TimestampSigner |
|
35 | 33 | from ziggurat_foundations.permissions import ALL_PERMISSIONS |
|
36 | 34 | from ziggurat_foundations.models.services.user import UserService |
|
37 | 35 | from dateutil.relativedelta import relativedelta |
|
38 | 36 | from dateutil.rrule import rrule, MONTHLY, DAILY |
|
39 | 37 | |
|
40 | 38 | log = logging.getLogger(__name__) |
|
41 | 39 | |
|
42 | 40 | |
|
43 |
Stat = namedtuple( |
|
|
41 | Stat = namedtuple("Stat", "start_interval value") | |
|
44 | 42 | |
|
45 | 43 | |
|
46 | 44 | def default_extractor(item): |
|
47 | 45 | """ |
|
48 | 46 | :param item - item to extract date from |
|
49 | 47 | """ |
|
50 |
if hasattr(item, |
|
|
48 | if hasattr(item, "start_interval"): | |
|
51 | 49 | return item.start_interval |
|
52 |
return item[ |
|
|
50 | return item["start_interval"] | |
|
53 | 51 | |
|
54 | 52 | |
|
55 | 53 | # fast gap generator |
|
56 | def gap_gen_default(start, step, itemiterator, end_time=None, | |
|
57 | iv_extractor=None): | |
|
54 | def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None): | |
|
58 | 55 | """ generates a list of time/value items based on step and itemiterator |
|
59 | 56 | if there are entries missing from iterator time/None will be returned |
|
60 | 57 | instead |
|
61 | 58 | :param start - datetime - what time should we start generating our values |
|
62 | 59 | :param step - timedelta - stepsize |
|
63 | 60 | :param itemiterator - iterable - we will check this iterable for values |
|
64 | 61 | corresponding to generated steps |
|
65 | 62 | :param end_time - datetime - when last step is >= end_time stop iterating |
|
66 | 63 | :param iv_extractor - extracts current step from iterable items |
|
67 | 64 | """ |
|
68 | 65 | |
|
69 | 66 | if not iv_extractor: |
|
70 | 67 | iv_extractor = default_extractor |
|
71 | 68 | |
|
72 | 69 | next_step = start |
|
73 | 70 | minutes = step.total_seconds() / 60.0 |
|
74 | 71 | while next_step.minute % minutes != 0: |
|
75 | 72 | next_step = next_step.replace(minute=next_step.minute - 1) |
|
76 | 73 | for item in itemiterator: |
|
77 | 74 | item_start_interval = iv_extractor(item) |
|
78 | 75 | # do we have a match for current time step in our data? |
|
79 | 76 | # no gen a new tuple with 0 values |
|
80 | 77 | while next_step < item_start_interval: |
|
81 | 78 | yield Stat(next_step, None) |
|
82 | 79 | next_step = next_step + step |
|
83 | 80 | if next_step == item_start_interval: |
|
84 | 81 | yield Stat(item_start_interval, item) |
|
85 | 82 | next_step = next_step + step |
|
86 | 83 | if end_time: |
|
87 | 84 | while next_step < end_time: |
|
88 | 85 | yield Stat(next_step, None) |
|
89 | 86 | next_step = next_step + step |
|
90 | 87 | |
|
91 | 88 | |
|
92 | 89 | class DateTimeEncoder(json.JSONEncoder): |
|
93 | 90 | """ Simple datetime to ISO encoder for json serialization""" |
|
94 | 91 | |
|
95 | 92 | def default(self, obj): |
|
96 | 93 | if isinstance(obj, date): |
|
97 | 94 | return obj.isoformat() |
|
98 | 95 | if isinstance(obj, datetime): |
|
99 | 96 | return obj.isoformat() |
|
100 | 97 | return json.JSONEncoder.default(self, obj) |
|
101 | 98 | |
|
102 | 99 | |
|
103 | def channelstream_request(secret, endpoint, payload, throw_exceptions=False, | |
|
104 | servers=None): | |
|
100 | def channelstream_request( | |
|
101 | secret, endpoint, payload, throw_exceptions=False, servers=None | |
|
102 | ): | |
|
105 | 103 | responses = [] |
|
106 | 104 | if not servers: |
|
107 | 105 | servers = [] |
|
108 | 106 | |
|
109 | 107 | signer = TimestampSigner(secret) |
|
110 | 108 | sig_for_server = signer.sign(endpoint) |
|
111 |
for secret, server in [(s[ |
|
|
109 | for secret, server in [(s["secret"], s["server"]) for s in servers]: | |
|
112 | 110 | response = {} |
|
113 | secret_headers = {'x-channelstream-secret': sig_for_server, | |
|
114 | 'x-channelstream-endpoint': endpoint, | |
|
115 | 'Content-Type': 'application/json'} | |
|
116 | url = '%s%s' % (server, endpoint) | |
|
111 | secret_headers = { | |
|
112 | "x-channelstream-secret": sig_for_server, | |
|
113 | "x-channelstream-endpoint": endpoint, | |
|
114 | "Content-Type": "application/json", | |
|
115 | } | |
|
116 | url = "%s%s" % (server, endpoint) | |
|
117 | 117 | try: |
|
118 |
response = requests.post( |
|
|
119 | data=json.dumps(payload, | |
|
120 | cls=DateTimeEncoder), | |
|
121 |
|
|
|
122 |
|
|
|
123 |
|
|
|
118 | response = requests.post( | |
|
119 | url, | |
|
120 | data=json.dumps(payload, cls=DateTimeEncoder), | |
|
121 | headers=secret_headers, | |
|
122 | verify=False, | |
|
123 | timeout=2, | |
|
124 | ).json() | |
|
124 | 125 | except requests.exceptions.RequestException as e: |
|
125 | 126 | if throw_exceptions: |
|
126 | 127 | raise |
|
127 | 128 | responses.append(response) |
|
128 | 129 | return responses |
|
129 | 130 | |
|
130 | 131 | |
|
131 | 132 | def add_cors_headers(response): |
|
132 | 133 | # allow CORS |
|
133 |
response.headers.add( |
|
|
134 |
response.headers.add( |
|
|
135 |
response.headers.add( |
|
|
134 | response.headers.add("Access-Control-Allow-Origin", "*") | |
|
135 | response.headers.add("XDomainRequestAllowed", "1") | |
|
136 | response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS") | |
|
136 | 137 | # response.headers.add('Access-Control-Allow-Credentials', 'true') |
|
137 |
response.headers.add( |
|
|
138 | 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie') | |
|
139 | response.headers.add('Access-Control-Max-Age', '86400') | |
|
138 | response.headers.add( | |
|
139 | "Access-Control-Allow-Headers", | |
|
140 | "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie", | |
|
141 | ) | |
|
142 | response.headers.add("Access-Control-Max-Age", "86400") | |
|
140 | 143 | |
|
141 | 144 | |
|
142 | 145 | from sqlalchemy.sql import compiler |
|
143 | 146 | from psycopg2.extensions import adapt as sqlescape |
|
144 | 147 | |
|
145 | 148 | |
|
146 | 149 | # or use the appropiate escape function from your db driver |
|
147 | 150 | |
|
151 | ||
|
148 | 152 | def compile_query(query): |
|
149 | 153 | dialect = query.session.bind.dialect |
|
150 | 154 | statement = query.statement |
|
151 | 155 | comp = compiler.SQLCompiler(dialect, statement) |
|
152 | 156 | comp.compile() |
|
153 | 157 | enc = dialect.encoding |
|
154 | 158 | params = {} |
|
155 | 159 | for k, v in comp.params.items(): |
|
156 | 160 | if isinstance(v, str): |
|
157 | 161 | v = v.encode(enc) |
|
158 | 162 | params[k] = sqlescape(v) |
|
159 | 163 | return (comp.string.encode(enc) % params).decode(enc) |
|
160 | 164 | |
|
161 | 165 | |
|
162 | 166 | def convert_es_type(input_data): |
|
163 | 167 | """ |
|
164 | 168 | This might need to convert some text or other types to corresponding ES types |
|
165 | 169 | """ |
|
166 | 170 | return str(input_data) |
|
167 | 171 | |
|
168 | 172 | |
|
169 |
ProtoVersion = namedtuple( |
|
|
173 | ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"]) | |
|
170 | 174 | |
|
171 | 175 | |
|
172 | 176 | def parse_proto(input_data): |
|
173 | 177 | try: |
|
174 |
parts = [int(x) for x in input_data.split( |
|
|
178 | parts = [int(x) for x in input_data.split(".")] | |
|
175 | 179 | while len(parts) < 3: |
|
176 | 180 | parts.append(0) |
|
177 | 181 | return ProtoVersion(*parts) |
|
178 | 182 | except Exception as e: |
|
179 |
log.info( |
|
|
183 | log.info("Unknown protocol version: %s" % e) | |
|
180 | 184 | return ProtoVersion(99, 99, 99) |
|
181 | 185 | |
|
182 | 186 | |
|
183 | def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6, | |
|
184 | ixtypes=None): | |
|
187 | def es_index_name_limiter( | |
|
188 | start_date=None, end_date=None, months_in_past=6, ixtypes=None | |
|
189 | ): | |
|
185 | 190 | """ |
|
186 | 191 | This function limits the search to 6 months by default so we don't have to |
|
187 | 192 | query 300 elasticsearch indices for 20 years of historical data for example |
|
188 | 193 | """ |
|
189 | 194 | |
|
190 | 195 | # should be cached later |
|
191 | 196 | def get_possible_names(): |
|
192 |
return list(Datastores.es.indices.get_alias( |
|
|
197 | return list(Datastores.es.indices.get_alias("*")) | |
|
193 | 198 | |
|
194 | 199 | possible_names = get_possible_names() |
|
195 | 200 | es_index_types = [] |
|
196 | 201 | if not ixtypes: |
|
197 |
ixtypes = [ |
|
|
202 | ixtypes = ["reports", "metrics", "logs"] | |
|
198 | 203 | for t in ixtypes: |
|
199 |
if t == |
|
|
200 |
es_index_types.append( |
|
|
201 |
elif t == |
|
|
202 |
es_index_types.append( |
|
|
203 |
elif t == |
|
|
204 |
es_index_types.append( |
|
|
205 |
elif t == |
|
|
206 |
es_index_types.append( |
|
|
207 |
elif t == |
|
|
208 |
es_index_types.append( |
|
|
204 | if t == "reports": | |
|
205 | es_index_types.append("rcae_r_%s") | |
|
206 | elif t == "logs": | |
|
207 | es_index_types.append("rcae_l_%s") | |
|
208 | elif t == "metrics": | |
|
209 | es_index_types.append("rcae_m_%s") | |
|
210 | elif t == "uptime": | |
|
211 | es_index_types.append("rcae_u_%s") | |
|
212 | elif t == "slow_calls": | |
|
213 | es_index_types.append("rcae_sc_%s") | |
|
209 | 214 | |
|
210 | 215 | if start_date: |
|
211 | 216 | start_date = copy.copy(start_date) |
|
212 | 217 | else: |
|
213 | 218 | if not end_date: |
|
214 | 219 | end_date = datetime.utcnow() |
|
215 | 220 | start_date = end_date + relativedelta(months=months_in_past * -1) |
|
216 | 221 | |
|
217 | 222 | if not end_date: |
|
218 | 223 | end_date = start_date + relativedelta(months=months_in_past) |
|
219 | 224 | |
|
220 |
index_dates = list( |
|
|
221 | dtstart=start_date.date().replace(day=1), | |
|
222 | until=end_date.date(), | |
|
223 | count=36)) | |
|
225 | index_dates = list( | |
|
226 | rrule( | |
|
227 | MONTHLY, | |
|
228 | dtstart=start_date.date().replace(day=1), | |
|
229 | until=end_date.date(), | |
|
230 | count=36, | |
|
231 | ) | |
|
232 | ) | |
|
224 | 233 | index_names = [] |
|
225 | 234 | for ix_type in es_index_types: |
|
226 | to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates | |
|
227 |
|
|
|
235 | to_extend = [ | |
|
236 | ix_type % d.strftime("%Y_%m") | |
|
237 | for d in index_dates | |
|
238 | if ix_type % d.strftime("%Y_%m") in possible_names | |
|
239 | ] | |
|
228 | 240 | index_names.extend(to_extend) |
|
229 | for day in list(rrule(DAILY, dtstart=start_date.date(), | |
|
230 |
|
|
|
231 | ix_name = ix_type % day.strftime('%Y_%m_%d') | |
|
241 | for day in list( | |
|
242 | rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366) | |
|
243 | ): | |
|
244 | ix_name = ix_type % day.strftime("%Y_%m_%d") | |
|
232 | 245 | if ix_name in possible_names: |
|
233 | 246 | index_names.append(ix_name) |
|
234 | 247 | return index_names |
|
235 | 248 | |
|
236 | 249 | |
|
237 | 250 | def build_filter_settings_from_query_dict( |
|
238 |
|
|
|
239 | resource_permissions=None): | |
|
251 | request, params=None, override_app_ids=None, resource_permissions=None | |
|
252 | ): | |
|
240 | 253 | """ |
|
241 | 254 | Builds list of normalized search terms for ES from query params |
|
242 | 255 | ensuring application list is restricted to only applications user |
|
243 | 256 | has access to |
|
244 | 257 | |
|
245 | 258 | :param params (dictionary) |
|
246 | 259 | :param override_app_ids - list of application id's to use instead of |
|
247 | 260 | applications user normally has access to |
|
248 | 261 | """ |
|
249 | 262 | params = copy.deepcopy(params) |
|
250 | 263 | applications = [] |
|
251 | 264 | if not resource_permissions: |
|
252 |
resource_permissions = [ |
|
|
265 | resource_permissions = ["view"] | |
|
253 | 266 | |
|
254 | 267 | if request.user: |
|
255 | 268 | applications = UserService.resources_with_perms( |
|
256 |
request.user, resource_permissions, resource_types=[ |
|
|
269 | request.user, resource_permissions, resource_types=["application"] | |
|
270 | ) | |
|
257 | 271 | |
|
258 | 272 | # CRITICAL - this ensures our resultset is limited to only the ones |
|
259 | 273 | # user has view permissions |
|
260 | 274 | all_possible_app_ids = set([app.resource_id for app in applications]) |
|
261 | 275 | |
|
262 | 276 | # if override is preset we force permission for app to be present |
|
263 | 277 | # this allows users to see dashboards and applications they would |
|
264 | 278 | # normally not be able to |
|
265 | 279 | |
|
266 | 280 | if override_app_ids: |
|
267 | 281 | all_possible_app_ids = set(override_app_ids) |
|
268 | 282 | |
|
269 | 283 | schema = LogSearchSchema().bind(resources=all_possible_app_ids) |
|
270 | 284 | tag_schema = TagListSchema() |
|
271 | 285 | filter_settings = schema.deserialize(params) |
|
272 | 286 | tag_list = [] |
|
273 | 287 | for k, v in list(filter_settings.items()): |
|
274 | 288 | if k in accepted_search_params: |
|
275 | 289 | continue |
|
276 |
tag_list.append({"name": k, "value": v, "op": |
|
|
290 | tag_list.append({"name": k, "value": v, "op": "eq"}) | |
|
277 | 291 | # remove the key from filter_settings |
|
278 | 292 | filter_settings.pop(k, None) |
|
279 | 293 | tags = tag_schema.deserialize(tag_list) |
|
280 |
filter_settings[ |
|
|
294 | filter_settings["tags"] = tags | |
|
281 | 295 | return filter_settings |
|
282 | 296 | |
|
283 | 297 | |
|
284 | 298 | def gen_uuid(): |
|
285 | 299 | return str(uuid.uuid4()) |
|
286 | 300 | |
|
287 | 301 | |
|
288 | 302 | def gen_uuid4_sha_hex(): |
|
289 | 303 | return hashlib.sha1(uuid.uuid4().bytes).hexdigest() |
|
290 | 304 | |
|
291 | 305 | |
|
292 | 306 | def permission_tuple_to_dict(data): |
|
293 | 307 | out = { |
|
294 | 308 | "user_name": None, |
|
295 | 309 | "perm_name": data.perm_name, |
|
296 | 310 | "owner": data.owner, |
|
297 | 311 | "type": data.type, |
|
298 | 312 | "resource_name": None, |
|
299 | 313 | "resource_type": None, |
|
300 | 314 | "resource_id": None, |
|
301 | 315 | "group_name": None, |
|
302 | "group_id": None | |
|
316 | "group_id": None, | |
|
303 | 317 | } |
|
304 | 318 | if data.user: |
|
305 | 319 | out["user_name"] = data.user.user_name |
|
306 | 320 | if data.perm_name == ALL_PERMISSIONS: |
|
307 |
out[ |
|
|
321 | out["perm_name"] = "__all_permissions__" | |
|
308 | 322 | if data.resource: |
|
309 |
out[ |
|
|
310 |
out[ |
|
|
311 |
out[ |
|
|
323 | out["resource_name"] = data.resource.resource_name | |
|
324 | out["resource_type"] = data.resource.resource_type | |
|
325 | out["resource_id"] = data.resource.resource_id | |
|
312 | 326 | if data.group: |
|
313 |
out[ |
|
|
314 |
out[ |
|
|
327 | out["group_name"] = data.group.group_name | |
|
328 | out["group_id"] = data.group.id | |
|
315 | 329 | return out |
|
316 | 330 | |
|
317 | 331 | |
|
318 | def get_cached_buckets(request, stats_since, end_time, fn, cache_key, | |
|
319 | gap_gen=None, db_session=None, step_interval=None, | |
|
320 | iv_extractor=None, | |
|
321 | rerange=False, *args, **kwargs): | |
|
332 | def get_cached_buckets( | |
|
333 | request, | |
|
334 | stats_since, | |
|
335 | end_time, | |
|
336 | fn, | |
|
337 | cache_key, | |
|
338 | gap_gen=None, | |
|
339 | db_session=None, | |
|
340 | step_interval=None, | |
|
341 | iv_extractor=None, | |
|
342 | rerange=False, | |
|
343 | *args, | |
|
344 | **kwargs | |
|
345 | ): | |
|
322 | 346 | """ Takes "fn" that should return some data and tries to load the data |
|
323 | 347 | dividing it into daily buckets - if the stats_since and end time give a |
|
324 | 348 | delta bigger than 24hours, then only "todays" data is computed on the fly |
|
325 | 349 | |
|
326 | 350 | :param request: (request) request object |
|
327 | 351 | :param stats_since: (datetime) start date of buckets range |
|
328 | 352 | :param end_time: (datetime) end date of buckets range - utcnow() if None |
|
329 | 353 | :param fn: (callable) callable to use to populate buckets should have |
|
330 | 354 | following signature: |
|
331 | 355 | def get_data(request, since_when, until, *args, **kwargs): |
|
332 | 356 | |
|
333 | 357 | :param cache_key: (string) cache key that will be used to build bucket |
|
334 | 358 | caches |
|
335 | 359 | :param gap_gen: (callable) gap generator - should return step intervals |
|
336 | 360 | to use with out `fn` callable |
|
337 | 361 | :param db_session: (Session) sqlalchemy session |
|
338 | 362 | :param step_interval: (timedelta) optional step interval if we want to |
|
339 | 363 | override the default determined from total start/end time delta |
|
340 | 364 | :param iv_extractor: (callable) used to get step intervals from data |
|
341 | 365 | returned by `fn` callable |
|
342 | 366 | :param rerange: (bool) handy if we want to change ranges from hours to |
|
343 | 367 | days when cached data is missing - will shorten execution time if `fn` |
|
344 | 368 | callable supports that and we are working with multiple rows - like metrics |
|
345 | 369 | :param args: |
|
346 | 370 | :param kwargs: |
|
347 | 371 | |
|
348 | 372 | :return: iterable |
|
349 | 373 | """ |
|
350 | 374 | if not end_time: |
|
351 | 375 | end_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
352 | 376 | delta = end_time - stats_since |
|
353 | 377 | # if smaller than 3 days we want to group by 5min else by 1h, |
|
354 | 378 | # for 60 min group by min |
|
355 | 379 | if not gap_gen: |
|
356 | 380 | gap_gen = gap_gen_default |
|
357 | 381 | if not iv_extractor: |
|
358 | 382 | iv_extractor = default_extractor |
|
359 | 383 | |
|
360 | 384 | # do not use custom interval if total time range with new iv would exceed |
|
361 | 385 | # end time |
|
362 | 386 | if not step_interval or stats_since + step_interval >= end_time: |
|
363 |
if delta < h.time_deltas.get( |
|
|
387 | if delta < h.time_deltas.get("12h")["delta"]: | |
|
364 | 388 | step_interval = timedelta(seconds=60) |
|
365 |
elif delta < h.time_deltas.get( |
|
|
389 | elif delta < h.time_deltas.get("3d")["delta"]: | |
|
366 | 390 | step_interval = timedelta(seconds=60 * 5) |
|
367 |
elif delta > h.time_deltas.get( |
|
|
391 | elif delta > h.time_deltas.get("2w")["delta"]: | |
|
368 | 392 | step_interval = timedelta(days=1) |
|
369 | 393 | else: |
|
370 | 394 | step_interval = timedelta(minutes=60) |
|
371 | 395 | |
|
372 | 396 | if step_interval >= timedelta(minutes=60): |
|
373 | log.info('cached_buckets:{}: adjusting start time ' | |
|
374 | 'for hourly or daily intervals'.format(cache_key)) | |
|
397 | log.info( | |
|
398 | "cached_buckets:{}: adjusting start time " | |
|
399 | "for hourly or daily intervals".format(cache_key) | |
|
400 | ) | |
|
375 | 401 | stats_since = stats_since.replace(hour=0, minute=0) |
|
376 | 402 | |
|
377 | ranges = [i.start_interval for i in list(gap_gen(stats_since, | |
|
378 | step_interval, [], | |
|
379 | end_time=end_time))] | |
|
403 | ranges = [ | |
|
404 | i.start_interval | |
|
405 | for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time)) | |
|
406 | ] | |
|
380 | 407 | buckets = {} |
|
381 |
storage_key = |
|
|
408 | storage_key = "buckets:" + cache_key + "{}|{}" | |
|
382 | 409 | # this means we basicly cache per hour in 3-14 day intervals but i think |
|
383 | 410 | # its fine at this point - will be faster than db access anyways |
|
384 | 411 | |
|
385 | 412 | if len(ranges) >= 1: |
|
386 | 413 | last_ranges = [ranges[-1]] |
|
387 | 414 | else: |
|
388 | 415 | last_ranges = [] |
|
389 | 416 | if step_interval >= timedelta(minutes=60): |
|
390 | 417 | for r in ranges: |
|
391 | 418 | k = storage_key.format(step_interval.total_seconds(), r) |
|
392 | 419 | value = request.registry.cache_regions.redis_day_30.get(k) |
|
393 | 420 | # last buckets are never loaded from cache |
|
394 | is_last_result = ( | |
|
395 | r >= end_time - timedelta(hours=6) or r in last_ranges) | |
|
421 | is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges | |
|
396 | 422 | if value is not NO_VALUE and not is_last_result: |
|
397 |
log.info( |
|
|
398 | "loading range {} from cache".format(cache_key, r)) | |
|
423 | log.info( | |
|
424 | "cached_buckets:{}: " | |
|
425 | "loading range {} from cache".format(cache_key, r) | |
|
426 | ) | |
|
399 | 427 | buckets[r] = value |
|
400 | 428 | else: |
|
401 |
log.info( |
|
|
402 | "loading range {} from storage".format(cache_key, r)) | |
|
429 | log.info( | |
|
430 | "cached_buckets:{}: " | |
|
431 | "loading range {} from storage".format(cache_key, r) | |
|
432 | ) | |
|
403 | 433 | range_size = step_interval |
|
404 | if (step_interval == timedelta(minutes=60) and | |
|
405 | not is_last_result and rerange): | |
|
434 | if ( | |
|
435 | step_interval == timedelta(minutes=60) | |
|
436 | and not is_last_result | |
|
437 | and rerange | |
|
438 | ): | |
|
406 | 439 | range_size = timedelta(days=1) |
|
407 | 440 | r = r.replace(hour=0, minute=0) |
|
408 |
log.info( |
|
|
409 |
|
|
|
410 | "range {} {}".format(cache_key, r, | |
|
411 |
|
|
|
441 | log.info( | |
|
442 | "cached_buckets:{}: " | |
|
443 | "loading collapsed " | |
|
444 | "range {} {}".format(cache_key, r, r + range_size) | |
|
445 | ) | |
|
412 | 446 | bucket_data = fn( |
|
413 |
request, |
|
|
414 | gap_gen, bucket_count=len(ranges), *args, **kwargs) | |
|
447 | request, | |
|
448 | r, | |
|
449 | r + range_size, | |
|
450 | step_interval, | |
|
451 | gap_gen, | |
|
452 | bucket_count=len(ranges), | |
|
453 | *args, | |
|
454 | **kwargs | |
|
455 | ) | |
|
415 | 456 | for b in bucket_data: |
|
416 | 457 | b_iv = iv_extractor(b) |
|
417 | 458 | buckets[b_iv] = b |
|
418 | k2 = storage_key.format( | |
|
419 | step_interval.total_seconds(), b_iv) | |
|
459 | k2 = storage_key.format(step_interval.total_seconds(), b_iv) | |
|
420 | 460 | request.registry.cache_regions.redis_day_30.set(k2, b) |
|
421 | 461 | log.info("cached_buckets:{}: saving cache".format(cache_key)) |
|
422 | 462 | else: |
|
423 | 463 | # bucket count is 1 for short time ranges <= 24h from now |
|
424 | bucket_data = fn(request, stats_since, end_time, step_interval, | |
|
425 | gap_gen, bucket_count=1, *args, **kwargs) | |
|
464 | bucket_data = fn( | |
|
465 | request, | |
|
466 | stats_since, | |
|
467 | end_time, | |
|
468 | step_interval, | |
|
469 | gap_gen, | |
|
470 | bucket_count=1, | |
|
471 | *args, | |
|
472 | **kwargs | |
|
473 | ) | |
|
426 | 474 | for b in bucket_data: |
|
427 | 475 | buckets[iv_extractor(b)] = b |
|
428 | 476 | return buckets |
|
429 | 477 | |
|
430 | 478 | |
|
431 | def get_cached_split_data(request, stats_since, end_time, fn, cache_key, | |
|
432 | db_session=None, *args, **kwargs): | |
|
479 | def get_cached_split_data( | |
|
480 | request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs | |
|
481 | ): | |
|
433 | 482 | """ Takes "fn" that should return some data and tries to load the data |
|
434 | 483 | dividing it into 2 buckets - cached "since_from" bucket and "today" |
|
435 | 484 | bucket - then the data can be reduced into single value |
|
436 | 485 | |
|
437 | 486 | Data is cached if the stats_since and end time give a delta bigger |
|
438 | 487 | than 24hours - then only 24h is computed on the fly |
|
439 | 488 | """ |
|
440 | 489 | if not end_time: |
|
441 | 490 | end_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
442 | 491 | delta = end_time - stats_since |
|
443 | 492 | |
|
444 | 493 | if delta >= timedelta(minutes=60): |
|
445 | log.info('cached_split_data:{}: adjusting start time ' | |
|
446 | 'for hourly or daily intervals'.format(cache_key)) | |
|
494 | log.info( | |
|
495 | "cached_split_data:{}: adjusting start time " | |
|
496 | "for hourly or daily intervals".format(cache_key) | |
|
497 | ) | |
|
447 | 498 | stats_since = stats_since.replace(hour=0, minute=0) |
|
448 | 499 | |
|
449 |
storage_key = |
|
|
500 | storage_key = "buckets_split_data:" + cache_key + ":{}|{}" | |
|
450 | 501 | old_end_time = end_time.replace(hour=0, minute=0) |
|
451 | 502 | |
|
452 | final_storage_key = storage_key.format(delta.total_seconds(), | |
|
453 | old_end_time) | |
|
503 | final_storage_key = storage_key.format(delta.total_seconds(), old_end_time) | |
|
454 | 504 | older_data = None |
|
455 | 505 | |
|
456 | cdata = request.registry.cache_regions.redis_day_7.get( | |
|
457 | final_storage_key) | |
|
506 | cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key) | |
|
458 | 507 | |
|
459 | 508 | if cdata: |
|
460 | log.info("cached_split_data:{}: found old " | |
|
461 | "bucket data".format(cache_key)) | |
|
509 | log.info("cached_split_data:{}: found old " "bucket data".format(cache_key)) | |
|
462 | 510 | older_data = cdata |
|
463 | 511 | |
|
464 |
if |
|
|
465 | not cdata): | |
|
466 |
|
|
|
467 |
|
|
|
512 | if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata: | |
|
513 | log.info( | |
|
514 | "cached_split_data:{}: didn't find the " | |
|
515 | "start bucket in cache so load older data".format(cache_key) | |
|
516 | ) | |
|
468 | 517 | recent_stats_since = old_end_time |
|
469 | older_data = fn(request, stats_since, recent_stats_since, | |
|
470 | db_session=db_session, *args, **kwargs) | |
|
471 | request.registry.cache_regions.redis_day_7.set(final_storage_key, | |
|
472 | older_data) | |
|
473 | elif stats_since < end_time - h.time_deltas.get('24h')['delta']: | |
|
518 | older_data = fn( | |
|
519 | request, | |
|
520 | stats_since, | |
|
521 | recent_stats_since, | |
|
522 | db_session=db_session, | |
|
523 | *args, | |
|
524 | **kwargs | |
|
525 | ) | |
|
526 | request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data) | |
|
527 | elif stats_since < end_time - h.time_deltas.get("24h")["delta"]: | |
|
474 | 528 | recent_stats_since = old_end_time |
|
475 | 529 | else: |
|
476 | 530 | recent_stats_since = stats_since |
|
477 | 531 | |
|
478 | log.info("cached_split_data:{}: loading fresh " | |
|
479 | "data bucksts from last 24h ".format(cache_key)) | |
|
480 | todays_data = fn(request, recent_stats_since, end_time, | |
|
481 | db_session=db_session, *args, **kwargs) | |
|
532 | log.info( | |
|
533 | "cached_split_data:{}: loading fresh " | |
|
534 | "data bucksts from last 24h ".format(cache_key) | |
|
535 | ) | |
|
536 | todays_data = fn( | |
|
537 | request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs | |
|
538 | ) | |
|
482 | 539 | return older_data, todays_data |
|
483 | 540 | |
|
484 | 541 | |
|
485 | 542 | def in_batches(seq, size): |
|
486 | 543 | """ |
|
487 | 544 | Splits am iterable into batches of specified size |
|
488 | 545 | :param seq (iterable) |
|
489 | 546 | :param size integer |
|
490 | 547 | """ |
|
491 | return (seq[pos:pos + size] for pos in range(0, len(seq), size)) | |
|
548 | return (seq[pos : pos + size] for pos in range(0, len(seq), size)) |
@@ -1,142 +1,161 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import uuid |
|
19 | 19 | |
|
20 | 20 | from datetime import datetime |
|
21 | 21 | |
|
22 | 22 | log = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def parse_airbrake_xml(request): |
|
26 | 26 | root = request.context.airbrake_xml_etree |
|
27 |
error = root.find( |
|
|
28 |
notifier = root.find( |
|
|
29 |
server_env = root.find( |
|
|
30 |
request_data = root.find( |
|
|
31 |
user = root.find( |
|
|
27 | error = root.find("error") | |
|
28 | notifier = root.find("notifier") | |
|
29 | server_env = root.find("server-environment") | |
|
30 | request_data = root.find("request") | |
|
31 | user = root.find("current-user") | |
|
32 | 32 | if request_data is not None: |
|
33 |
cgi_data = request_data.find( |
|
|
33 | cgi_data = request_data.find("cgi-data") | |
|
34 | 34 | if cgi_data is None: |
|
35 | 35 | cgi_data = [] |
|
36 | 36 | |
|
37 | 37 | error_dict = { |
|
38 |
|
|
|
39 |
|
|
|
38 | "class_name": error.findtext("class") or "", | |
|
39 | "error": error.findtext("message") or "", | |
|
40 | 40 | "occurences": 1, |
|
41 | 41 | "http_status": 500, |
|
42 | 42 | "priority": 5, |
|
43 |
"server": |
|
|
44 |
|
|
|
43 | "server": "unknown", | |
|
44 | "url": "unknown", | |
|
45 | "request": {}, | |
|
45 | 46 | } |
|
46 | 47 | if user is not None: |
|
47 |
error_dict[ |
|
|
48 | user.findtext('id') | |
|
48 | error_dict["username"] = user.findtext("username") or user.findtext("id") | |
|
49 | 49 | if notifier is not None: |
|
50 |
error_dict[ |
|
|
50 | error_dict["client"] = notifier.findtext("name") | |
|
51 | 51 | |
|
52 | 52 | if server_env is not None: |
|
53 |
error_dict["server"] = server_env.findtext( |
|
|
53 | error_dict["server"] = server_env.findtext("hostname", "unknown") | |
|
54 | 54 | |
|
55 | whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', | |
|
56 | 'CONTENT_TYPE', 'HTTP_REFERER'] | |
|
55 | whitelist_environ = [ | |
|
56 | "REMOTE_USER", | |
|
57 | "REMOTE_ADDR", | |
|
58 | "SERVER_NAME", | |
|
59 | "CONTENT_TYPE", | |
|
60 | "HTTP_REFERER", | |
|
61 | ] | |
|
57 | 62 | |
|
58 | 63 | if request_data is not None: |
|
59 |
error_dict[ |
|
|
60 |
component = request_data.findtext( |
|
|
61 |
action = request_data.findtext( |
|
|
64 | error_dict["url"] = request_data.findtext("url", "unknown") | |
|
65 | component = request_data.findtext("component") | |
|
66 | action = request_data.findtext("action") | |
|
62 | 67 | if component and action: |
|
63 |
error_dict[ |
|
|
68 | error_dict["view_name"] = "%s:%s" % (component, action) | |
|
64 | 69 | for node in cgi_data: |
|
65 |
key = node.get( |
|
|
66 |
if key.startswith( |
|
|
67 |
error_dict[ |
|
|
68 |
elif |
|
|
69 |
error_dict[ |
|
|
70 | key = node.get("key") | |
|
71 | if key.startswith("HTTP") or key in whitelist_environ: | |
|
72 | error_dict["request"][key] = node.text | |
|
73 | elif "query_parameters" in key: | |
|
74 | error_dict["request"]["GET"] = {} | |
|
70 | 75 | for x in node: |
|
71 |
error_dict[ |
|
|
72 |
elif |
|
|
73 |
error_dict[ |
|
|
76 | error_dict["request"]["GET"][x.get("key")] = x.text | |
|
77 | elif "request_parameters" in key: | |
|
78 | error_dict["request"]["POST"] = {} | |
|
74 | 79 | for x in node: |
|
75 |
error_dict[ |
|
|
76 |
elif key.endswith( |
|
|
77 |
error_dict[ |
|
|
80 | error_dict["request"]["POST"][x.get("key")] = x.text | |
|
81 | elif key.endswith("cookie"): | |
|
82 | error_dict["request"]["COOKIE"] = {} | |
|
78 | 83 | for x in node: |
|
79 |
error_dict[ |
|
|
80 |
elif key.endswith( |
|
|
81 |
error_dict[ |
|
|
82 |
elif key.endswith( |
|
|
83 |
error_dict[ |
|
|
84 | error_dict["request"]["COOKIE"][x.get("key")] = x.text | |
|
85 | elif key.endswith("request_id"): | |
|
86 | error_dict["request_id"] = node.text | |
|
87 | elif key.endswith("session"): | |
|
88 | error_dict["request"]["SESSION"] = {} | |
|
84 | 89 | for x in node: |
|
85 |
error_dict[ |
|
|
90 | error_dict["request"]["SESSION"][x.get("key")] = x.text | |
|
86 | 91 | else: |
|
87 |
if key in [ |
|
|
92 | if key in ["rack.session.options"]: | |
|
88 | 93 | # skip secret configs |
|
89 | 94 | continue |
|
90 | 95 | try: |
|
91 | 96 | if len(node): |
|
92 |
error_dict[ |
|
|
93 |
[(x.get( |
|
|
97 | error_dict["request"][key] = dict( | |
|
98 | [(x.get("key"), x.text) for x in node] | |
|
99 | ) | |
|
94 | 100 | else: |
|
95 |
error_dict[ |
|
|
101 | error_dict["request"][key] = node.text | |
|
96 | 102 | except Exception as e: |
|
97 |
log.warning( |
|
|
103 | log.warning("Airbrake integration exception: %s" % e) | |
|
98 | 104 | |
|
99 |
error_dict[ |
|
|
105 | error_dict["request"].pop("HTTP_COOKIE", "") | |
|
100 | 106 | |
|
101 |
error_dict[ |
|
|
102 |
error_dict[ |
|
|
103 |
if |
|
|
104 |
error_dict[ |
|
|
107 | error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "") | |
|
108 | error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "") | |
|
109 | if "request_id" not in error_dict: | |
|
110 | error_dict["request_id"] = str(uuid.uuid4()) | |
|
105 | 111 | if request.context.possibly_public: |
|
106 | 112 | # set ip for reports that come from airbrake js client |
|
107 | 113 | error_dict["timestamp"] = datetime.utcnow() |
|
108 | 114 | if request.environ.get("HTTP_X_FORWARDED_FOR"): |
|
109 |
ip = request.environ.get("HTTP_X_FORWARDED_FOR", |
|
|
110 |
first_ip = ip.split( |
|
|
115 | ip = request.environ.get("HTTP_X_FORWARDED_FOR", "") | |
|
116 | first_ip = ip.split(",")[0] | |
|
111 | 117 | remote_addr = first_ip.strip() |
|
112 | 118 | else: |
|
113 |
remote_addr = |
|
|
114 | request.environ.get('REMOTE_ADDR')) | |
|
119 | remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get( | |
|
120 | "REMOTE_ADDR" | |
|
121 | ) | |
|
115 | 122 | error_dict["ip"] = remote_addr |
|
116 | 123 | |
|
117 | blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf', | |
|
118 | 'session', 'test'] | |
|
124 | blacklist = [ | |
|
125 | "password", | |
|
126 | "passwd", | |
|
127 | "pwd", | |
|
128 | "auth_tkt", | |
|
129 | "secret", | |
|
130 | "csrf", | |
|
131 | "session", | |
|
132 | "test", | |
|
133 | ] | |
|
119 | 134 | |
|
120 | 135 | lines = [] |
|
121 |
for l in error.find( |
|
|
122 |
lines.append( |
|
|
123 | 'line': l.get("number", ""), | |
|
124 |
|
|
|
125 |
|
|
|
126 |
|
|
|
127 | 'vars': {}}) | |
|
128 | error_dict['traceback'] = list(reversed(lines)) | |
|
136 | for l in error.find("backtrace"): | |
|
137 | lines.append( | |
|
138 | { | |
|
139 | "file": l.get("file", ""), | |
|
140 | "line": l.get("number", ""), | |
|
141 | "fn": l.get("method", ""), | |
|
142 | "module": l.get("module", ""), | |
|
143 | "cline": l.get("method", ""), | |
|
144 | "vars": {}, | |
|
145 | } | |
|
146 | ) | |
|
147 | error_dict["traceback"] = list(reversed(lines)) | |
|
129 | 148 | # filtering is not provided by airbrake |
|
130 | 149 | keys_to_check = ( |
|
131 |
error_dict[ |
|
|
132 |
error_dict[ |
|
|
133 |
error_dict[ |
|
|
134 |
error_dict[ |
|
|
150 | error_dict["request"].get("COOKIE"), | |
|
151 | error_dict["request"].get("COOKIES"), | |
|
152 | error_dict["request"].get("POST"), | |
|
153 | error_dict["request"].get("SESSION"), | |
|
135 | 154 | ) |
|
136 | 155 | for source in [_f for _f in keys_to_check if _f]: |
|
137 | 156 | for k in source.keys(): |
|
138 | 157 | for bad_key in blacklist: |
|
139 | 158 | if bad_key in k.lower(): |
|
140 |
source[k] = |
|
|
159 | source[k] = "***" | |
|
141 | 160 | |
|
142 | 161 | return error_dict |
@@ -1,56 +1,56 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from datetime import tzinfo, timedelta, datetime |
|
18 | 18 | from dateutil.relativedelta import relativedelta |
|
19 | 19 | import logging |
|
20 | 20 | |
|
21 | 21 | log = logging.getLogger(__name__) |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | def to_relativedelta(time_delta): |
|
25 | return relativedelta(seconds=int(time_delta.total_seconds()), | |
|
26 |
|
|
|
25 | return relativedelta( | |
|
26 | seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds | |
|
27 | ) | |
|
27 | 28 | |
|
28 | 29 | |
|
29 | def convert_date(date_str, return_utcnow_if_wrong=True, | |
|
30 | normalize_future=False): | |
|
30 | def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False): | |
|
31 | 31 | utcnow = datetime.utcnow() |
|
32 | 32 | if isinstance(date_str, datetime): |
|
33 | 33 | # get rid of tzinfo |
|
34 | 34 | return date_str.replace(tzinfo=None) |
|
35 | 35 | if not date_str and return_utcnow_if_wrong: |
|
36 | 36 | return utcnow |
|
37 | 37 | try: |
|
38 | 38 | try: |
|
39 |
if |
|
|
40 |
date_str = date_str[:date_str.index( |
|
|
41 |
if |
|
|
42 |
date = datetime.strptime(date_str, |
|
|
39 | if "Z" in date_str: | |
|
40 | date_str = date_str[: date_str.index("Z")] | |
|
41 | if "." in date_str: | |
|
42 | date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f") | |
|
43 | 43 | else: |
|
44 |
date = datetime.strptime(date_str, |
|
|
44 | date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S") | |
|
45 | 45 | except Exception: |
|
46 | 46 | # bw compat with old client |
|
47 |
date = datetime.strptime(date_str, |
|
|
47 | date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f") | |
|
48 | 48 | except Exception: |
|
49 | 49 | if return_utcnow_if_wrong: |
|
50 | 50 | date = utcnow |
|
51 | 51 | else: |
|
52 | 52 | date = None |
|
53 | 53 | if normalize_future and date and date > (utcnow + timedelta(minutes=3)): |
|
54 |
log.warning( |
|
|
54 | log.warning("time %s in future + 3 min, normalizing" % date) | |
|
55 | 55 | return utcnow |
|
56 | 56 | return date |
@@ -1,296 +1,317 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from datetime import timedelta |
|
18 | 18 | |
|
19 | 19 | from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType |
|
20 | 20 | |
|
21 | 21 | EXCLUDED_LOG_VARS = [ |
|
22 | 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', | |
|
23 | 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs', | |
|
24 | 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated', | |
|
25 | 'thread', 'threadName'] | |
|
22 | "args", | |
|
23 | "asctime", | |
|
24 | "created", | |
|
25 | "exc_info", | |
|
26 | "exc_text", | |
|
27 | "filename", | |
|
28 | "funcName", | |
|
29 | "levelname", | |
|
30 | "levelno", | |
|
31 | "lineno", | |
|
32 | "message", | |
|
33 | "module", | |
|
34 | "msecs", | |
|
35 | "msg", | |
|
36 | "name", | |
|
37 | "pathname", | |
|
38 | "process", | |
|
39 | "processName", | |
|
40 | "relativeCreated", | |
|
41 | "thread", | |
|
42 | "threadName", | |
|
43 | ] | |
|
26 | 44 | |
|
27 | 45 | EXCLUDE_SENTRY_KEYS = [ |
|
28 |
|
|
|
29 |
|
|
|
30 |
|
|
|
31 |
|
|
|
32 |
|
|
|
33 |
|
|
|
34 |
|
|
|
35 |
|
|
|
36 |
|
|
|
37 |
|
|
|
38 |
|
|
|
39 |
|
|
|
40 |
|
|
|
41 |
|
|
|
42 | 'sentry.interfaces.Csp', 'sentry.interfaces.Exception', | |
|
43 | 'sentry.interfaces.Http', 'sentry.interfaces.Message', | |
|
44 |
|
|
|
45 |
|
|
|
46 | 'sentry.interfaces.Template', 'sentry.interfaces.User', | |
|
47 |
|
|
|
48 |
|
|
|
49 |
|
|
|
50 |
|
|
|
51 |
|
|
|
52 |
|
|
|
53 |
|
|
|
54 |
|
|
|
55 | 'stacktrace', | |
|
56 | 'tags', | |
|
57 | 'template', | |
|
58 | 'time_spent', | |
|
59 | 'timestamp', | |
|
60 | 'user'] | |
|
46 | "csp", | |
|
47 | "culprit", | |
|
48 | "event_id", | |
|
49 | "exception", | |
|
50 | "extra", | |
|
51 | "level", | |
|
52 | "logentry", | |
|
53 | "logger", | |
|
54 | "message", | |
|
55 | "modules", | |
|
56 | "platform", | |
|
57 | "query", | |
|
58 | "release", | |
|
59 | "request", | |
|
60 | "sentry.interfaces.Csp", | |
|
61 | "sentry.interfaces.Exception", | |
|
62 | "sentry.interfaces.Http", | |
|
63 | "sentry.interfaces.Message", | |
|
64 | "sentry.interfaces.Query", | |
|
65 | "sentry.interfaces.Stacktrace", | |
|
66 | "sentry.interfaces.Template", | |
|
67 | "sentry.interfaces.User", | |
|
68 | "sentry.interfaces.csp.Csp", | |
|
69 | "sentry.interfaces.exception.Exception", | |
|
70 | "sentry.interfaces.http.Http", | |
|
71 | "sentry.interfaces.message.Message", | |
|
72 | "sentry.interfaces.query.Query", | |
|
73 | "sentry.interfaces.stacktrace.Stacktrace", | |
|
74 | "sentry.interfaces.template.Template", | |
|
75 | "sentry.interfaces.user.User", | |
|
76 | "server_name", | |
|
77 | "stacktrace", | |
|
78 | "tags", | |
|
79 | "template", | |
|
80 | "time_spent", | |
|
81 | "timestamp", | |
|
82 | "user", | |
|
83 | ] | |
|
61 | 84 | |
|
62 | 85 | |
|
63 | 86 | def get_keys(list_of_keys, json_body): |
|
64 | 87 | for k in list_of_keys: |
|
65 | 88 | if k in json_body: |
|
66 | 89 | return json_body[k] |
|
67 | 90 | |
|
68 | 91 | |
|
69 | 92 | def get_logentry(json_body): |
|
70 |
key_names = [ |
|
|
71 | 'sentry.interfaces.message.Message', | |
|
72 |
|
|
|
73 | ] | |
|
93 | key_names = [ | |
|
94 | "logentry", | |
|
95 | "sentry.interfaces.message.Message", | |
|
96 | "sentry.interfaces.Message", | |
|
97 | ] | |
|
74 | 98 | logentry = get_keys(key_names, json_body) |
|
75 | 99 | return logentry |
|
76 | 100 | |
|
77 | 101 | |
|
78 | 102 | def get_exception(json_body): |
|
79 | 103 | parsed_exception = {} |
|
80 |
key_names = [ |
|
|
81 | 'sentry.interfaces.exception.Exception', | |
|
82 |
|
|
|
83 | ] | |
|
104 | key_names = [ | |
|
105 | "exception", | |
|
106 | "sentry.interfaces.exception.Exception", | |
|
107 | "sentry.interfaces.Exception", | |
|
108 | ] | |
|
84 | 109 | exception = get_keys(key_names, json_body) or {} |
|
85 | 110 | if exception: |
|
86 | 111 | if isinstance(exception, dict): |
|
87 |
exception = exception[ |
|
|
112 | exception = exception["values"][0] | |
|
88 | 113 | else: |
|
89 | 114 | exception = exception[0] |
|
90 | 115 | |
|
91 |
parsed_exception[ |
|
|
92 |
parsed_exception[ |
|
|
93 |
parsed_exception[ |
|
|
116 | parsed_exception["type"] = exception.get("type") | |
|
117 | parsed_exception["value"] = exception.get("value") | |
|
118 | parsed_exception["module"] = exception.get("module") | |
|
94 | 119 | parsed_stacktrace = get_stacktrace(exception) or {} |
|
95 | 120 | parsed_exception = exception or {} |
|
96 | 121 | return parsed_exception, parsed_stacktrace |
|
97 | 122 | |
|
98 | 123 | |
|
99 | 124 | def get_stacktrace(json_body): |
|
100 | 125 | parsed_stacktrace = [] |
|
101 |
key_names = [ |
|
|
102 | 'sentry.interfaces.stacktrace.Stacktrace', | |
|
103 |
|
|
|
104 | ] | |
|
126 | key_names = [ | |
|
127 | "stacktrace", | |
|
128 | "sentry.interfaces.stacktrace.Stacktrace", | |
|
129 | "sentry.interfaces.Stacktrace", | |
|
130 | ] | |
|
105 | 131 | stacktrace = get_keys(key_names, json_body) |
|
106 | 132 | if stacktrace: |
|
107 |
for frame in stacktrace[ |
|
|
133 | for frame in stacktrace["frames"]: | |
|
108 | 134 | parsed_stacktrace.append( |
|
109 | {"cline": frame.get('context_line', ''), | |
|
110 |
" |
|
|
111 |
" |
|
|
112 |
" |
|
|
113 |
" |
|
|
114 |
" |
|
|
115 | } | |
|
135 | { | |
|
136 | "cline": frame.get("context_line", ""), | |
|
137 | "file": frame.get("filename", ""), | |
|
138 | "module": frame.get("module", ""), | |
|
139 | "fn": frame.get("function", ""), | |
|
140 | "line": frame.get("lineno", ""), | |
|
141 | "vars": list(frame.get("vars", {}).items()), | |
|
142 | } | |
|
116 | 143 | ) |
|
117 | 144 | return parsed_stacktrace |
|
118 | 145 | |
|
119 | 146 | |
|
120 | 147 | def get_template(json_body): |
|
121 | 148 | parsed_template = {} |
|
122 |
key_names = [ |
|
|
123 | 'sentry.interfaces.template.Template', | |
|
124 |
|
|
|
125 | ] | |
|
149 | key_names = [ | |
|
150 | "template", | |
|
151 | "sentry.interfaces.template.Template", | |
|
152 | "sentry.interfaces.Template", | |
|
153 | ] | |
|
126 | 154 | template = get_keys(key_names, json_body) |
|
127 | 155 | if template: |
|
128 |
for frame in template[ |
|
|
156 | for frame in template["frames"]: | |
|
129 | 157 | parsed_template.append( |
|
130 | {"cline": frame.get('context_line', ''), | |
|
131 |
" |
|
|
132 | "fn": '', | |
|
133 | "line": frame.get('lineno', ''), | |
|
134 | "vars": [] | |
|
135 |
|
|
|
158 | { | |
|
159 | "cline": frame.get("context_line", ""), | |
|
160 | "file": frame.get("filename", ""), | |
|
161 | "fn": "", | |
|
162 | "line": frame.get("lineno", ""), | |
|
163 | "vars": [], | |
|
164 | } | |
|
136 | 165 | ) |
|
137 | 166 | |
|
138 | 167 | return parsed_template |
|
139 | 168 | |
|
140 | 169 | |
|
141 | 170 | def get_request(json_body): |
|
142 | 171 | parsed_http = {} |
|
143 | key_names = ['request', | |
|
144 | 'sentry.interfaces.http.Http', | |
|
145 | 'sentry.interfaces.Http' | |
|
146 | ] | |
|
172 | key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"] | |
|
147 | 173 | http = get_keys(key_names, json_body) or {} |
|
148 | 174 | for k, v in http.items(): |
|
149 |
if k == |
|
|
150 |
parsed_http[ |
|
|
151 |
for sk, sv in http[ |
|
|
152 |
parsed_http[ |
|
|
175 | if k == "headers": | |
|
176 | parsed_http["headers"] = {} | |
|
177 | for sk, sv in http["headers"].items(): | |
|
178 | parsed_http["headers"][sk.title()] = sv | |
|
153 | 179 | else: |
|
154 | 180 | parsed_http[k.lower()] = v |
|
155 | 181 | return parsed_http |
|
156 | 182 | |
|
157 | 183 | |
|
158 | 184 | def get_user(json_body): |
|
159 | 185 | parsed_user = {} |
|
160 | key_names = ['user', | |
|
161 | 'sentry.interfaces.user.User', | |
|
162 | 'sentry.interfaces.User' | |
|
163 | ] | |
|
186 | key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"] | |
|
164 | 187 | user = get_keys(key_names, json_body) |
|
165 | 188 | if user: |
|
166 |
parsed_user[ |
|
|
167 |
parsed_user[ |
|
|
168 |
parsed_user[ |
|
|
169 |
parsed_user[ |
|
|
189 | parsed_user["id"] = user.get("id") | |
|
190 | parsed_user["username"] = user.get("username") | |
|
191 | parsed_user["email"] = user.get("email") | |
|
192 | parsed_user["ip_address"] = user.get("ip_address") | |
|
170 | 193 | |
|
171 | 194 | return parsed_user |
|
172 | 195 | |
|
173 | 196 | |
|
174 | 197 | def get_query(json_body): |
|
175 | 198 | query = None |
|
176 | key_name = ['query', | |
|
177 | 'sentry.interfaces.query.Query', | |
|
178 | 'sentry.interfaces.Query' | |
|
179 | ] | |
|
199 | key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"] | |
|
180 | 200 | query = get_keys(key_name, json_body) |
|
181 | 201 | return query |
|
182 | 202 | |
|
183 | 203 | |
|
184 | 204 | def parse_sentry_event(json_body): |
|
185 |
request_id = json_body.get( |
|
|
205 | request_id = json_body.get("event_id") | |
|
186 | 206 | |
|
187 | 207 | # required |
|
188 |
message = json_body.get( |
|
|
189 |
log_timestamp = json_body.get( |
|
|
190 |
level = json_body.get( |
|
|
208 | message = json_body.get("message") | |
|
209 | log_timestamp = json_body.get("timestamp") | |
|
210 | level = json_body.get("level") | |
|
191 | 211 | if isinstance(level, int): |
|
192 | 212 | level = LogLevelPython.key_from_value(level) |
|
193 | 213 | |
|
194 |
namespace = json_body.get( |
|
|
195 |
language = json_body.get( |
|
|
214 | namespace = json_body.get("logger") | |
|
215 | language = json_body.get("platform") | |
|
196 | 216 | |
|
197 | 217 | # optional |
|
198 |
server_name = json_body.get( |
|
|
199 |
culprit = json_body.get( |
|
|
200 |
release = json_body.get( |
|
|
218 | server_name = json_body.get("server_name") | |
|
219 | culprit = json_body.get("culprit") | |
|
220 | release = json_body.get("release") | |
|
201 | 221 | |
|
202 |
tags = json_body.get( |
|
|
203 |
if hasattr(tags, |
|
|
222 | tags = json_body.get("tags", {}) | |
|
223 | if hasattr(tags, "items"): | |
|
204 | 224 | tags = list(tags.items()) |
|
205 |
extra = json_body.get( |
|
|
206 |
if hasattr(extra, |
|
|
225 | extra = json_body.get("extra", {}) | |
|
226 | if hasattr(extra, "items"): | |
|
207 | 227 | extra = list(extra.items()) |
|
208 | 228 | |
|
209 | 229 | parsed_req = get_request(json_body) |
|
210 | 230 | user = get_user(json_body) |
|
211 | 231 | template = get_template(json_body) |
|
212 | 232 | query = get_query(json_body) |
|
213 | 233 | |
|
214 | 234 | # other unidentified keys found |
|
215 | other_keys = [(k, json_body[k]) for k in json_body.keys() | |
|
216 |
|
|
|
235 | other_keys = [ | |
|
236 | (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS | |
|
237 | ] | |
|
217 | 238 | |
|
218 | 239 | logentry = get_logentry(json_body) |
|
219 | 240 | if logentry: |
|
220 |
message = logentry[ |
|
|
241 | message = logentry["message"] | |
|
221 | 242 | |
|
222 | 243 | exception, stacktrace = get_exception(json_body) |
|
223 | 244 | |
|
224 | 245 | alt_stacktrace = get_stacktrace(json_body) |
|
225 | 246 | event_type = None |
|
226 | 247 | if not exception and not stacktrace and not alt_stacktrace and not template: |
|
227 | 248 | event_type = ParsedSentryEventType.LOG |
|
228 | 249 | |
|
229 | 250 | event_dict = { |
|
230 |
|
|
|
231 |
|
|
|
232 |
|
|
|
233 |
|
|
|
234 |
|
|
|
235 |
|
|
|
236 |
|
|
|
251 | "log_level": level, | |
|
252 | "message": message, | |
|
253 | "namespace": namespace, | |
|
254 | "request_id": request_id, | |
|
255 | "server": server_name, | |
|
256 | "date": log_timestamp, | |
|
257 | "tags": tags, | |
|
237 | 258 | } |
|
238 |
event_dict[ |
|
|
239 |
[(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS] |
|
|
259 | event_dict["tags"].extend( | |
|
260 | [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS] | |
|
261 | ) | |
|
240 | 262 | |
|
241 | 263 | # other keys can be various object types |
|
242 |
event_dict[ |
|
|
243 | if isinstance(v, str)]) | |
|
264 | event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)]) | |
|
244 | 265 | if culprit: |
|
245 |
event_dict[ |
|
|
266 | event_dict["tags"].append(("sentry_culprit", culprit)) | |
|
246 | 267 | if language: |
|
247 |
event_dict[ |
|
|
268 | event_dict["tags"].append(("sentry_language", language)) | |
|
248 | 269 | if release: |
|
249 |
event_dict[ |
|
|
270 | event_dict["tags"].append(("sentry_release", release)) | |
|
250 | 271 | |
|
251 | 272 | if exception or stacktrace or alt_stacktrace or template: |
|
252 | 273 | event_type = ParsedSentryEventType.ERROR_REPORT |
|
253 | 274 | event_dict = { |
|
254 |
|
|
|
255 |
|
|
|
256 |
|
|
|
257 |
|
|
|
258 |
|
|
|
259 |
|
|
|
260 |
|
|
|
261 |
|
|
|
262 |
|
|
|
263 |
|
|
|
264 |
|
|
|
265 |
|
|
|
266 |
|
|
|
267 |
|
|
|
268 |
|
|
|
269 |
|
|
|
270 |
|
|
|
271 |
|
|
|
272 |
|
|
|
273 |
|
|
|
275 | "client": "sentry", | |
|
276 | "error": message, | |
|
277 | "namespace": namespace, | |
|
278 | "request_id": request_id, | |
|
279 | "server": server_name, | |
|
280 | "start_time": log_timestamp, | |
|
281 | "end_time": None, | |
|
282 | "tags": tags, | |
|
283 | "extra": extra, | |
|
284 | "language": language, | |
|
285 | "view_name": json_body.get("culprit"), | |
|
286 | "http_status": None, | |
|
287 | "username": None, | |
|
288 | "url": parsed_req.get("url"), | |
|
289 | "ip": None, | |
|
290 | "user_agent": None, | |
|
291 | "request": None, | |
|
292 | "slow_calls": None, | |
|
293 | "request_stats": None, | |
|
294 | "traceback": None, | |
|
274 | 295 | } |
|
275 | 296 | |
|
276 |
event_dict[ |
|
|
297 | event_dict["extra"].extend(other_keys) | |
|
277 | 298 | if release: |
|
278 |
event_dict[ |
|
|
279 |
event_dict[ |
|
|
280 |
if |
|
|
281 |
event_dict[ |
|
|
282 |
if |
|
|
283 |
event_dict[ |
|
|
284 |
ts_ms = int(json_body.get( |
|
|
299 | event_dict["tags"].append(("sentry_release", release)) | |
|
300 | event_dict["request"] = parsed_req | |
|
301 | if "headers" in parsed_req: | |
|
302 | event_dict["user_agent"] = parsed_req["headers"].get("User-Agent") | |
|
303 | if "env" in parsed_req: | |
|
304 | event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR") | |
|
305 | ts_ms = int(json_body.get("time_spent") or 0) | |
|
285 | 306 | if ts_ms > 0: |
|
286 |
event_dict[ |
|
|
287 |
|
|
|
307 | event_dict["end_time"] = event_dict["start_time"] + timedelta( | |
|
308 | milliseconds=ts_ms | |
|
309 | ) | |
|
288 | 310 | if stacktrace or alt_stacktrace or template: |
|
289 |
event_dict[ |
|
|
311 | event_dict["traceback"] = stacktrace or alt_stacktrace or template | |
|
290 | 312 | for k in list(event_dict.keys()): |
|
291 | 313 | if event_dict[k] is None: |
|
292 | 314 | del event_dict[k] |
|
293 | 315 | if user: |
|
294 |
event_dict[ |
|
|
295 | or user['email'] | |
|
316 | event_dict["username"] = user["username"] or user["id"] or user["email"] | |
|
296 | 317 | return event_dict, event_type |
@@ -1,17 +1,15 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | ||
|
17 |
@@ -1,98 +1,102 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from alembic import context |
|
18 | 18 | from sqlalchemy import engine_from_config, pool, MetaData |
|
19 | 19 | from logging.config import fileConfig |
|
20 | 20 | from appenlight.models import NAMING_CONVENTION |
|
21 | 21 | |
|
22 | 22 | # this is the Alembic Config object, which provides |
|
23 | 23 | # access to the values within the .ini file in use. |
|
24 | 24 | config = context.config |
|
25 | 25 | |
|
26 | 26 | # Interpret the config file for Python logging. |
|
27 | 27 | # This line sets up loggers basically. |
|
28 | 28 | if config.config_file_name: |
|
29 | 29 | fileConfig(config.config_file_name) |
|
30 | 30 | |
|
31 | 31 | # add your model's MetaData object here |
|
32 | 32 | # for 'autogenerate' support |
|
33 | 33 | # from myapp import mymodel |
|
34 | 34 | # target_metadata = mymodel.Base.metadata |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | target_metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
38 | 38 | |
|
39 | 39 | # other values from the config, defined by the needs of env.py, |
|
40 | 40 | # can be acquired: |
|
41 | 41 | # my_important_option = config.get_main_option("my_important_option") |
|
42 | 42 | # ... etc. |
|
43 | 43 | |
|
44 |
VERSION_TABLE_NAME = |
|
|
44 | VERSION_TABLE_NAME = "alembic_appenlight_version" | |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | def run_migrations_offline(): |
|
48 | 48 | """Run migrations in 'offline' mode. |
|
49 | 49 | |
|
50 | 50 | This configures the context with just a URL |
|
51 | 51 | and not an Engine, though an Engine is acceptable |
|
52 | 52 | here as well. By skipping the Engine creation |
|
53 | 53 | we don't even need a DBAPI to be available. |
|
54 | 54 | |
|
55 | 55 | Calls to context.execute() here emit the given string to the |
|
56 | 56 | script output. |
|
57 | 57 | |
|
58 | 58 | """ |
|
59 | 59 | url = config.get_main_option("sqlalchemy.url") |
|
60 | context.configure(url=url, target_metadata=target_metadata, | |
|
61 | transaction_per_migration=True, | |
|
62 | version_table=VERSION_TABLE_NAME) | |
|
60 | context.configure( | |
|
61 | url=url, | |
|
62 | target_metadata=target_metadata, | |
|
63 | transaction_per_migration=True, | |
|
64 | version_table=VERSION_TABLE_NAME, | |
|
65 | ) | |
|
63 | 66 | |
|
64 | 67 | with context.begin_transaction(): |
|
65 | 68 | context.run_migrations() |
|
66 | 69 | |
|
67 | 70 | |
|
68 | 71 | def run_migrations_online(): |
|
69 | 72 | """Run migrations in 'online' mode. |
|
70 | 73 | |
|
71 | 74 | In this scenario we need to create an Engine |
|
72 | 75 | and associate a connection with the context. |
|
73 | 76 | |
|
74 | 77 | """ |
|
75 | 78 | engine = engine_from_config( |
|
76 | 79 | config.get_section(config.config_ini_section), |
|
77 |
prefix= |
|
|
78 |
poolclass=pool.NullPool |
|
|
80 | prefix="sqlalchemy.", | |
|
81 | poolclass=pool.NullPool, | |
|
82 | ) | |
|
79 | 83 | |
|
80 | 84 | connection = engine.connect() |
|
81 | 85 | context.configure( |
|
82 | 86 | connection=connection, |
|
83 | 87 | target_metadata=target_metadata, |
|
84 | 88 | transaction_per_migration=True, |
|
85 | version_table=VERSION_TABLE_NAME | |
|
89 | version_table=VERSION_TABLE_NAME, | |
|
86 | 90 | ) |
|
87 | 91 | |
|
88 | 92 | try: |
|
89 | 93 | with context.begin_transaction(): |
|
90 | 94 | context.run_migrations() |
|
91 | 95 | finally: |
|
92 | 96 | connection.close() |
|
93 | 97 | |
|
94 | 98 | |
|
95 | 99 | if context.is_offline_mode(): |
|
96 | 100 | run_migrations_offline() |
|
97 | 101 | else: |
|
98 | 102 | run_migrations_online() |
This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them | |||
@@ -1,624 +1,813 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | """initial tables |
|
18 | 18 | |
|
19 | 19 | Revision ID: 55b6e612672f |
|
20 | 20 | Revises: None |
|
21 | 21 | Create Date: 2014-10-13 23:47:38.295159 |
|
22 | 22 | |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | # revision identifiers, used by Alembic. |
|
26 |
revision = |
|
|
26 | revision = "55b6e612672f" | |
|
27 | 27 | down_revision = None |
|
28 | 28 | |
|
29 | 29 | from alembic import op |
|
30 | 30 | import sqlalchemy as sa |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | def upgrade(): |
|
34 |
op.add_column( |
|
|
35 |
op.add_column( |
|
|
36 |
op.add_column( |
|
|
37 |
op.add_column( |
|
|
38 |
op.add_column( |
|
|
39 |
op.add_column( |
|
|
40 |
op.add_column( |
|
|
41 | op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest")) | |
|
42 | op.add_column('users', sa.Column('city', sa.Unicode(128))) | |
|
43 | op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default='')) | |
|
44 | op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true')) | |
|
45 | op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default='')) | |
|
34 | op.add_column("users", sa.Column("first_name", sa.Unicode(25))) | |
|
35 | op.add_column("users", sa.Column("last_name", sa.Unicode(50))) | |
|
36 | op.add_column("users", sa.Column("company_name", sa.Unicode(255))) | |
|
37 | op.add_column("users", sa.Column("company_address", sa.Unicode(255))) | |
|
38 | op.add_column("users", sa.Column("phone1", sa.Unicode(25))) | |
|
39 | op.add_column("users", sa.Column("phone2", sa.Unicode(25))) | |
|
40 | op.add_column("users", sa.Column("zip_code", sa.Unicode(25))) | |
|
41 | op.add_column( | |
|
42 | "users", | |
|
43 | sa.Column( | |
|
44 | "default_report_sort", | |
|
45 | sa.Unicode(20), | |
|
46 | nullable=False, | |
|
47 | server_default="newest", | |
|
48 | ), | |
|
49 | ) | |
|
50 | op.add_column("users", sa.Column("city", sa.Unicode(128))) | |
|
51 | op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default="")) | |
|
52 | op.add_column( | |
|
53 | "users", | |
|
54 | sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"), | |
|
55 | ) | |
|
56 | op.add_column( | |
|
57 | "users", | |
|
58 | sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""), | |
|
59 | ) | |
|
46 | 60 | |
|
47 | 61 | op.create_table( |
|
48 |
|
|
|
49 |
sa.Column( |
|
|
50 | sa.Column('resource_id', sa.Integer(), | |
|
51 | sa.ForeignKey('resources.resource_id', onupdate='cascade', | |
|
52 | ondelete='cascade')), | |
|
53 | sa.Column('integration_name', sa.Unicode(64)), | |
|
54 | sa.Column('config', sa.dialects.postgresql.JSON, nullable=False), | |
|
55 | sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()), | |
|
56 | sa.Column('external_id', sa.Unicode(255)), | |
|
57 |
sa.Column( |
|
|
62 | "integrations", | |
|
63 | sa.Column("id", sa.Integer(), primary_key=True), | |
|
64 | sa.Column( | |
|
65 | "resource_id", | |
|
66 | sa.Integer(), | |
|
67 | sa.ForeignKey( | |
|
68 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
69 | ), | |
|
70 | ), | |
|
71 | sa.Column("integration_name", sa.Unicode(64)), | |
|
72 | sa.Column("config", sa.dialects.postgresql.JSON, nullable=False), | |
|
73 | sa.Column( | |
|
74 | "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now() | |
|
75 | ), | |
|
76 | sa.Column("external_id", sa.Unicode(255)), | |
|
77 | sa.Column("external_id2", sa.Unicode(255)), | |
|
58 | 78 | ) |
|
59 | 79 | |
|
60 | 80 | op.create_table( |
|
61 |
|
|
|
62 | sa.Column('owner_id', sa.Integer(), | |
|
63 | sa.ForeignKey('users.id', onupdate='cascade', | |
|
64 | ondelete='cascade'), nullable=False), | |
|
65 | sa.Column('channel_name', sa.Unicode(25), nullable=False), | |
|
66 | sa.Column('channel_value', sa.Unicode(80), nullable=False), | |
|
67 | sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False), | |
|
68 |
sa.Column( |
|
|
69 |
sa.Column( |
|
|
70 | sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'), | |
|
71 | sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'), | |
|
72 | sa.Column('pkey', sa.Integer(), primary_key=True), | |
|
73 | sa.Column('integration_id', sa.Integer, | |
|
74 | sa.ForeignKey('integrations.id', onupdate='cascade', | |
|
75 | ondelete='cascade')), | |
|
76 | ) | |
|
77 | op.create_unique_constraint('uq_alert_channels', 'alert_channels', | |
|
78 | ["owner_id", "channel_name", "channel_value"]) | |
|
81 | "alert_channels", | |
|
82 | sa.Column( | |
|
83 | "owner_id", | |
|
84 | sa.Integer(), | |
|
85 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
|
86 | nullable=False, | |
|
87 | ), | |
|
88 | sa.Column("channel_name", sa.Unicode(25), nullable=False), | |
|
89 | sa.Column("channel_value", sa.Unicode(80), nullable=False), | |
|
90 | sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False), | |
|
91 | sa.Column( | |
|
92 | "channel_validated", sa.Boolean, nullable=False, server_default="False" | |
|
93 | ), | |
|
94 | sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"), | |
|
95 | sa.Column( | |
|
96 | "notify_only_first", sa.Boolean, nullable=False, server_default="False" | |
|
97 | ), | |
|
98 | sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"), | |
|
99 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
|
100 | sa.Column( | |
|
101 | "integration_id", | |
|
102 | sa.Integer, | |
|
103 | sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"), | |
|
104 | ), | |
|
105 | ) | |
|
106 | op.create_unique_constraint( | |
|
107 | "uq_alert_channels", | |
|
108 | "alert_channels", | |
|
109 | ["owner_id", "channel_name", "channel_value"], | |
|
110 | ) | |
|
79 | 111 | |
|
80 | 112 | op.create_table( |
|
81 |
|
|
|
82 |
sa.Column( |
|
|
83 | sa.Column('resource_id', sa.Integer(), | |
|
84 | sa.ForeignKey('resources.resource_id', onupdate='cascade', | |
|
85 | ondelete='cascade')), | |
|
86 | sa.Column('pkey', sa.Integer(), primary_key=True), | |
|
87 | sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'), | |
|
88 | sa.Column('rule', sa.dialects.postgresql.JSON), | |
|
89 | sa.Column('type', sa.Unicode(10), index=True), | |
|
90 |
sa.Column( |
|
|
91 | sa.Column('config', sa.dialects.postgresql.JSON), | |
|
92 | sa.Column('name', sa.Unicode(255), server_default='') | |
|
113 | "alert_channels_actions", | |
|
114 | sa.Column("owner_id", sa.Integer(), nullable=False), | |
|
115 | sa.Column( | |
|
116 | "resource_id", | |
|
117 | sa.Integer(), | |
|
118 | sa.ForeignKey( | |
|
119 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
120 | ), | |
|
121 | ), | |
|
122 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
|
123 | sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"), | |
|
124 | sa.Column("rule", sa.dialects.postgresql.JSON), | |
|
125 | sa.Column("type", sa.Unicode(10), index=True), | |
|
126 | sa.Column("other_id", sa.Unicode(40), index=True), | |
|
127 | sa.Column("config", sa.dialects.postgresql.JSON), | |
|
128 | sa.Column("name", sa.Unicode(255), server_default=""), | |
|
93 | 129 | ) |
|
94 | 130 | |
|
95 | ||
|
96 | 131 | op.create_table( |
|
97 |
|
|
|
98 |
sa.Column( |
|
|
99 |
sa.Column( |
|
|
100 |
sa.Column( |
|
|
101 | sa.Column('resource_id', sa.Integer(), | |
|
102 | sa.ForeignKey('resources.resource_id', | |
|
103 | onupdate='cascade', | |
|
104 | ondelete='cascade'), nullable=False), | |
|
105 | sa.Column('rule', sa.dialects.postgresql.JSON), | |
|
132 | "application_postprocess_conf", | |
|
133 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
|
134 | sa.Column("do", sa.Unicode(25), nullable=False), | |
|
135 | sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""), | |
|
136 | sa.Column( | |
|
137 | "resource_id", | |
|
138 | sa.Integer(), | |
|
139 | sa.ForeignKey( | |
|
140 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
141 | ), | |
|
142 | nullable=False, | |
|
143 | ), | |
|
144 | sa.Column("rule", sa.dialects.postgresql.JSON), | |
|
106 | 145 | ) |
|
107 | 146 | |
|
108 | 147 | op.create_table( |
|
109 |
|
|
|
110 | sa.Column('resource_id', sa.Integer(), | |
|
111 | sa.ForeignKey('resources.resource_id', onupdate='cascade', | |
|
112 | ondelete='cascade'), nullable=False, | |
|
113 | primary_key=True, autoincrement=False), | |
|
114 | sa.Column('domains', sa.UnicodeText, nullable=False), | |
|
115 | sa.Column('api_key', sa.Unicode(32), nullable=False, index=True), | |
|
116 | sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'), | |
|
117 | sa.Column('public_key', sa.Unicode(32), nullable=False, index=True), | |
|
118 | sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False), | |
|
119 | sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False), | |
|
120 |
sa.Column( |
|
|
121 | sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False), | |
|
122 | ) | |
|
123 | op.create_unique_constraint(None, 'applications', | |
|
124 | ["public_key"]) | |
|
125 | op.create_unique_constraint(None, 'applications', | |
|
126 | ["api_key"]) | |
|
148 | "applications", | |
|
149 | sa.Column( | |
|
150 | "resource_id", | |
|
151 | sa.Integer(), | |
|
152 | sa.ForeignKey( | |
|
153 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
154 | ), | |
|
155 | nullable=False, | |
|
156 | primary_key=True, | |
|
157 | autoincrement=False, | |
|
158 | ), | |
|
159 | sa.Column("domains", sa.UnicodeText, nullable=False), | |
|
160 | sa.Column("api_key", sa.Unicode(32), nullable=False, index=True), | |
|
161 | sa.Column( | |
|
162 | "default_grouping", | |
|
163 | sa.Unicode(20), | |
|
164 | nullable=False, | |
|
165 | server_default="url_type", | |
|
166 | ), | |
|
167 | sa.Column("public_key", sa.Unicode(32), nullable=False, index=True), | |
|
168 | sa.Column( | |
|
169 | "error_report_threshold", sa.Integer(), server_default="10", nullable=False | |
|
170 | ), | |
|
171 | sa.Column( | |
|
172 | "slow_report_threshold", sa.Integer(), server_default="10", nullable=False | |
|
173 | ), | |
|
174 | sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False), | |
|
175 | sa.Column( | |
|
176 | "allow_permanent_storage", | |
|
177 | sa.Boolean(), | |
|
178 | server_default="false", | |
|
179 | nullable=False, | |
|
180 | ), | |
|
181 | ) | |
|
182 | op.create_unique_constraint(None, "applications", ["public_key"]) | |
|
183 | op.create_unique_constraint(None, "applications", ["api_key"]) | |
|
127 | 184 | |
|
128 | 185 | op.create_table( |
|
129 |
|
|
|
130 |
sa.Column( |
|
|
131 | sa.Column('resource_id', sa.Integer(), | |
|
132 | sa.ForeignKey('resources.resource_id', | |
|
133 | onupdate='cascade', | |
|
134 | ondelete='cascade')), | |
|
135 | sa.Column('timestamp', sa.DateTime), | |
|
136 | sa.Column('namespace', sa.Unicode(255)), | |
|
137 | sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}") | |
|
186 | "metrics", | |
|
187 | sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True), | |
|
188 | sa.Column( | |
|
189 | "resource_id", | |
|
190 | sa.Integer(), | |
|
191 | sa.ForeignKey( | |
|
192 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
193 | ), | |
|
194 | ), | |
|
195 | sa.Column("timestamp", sa.DateTime), | |
|
196 | sa.Column("namespace", sa.Unicode(255)), | |
|
197 | sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"), | |
|
138 | 198 | ) |
|
139 | 199 | |
|
140 | 200 | op.create_table( |
|
141 |
|
|
|
142 |
sa.Column( |
|
|
143 |
sa.Column( |
|
|
144 |
sa.Column( |
|
|
145 |
sa.Column( |
|
|
146 |
sa.Column( |
|
|
147 |
sa.Column( |
|
|
148 |
sa.Column( |
|
|
149 |
sa.Column( |
|
|
150 |
sa.Column( |
|
|
151 |
sa.Column( |
|
|
152 |
sa.Column( |
|
|
153 |
sa.Column( |
|
|
201 | "events", | |
|
202 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), | |
|
203 | sa.Column("start_date", sa.DateTime, nullable=False, index=True), | |
|
204 | sa.Column("end_date", sa.DateTime), | |
|
205 | sa.Column("status", sa.Integer(), nullable=False, index=True), | |
|
206 | sa.Column("event_type", sa.Integer(), nullable=False, index=True), | |
|
207 | sa.Column("origin_user_id", sa.Integer()), | |
|
208 | sa.Column("target_user_id", sa.Integer()), | |
|
209 | sa.Column("resource_id", sa.Integer(), index=True), | |
|
210 | sa.Column("text", sa.UnicodeText, server_default=""), | |
|
211 | sa.Column("values", sa.dialects.postgresql.JSON), | |
|
212 | sa.Column("target_id", sa.Integer()), | |
|
213 | sa.Column("target_uuid", sa.Unicode(40), index=True), | |
|
154 | 214 | ) |
|
155 | 215 | |
|
156 | 216 | op.create_table( |
|
157 |
|
|
|
158 |
sa.Column( |
|
|
159 | sa.Column('resource_id', sa.Integer(), | |
|
160 | sa.ForeignKey('resources.resource_id', | |
|
161 | onupdate='cascade', | |
|
162 | ondelete='cascade')), | |
|
163 | sa.Column('log_level', sa.SmallInteger(), nullable=False), | |
|
164 | sa.Column('primary_key', sa.Unicode(128), nullable=True), | |
|
165 | sa.Column('message', sa.UnicodeText, nullable=False, server_default=''), | |
|
166 | sa.Column('timestamp', sa.DateTime), | |
|
167 |
sa.Column( |
|
|
168 | sa.Column('request_id', sa.Unicode(40)), | |
|
169 | sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"), | |
|
170 | sa.Column('permanent', sa.Boolean(), server_default="false", | |
|
171 | nullable=False) | |
|
217 | "logs", | |
|
218 | sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True), | |
|
219 | sa.Column( | |
|
220 | "resource_id", | |
|
221 | sa.Integer(), | |
|
222 | sa.ForeignKey( | |
|
223 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
224 | ), | |
|
225 | ), | |
|
226 | sa.Column("log_level", sa.SmallInteger(), nullable=False), | |
|
227 | sa.Column("primary_key", sa.Unicode(128), nullable=True), | |
|
228 | sa.Column("message", sa.UnicodeText, nullable=False, server_default=""), | |
|
229 | sa.Column("timestamp", sa.DateTime), | |
|
230 | sa.Column("namespace", sa.Unicode(255)), | |
|
231 | sa.Column("request_id", sa.Unicode(40)), | |
|
232 | sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"), | |
|
233 | sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False), | |
|
172 | 234 | ) |
|
173 | 235 | |
|
174 | 236 | op.create_table( |
|
175 |
|
|
|
176 |
sa.Column( |
|
|
177 | sa.Column('resource_id', sa.Integer, | |
|
178 | sa.ForeignKey('resources.resource_id', onupdate='cascade', | |
|
179 | ondelete='cascade'), nullable=False), | |
|
180 | sa.Column('priority', sa.Integer, nullable=False, server_default="5"), | |
|
181 | sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()), | |
|
182 | sa.Column('last_timestamp', sa.DateTime()), | |
|
183 | sa.Column('error', sa.UnicodeText, nullable=False, server_default=""), | |
|
184 | sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""), | |
|
185 |
sa.Column( |
|
|
186 | sa.Column('report_type', sa.Integer, nullable=False, server_default="0"), | |
|
187 | sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"), | |
|
188 | sa.Column('last_report', sa.Integer, nullable=False, server_default="0"), | |
|
189 | sa.Column('occurences', sa.Integer, nullable=False, server_default="1"), | |
|
190 | sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"), | |
|
191 | sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"), | |
|
192 | sa.Column('notified', sa.Boolean, nullable=False, server_default="False"), | |
|
193 |
sa.Column( |
|
|
194 |
sa.Column( |
|
|
195 | sa.Column('read', sa.Boolean, nullable=False, server_default="False"), | |
|
237 | "reports_groups", | |
|
238 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
|
239 | sa.Column( | |
|
240 | "resource_id", | |
|
241 | sa.Integer, | |
|
242 | sa.ForeignKey( | |
|
243 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
244 | ), | |
|
245 | nullable=False, | |
|
246 | ), | |
|
247 | sa.Column("priority", sa.Integer, nullable=False, server_default="5"), | |
|
248 | sa.Column( | |
|
249 | "first_timestamp", | |
|
250 | sa.DateTime(), | |
|
251 | nullable=False, | |
|
252 | server_default=sa.func.now(), | |
|
253 | ), | |
|
254 | sa.Column("last_timestamp", sa.DateTime()), | |
|
255 | sa.Column("error", sa.UnicodeText, nullable=False, server_default=""), | |
|
256 | sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""), | |
|
257 | sa.Column( | |
|
258 | "triggered_postprocesses_ids", | |
|
259 | sa.dialects.postgresql.JSON, | |
|
260 | nullable=False, | |
|
261 | server_default="[]", | |
|
262 | ), | |
|
263 | sa.Column("report_type", sa.Integer, nullable=False, server_default="0"), | |
|
264 | sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"), | |
|
265 | sa.Column("last_report", sa.Integer, nullable=False, server_default="0"), | |
|
266 | sa.Column("occurences", sa.Integer, nullable=False, server_default="1"), | |
|
267 | sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"), | |
|
268 | sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"), | |
|
269 | sa.Column("notified", sa.Boolean, nullable=False, server_default="False"), | |
|
270 | sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"), | |
|
271 | sa.Column("public", sa.Boolean, nullable=False, server_default="False"), | |
|
272 | sa.Column("read", sa.Boolean, nullable=False, server_default="False"), | |
|
196 | 273 | ) |
|
197 | 274 | |
|
198 | 275 | op.create_table( |
|
199 |
|
|
|
200 |
sa.Column( |
|
|
201 | sa.Column('group_id', sa.types.BigInteger, | |
|
202 | sa.ForeignKey('reports_groups.id', onupdate='cascade', | |
|
203 | ondelete='cascade'), nullable=False, index=True), | |
|
204 | sa.Column('resource_id', sa.Integer, nullable=False, index=True), | |
|
205 | sa.Column('report_type', sa.Integer, nullable=False, server_default="0"), | |
|
206 | sa.Column('error', sa.UnicodeText, nullable=False, server_default=""), | |
|
207 | sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"), | |
|
208 |
sa.Column( |
|
|
209 |
sa.Column( |
|
|
210 |
sa.Column( |
|
|
211 | sa.Column('username', sa.Unicode(255), nullable=False, server_default=""), | |
|
212 |
|
|
|
213 | sa.Column('url', sa.UnicodeText, nullable=False, server_default=""), | |
|
214 | sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""), | |
|
215 |
|
|
|
216 | sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"), | |
|
217 | sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""), | |
|
218 | sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()), | |
|
219 | sa.Column('end_time', sa.DateTime()), | |
|
220 |
sa.Column( |
|
|
221 |
sa.Column( |
|
|
222 | sa.Column('http_status', sa.Integer, index=True), | |
|
223 | sa.Column('url_domain', sa.Unicode(128)), | |
|
224 | sa.Column('url_path', sa.UnicodeText), | |
|
225 | sa.Column('language', sa.Integer, server_default="0"), | |
|
226 | ) | |
|
227 | op.create_index(None, 'reports', | |
|
228 | [sa.text("(tags ->> 'server_name')")]) | |
|
229 | op.create_index(None, 'reports', | |
|
230 | [sa.text("(tags ->> 'view_name')")]) | |
|
276 | "reports", | |
|
277 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
|
278 | sa.Column( | |
|
279 | "group_id", | |
|
280 | sa.types.BigInteger, | |
|
281 | sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"), | |
|
282 | nullable=False, | |
|
283 | index=True, | |
|
284 | ), | |
|
285 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
|
286 | sa.Column("report_type", sa.Integer, nullable=False, server_default="0"), | |
|
287 | sa.Column("error", sa.UnicodeText, nullable=False, server_default=""), | |
|
288 | sa.Column( | |
|
289 | "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
|
290 | ), | |
|
291 | sa.Column( | |
|
292 | "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
|
293 | ), | |
|
294 | sa.Column( | |
|
295 | "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
|
296 | ), | |
|
297 | sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""), | |
|
298 | sa.Column("username", sa.Unicode(255), nullable=False, server_default=""), | |
|
299 | sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""), | |
|
300 | sa.Column("url", sa.UnicodeText, nullable=False, server_default=""), | |
|
301 | sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""), | |
|
302 | sa.Column( | |
|
303 | "request_stats", | |
|
304 | sa.dialects.postgresql.JSON, | |
|
305 | nullable=False, | |
|
306 | server_default="{}", | |
|
307 | ), | |
|
308 | sa.Column( | |
|
309 | "traceback", | |
|
310 | sa.dialects.postgresql.JSON, | |
|
311 | nullable=False, | |
|
312 | server_default="{}", | |
|
313 | ), | |
|
314 | sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""), | |
|
315 | sa.Column( | |
|
316 | "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now() | |
|
317 | ), | |
|
318 | sa.Column("end_time", sa.DateTime()), | |
|
319 | sa.Column( | |
|
320 | "report_group_time", | |
|
321 | sa.DateTime, | |
|
322 | index=True, | |
|
323 | nullable=False, | |
|
324 | server_default=sa.func.now(), | |
|
325 | ), | |
|
326 | sa.Column("duration", sa.Float(), nullable=False, server_default="0"), | |
|
327 | sa.Column("http_status", sa.Integer, index=True), | |
|
328 | sa.Column("url_domain", sa.Unicode(128)), | |
|
329 | sa.Column("url_path", sa.UnicodeText), | |
|
330 | sa.Column("language", sa.Integer, server_default="0"), | |
|
331 | ) | |
|
332 | op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")]) | |
|
333 | op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")]) | |
|
231 | 334 | |
|
232 | 335 | op.create_table( |
|
233 |
|
|
|
234 |
sa.Column( |
|
|
235 |
sa.Column( |
|
|
236 | sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'), | |
|
237 | nullable=False, primary_key=True), | |
|
238 | sa.Column('report_time', sa.DateTime, nullable=False) | |
|
239 | ) | |
|
336 | "reports_assignments", | |
|
337 | sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True), | |
|
338 | sa.Column( | |
|
339 | "owner_id", | |
|
340 | sa.Integer, | |
|
341 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
|
342 | nullable=False, | |
|
343 | primary_key=True, | |
|
344 | ), | |
|
345 | sa.Column("report_time", sa.DateTime, nullable=False), | |
|
346 | ) | |
|
240 | 347 | |
|
241 | 348 | op.create_table( |
|
242 |
|
|
|
243 |
sa.Column( |
|
|
244 |
sa.Column( |
|
|
245 |
sa.Column( |
|
|
246 | sa.ForeignKey('users.id', onupdate='cascade', | |
|
247 | ondelete='set null'), nullable=True), | |
|
248 | sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), | |
|
249 | sa.Column('report_time', sa.DateTime, nullable=False), | |
|
250 | sa.Column('group_id', sa.types.BigInteger, nullable=False) | |
|
349 | "reports_comments", | |
|
350 | sa.Column("comment_id", sa.Integer, primary_key=True), | |
|
351 | sa.Column("body", sa.UnicodeText, nullable=False, server_default=""), | |
|
352 | sa.Column( | |
|
353 | "owner_id", | |
|
354 | sa.Integer, | |
|
355 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"), | |
|
356 | nullable=True, | |
|
357 | ), | |
|
358 | sa.Column( | |
|
359 | "created_timestamp", | |
|
360 | sa.DateTime, | |
|
361 | nullable=False, | |
|
362 | server_default=sa.func.now(), | |
|
363 | ), | |
|
364 | sa.Column("report_time", sa.DateTime, nullable=False), | |
|
365 | sa.Column("group_id", sa.types.BigInteger, nullable=False), | |
|
251 | 366 | ) |
|
252 | 367 | |
|
253 | 368 | op.create_table( |
|
254 |
|
|
|
255 |
sa.Column( |
|
|
256 |
sa.Column( |
|
|
257 |
sa.Column( |
|
|
258 | sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True), | |
|
259 | sa.Column('owner_user_id', sa.Integer), | |
|
260 | sa.Column('type', sa.Integer, index=True, nullable=False), | |
|
261 | sa.Column('duration', sa.Float(), server_default='0'), | |
|
262 | sa.Column('server_name', sa.Unicode(128), | |
|
263 | server_default=''), | |
|
264 |
sa.Column( |
|
|
265 | server_default=''), | |
|
266 |
sa.Column( |
|
|
267 | ) | |
|
268 | op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats', | |
|
269 |
|
|
|
369 | "reports_stats", | |
|
370 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
|
371 | sa.Column("start_interval", sa.DateTime, nullable=False, index=True), | |
|
372 | sa.Column("group_id", sa.types.BigInteger, index=True), | |
|
373 | sa.Column( | |
|
374 | "occurences", sa.Integer, nullable=False, server_default="0", index=True | |
|
375 | ), | |
|
376 | sa.Column("owner_user_id", sa.Integer), | |
|
377 | sa.Column("type", sa.Integer, index=True, nullable=False), | |
|
378 | sa.Column("duration", sa.Float(), server_default="0"), | |
|
379 | sa.Column("server_name", sa.Unicode(128), server_default=""), | |
|
380 | sa.Column("view_name", sa.Unicode(128), server_default=""), | |
|
381 | sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True), | |
|
382 | ) | |
|
383 | op.create_index( | |
|
384 | "ix_reports_stats_start_interval_group_id", | |
|
385 | "reports_stats", | |
|
386 | ["start_interval", "group_id"], | |
|
387 | ) | |
|
270 | 388 | |
|
271 | 389 | op.create_table( |
|
272 |
|
|
|
273 |
sa.Column( |
|
|
274 | sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'), | |
|
275 | nullable=False, index=True), | |
|
276 | sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True), | |
|
277 | sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True), | |
|
278 | sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()), | |
|
279 | sa.Column('type', sa.Unicode(16), nullable=False, index=True), | |
|
280 | sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''), | |
|
281 | sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False), | |
|
282 | sa.Column('location', sa.UnicodeText, server_default=''), | |
|
283 | sa.Column('subtype', sa.Unicode(16), nullable=False, index=True), | |
|
284 | sa.Column('resource_id', sa.Integer, nullable=False, index=True), | |
|
285 | sa.Column('statement_hash', sa.Unicode(60), index=True) | |
|
390 | "slow_calls", | |
|
391 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
|
392 | sa.Column( | |
|
393 | "report_id", | |
|
394 | sa.types.BigInteger, | |
|
395 | sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"), | |
|
396 | nullable=False, | |
|
397 | index=True, | |
|
398 | ), | |
|
399 | sa.Column( | |
|
400 | "duration", sa.Float(), nullable=False, server_default="0", index=True | |
|
401 | ), | |
|
402 | sa.Column( | |
|
403 | "timestamp", | |
|
404 | sa.DateTime, | |
|
405 | nullable=False, | |
|
406 | server_default=sa.func.now(), | |
|
407 | index=True, | |
|
408 | ), | |
|
409 | sa.Column( | |
|
410 | "report_group_time", | |
|
411 | sa.DateTime, | |
|
412 | index=True, | |
|
413 | nullable=False, | |
|
414 | server_default=sa.func.now(), | |
|
415 | ), | |
|
416 | sa.Column("type", sa.Unicode(16), nullable=False, index=True), | |
|
417 | sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""), | |
|
418 | sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False), | |
|
419 | sa.Column("location", sa.UnicodeText, server_default=""), | |
|
420 | sa.Column("subtype", sa.Unicode(16), nullable=False, index=True), | |
|
421 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
|
422 | sa.Column("statement_hash", sa.Unicode(60), index=True), | |
|
286 | 423 | ) |
|
287 | 424 | |
|
288 | 425 | op.create_table( |
|
289 |
|
|
|
290 |
sa.Column( |
|
|
291 | sa.Column('resource_id', sa.Integer, | |
|
292 | sa.ForeignKey('resources.resource_id', onupdate='cascade', | |
|
293 | ondelete='cascade')), | |
|
294 | sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), | |
|
295 | sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), | |
|
296 | sa.Column('name', sa.Unicode(32), nullable=False), | |
|
297 | sa.Column('value', sa.dialects.postgresql.JSON, nullable=False), | |
|
298 | sa.Column('times_seen', sa.Integer, nullable=False, server_default='1') | |
|
426 | "tags", | |
|
427 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
|
428 | sa.Column( | |
|
429 | "resource_id", | |
|
430 | sa.Integer, | |
|
431 | sa.ForeignKey( | |
|
432 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
433 | ), | |
|
434 | ), | |
|
435 | sa.Column( | |
|
436 | "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now() | |
|
437 | ), | |
|
438 | sa.Column( | |
|
439 | "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now() | |
|
440 | ), | |
|
441 | sa.Column("name", sa.Unicode(32), nullable=False), | |
|
442 | sa.Column("value", sa.dialects.postgresql.JSON, nullable=False), | |
|
443 | sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"), | |
|
299 | 444 | ) |
|
300 | 445 | |
|
301 | 446 | op.create_table( |
|
302 |
|
|
|
303 |
sa.Column( |
|
|
304 |
sa.Column( |
|
|
305 | sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()), | |
|
306 | sa.Column('expires', sa.DateTime), | |
|
307 | sa.Column('owner_id', sa.Integer, | |
|
308 | sa.ForeignKey('users.id', onupdate='cascade', | |
|
309 | ondelete='cascade')), | |
|
310 | sa.Column('description', sa.Unicode), | |
|
447 | "auth_tokens", | |
|
448 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), | |
|
449 | sa.Column("token", sa.Unicode), | |
|
450 | sa.Column( | |
|
451 | "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now() | |
|
452 | ), | |
|
453 | sa.Column("expires", sa.DateTime), | |
|
454 | sa.Column( | |
|
455 | "owner_id", | |
|
456 | sa.Integer, | |
|
457 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
|
458 | ), | |
|
459 | sa.Column("description", sa.Unicode), | |
|
311 | 460 | ) |
|
312 | 461 | |
|
313 | 462 | op.create_table( |
|
314 |
|
|
|
315 | sa.Column('channel_pkey', sa.Integer, | |
|
316 | sa.ForeignKey('alert_channels.pkey', | |
|
317 | ondelete='CASCADE', onupdate='CASCADE')), | |
|
318 | sa.Column('action_pkey', sa.Integer, | |
|
319 | sa.ForeignKey('alert_channels_actions.pkey', | |
|
320 | ondelete='CASCADE', onupdate='CASCADE')) | |
|
463 | "channels_actions", | |
|
464 | sa.Column( | |
|
465 | "channel_pkey", | |
|
466 | sa.Integer, | |
|
467 | sa.ForeignKey( | |
|
468 | "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
|
469 | ), | |
|
470 | ), | |
|
471 | sa.Column( | |
|
472 | "action_pkey", | |
|
473 | sa.Integer, | |
|
474 | sa.ForeignKey( | |
|
475 | "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
|
476 | ), | |
|
477 | ), | |
|
321 | 478 | ) |
|
322 | 479 | |
|
323 | 480 | op.create_table( |
|
324 |
|
|
|
325 |
sa.Column( |
|
|
326 |
sa.Column( |
|
|
327 |
sa.Column( |
|
|
328 | server_default="{}") | |
|
481 | "config", | |
|
482 | sa.Column("key", sa.Unicode(128), primary_key=True), | |
|
483 | sa.Column("section", sa.Unicode(128), primary_key=True), | |
|
484 | sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"), | |
|
329 | 485 | ) |
|
330 | 486 | |
|
331 | 487 | op.create_table( |
|
332 |
|
|
|
333 |
sa.Column( |
|
|
334 |
sa.Column( |
|
|
335 |
sa.Column( |
|
|
336 |
sa.Column( |
|
|
337 | server_default="{}"), | |
|
338 | sa.Column('resource_id', sa.Integer(), | |
|
339 | sa.ForeignKey('resources.resource_id', onupdate='cascade', | |
|
340 | ondelete='cascade')), | |
|
341 | sa.Column('owner_id', sa.Integer(), | |
|
342 | sa.ForeignKey('users.id', onupdate='cascade', | |
|
343 | ondelete='cascade'))) | |
|
488 | "plugin_configs", | |
|
489 | sa.Column("id", sa.Integer, primary_key=True), | |
|
490 | sa.Column("plugin_name", sa.Unicode(128)), | |
|
491 | sa.Column("section", sa.Unicode(128)), | |
|
492 | sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"), | |
|
493 | sa.Column( | |
|
494 | "resource_id", | |
|
495 | sa.Integer(), | |
|
496 | sa.ForeignKey( | |
|
497 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
498 | ), | |
|
499 | ), | |
|
500 | sa.Column( | |
|
501 | "owner_id", | |
|
502 | sa.Integer(), | |
|
503 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
|
504 | ), | |
|
505 | ) | |
|
344 | 506 | |
|
345 | 507 | op.create_table( |
|
346 |
|
|
|
347 |
sa.Column( |
|
|
348 |
sa.Column( |
|
|
508 | "rc_versions", | |
|
509 | sa.Column("name", sa.Unicode(40), primary_key=True), | |
|
510 | sa.Column("value", sa.Unicode(40)), | |
|
511 | ) | |
|
512 | version_table = sa.table( | |
|
513 | "rc_versions", | |
|
514 | sa.Column("name", sa.Unicode(40)), | |
|
515 | sa.Column("value", sa.Unicode(40)), | |
|
349 | 516 | ) |
|
350 | version_table = sa.table('rc_versions', | |
|
351 | sa.Column('name', sa.Unicode(40)), | |
|
352 | sa.Column('value', sa.Unicode(40))) | |
|
353 | 517 | |
|
354 |
insert = version_table.insert().values(name= |
|
|
518 | insert = version_table.insert().values(name="es_reports") | |
|
355 | 519 | op.execute(insert) |
|
356 |
insert = version_table.insert().values(name= |
|
|
520 | insert = version_table.insert().values(name="es_reports_groups") | |
|
357 | 521 | op.execute(insert) |
|
358 |
insert = version_table.insert().values(name= |
|
|
522 | insert = version_table.insert().values(name="es_reports_stats") | |
|
359 | 523 | op.execute(insert) |
|
360 |
insert = version_table.insert().values(name= |
|
|
524 | insert = version_table.insert().values(name="es_logs") | |
|
361 | 525 | op.execute(insert) |
|
362 |
insert = version_table.insert().values(name= |
|
|
526 | insert = version_table.insert().values(name="es_metrics") | |
|
363 | 527 | op.execute(insert) |
|
364 |
insert = version_table.insert().values(name= |
|
|
528 | insert = version_table.insert().values(name="es_slow_calls") | |
|
365 | 529 | op.execute(insert) |
|
366 | 530 | |
|
367 | ||
|
368 | op.execute(''' | |
|
531 | op.execute( | |
|
532 | """ | |
|
369 | 533 |
|
|
370 | 534 |
|
|
371 | 535 | $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$ |
|
372 | 536 |
|
|
373 | ''') | |
|
537 | """ | |
|
538 | ) | |
|
374 | 539 | |
|
375 |
op.execute( |
|
|
540 | op.execute( | |
|
541 | """ | |
|
376 | 542 |
|
|
377 | 543 |
|
|
378 | 544 |
|
|
379 | 545 |
|
|
380 | 546 | main_table varchar := 'logs'; |
|
381 | 547 | partitioned_table varchar := ''; |
|
382 | 548 |
|
|
383 | 549 |
|
|
384 | 550 |
|
|
385 | 551 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp); |
|
386 | 552 |
|
|
387 | 553 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp); |
|
388 | 554 |
|
|
389 | 555 |
|
|
390 | 556 |
|
|
391 | 557 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
392 | 558 |
|
|
393 | 559 |
|
|
394 | 560 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
395 | 561 |
|
|
396 | 562 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', |
|
397 | 563 |
|
|
398 | 564 | quote_literal(date_trunc('month', NEW.timestamp)::date) , |
|
399 | 565 | quote_literal((date_trunc('month', NEW.timestamp)::date + interval '1 month')::text), |
|
400 | 566 |
|
|
401 | 567 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(log_id);', partitioned_table, partitioned_table); |
|
402 | 568 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
403 | 569 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); |
|
404 | 570 | EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table); |
|
405 | 571 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); |
|
406 | 572 | EXECUTE format('CREATE INDEX ix_%s_pkey_namespace ON %s (primary_key, namespace);', partitioned_table, partitioned_table); |
|
407 | 573 |
|
|
408 | 574 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', |
|
409 | 575 |
|
|
410 | 576 | quote_literal(date_trunc('day', NEW.timestamp)::date) , |
|
411 | 577 | quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text), |
|
412 | 578 |
|
|
413 | 579 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s_ PRIMARY KEY(log_id);', partitioned_table, partitioned_table); |
|
414 | 580 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
415 | 581 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); |
|
416 | 582 | EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table); |
|
417 | 583 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); |
|
418 | 584 | EXECUTE format('CREATE INDEX ix_%s_primary_key_namespace ON %s (primary_key,namespace);', partitioned_table, partitioned_table); |
|
419 | 585 |
|
|
420 | 586 |
|
|
421 | 587 |
|
|
422 | 588 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
423 | 589 |
|
|
424 | 590 |
|
|
425 | 591 |
|
|
426 | 592 |
|
|
427 | 593 |
|
|
428 | 594 |
|
|
429 | ''') | |
|
595 | """ | |
|
596 | ) | |
|
430 | 597 | |
|
431 |
op.execute( |
|
|
598 | op.execute( | |
|
599 | """ | |
|
432 | 600 |
|
|
433 | ''') | |
|
601 | """ | |
|
602 | ) | |
|
434 | 603 | |
|
435 |
op.execute( |
|
|
604 | op.execute( | |
|
605 | """ | |
|
436 | 606 |
|
|
437 | 607 |
|
|
438 | 608 |
|
|
439 | 609 |
|
|
440 | 610 | main_table varchar := 'metrics'; |
|
441 | 611 | partitioned_table varchar := ''; |
|
442 | 612 |
|
|
443 | 613 |
|
|
444 | 614 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp); |
|
445 | 615 |
|
|
446 | 616 |
|
|
447 | 617 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
448 | 618 |
|
|
449 | 619 |
|
|
450 | 620 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
451 | 621 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', |
|
452 | 622 |
|
|
453 | 623 | quote_literal(date_trunc('day', NEW.timestamp)::date) , |
|
454 | 624 | quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text), |
|
455 | 625 |
|
|
456 | 626 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(pkey);', partitioned_table, partitioned_table); |
|
457 | 627 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
458 | 628 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); |
|
459 | 629 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); |
|
460 | 630 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
461 | 631 |
|
|
462 | 632 |
|
|
463 | 633 |
|
|
464 | 634 |
|
|
465 | 635 |
|
|
466 | ''') | |
|
636 | """ | |
|
637 | ) | |
|
467 | 638 | |
|
468 |
op.execute( |
|
|
639 | op.execute( | |
|
640 | """ | |
|
469 | 641 |
|
|
470 | ''') | |
|
642 | """ | |
|
643 | ) | |
|
471 | 644 | |
|
472 |
op.execute( |
|
|
645 | op.execute( | |
|
646 | """ | |
|
473 | 647 |
|
|
474 | 648 |
|
|
475 | 649 |
|
|
476 | 650 |
|
|
477 | 651 | main_table varchar := 'reports_stats'; |
|
478 | 652 | partitioned_table varchar := ''; |
|
479 | 653 |
|
|
480 | 654 |
|
|
481 | 655 | partitioned_table := main_table || '_p_' || date_part('year', NEW.start_interval)::TEXT || '_' || DATE_part('month', NEW.start_interval); |
|
482 | 656 |
|
|
483 | 657 |
|
|
484 | 658 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
485 | 659 |
|
|
486 | 660 |
|
|
487 | 661 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
488 | 662 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( start_interval >= DATE %s AND start_interval < DATE %s )) INHERITS (%s)', |
|
489 | 663 |
|
|
490 | 664 | quote_literal(date_trunc('month', NEW.start_interval)::date) , |
|
491 | 665 | quote_literal((date_trunc('month', NEW.start_interval)::date + interval '1 month')::text), |
|
492 | 666 |
|
|
493 | 667 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
494 | 668 | EXECUTE format('CREATE INDEX ix_%s_start_interval ON %s USING btree (start_interval);', partitioned_table, partitioned_table); |
|
495 | 669 | EXECUTE format('CREATE INDEX ix_%s_type ON %s USING btree (type);', partitioned_table, partitioned_table); |
|
496 | 670 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); |
|
497 | 671 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
498 | 672 |
|
|
499 | 673 |
|
|
500 | 674 |
|
|
501 | 675 |
|
|
502 | ''') | |
|
676 | """ | |
|
677 | ) | |
|
503 | 678 | |
|
504 |
op.execute( |
|
|
679 | op.execute( | |
|
680 | """ | |
|
505 | 681 |
|
|
506 | ''') | |
|
682 | """ | |
|
683 | ) | |
|
507 | 684 | |
|
508 |
op.execute( |
|
|
685 | op.execute( | |
|
686 | """ | |
|
509 | 687 |
|
|
510 | 688 |
|
|
511 | 689 |
|
|
512 | 690 |
|
|
513 | 691 | main_table varchar := 'reports_groups'; |
|
514 | 692 | partitioned_table varchar := ''; |
|
515 | 693 |
|
|
516 | 694 |
|
|
517 | 695 | partitioned_table := main_table || '_p_' || date_part('year', NEW.first_timestamp)::TEXT || '_' || DATE_part('month', NEW.first_timestamp); |
|
518 | 696 |
|
|
519 | 697 |
|
|
520 | 698 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
521 | 699 |
|
|
522 | 700 |
|
|
523 | 701 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
524 | 702 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( first_timestamp >= DATE %s AND first_timestamp < DATE %s )) INHERITS (%s)', |
|
525 | 703 |
|
|
526 | 704 | quote_literal(date_trunc('month', NEW.first_timestamp)::date) , |
|
527 | 705 | quote_literal((date_trunc('month', NEW.first_timestamp)::date + interval '1 month')::text), |
|
528 | 706 |
|
|
529 | 707 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
530 | 708 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
531 | 709 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
532 | 710 |
|
|
533 | 711 |
|
|
534 | 712 |
|
|
535 | 713 |
|
|
536 | ''') | |
|
714 | """ | |
|
715 | ) | |
|
537 | 716 | |
|
538 |
op.execute( |
|
|
717 | op.execute( | |
|
718 | """ | |
|
539 | 719 |
|
|
540 | ''') | |
|
720 | """ | |
|
721 | ) | |
|
541 | 722 | |
|
542 |
op.execute( |
|
|
723 | op.execute( | |
|
724 | """ | |
|
543 | 725 |
|
|
544 | 726 |
|
|
545 | 727 |
|
|
546 | 728 |
|
|
547 | 729 | main_table varchar := 'reports'; |
|
548 | 730 | partitioned_table varchar := ''; |
|
549 | 731 | partitioned_parent_table varchar := ''; |
|
550 | 732 |
|
|
551 | 733 |
|
|
552 | 734 | partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
553 | 735 | partitioned_parent_table := 'reports_groups_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
554 | 736 |
|
|
555 | 737 |
|
|
556 | 738 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
557 | 739 |
|
|
558 | 740 |
|
|
559 | 741 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
560 | 742 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)', |
|
561 | 743 |
|
|
562 | 744 | quote_literal(date_trunc('month', NEW.report_group_time)::date) , |
|
563 | 745 | quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text), |
|
564 | 746 |
|
|
565 | 747 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
566 | 748 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
567 | 749 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_group_id FOREIGN KEY (group_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table); |
|
568 | 750 | EXECUTE format('CREATE INDEX ix_%s_report_group_time ON %s USING btree (report_group_time);', partitioned_table, partitioned_table); |
|
569 | 751 | EXECUTE format('CREATE INDEX ix_%s_group_id ON %s USING btree (group_id);', partitioned_table, partitioned_table); |
|
570 | 752 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); |
|
571 | 753 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
572 | 754 |
|
|
573 | 755 |
|
|
574 | 756 |
|
|
575 | 757 |
|
|
576 | ''') | |
|
758 | """ | |
|
759 | ) | |
|
577 | 760 | |
|
578 |
op.execute( |
|
|
761 | op.execute( | |
|
762 | """ | |
|
579 | 763 |
|
|
580 | ''') | |
|
581 | ||
|
764 | """ | |
|
765 | ) | |
|
582 | 766 | |
|
583 |
op.execute( |
|
|
767 | op.execute( | |
|
768 | """ | |
|
584 | 769 |
|
|
585 | 770 |
|
|
586 | 771 |
|
|
587 | 772 |
|
|
588 | 773 | main_table varchar := 'slow_calls'; |
|
589 | 774 | partitioned_table varchar := ''; |
|
590 | 775 | partitioned_parent_table varchar := ''; |
|
591 | 776 |
|
|
592 | 777 |
|
|
593 | 778 | partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
594 | 779 | partitioned_parent_table := 'reports_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
595 | 780 |
|
|
596 | 781 |
|
|
597 | 782 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
598 | 783 |
|
|
599 | 784 |
|
|
600 | 785 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
601 | 786 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)', |
|
602 | 787 |
|
|
603 | 788 | quote_literal(date_trunc('month', NEW.report_group_time)::date) , |
|
604 | 789 | quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text), |
|
605 | 790 |
|
|
606 | 791 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
607 | 792 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
608 | 793 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_report_id FOREIGN KEY (report_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table); |
|
609 | 794 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); |
|
610 | 795 | EXECUTE format('CREATE INDEX ix_%s_report_id ON %s USING btree (report_id);', partitioned_table, partitioned_table); |
|
611 | 796 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s USING btree (timestamp);', partitioned_table, partitioned_table); |
|
612 | 797 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
613 | 798 |
|
|
614 | 799 |
|
|
615 | 800 |
|
|
616 | 801 |
|
|
617 | ''') | |
|
802 | """ | |
|
803 | ) | |
|
618 | 804 | |
|
619 |
op.execute( |
|
|
805 | op.execute( | |
|
806 | """ | |
|
620 | 807 |
|
|
621 | ''') | |
|
808 | """ | |
|
809 | ) | |
|
810 | ||
|
622 | 811 | |
|
623 | 812 | def downgrade(): |
|
624 | 813 | pass |
@@ -1,32 +1,40 b'' | |||
|
1 | 1 | """connect resources to alert_channels |
|
2 | 2 | |
|
3 | 3 | Revision ID: e9fcfbdd9498 |
|
4 | 4 | Revises: 55b6e612672f |
|
5 | 5 | Create Date: 2018-02-28 13:52:50.717217 |
|
6 | 6 | |
|
7 | 7 | """ |
|
8 | 8 | |
|
9 | 9 | # revision identifiers, used by Alembic. |
|
10 |
revision = |
|
|
11 |
down_revision = |
|
|
10 | revision = "e9fcfbdd9498" | |
|
11 | down_revision = "55b6e612672f" | |
|
12 | 12 | |
|
13 | 13 | from alembic import op |
|
14 | 14 | import sqlalchemy as sa |
|
15 | 15 | |
|
16 | 16 | |
|
17 | 17 | def upgrade(): |
|
18 | 18 | op.create_table( |
|
19 |
|
|
|
20 | sa.Column('channel_pkey', sa.Integer, | |
|
21 | sa.ForeignKey('alert_channels.pkey', | |
|
22 | ondelete='CASCADE', onupdate='CASCADE'), | |
|
23 | primary_key=True), | |
|
24 | sa.Column('resource_id', sa.Integer, | |
|
25 | sa.ForeignKey('resources.resource_id', | |
|
26 | ondelete='CASCADE', onupdate='CASCADE'), | |
|
27 | primary_key=True) | |
|
19 | "channels_resources", | |
|
20 | sa.Column( | |
|
21 | "channel_pkey", | |
|
22 | sa.Integer, | |
|
23 | sa.ForeignKey( | |
|
24 | "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
|
25 | ), | |
|
26 | primary_key=True, | |
|
27 | ), | |
|
28 | sa.Column( | |
|
29 | "resource_id", | |
|
30 | sa.Integer, | |
|
31 | sa.ForeignKey( | |
|
32 | "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE" | |
|
33 | ), | |
|
34 | primary_key=True, | |
|
35 | ), | |
|
28 | 36 | ) |
|
29 | 37 | |
|
30 | 38 | |
|
31 | 39 | def downgrade(): |
|
32 |
op.drop_table( |
|
|
40 | op.drop_table("channels_resources") |
@@ -1,130 +1,139 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | from sqlalchemy.ext.declarative import declarative_base |
|
20 | 20 | from sqlalchemy import MetaData |
|
21 | 21 | from sqlalchemy.orm import scoped_session |
|
22 | 22 | from sqlalchemy.orm import sessionmaker |
|
23 | 23 | from zope.sqlalchemy import ZopeTransactionExtension |
|
24 | 24 | import ziggurat_foundations |
|
25 | 25 | from ziggurat_foundations.models.base import get_db_session |
|
26 | 26 | |
|
27 | 27 | log = logging.getLogger(__name__) |
|
28 | 28 | |
|
29 | 29 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) |
|
30 | 30 | |
|
31 | 31 | NAMING_CONVENTION = { |
|
32 |
"ix": |
|
|
32 | "ix": "ix_%(column_0_label)s", | |
|
33 | 33 | "uq": "uq_%(table_name)s_%(column_0_name)s", |
|
34 | 34 | "ck": "ck_%(table_name)s_%(constraint_name)s", |
|
35 | 35 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", |
|
36 | "pk": "pk_%(table_name)s" | |
|
36 | "pk": "pk_%(table_name)s", | |
|
37 | 37 | } |
|
38 | 38 | |
|
39 | 39 | metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
40 | 40 | Base = declarative_base(metadata=metadata) |
|
41 | 41 | |
|
42 | 42 | # optional for request.db approach |
|
43 | 43 | ziggurat_foundations.models.DBSession = DBSession |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class Datastores(object): |
|
47 | 47 | redis = None |
|
48 | 48 | es = None |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | def register_datastores(es_conn, redis_conn, redis_lockmgr): |
|
52 | 52 | Datastores.es = es_conn |
|
53 | 53 | Datastores.redis = redis_conn |
|
54 | 54 | Datastores.lockmgr = redis_lockmgr |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | class SliceableESQuery(object): |
|
58 | 58 | def __init__(self, query, sort_query=None, aggregations=False, **kwconfig): |
|
59 | 59 | self.query = query |
|
60 | 60 | self.sort_query = sort_query |
|
61 | 61 | self.aggregations = aggregations |
|
62 |
self.items_per_page = kwconfig.pop( |
|
|
63 |
self.page = kwconfig.pop( |
|
|
62 | self.items_per_page = kwconfig.pop("items_per_page", 10) | |
|
63 | self.page = kwconfig.pop("page", 1) | |
|
64 | 64 | self.kwconfig = kwconfig |
|
65 | 65 | self.result = None |
|
66 | 66 | |
|
67 | 67 | def __getitem__(self, index): |
|
68 | 68 | config = self.kwconfig.copy() |
|
69 |
config[ |
|
|
69 | config["from_"] = index.start | |
|
70 | 70 | query = self.query.copy() |
|
71 | 71 | if self.sort_query: |
|
72 | 72 | query.update(self.sort_query) |
|
73 |
self.result = Datastores.es.search( |
|
|
74 | **config) | |
|
73 | self.result = Datastores.es.search( | |
|
74 | body=query, size=self.items_per_page, **config | |
|
75 | ) | |
|
75 | 76 | if self.aggregations: |
|
76 |
self.items = self.result.get( |
|
|
77 | self.items = self.result.get("aggregations") | |
|
77 | 78 | else: |
|
78 |
self.items = self.result[ |
|
|
79 | self.items = self.result["hits"]["hits"] | |
|
79 | 80 | |
|
80 | 81 | return self.items |
|
81 | 82 | |
|
82 | 83 | def __iter__(self): |
|
83 | 84 | return self.result |
|
84 | 85 | |
|
85 | 86 | def __len__(self): |
|
86 | 87 | config = self.kwconfig.copy() |
|
87 | 88 | query = self.query.copy() |
|
88 |
self.result = Datastores.es.search( |
|
|
89 | **config) | |
|
89 | self.result = Datastores.es.search( | |
|
90 | body=query, size=self.items_per_page, **config | |
|
91 | ) | |
|
90 | 92 | if self.aggregations: |
|
91 |
self.items = self.result.get( |
|
|
93 | self.items = self.result.get("aggregations") | |
|
92 | 94 | else: |
|
93 |
self.items = self.result[ |
|
|
95 | self.items = self.result["hits"]["hits"] | |
|
94 | 96 | |
|
95 |
count = int(self.result[ |
|
|
97 | count = int(self.result["hits"]["total"]) | |
|
96 | 98 | return count if count < 5000 else 5000 |
|
97 | 99 | |
|
98 | 100 | |
|
99 | 101 | from appenlight.models.resource import Resource |
|
100 | 102 | from appenlight.models.application import Application |
|
101 | 103 | from appenlight.models.user import User |
|
102 | 104 | from appenlight.models.alert_channel import AlertChannel |
|
103 | 105 | from appenlight.models.alert_channel_action import AlertChannelAction |
|
104 | 106 | from appenlight.models.metric import Metric |
|
105 |
from appenlight.models.application_postprocess_conf import |
|
|
106 | ApplicationPostprocessConf | |
|
107 | from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf | |
|
107 | 108 | from appenlight.models.auth_token import AuthToken |
|
108 | 109 | from appenlight.models.event import Event |
|
109 | 110 | from appenlight.models.external_identity import ExternalIdentity |
|
110 | 111 | from appenlight.models.group import Group |
|
111 | 112 | from appenlight.models.group_permission import GroupPermission |
|
112 | 113 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
113 | 114 | from appenlight.models.log import Log |
|
114 | 115 | from appenlight.models.plugin_config import PluginConfig |
|
115 | 116 | from appenlight.models.report import Report |
|
116 | 117 | from appenlight.models.report_group import ReportGroup |
|
117 | 118 | from appenlight.models.report_comment import ReportComment |
|
118 | 119 | from appenlight.models.report_assignment import ReportAssignment |
|
119 | 120 | from appenlight.models.report_stat import ReportStat |
|
120 | 121 | from appenlight.models.slow_call import SlowCall |
|
121 | 122 | from appenlight.models.tag import Tag |
|
122 | 123 | from appenlight.models.user_group import UserGroup |
|
123 | 124 | from appenlight.models.user_permission import UserPermission |
|
124 | 125 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
125 | 126 | from ziggurat_foundations import ziggurat_model_init |
|
126 | 127 | |
|
127 | ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission, | |
|
128 | UserResourcePermission, GroupResourcePermission, | |
|
129 | Resource, | |
|
130 | ExternalIdentity, passwordmanager=None) | |
|
128 | ziggurat_model_init( | |
|
129 | User, | |
|
130 | Group, | |
|
131 | UserGroup, | |
|
132 | GroupPermission, | |
|
133 | UserPermission, | |
|
134 | UserResourcePermission, | |
|
135 | GroupResourcePermission, | |
|
136 | Resource, | |
|
137 | ExternalIdentity, | |
|
138 | passwordmanager=None, | |
|
139 | ) |
@@ -1,305 +1,298 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import sqlalchemy as sa |
|
19 | 19 | import urllib.request, urllib.parse, urllib.error |
|
20 | 20 | from datetime import timedelta |
|
21 | 21 | from appenlight.models import Base |
|
22 | 22 | from appenlight.lib.utils.date_utils import convert_date |
|
23 | 23 | from sqlalchemy.dialects.postgresql import JSON |
|
24 | 24 | from ziggurat_foundations.models.base import BaseModel |
|
25 | 25 | |
|
26 | 26 | log = logging.getLogger(__name__) |
|
27 | 27 | |
|
28 | 28 | # |
|
29 | 29 | channel_rules_m2m_table = sa.Table( |
|
30 |
|
|
|
31 | sa.Column('channel_pkey', sa.Integer, | |
|
32 |
|
|
|
33 | sa.Column('action_pkey', sa.Integer, | |
|
34 | sa.ForeignKey('alert_channels_actions.pkey')) | |
|
30 | "channels_actions", | |
|
31 | Base.metadata, | |
|
32 | sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | |
|
33 | sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")), | |
|
35 | 34 | ) |
|
36 | 35 | |
|
37 | 36 | channel_resources_m2m_table = sa.Table( |
|
38 |
|
|
|
39 | sa.Column('channel_pkey', sa.Integer, | |
|
40 |
|
|
|
41 |
sa.Column( |
|
|
42 | sa.ForeignKey('resources.resource_id')) | |
|
37 | "channels_resources", | |
|
38 | Base.metadata, | |
|
39 | sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | |
|
40 | sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")), | |
|
43 | 41 | ) |
|
44 | 42 | |
|
45 |
DATE_FRMT = |
|
|
43 | DATE_FRMT = "%Y-%m-%dT%H:%M" | |
|
46 | 44 | |
|
47 | 45 | |
|
48 | 46 | class AlertChannel(Base, BaseModel): |
|
49 | 47 | """ |
|
50 | 48 | Stores information about possible alerting options |
|
51 | 49 | """ |
|
52 | __tablename__ = 'alert_channels' | |
|
53 | __possible_channel_names__ = ['email'] | |
|
50 | ||
|
51 | __tablename__ = "alert_channels" | |
|
52 | __possible_channel_names__ = ["email"] | |
|
54 | 53 | __mapper_args__ = { |
|
55 |
|
|
|
56 |
|
|
|
54 | "polymorphic_on": "channel_name", | |
|
55 | "polymorphic_identity": "integration", | |
|
57 | 56 | } |
|
58 | 57 | |
|
59 |
owner_id = sa.Column( |
|
|
60 | sa.ForeignKey('users.id', onupdate='CASCADE', | |
|
61 | ondelete='CASCADE')) | |
|
58 | owner_id = sa.Column( | |
|
59 | sa.Unicode(30), | |
|
60 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |
|
61 | ) | |
|
62 | 62 | channel_name = sa.Column(sa.Unicode(25), nullable=False) |
|
63 |
channel_value = sa.Column(sa.Unicode(80), nullable=False, default= |
|
|
64 |
channel_json_conf = sa.Column(JSON(), nullable=False, default= |
|
|
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, | |
|
66 | default=False) | |
|
67 |
|
|
|
68 | default=True) | |
|
69 | daily_digest = sa.Column(sa.Boolean, nullable=False, | |
|
70 | default=True) | |
|
71 | integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'), | |
|
72 | nullable=True) | |
|
63 | channel_value = sa.Column(sa.Unicode(80), nullable=False, default="") | |
|
64 | channel_json_conf = sa.Column(JSON(), nullable=False, default="") | |
|
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, default=False) | |
|
66 | send_alerts = sa.Column(sa.Boolean, nullable=False, default=True) | |
|
67 | daily_digest = sa.Column(sa.Boolean, nullable=False, default=True) | |
|
68 | integration_id = sa.Column( | |
|
69 | sa.Integer, sa.ForeignKey("integrations.id"), nullable=True | |
|
70 | ) | |
|
73 | 71 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
74 | 72 | |
|
75 |
channel_actions = sa.orm.relationship( |
|
|
76 | cascade="all", | |
|
77 | passive_deletes=True, | |
|
78 | passive_updates=True, | |
|
79 | secondary=channel_rules_m2m_table, | |
|
80 | backref='channels') | |
|
81 | resources = sa.orm.relationship('Resource', | |
|
82 | cascade="all", | |
|
83 | passive_deletes=True, | |
|
84 | passive_updates=True, | |
|
85 | secondary=channel_resources_m2m_table, | |
|
86 | backref='resources') | |
|
73 | channel_actions = sa.orm.relationship( | |
|
74 | "AlertChannelAction", | |
|
75 | cascade="all", | |
|
76 | passive_deletes=True, | |
|
77 | passive_updates=True, | |
|
78 | secondary=channel_rules_m2m_table, | |
|
79 | backref="channels", | |
|
80 | ) | |
|
81 | resources = sa.orm.relationship( | |
|
82 | "Resource", | |
|
83 | cascade="all", | |
|
84 | passive_deletes=True, | |
|
85 | passive_updates=True, | |
|
86 | secondary=channel_resources_m2m_table, | |
|
87 | backref="resources", | |
|
88 | ) | |
|
87 | 89 | |
|
88 | 90 | @property |
|
89 | 91 | def channel_visible_value(self): |
|
90 | 92 | if self.integration: |
|
91 |
return |
|
|
92 | self.channel_name, | |
|
93 | self.integration.resource.resource_name | |
|
93 | return "{}: {}".format( | |
|
94 | self.channel_name, self.integration.resource.resource_name | |
|
94 | 95 | ) |
|
95 | 96 | |
|
96 | return '{}: {}'.format( | |
|
97 | self.channel_name, | |
|
98 | self.channel_value | |
|
99 | ) | |
|
97 | return "{}: {}".format(self.channel_name, self.channel_value) | |
|
100 | 98 | |
|
101 | def get_dict(self, exclude_keys=None, include_keys=None, | |
|
102 | extended_info=True): | |
|
99 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True): | |
|
103 | 100 | """ |
|
104 | 101 | Returns dictionary with required information that will be consumed by |
|
105 | 102 | angular |
|
106 | 103 | """ |
|
107 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, | |
|
108 | include_keys) | |
|
104 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys) | |
|
109 | 105 | exclude_keys_list = exclude_keys or [] |
|
110 | 106 | include_keys_list = include_keys or [] |
|
111 | 107 | |
|
112 |
instance_dict[ |
|
|
113 |
instance_dict[ |
|
|
108 | instance_dict["supports_report_alerting"] = True | |
|
109 | instance_dict["channel_visible_value"] = self.channel_visible_value | |
|
114 | 110 | |
|
115 | 111 | if extended_info: |
|
116 |
instance_dict[ |
|
|
117 | rule.get_dict(extended_info=True) for | |
|
118 | rule in self.channel_actions] | |
|
112 | instance_dict["actions"] = [ | |
|
113 | rule.get_dict(extended_info=True) for rule in self.channel_actions | |
|
114 | ] | |
|
119 | 115 | |
|
120 |
del instance_dict[ |
|
|
116 | del instance_dict["channel_json_conf"] | |
|
121 | 117 | |
|
122 | 118 | if self.integration: |
|
123 | 119 | instance_dict[ |
|
124 |
|
|
|
125 |
|
|
|
120 | "supports_report_alerting" | |
|
121 | ] = self.integration.supports_report_alerting | |
|
126 | 122 | d = {} |
|
127 | 123 | for k in instance_dict.keys(): |
|
128 |
if |
|
|
129 |
|
|
|
124 | if k not in exclude_keys_list and ( | |
|
125 | k in include_keys_list or not include_keys | |
|
126 | ): | |
|
130 | 127 | d[k] = instance_dict[k] |
|
131 | 128 | return d |
|
132 | 129 | |
|
133 | 130 | def __repr__(self): |
|
134 |
return |
|
|
135 | self.channel_value, | |
|
136 | self.user_name,) | |
|
131 | return "<AlertChannel: (%s,%s), user:%s>" % ( | |
|
132 | self.channel_name, | |
|
133 | self.channel_value, | |
|
134 | self.user_name, | |
|
135 | ) | |
|
137 | 136 | |
|
138 | 137 | def send_digest(self, **kwargs): |
|
139 | 138 | """ |
|
140 | 139 | This should implement daily top error report notifications |
|
141 | 140 | """ |
|
142 |
log.warning( |
|
|
141 | log.warning("send_digest NOT IMPLEMENTED") | |
|
143 | 142 | |
|
144 | 143 | def notify_reports(self, **kwargs): |
|
145 | 144 | """ |
|
146 | 145 | This should implement notification of reports that occured in 1 min |
|
147 | 146 | interval |
|
148 | 147 | """ |
|
149 |
log.warning( |
|
|
148 | log.warning("notify_reports NOT IMPLEMENTED") | |
|
150 | 149 | |
|
151 | 150 | def notify_alert(self, **kwargs): |
|
152 | 151 | """ |
|
153 | 152 | Notify user of report/uptime/chart threshold events based on events alert |
|
154 | 153 | type |
|
155 | 154 | |
|
156 | 155 | Kwargs: |
|
157 | 156 | application: application that the event applies for, |
|
158 | 157 | event: event that is notified, |
|
159 | 158 | user: user that should be notified |
|
160 | 159 | request: request object |
|
161 | 160 | |
|
162 | 161 | """ |
|
163 |
alert_name = kwargs[ |
|
|
164 |
if alert_name in [ |
|
|
162 | alert_name = kwargs["event"].unified_alert_name() | |
|
163 | if alert_name in ["slow_report_alert", "error_report_alert"]: | |
|
165 | 164 | self.notify_report_alert(**kwargs) |
|
166 |
elif alert_name == |
|
|
165 | elif alert_name == "uptime_alert": | |
|
167 | 166 | self.notify_uptime_alert(**kwargs) |
|
168 |
elif alert_name == |
|
|
167 | elif alert_name == "chart_alert": | |
|
169 | 168 | self.notify_chart_alert(**kwargs) |
|
170 | 169 | |
|
171 | 170 | def notify_chart_alert(self, **kwargs): |
|
172 | 171 | """ |
|
173 | 172 | This should implement report open/close alerts notifications |
|
174 | 173 | """ |
|
175 |
log.warning( |
|
|
174 | log.warning("notify_chart_alert NOT IMPLEMENTED") | |
|
176 | 175 | |
|
177 | 176 | def notify_report_alert(self, **kwargs): |
|
178 | 177 | """ |
|
179 | 178 | This should implement report open/close alerts notifications |
|
180 | 179 | """ |
|
181 |
log.warning( |
|
|
180 | log.warning("notify_report_alert NOT IMPLEMENTED") | |
|
182 | 181 | |
|
183 | 182 | def notify_uptime_alert(self, **kwargs): |
|
184 | 183 | """ |
|
185 | 184 | This should implement uptime open/close alerts notifications |
|
186 | 185 | """ |
|
187 |
log.warning( |
|
|
186 | log.warning("notify_uptime_alert NOT IMPLEMENTED") | |
|
188 | 187 | |
|
189 | 188 | def get_notification_basic_vars(self, kwargs): |
|
190 | 189 | """ |
|
191 | 190 | Sets most common variables used later for rendering notifications for |
|
192 | 191 | channel |
|
193 | 192 | """ |
|
194 |
if |
|
|
195 |
kwargs[ |
|
|
193 | if "event" in kwargs: | |
|
194 | kwargs["since_when"] = kwargs["event"].start_date | |
|
196 | 195 | |
|
197 |
url_start_date = kwargs.get( |
|
|
198 |
url_end_date = kwargs.get( |
|
|
196 | url_start_date = kwargs.get("since_when") - timedelta(minutes=1) | |
|
197 | url_end_date = kwargs.get("since_when") + timedelta(minutes=4) | |
|
199 | 198 | tmpl_vars = { |
|
200 |
"timestamp": kwargs[ |
|
|
201 |
"user": kwargs[ |
|
|
202 |
"since_when": kwargs.get( |
|
|
199 | "timestamp": kwargs["since_when"], | |
|
200 | "user": kwargs["user"], | |
|
201 | "since_when": kwargs.get("since_when"), | |
|
203 | 202 | "url_start_date": url_start_date, |
|
204 | "url_end_date": url_end_date | |
|
203 | "url_end_date": url_end_date, | |
|
205 | 204 | } |
|
206 |
tmpl_vars["resource_name"] = kwargs[ |
|
|
207 |
tmpl_vars["resource"] = kwargs[ |
|
|
205 | tmpl_vars["resource_name"] = kwargs["resource"].resource_name | |
|
206 | tmpl_vars["resource"] = kwargs["resource"] | |
|
208 | 207 | |
|
209 |
if |
|
|
210 |
tmpl_vars[ |
|
|
211 |
tmpl_vars[ |
|
|
212 |
tmpl_vars[ |
|
|
208 | if "event" in kwargs: | |
|
209 | tmpl_vars["event_values"] = kwargs["event"].values | |
|
210 | tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name() | |
|
211 | tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action() | |
|
213 | 212 | return tmpl_vars |
|
214 | 213 | |
|
215 | 214 | def report_alert_notification_vars(self, kwargs): |
|
216 | 215 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
217 |
reports = kwargs.get( |
|
|
216 | reports = kwargs.get("reports", []) | |
|
218 | 217 | tmpl_vars["reports"] = reports |
|
219 | 218 | tmpl_vars["confirmed_total"] = len(reports) |
|
220 | 219 | |
|
221 | 220 | tmpl_vars["report_type"] = "error reports" |
|
222 |
tmpl_vars["url_report_type"] = |
|
|
221 | tmpl_vars["url_report_type"] = "report/list" | |
|
223 | 222 | |
|
224 |
alert_type = tmpl_vars.get( |
|
|
225 |
if |
|
|
223 | alert_type = tmpl_vars.get("alert_type", "") | |
|
224 | if "slow_report" in alert_type: | |
|
226 | 225 | tmpl_vars["report_type"] = "slow reports" |
|
227 |
tmpl_vars["url_report_type"] = |
|
|
226 | tmpl_vars["url_report_type"] = "report/list_slow" | |
|
228 | 227 | |
|
229 |
app_url = kwargs[ |
|
|
228 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
|
230 | 229 | |
|
231 |
destination_url = kwargs[ |
|
|
232 | _app_url=app_url) | |
|
230 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
|
233 | 231 | if alert_type: |
|
234 |
destination_url += |
|
|
232 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format( | |
|
235 | 233 | tmpl_vars["url_report_type"], |
|
236 |
tmpl_vars[ |
|
|
237 |
tmpl_vars[ |
|
|
238 |
tmpl_vars[ |
|
|
234 | tmpl_vars["resource"].resource_id, | |
|
235 | tmpl_vars["url_start_date"].strftime(DATE_FRMT), | |
|
236 | tmpl_vars["url_end_date"].strftime(DATE_FRMT), | |
|
239 | 237 | ) |
|
240 | 238 | else: |
|
241 |
destination_url += |
|
|
242 | tmpl_vars["url_report_type"], | |
|
243 | tmpl_vars['resource'].resource_id | |
|
239 | destination_url += "ui/{}?resource={}".format( | |
|
240 | tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id | |
|
244 | 241 | ) |
|
245 | 242 | tmpl_vars["destination_url"] = destination_url |
|
246 | 243 | |
|
247 | 244 | return tmpl_vars |
|
248 | 245 | |
|
249 | 246 | def uptime_alert_notification_vars(self, kwargs): |
|
250 | 247 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
251 |
app_url = kwargs[ |
|
|
252 |
destination_url = kwargs[ |
|
|
253 |
destination_url += |
|
|
254 | 'uptime', | |
|
255 | tmpl_vars['resource'].resource_id) | |
|
256 |
tmpl_vars[ |
|
|
257 | ||
|
258 |
reason = |
|
|
259 |
e_values = tmpl_vars.get( |
|
|
260 | ||
|
261 |
if e_values and e_values.get( |
|
|
262 |
reason += |
|
|
248 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
|
249 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
|
250 | destination_url += "ui/{}?resource={}".format( | |
|
251 | "uptime", tmpl_vars["resource"].resource_id | |
|
252 | ) | |
|
253 | tmpl_vars["destination_url"] = destination_url | |
|
254 | ||
|
255 | reason = "" | |
|
256 | e_values = tmpl_vars.get("event_values") | |
|
257 | ||
|
258 | if e_values and e_values.get("response_time") == 0: | |
|
259 | reason += " Response time was slower than 20 seconds." | |
|
263 | 260 | elif e_values: |
|
264 |
code = e_values.get( |
|
|
265 |
reason += |
|
|
261 | code = e_values.get("status_code") | |
|
262 | reason += " Response status code: %s." % code | |
|
266 | 263 | |
|
267 |
tmpl_vars[ |
|
|
264 | tmpl_vars["reason"] = reason | |
|
268 | 265 | return tmpl_vars |
|
269 | 266 | |
|
270 | 267 | def chart_alert_notification_vars(self, kwargs): |
|
271 | 268 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
272 |
tmpl_vars[ |
|
|
273 |
tmpl_vars[ |
|
|
274 | 'action_name') or '' | |
|
275 | matched_values = tmpl_vars['event_values']['matched_step_values'] | |
|
276 | tmpl_vars['readable_values'] = [] | |
|
277 | for key, value in list(matched_values['values'].items()): | |
|
278 | matched_label = matched_values['labels'].get(key) | |
|
269 | tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"] | |
|
270 | tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or "" | |
|
271 | matched_values = tmpl_vars["event_values"]["matched_step_values"] | |
|
272 | tmpl_vars["readable_values"] = [] | |
|
273 | for key, value in list(matched_values["values"].items()): | |
|
274 | matched_label = matched_values["labels"].get(key) | |
|
279 | 275 | if matched_label: |
|
280 |
tmpl_vars[ |
|
|
281 |
|
|
|
282 |
|
|
|
283 | }) | |
|
284 | tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'], | |
|
285 | key=lambda x: x['label']) | |
|
286 |
start_date = convert_date(tmpl_vars[ |
|
|
276 | tmpl_vars["readable_values"].append( | |
|
277 | {"label": matched_label["human_label"], "value": value} | |
|
278 | ) | |
|
279 | tmpl_vars["readable_values"] = sorted( | |
|
280 | tmpl_vars["readable_values"], key=lambda x: x["label"] | |
|
281 | ) | |
|
282 | start_date = convert_date(tmpl_vars["event_values"]["start_interval"]) | |
|
287 | 283 | end_date = None |
|
288 |
if tmpl_vars[ |
|
|
289 |
end_date = convert_date(tmpl_vars[ |
|
|
284 | if tmpl_vars["event_values"].get("end_interval"): | |
|
285 | end_date = convert_date(tmpl_vars["event_values"]["end_interval"]) | |
|
290 | 286 | |
|
291 |
app_url = kwargs[ |
|
|
292 |
destination_url = kwargs[ |
|
|
287 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
|
288 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
|
293 | 289 | to_encode = { |
|
294 |
|
|
|
295 |
|
|
|
290 | "resource": tmpl_vars["event_values"]["resource"], | |
|
291 | "start_date": start_date.strftime(DATE_FRMT), | |
|
296 | 292 | } |
|
297 | 293 | if end_date: |
|
298 |
to_encode[ |
|
|
294 | to_encode["end_date"] = end_date.strftime(DATE_FRMT) | |
|
299 | 295 | |
|
300 |
destination_url += |
|
|
301 | 'logs', | |
|
302 | urllib.parse.urlencode(to_encode) | |
|
303 | ) | |
|
304 | tmpl_vars['destination_url'] = destination_url | |
|
296 | destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode)) | |
|
297 | tmpl_vars["destination_url"] = destination_url | |
|
305 | 298 | return tmpl_vars |
@@ -1,79 +1,84 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | |
|
19 | 19 | from ziggurat_foundations.models.services.resource import ResourceService |
|
20 | 20 | from appenlight.models import Base, get_db_session |
|
21 | 21 | from sqlalchemy.orm import validates |
|
22 | 22 | from ziggurat_foundations.models.base import BaseModel |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class AlertChannelAction(Base, BaseModel): |
|
26 | 26 | """ |
|
27 | 27 | Stores notifications conditions for user's alert channels |
|
28 | 28 | This is later used for rule parsing like "alert if http_status == 500" |
|
29 | 29 | """ |
|
30 | __tablename__ = 'alert_channels_actions' | |
|
31 | 30 | |
|
32 | types = ['report', 'chart'] | |
|
31 | __tablename__ = "alert_channels_actions" | |
|
33 | 32 | |
|
34 | owner_id = sa.Column(sa.Integer, | |
|
35 | sa.ForeignKey('users.id', onupdate='CASCADE', | |
|
36 | ondelete='CASCADE')) | |
|
33 | types = ["report", "chart"] | |
|
34 | ||
|
35 | owner_id = sa.Column( | |
|
36 | sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE") | |
|
37 | ) | |
|
37 | 38 | resource_id = sa.Column(sa.Integer()) |
|
38 |
action = sa.Column(sa.Unicode(10), nullable=False, default= |
|
|
39 | action = sa.Column(sa.Unicode(10), nullable=False, default="always") | |
|
39 | 40 | type = sa.Column(sa.Unicode(10), nullable=False) |
|
40 | 41 | other_id = sa.Column(sa.Unicode(40)) |
|
41 | 42 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
42 |
rule = sa.Column( |
|
|
43 | nullable=False, default={'field': 'http_status', | |
|
44 | "op": "ge", "value": "500"}) | |
|
43 | rule = sa.Column( | |
|
44 | sa.dialects.postgresql.JSON, | |
|
45 | nullable=False, | |
|
46 | default={"field": "http_status", "op": "ge", "value": "500"}, | |
|
47 | ) | |
|
45 | 48 | config = sa.Column(sa.dialects.postgresql.JSON) |
|
46 | 49 | name = sa.Column(sa.Unicode(255)) |
|
47 | 50 | |
|
48 |
@validates( |
|
|
51 | @validates("notify_type") | |
|
49 | 52 | def validate_email(self, key, notify_type): |
|
50 |
assert notify_type in [ |
|
|
53 | assert notify_type in ["always", "only_first"] | |
|
51 | 54 | return notify_type |
|
52 | 55 | |
|
53 | 56 | def resource_name(self, db_session=None): |
|
54 | 57 | db_session = get_db_session(db_session) |
|
55 | 58 | if self.resource_id: |
|
56 | 59 | return ResourceService.by_resource_id( |
|
57 |
self.resource_id, db_session=db_session |
|
|
60 | self.resource_id, db_session=db_session | |
|
61 | ).resource_name | |
|
58 | 62 | else: |
|
59 |
return |
|
|
63 | return "any resource" | |
|
60 | 64 | |
|
61 | def get_dict(self, exclude_keys=None, include_keys=None, | |
|
62 | extended_info=False): | |
|
65 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False): | |
|
63 | 66 | """ |
|
64 | 67 | Returns dictionary with required information that will be consumed by |
|
65 | 68 | angular |
|
66 | 69 | """ |
|
67 | 70 | instance_dict = super(AlertChannelAction, self).get_dict() |
|
68 | 71 | exclude_keys_list = exclude_keys or [] |
|
69 | 72 | include_keys_list = include_keys or [] |
|
70 | 73 | if extended_info: |
|
71 |
instance_dict[ |
|
|
72 |
c.get_dict(extended_info=False) for c in self.channels |
|
|
74 | instance_dict["channels"] = [ | |
|
75 | c.get_dict(extended_info=False) for c in self.channels | |
|
76 | ] | |
|
73 | 77 | |
|
74 | 78 | d = {} |
|
75 | 79 | for k in instance_dict.keys(): |
|
76 |
if |
|
|
77 |
|
|
|
80 | if k not in exclude_keys_list and ( | |
|
81 | k in include_keys_list or not include_keys | |
|
82 | ): | |
|
78 | 83 | d[k] = instance_dict[k] |
|
79 | 84 | return d |
@@ -1,16 +1,15 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 |
@@ -1,188 +1,191 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | from appenlight.models.alert_channel import AlertChannel |
|
19 | 19 | from appenlight.models.integrations.campfire import CampfireIntegration |
|
20 | 20 | from webhelpers2.text import truncate |
|
21 | 21 | |
|
22 | 22 | log = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class CampfireAlertChannel(AlertChannel): |
|
26 | __mapper_args__ = { | |
|
27 | 'polymorphic_identity': 'campfire' | |
|
28 | } | |
|
26 | __mapper_args__ = {"polymorphic_identity": "campfire"} | |
|
29 | 27 | |
|
30 | 28 | @property |
|
31 | 29 | def client(self): |
|
32 | 30 | client = CampfireIntegration.create_client( |
|
33 |
self.integration.config[ |
|
|
34 | self.integration.config['account']) | |
|
31 | self.integration.config["api_token"], self.integration.config["account"] | |
|
32 | ) | |
|
35 | 33 | return client |
|
36 | 34 | |
|
37 | 35 | def notify_reports(self, **kwargs): |
|
38 | 36 | """ |
|
39 | 37 | Notify user of individual reports |
|
40 | 38 | |
|
41 | 39 | kwargs: |
|
42 | 40 | application: application that the event applies for, |
|
43 | 41 | user: user that should be notified |
|
44 | 42 | request: request object |
|
45 | 43 | since_when: reports are newer than this time value, |
|
46 | 44 | reports: list of reports to render |
|
47 | 45 | |
|
48 | 46 | """ |
|
49 | 47 | template_vars = self.report_alert_notification_vars(kwargs) |
|
50 | 48 | |
|
51 |
app_url = kwargs[ |
|
|
52 |
destination_url = kwargs[ |
|
|
53 | app_url=app_url) | |
|
54 |
|
|
|
55 |
|
|
|
56 |
|
|
|
57 |
|
|
|
58 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( | |
|
59 | *f_args) | |
|
60 | ||
|
61 |
if template_vars[ |
|
|
49 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
|
50 | destination_url = kwargs["request"].route_url("/", app_url=app_url) | |
|
51 | f_args = ( | |
|
52 | "report", | |
|
53 | template_vars["resource"].resource_id, | |
|
54 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
|
55 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
|
56 | ) | |
|
57 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
|
58 | ||
|
59 | if template_vars["confirmed_total"] > 1: | |
|
62 | 60 | template_vars["title"] = "%s - %s reports" % ( |
|
63 |
template_vars[ |
|
|
64 |
template_vars[ |
|
|
61 | template_vars["resource_name"], | |
|
62 | template_vars["confirmed_total"], | |
|
65 | 63 | ) |
|
66 | 64 | else: |
|
67 |
error_title = truncate( |
|
|
68 | 'slow report', 90) | |
|
65 | error_title = truncate( | |
|
66 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
67 | ) | |
|
69 | 68 | template_vars["title"] = "%s - '%s' report" % ( |
|
70 |
template_vars[ |
|
|
71 |
error_title |
|
|
69 | template_vars["resource_name"], | |
|
70 | error_title, | |
|
71 | ) | |
|
72 | 72 | |
|
73 |
template_vars["title"] += |
|
|
73 | template_vars["title"] += " " + destination_url | |
|
74 | 74 | |
|
75 |
log_msg = |
|
|
76 |
kwargs[ |
|
|
75 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
|
76 | kwargs["user"].user_name, | |
|
77 | 77 | self.channel_visible_value, |
|
78 |
template_vars[ |
|
|
78 | template_vars["confirmed_total"], | |
|
79 | ) | |
|
79 | 80 | log.warning(log_msg) |
|
80 | 81 | |
|
81 |
for room in self.integration.config[ |
|
|
82 | for room in self.integration.config["rooms"].split(","): | |
|
82 | 83 | self.client.speak_to_room(room.strip(), template_vars["title"]) |
|
83 | 84 | |
|
84 | 85 | def notify_report_alert(self, **kwargs): |
|
85 | 86 | """ |
|
86 | 87 | Build and send report alert notification |
|
87 | 88 | |
|
88 | 89 | Kwargs: |
|
89 | 90 | application: application that the event applies for, |
|
90 | 91 | event: event that is notified, |
|
91 | 92 | user: user that should be notified |
|
92 | 93 | request: request object |
|
93 | 94 | |
|
94 | 95 | """ |
|
95 | 96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
96 | 97 | |
|
97 |
if kwargs[ |
|
|
98 |
title = |
|
|
99 |
template_vars[ |
|
|
100 |
template_vars[ |
|
|
101 |
kwargs[ |
|
|
102 |
template_vars[ |
|
|
103 |
template_vars[ |
|
|
98 | if kwargs["event"].unified_alert_action() == "OPEN": | |
|
99 | title = "ALERT %s: %s - %s %s %s" % ( | |
|
100 | template_vars["alert_action"], | |
|
101 | template_vars["resource_name"], | |
|
102 | kwargs["event"].values["reports"], | |
|
103 | template_vars["report_type"], | |
|
104 | template_vars["destination_url"], | |
|
104 | 105 | ) |
|
105 | 106 | |
|
106 | 107 | else: |
|
107 |
title = |
|
|
108 |
template_vars[ |
|
|
109 |
template_vars[ |
|
|
110 |
template_vars[ |
|
|
108 | title = "ALERT %s: %s type: %s" % ( | |
|
109 | template_vars["alert_action"], | |
|
110 | template_vars["resource_name"], | |
|
111 | template_vars["alert_type"].replace("_", " "), | |
|
111 | 112 | ) |
|
112 |
for room in self.integration.config[ |
|
|
113 |
self.client.speak_to_room(room.strip(), title, sound= |
|
|
113 | for room in self.integration.config["rooms"].split(","): | |
|
114 | self.client.speak_to_room(room.strip(), title, sound="VUVUZELA") | |
|
114 | 115 | |
|
115 | 116 | def notify_uptime_alert(self, **kwargs): |
|
116 | 117 | """ |
|
117 | 118 | Build and send uptime alert notification |
|
118 | 119 | |
|
119 | 120 | Kwargs: |
|
120 | 121 | application: application that the event applies for, |
|
121 | 122 | event: event that is notified, |
|
122 | 123 | user: user that should be notified |
|
123 | 124 | request: request object |
|
124 | 125 | |
|
125 | 126 | """ |
|
126 | 127 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
127 | 128 | |
|
128 |
message = |
|
|
129 |
template_vars[ |
|
|
130 |
template_vars[ |
|
|
131 |
template_vars[ |
|
|
129 | message = "ALERT %s: %s has uptime issues %s\n\n" % ( | |
|
130 | template_vars["alert_action"], | |
|
131 | template_vars["resource_name"], | |
|
132 | template_vars["destination_url"], | |
|
132 | 133 | ) |
|
133 |
message += template_vars[ |
|
|
134 | message += template_vars["reason"] | |
|
134 | 135 | |
|
135 |
for room in self.integration.config[ |
|
|
136 |
self.client.speak_to_room(room.strip(), message, sound= |
|
|
136 | for room in self.integration.config["rooms"].split(","): | |
|
137 | self.client.speak_to_room(room.strip(), message, sound="VUVUZELA") | |
|
137 | 138 | |
|
138 | 139 | def send_digest(self, **kwargs): |
|
139 | 140 | """ |
|
140 | 141 | Build and send daily digest notification |
|
141 | 142 | |
|
142 | 143 | kwargs: |
|
143 | 144 | application: application that the event applies for, |
|
144 | 145 | user: user that should be notified |
|
145 | 146 | request: request object |
|
146 | 147 | since_when: reports are newer than this time value, |
|
147 | 148 | reports: list of reports to render |
|
148 | 149 | |
|
149 | 150 | """ |
|
150 | 151 | template_vars = self.report_alert_notification_vars(kwargs) |
|
151 |
f_args = (template_vars[ |
|
|
152 | template_vars['confirmed_total'],) | |
|
152 | f_args = (template_vars["resource_name"], template_vars["confirmed_total"]) | |
|
153 | 153 | message = "Daily report digest: %s - %s reports" % f_args |
|
154 |
message += |
|
|
155 |
for room in self.integration.config[ |
|
|
154 | message += "{}\n".format(template_vars["destination_url"]) | |
|
155 | for room in self.integration.config["rooms"].split(","): | |
|
156 | 156 | self.client.speak_to_room(room.strip(), message) |
|
157 | 157 | |
|
158 |
log_msg = |
|
|
159 |
kwargs[ |
|
|
158 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
|
159 | kwargs["user"].user_name, | |
|
160 | 160 | self.channel_visible_value, |
|
161 |
template_vars[ |
|
|
161 | template_vars["confirmed_total"], | |
|
162 | ) | |
|
162 | 163 | log.warning(log_msg) |
|
163 | 164 | |
|
164 | 165 | def notify_chart_alert(self, **kwargs): |
|
165 | 166 | """ |
|
166 | 167 | Build and send chart alert notification |
|
167 | 168 | |
|
168 | 169 | Kwargs: |
|
169 | 170 | application: application that the event applies for, |
|
170 | 171 | event: event that is notified, |
|
171 | 172 | user: user that should be notified |
|
172 | 173 | request: request object |
|
173 | 174 | |
|
174 | 175 | """ |
|
175 | 176 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
176 | message = 'ALERT {}: value in "{}" chart: ' \ | |
|
177 | 'met alert "{}" criteria {} \n'.format( | |
|
178 | template_vars['alert_action'], | |
|
179 |
template_vars[ |
|
|
180 |
template_vars[ |
|
|
181 |
template_vars[ |
|
|
177 | message = ( | |
|
178 | 'ALERT {}: value in "{}" chart: ' | |
|
179 | 'met alert "{}" criteria {} \n'.format( | |
|
180 | template_vars["alert_action"], | |
|
181 | template_vars["chart_name"], | |
|
182 | template_vars["action_name"], | |
|
183 | template_vars["destination_url"], | |
|
184 | ) | |
|
182 | 185 | ) |
|
183 | 186 | |
|
184 |
for item in template_vars[ |
|
|
185 |
message += |
|
|
187 | for item in template_vars["readable_values"]: | |
|
188 | message += "{}: {}\n".format(item["label"], item["value"]) | |
|
186 | 189 | |
|
187 |
for room in self.integration.config[ |
|
|
188 |
self.client.speak_to_room(room.strip(), message, sound= |
|
|
190 | for room in self.integration.config["rooms"].split(","): | |
|
191 | self.client.speak_to_room(room.strip(), message, sound="VUVUZELA") |
@@ -1,175 +1,192 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | from appenlight.models.alert_channel import AlertChannel |
|
19 | 19 | from appenlight.models.services.user import UserService |
|
20 | 20 | from webhelpers2.text import truncate |
|
21 | 21 | |
|
22 | 22 | log = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class EmailAlertChannel(AlertChannel): |
|
26 | 26 | """ |
|
27 | 27 | Default email alerting channel |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | __mapper_args__ = { | |
|
31 | 'polymorphic_identity': 'email' | |
|
32 | } | |
|
30 | __mapper_args__ = {"polymorphic_identity": "email"} | |
|
33 | 31 | |
|
34 | 32 | def notify_reports(self, **kwargs): |
|
35 | 33 | """ |
|
36 | 34 | Notify user of individual reports |
|
37 | 35 | |
|
38 | 36 | kwargs: |
|
39 | 37 | application: application that the event applies for, |
|
40 | 38 | user: user that should be notified |
|
41 | 39 | request: request object |
|
42 | 40 | since_when: reports are newer than this time value, |
|
43 | 41 | reports: list of reports to render |
|
44 | 42 | |
|
45 | 43 | """ |
|
46 | 44 | template_vars = self.report_alert_notification_vars(kwargs) |
|
47 | 45 | |
|
48 |
if template_vars[ |
|
|
46 | if template_vars["confirmed_total"] > 1: | |
|
49 | 47 | template_vars["title"] = "AppEnlight :: %s - %s reports" % ( |
|
50 |
template_vars[ |
|
|
51 |
template_vars[ |
|
|
48 | template_vars["resource_name"], | |
|
49 | template_vars["confirmed_total"], | |
|
52 | 50 | ) |
|
53 | 51 | else: |
|
54 |
error_title = truncate( |
|
|
55 | 'slow report', 20) | |
|
52 | error_title = truncate( | |
|
53 | template_vars["reports"][0][1].error or "slow report", 20 | |
|
54 | ) | |
|
56 | 55 | template_vars["title"] = "AppEnlight :: %s - '%s' report" % ( |
|
57 |
template_vars[ |
|
|
58 |
error_title |
|
|
59 | UserService.send_email(kwargs['request'], | |
|
60 | [self.channel_value], | |
|
61 | template_vars, | |
|
62 | '/email_templates/notify_reports.jinja2') | |
|
63 | log_msg = 'NOTIFY : %s via %s :: %s reports' % ( | |
|
64 | kwargs['user'].user_name, | |
|
56 | template_vars["resource_name"], | |
|
57 | error_title, | |
|
58 | ) | |
|
59 | UserService.send_email( | |
|
60 | kwargs["request"], | |
|
61 | [self.channel_value], | |
|
62 | template_vars, | |
|
63 | "/email_templates/notify_reports.jinja2", | |
|
64 | ) | |
|
65 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
|
66 | kwargs["user"].user_name, | |
|
65 | 67 | self.channel_visible_value, |
|
66 |
template_vars[ |
|
|
68 | template_vars["confirmed_total"], | |
|
69 | ) | |
|
67 | 70 | log.warning(log_msg) |
|
68 | 71 | |
|
69 | 72 | def send_digest(self, **kwargs): |
|
70 | 73 | """ |
|
71 | 74 | Build and send daily digest notification |
|
72 | 75 | |
|
73 | 76 | kwargs: |
|
74 | 77 | application: application that the event applies for, |
|
75 | 78 | user: user that should be notified |
|
76 | 79 | request: request object |
|
77 | 80 | since_when: reports are newer than this time value, |
|
78 | 81 | reports: list of reports to render |
|
79 | 82 | |
|
80 | 83 | """ |
|
81 | 84 | template_vars = self.report_alert_notification_vars(kwargs) |
|
82 | 85 | title = "AppEnlight :: Daily report digest: %s - %s reports" |
|
83 | 86 | template_vars["email_title"] = title % ( |
|
84 |
template_vars[ |
|
|
85 |
template_vars[ |
|
|
87 | template_vars["resource_name"], | |
|
88 | template_vars["confirmed_total"], | |
|
86 | 89 | ) |
|
87 | 90 | |
|
88 |
UserService.send_email( |
|
|
89 | [self.channel_value], | |
|
90 | template_vars, | |
|
91 | '/email_templates/notify_reports.jinja2', | |
|
92 | immediately=True, | |
|
93 | silent=True) | |
|
94 | log_msg = 'DIGEST : %s via %s :: %s reports' % ( | |
|
95 | kwargs['user'].user_name, | |
|
91 | UserService.send_email( | |
|
92 | kwargs["request"], | |
|
93 | [self.channel_value], | |
|
94 | template_vars, | |
|
95 | "/email_templates/notify_reports.jinja2", | |
|
96 | immediately=True, | |
|
97 | silent=True, | |
|
98 | ) | |
|
99 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
|
100 | kwargs["user"].user_name, | |
|
96 | 101 | self.channel_visible_value, |
|
97 |
template_vars[ |
|
|
102 | template_vars["confirmed_total"], | |
|
103 | ) | |
|
98 | 104 | log.warning(log_msg) |
|
99 | 105 | |
|
100 | 106 | def notify_report_alert(self, **kwargs): |
|
101 | 107 | """ |
|
102 | 108 | Build and send report alert notification |
|
103 | 109 | |
|
104 | 110 | Kwargs: |
|
105 | 111 | application: application that the event applies for, |
|
106 | 112 | event: event that is notified, |
|
107 | 113 | user: user that should be notified |
|
108 | 114 | request: request object |
|
109 | 115 | |
|
110 | 116 | """ |
|
111 | 117 | template_vars = self.report_alert_notification_vars(kwargs) |
|
112 | 118 | |
|
113 |
if kwargs[ |
|
|
114 |
title = |
|
|
115 |
template_vars[ |
|
|
116 |
template_vars[ |
|
|
117 |
kwargs[ |
|
|
118 |
template_vars[ |
|
|
119 | if kwargs["event"].unified_alert_action() == "OPEN": | |
|
120 | title = "AppEnlight :: ALERT %s: %s - %s %s" % ( | |
|
121 | template_vars["alert_action"], | |
|
122 | template_vars["resource_name"], | |
|
123 | kwargs["event"].values["reports"], | |
|
124 | template_vars["report_type"], | |
|
119 | 125 | ) |
|
120 | 126 | else: |
|
121 |
title = |
|
|
122 |
template_vars[ |
|
|
123 |
template_vars[ |
|
|
124 |
template_vars[ |
|
|
127 | title = "AppEnlight :: ALERT %s: %s type: %s" % ( | |
|
128 | template_vars["alert_action"], | |
|
129 | template_vars["resource_name"], | |
|
130 | template_vars["alert_type"].replace("_", " "), | |
|
125 | 131 | ) |
|
126 |
template_vars[ |
|
|
127 |
UserService.send_email( |
|
|
128 | template_vars, | |
|
129 | '/email_templates/alert_reports.jinja2') | |
|
132 | template_vars["email_title"] = title | |
|
133 | UserService.send_email( | |
|
134 | kwargs["request"], | |
|
135 | [self.channel_value], | |
|
136 | template_vars, | |
|
137 | "/email_templates/alert_reports.jinja2", | |
|
138 | ) | |
|
130 | 139 | |
|
131 | 140 | def notify_uptime_alert(self, **kwargs): |
|
132 | 141 | """ |
|
133 | 142 | Build and send uptime alert notification |
|
134 | 143 | |
|
135 | 144 | Kwargs: |
|
136 | 145 | application: application that the event applies for, |
|
137 | 146 | event: event that is notified, |
|
138 | 147 | user: user that should be notified |
|
139 | 148 | request: request object |
|
140 | 149 | |
|
141 | 150 | """ |
|
142 | 151 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
143 |
title = |
|
|
144 |
template_vars[ |
|
|
145 |
template_vars[ |
|
|
152 | title = "AppEnlight :: ALERT %s: %s has uptime issues" % ( | |
|
153 | template_vars["alert_action"], | |
|
154 | template_vars["resource_name"], | |
|
146 | 155 | ) |
|
147 |
template_vars[ |
|
|
156 | template_vars["email_title"] = title | |
|
148 | 157 | |
|
149 |
UserService.send_email( |
|
|
150 | template_vars, | |
|
151 | '/email_templates/alert_uptime.jinja2') | |
|
158 | UserService.send_email( | |
|
159 | kwargs["request"], | |
|
160 | [self.channel_value], | |
|
161 | template_vars, | |
|
162 | "/email_templates/alert_uptime.jinja2", | |
|
163 | ) | |
|
152 | 164 | |
|
153 | 165 | def notify_chart_alert(self, **kwargs): |
|
154 | 166 | """ |
|
155 | 167 | Build and send chart alert notification |
|
156 | 168 | |
|
157 | 169 | Kwargs: |
|
158 | 170 | application: application that the event applies for, |
|
159 | 171 | event: event that is notified, |
|
160 | 172 | user: user that should be notified |
|
161 | 173 | request: request object |
|
162 | 174 | |
|
163 | 175 | """ |
|
164 | 176 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
165 | 177 | |
|
166 | title = 'AppEnlight :: ALERT {} value in "{}" chart' \ | |
|
167 | ' met alert "{}" criteria'.format( | |
|
168 | template_vars['alert_action'], | |
|
169 |
template_vars[ |
|
|
170 |
template_vars[ |
|
|
178 | title = ( | |
|
179 | 'AppEnlight :: ALERT {} value in "{}" chart' | |
|
180 | ' met alert "{}" criteria'.format( | |
|
181 | template_vars["alert_action"], | |
|
182 | template_vars["chart_name"], | |
|
183 | template_vars["action_name"], | |
|
184 | ) | |
|
185 | ) | |
|
186 | template_vars["email_title"] = title | |
|
187 | UserService.send_email( | |
|
188 | kwargs["request"], | |
|
189 | [self.channel_value], | |
|
190 | template_vars, | |
|
191 | "/email_templates/alert_chart.jinja2", | |
|
171 | 192 | ) |
|
172 | template_vars['email_title'] = title | |
|
173 | UserService.send_email(kwargs['request'], [self.channel_value], | |
|
174 | template_vars, | |
|
175 | '/email_templates/alert_chart.jinja2') |
@@ -1,233 +1,225 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | from appenlight.models.alert_channel import AlertChannel |
|
19 | 19 | from appenlight.models.integrations.flowdock import FlowdockIntegration |
|
20 | 20 | from webhelpers2.text import truncate |
|
21 | 21 | |
|
22 | 22 | log = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class FlowdockAlertChannel(AlertChannel): |
|
26 | __mapper_args__ = { | |
|
27 | 'polymorphic_identity': 'flowdock' | |
|
28 | } | |
|
26 | __mapper_args__ = {"polymorphic_identity": "flowdock"} | |
|
29 | 27 | |
|
30 | 28 | def notify_reports(self, **kwargs): |
|
31 | 29 | """ |
|
32 | 30 | Notify user of individual reports |
|
33 | 31 | |
|
34 | 32 | kwargs: |
|
35 | 33 | application: application that the event applies for, |
|
36 | 34 | user: user that should be notified |
|
37 | 35 | request: request object |
|
38 | 36 | since_when: reports are newer than this time value, |
|
39 | 37 | reports: list of reports to render |
|
40 | 38 | |
|
41 | 39 | """ |
|
42 | 40 | template_vars = self.report_alert_notification_vars(kwargs) |
|
43 | 41 | |
|
44 |
app_url = kwargs[ |
|
|
45 |
destination_url = kwargs[ |
|
|
46 | _app_url=app_url) | |
|
47 |
|
|
|
48 |
|
|
|
49 |
|
|
|
50 |
|
|
|
51 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( | |
|
52 | *f_args) | |
|
53 | ||
|
54 |
if template_vars[ |
|
|
42 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
|
43 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
|
44 | f_args = ( | |
|
45 | "report", | |
|
46 | template_vars["resource"].resource_id, | |
|
47 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
|
48 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
|
49 | ) | |
|
50 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
|
51 | ||
|
52 | if template_vars["confirmed_total"] > 1: | |
|
55 | 53 | template_vars["title"] = "%s - %s reports" % ( |
|
56 |
template_vars[ |
|
|
57 |
template_vars[ |
|
|
54 | template_vars["resource_name"], | |
|
55 | template_vars["confirmed_total"], | |
|
58 | 56 | ) |
|
59 | 57 | else: |
|
60 |
error_title = truncate( |
|
|
61 | 'slow report', 90) | |
|
58 | error_title = truncate( | |
|
59 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
60 | ) | |
|
62 | 61 | template_vars["title"] = "%s - '%s' report" % ( |
|
63 |
template_vars[ |
|
|
64 |
error_title |
|
|
62 | template_vars["resource_name"], | |
|
63 | error_title, | |
|
64 | ) | |
|
65 | 65 | |
|
66 |
log_msg = |
|
|
67 |
kwargs[ |
|
|
66 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
|
67 | kwargs["user"].user_name, | |
|
68 | 68 | self.channel_visible_value, |
|
69 |
template_vars[ |
|
|
69 | template_vars["confirmed_total"], | |
|
70 | ) | |
|
70 | 71 | log.warning(log_msg) |
|
71 | 72 | |
|
72 | client = FlowdockIntegration.create_client( | |
|
73 | self.integration.config['api_token']) | |
|
73 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
|
74 | 74 | payload = { |
|
75 | 75 | "source": "AppEnlight", |
|
76 |
"from_address": kwargs[ |
|
|
77 | 'mailing.from_email'], | |
|
76 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
|
78 | 77 | "subject": template_vars["title"], |
|
79 | 78 | "content": "New report present", |
|
80 | 79 | "tags": ["appenlight"], |
|
81 | "link": destination_url | |
|
80 | "link": destination_url, | |
|
82 | 81 | } |
|
83 | 82 | client.send_to_inbox(payload) |
|
84 | 83 | |
|
85 | 84 | def notify_report_alert(self, **kwargs): |
|
86 | 85 | """ |
|
87 | 86 | Build and send report alert notification |
|
88 | 87 | |
|
89 | 88 | Kwargs: |
|
90 | 89 | application: application that the event applies for, |
|
91 | 90 | event: event that is notified, |
|
92 | 91 | user: user that should be notified |
|
93 | 92 | request: request object |
|
94 | 93 | |
|
95 | 94 | """ |
|
96 | 95 | template_vars = self.report_alert_notification_vars(kwargs) |
|
97 | 96 | |
|
98 |
if kwargs[ |
|
|
97 | if kwargs["event"].unified_alert_action() == "OPEN": | |
|
99 | 98 | |
|
100 |
title = |
|
|
101 |
template_vars[ |
|
|
102 |
template_vars[ |
|
|
103 |
kwargs[ |
|
|
104 |
template_vars[ |
|
|
99 | title = "ALERT %s: %s - %s %s" % ( | |
|
100 | template_vars["alert_action"], | |
|
101 | template_vars["resource_name"], | |
|
102 | kwargs["event"].values["reports"], | |
|
103 | template_vars["report_type"], | |
|
105 | 104 | ) |
|
106 | 105 | |
|
107 | 106 | else: |
|
108 |
title = |
|
|
109 |
template_vars[ |
|
|
110 |
template_vars[ |
|
|
111 |
template_vars[ |
|
|
107 | title = "ALERT %s: %s type: %s" % ( | |
|
108 | template_vars["alert_action"], | |
|
109 | template_vars["resource_name"], | |
|
110 | template_vars["alert_type"].replace("_", " "), | |
|
112 | 111 | ) |
|
113 | 112 | |
|
114 | client = FlowdockIntegration.create_client( | |
|
115 | self.integration.config['api_token']) | |
|
113 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
|
116 | 114 | payload = { |
|
117 | 115 | "source": "AppEnlight", |
|
118 |
"from_address": kwargs[ |
|
|
119 | 'mailing.from_email'], | |
|
116 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
|
120 | 117 | "subject": title, |
|
121 |
"content": |
|
|
122 |
"tags": ["appenlight", "alert", template_vars[ |
|
|
123 |
"link": template_vars[ |
|
|
118 | "content": "Investigation required", | |
|
119 | "tags": ["appenlight", "alert", template_vars["alert_type"]], | |
|
120 | "link": template_vars["destination_url"], | |
|
124 | 121 | } |
|
125 | 122 | client.send_to_inbox(payload) |
|
126 | 123 | |
|
127 | 124 | def notify_uptime_alert(self, **kwargs): |
|
128 | 125 | """ |
|
129 | 126 | Build and send uptime alert notification |
|
130 | 127 | |
|
131 | 128 | Kwargs: |
|
132 | 129 | application: application that the event applies for, |
|
133 | 130 | event: event that is notified, |
|
134 | 131 | user: user that should be notified |
|
135 | 132 | request: request object |
|
136 | 133 | |
|
137 | 134 | """ |
|
138 | 135 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
139 | 136 | |
|
140 |
message = |
|
|
141 |
template_vars[ |
|
|
142 |
template_vars[ |
|
|
137 | message = "ALERT %s: %s has uptime issues" % ( | |
|
138 | template_vars["alert_action"], | |
|
139 | template_vars["resource_name"], | |
|
143 | 140 | ) |
|
144 |
submessage = |
|
|
145 |
submessage += template_vars[ |
|
|
141 | submessage = "Info: " | |
|
142 | submessage += template_vars["reason"] | |
|
146 | 143 | |
|
147 | client = FlowdockIntegration.create_client( | |
|
148 | self.integration.config['api_token']) | |
|
144 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
|
149 | 145 | payload = { |
|
150 | 146 | "source": "AppEnlight", |
|
151 |
"from_address": kwargs[ |
|
|
152 | 'mailing.from_email'], | |
|
147 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
|
153 | 148 | "subject": message, |
|
154 | 149 | "content": submessage, |
|
155 |
"tags": ["appenlight", "alert", |
|
|
156 |
"link": template_vars[ |
|
|
150 | "tags": ["appenlight", "alert", "uptime"], | |
|
151 | "link": template_vars["destination_url"], | |
|
157 | 152 | } |
|
158 | 153 | client.send_to_inbox(payload) |
|
159 | 154 | |
|
160 | 155 | def send_digest(self, **kwargs): |
|
161 | 156 | """ |
|
162 | 157 | Build and send daily digest notification |
|
163 | 158 | |
|
164 | 159 | kwargs: |
|
165 | 160 | application: application that the event applies for, |
|
166 | 161 | user: user that should be notified |
|
167 | 162 | request: request object |
|
168 | 163 | since_when: reports are newer than this time value, |
|
169 | 164 | reports: list of reports to render |
|
170 | 165 | |
|
171 | 166 | """ |
|
172 | 167 | template_vars = self.report_alert_notification_vars(kwargs) |
|
173 | 168 | message = "Daily report digest: %s - %s reports" % ( |
|
174 |
template_vars[ |
|
|
169 | template_vars["resource_name"], | |
|
170 | template_vars["confirmed_total"], | |
|
171 | ) | |
|
175 | 172 | |
|
176 |
f_args = (template_vars[ |
|
|
177 | template_vars['timestamp']) | |
|
173 | f_args = (template_vars["confirmed_total"], template_vars["timestamp"]) | |
|
178 | 174 | |
|
179 | 175 | payload = { |
|
180 | 176 | "source": "AppEnlight", |
|
181 |
"from_address": kwargs[ |
|
|
182 | 'mailing.from_email'], | |
|
177 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
|
183 | 178 | "subject": message, |
|
184 |
"content": |
|
|
179 | "content": "%s reports in total since %s" % f_args, | |
|
185 | 180 | "tags": ["appenlight", "digest"], |
|
186 |
"link": template_vars[ |
|
|
181 | "link": template_vars["destination_url"], | |
|
187 | 182 | } |
|
188 | 183 | |
|
189 | client = FlowdockIntegration.create_client( | |
|
190 | self.integration.config['api_token']) | |
|
184 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
|
191 | 185 | client.send_to_inbox(payload) |
|
192 | 186 | |
|
193 |
log_msg = |
|
|
194 |
kwargs[ |
|
|
187 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
|
188 | kwargs["user"].user_name, | |
|
195 | 189 | self.channel_visible_value, |
|
196 |
template_vars[ |
|
|
190 | template_vars["confirmed_total"], | |
|
191 | ) | |
|
197 | 192 | log.warning(log_msg) |
|
198 | 193 | |
|
199 | 194 | def notify_chart_alert(self, **kwargs): |
|
200 | 195 | """ |
|
201 | 196 | Build and send chart alert notification |
|
202 | 197 | |
|
203 | 198 | Kwargs: |
|
204 | 199 | application: application that the event applies for, |
|
205 | 200 | event: event that is notified, |
|
206 | 201 | user: user that should be notified |
|
207 | 202 | request: request object |
|
208 | 203 | |
|
209 | 204 | """ |
|
210 | 205 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
211 | 206 | |
|
212 |
message = 'ALERT {}: value in "{}" chart ' |
|
|
213 | 'met alert "{}" criteria'.format( | |
|
214 |
template_vars[ |
|
|
215 |
template_vars[ |
|
|
216 | template_vars['action_name'], | |
|
207 | message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format( | |
|
208 | template_vars["alert_action"], | |
|
209 | template_vars["chart_name"], | |
|
210 | template_vars["action_name"], | |
|
217 | 211 | ) |
|
218 |
submessage = |
|
|
219 |
for item in template_vars[ |
|
|
220 |
submessage += |
|
|
212 | submessage = "Info: " | |
|
213 | for item in template_vars["readable_values"]: | |
|
214 | submessage += "{}: {}\n".format(item["label"], item["value"]) | |
|
221 | 215 | |
|
222 | client = FlowdockIntegration.create_client( | |
|
223 | self.integration.config['api_token']) | |
|
216 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
|
224 | 217 | payload = { |
|
225 | 218 | "source": "AppEnlight", |
|
226 |
"from_address": kwargs[ |
|
|
227 | 'mailing.from_email'], | |
|
219 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
|
228 | 220 | "subject": message, |
|
229 | 221 | "content": submessage, |
|
230 |
"tags": ["appenlight", "alert", |
|
|
231 |
"link": template_vars[ |
|
|
222 | "tags": ["appenlight", "alert", "chart"], | |
|
223 | "link": template_vars["destination_url"], | |
|
232 | 224 | } |
|
233 | 225 | client.send_to_inbox(payload) |
@@ -1,229 +1,238 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | from appenlight.models.alert_channel import AlertChannel |
|
19 | 19 | from appenlight.models.integrations.hipchat import HipchatIntegration |
|
20 | 20 | from webhelpers2.text import truncate |
|
21 | 21 | |
|
22 | 22 | log = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class HipchatAlertChannel(AlertChannel): |
|
26 | __mapper_args__ = { | |
|
27 | 'polymorphic_identity': 'hipchat' | |
|
28 | } | |
|
26 | __mapper_args__ = {"polymorphic_identity": "hipchat"} | |
|
29 | 27 | |
|
30 | 28 | def notify_reports(self, **kwargs): |
|
31 | 29 | """ |
|
32 | 30 | Notify user of individual reports |
|
33 | 31 | |
|
34 | 32 | kwargs: |
|
35 | 33 | application: application that the event applies for, |
|
36 | 34 | user: user that should be notified |
|
37 | 35 | request: request object |
|
38 | 36 | since_when: reports are newer than this time value, |
|
39 | 37 | reports: list of reports to render |
|
40 | 38 | |
|
41 | 39 | """ |
|
42 | 40 | template_vars = self.report_alert_notification_vars(kwargs) |
|
43 | 41 | |
|
44 |
app_url = kwargs[ |
|
|
45 |
destination_url = kwargs[ |
|
|
46 | _app_url=app_url) | |
|
47 |
|
|
|
48 |
|
|
|
49 |
|
|
|
50 |
|
|
|
51 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( | |
|
52 | *f_args) | |
|
53 | ||
|
54 |
if template_vars[ |
|
|
42 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
|
43 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
|
44 | f_args = ( | |
|
45 | "report", | |
|
46 | template_vars["resource"].resource_id, | |
|
47 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
|
48 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
|
49 | ) | |
|
50 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
|
51 | ||
|
52 | if template_vars["confirmed_total"] > 1: | |
|
55 | 53 | template_vars["title"] = "%s - %s reports" % ( |
|
56 |
template_vars[ |
|
|
57 |
template_vars[ |
|
|
54 | template_vars["resource_name"], | |
|
55 | template_vars["confirmed_total"], | |
|
58 | 56 | ) |
|
59 | 57 | else: |
|
60 |
error_title = truncate( |
|
|
61 | 'slow report', 90) | |
|
58 | error_title = truncate( | |
|
59 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
60 | ) | |
|
62 | 61 | template_vars["title"] = "%s - '%s' report" % ( |
|
63 |
template_vars[ |
|
|
64 |
error_title |
|
|
62 | template_vars["resource_name"], | |
|
63 | error_title, | |
|
64 | ) | |
|
65 | 65 | |
|
66 |
template_vars["title"] += |
|
|
66 | template_vars["title"] += " " + destination_url | |
|
67 | 67 | |
|
68 |
log_msg = |
|
|
69 |
kwargs[ |
|
|
68 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
|
69 | kwargs["user"].user_name, | |
|
70 | 70 | self.channel_visible_value, |
|
71 |
template_vars[ |
|
|
71 | template_vars["confirmed_total"], | |
|
72 | ) | |
|
72 | 73 | log.warning(log_msg) |
|
73 | 74 | |
|
74 | client = HipchatIntegration.create_client( | |
|
75 |
|
|
|
76 | for room in self.integration.config['rooms'].split(','): | |
|
77 |
|
|
|
78 |
"message_format": |
|
|
79 | "message": template_vars["title"], | |
|
80 | "from": "AppEnlight", | |
|
81 | "room_id": room.strip(), | |
|
82 | "color": "yellow" | |
|
83 |
} |
|
|
75 | client = HipchatIntegration.create_client(self.integration.config["api_token"]) | |
|
76 | for room in self.integration.config["rooms"].split(","): | |
|
77 | client.send( | |
|
78 | { | |
|
79 | "message_format": "text", | |
|
80 | "message": template_vars["title"], | |
|
81 | "from": "AppEnlight", | |
|
82 | "room_id": room.strip(), | |
|
83 | "color": "yellow", | |
|
84 | } | |
|
85 | ) | |
|
84 | 86 | |
|
85 | 87 | def notify_report_alert(self, **kwargs): |
|
86 | 88 | """ |
|
87 | 89 | Build and send report alert notification |
|
88 | 90 | |
|
89 | 91 | Kwargs: |
|
90 | 92 | application: application that the event applies for, |
|
91 | 93 | event: event that is notified, |
|
92 | 94 | user: user that should be notified |
|
93 | 95 | request: request object |
|
94 | 96 | |
|
95 | 97 | """ |
|
96 | 98 | template_vars = self.report_alert_notification_vars(kwargs) |
|
97 | 99 | |
|
98 |
if kwargs[ |
|
|
100 | if kwargs["event"].unified_alert_action() == "OPEN": | |
|
99 | 101 | |
|
100 |
title = |
|
|
101 |
template_vars[ |
|
|
102 |
template_vars[ |
|
|
103 |
kwargs[ |
|
|
104 |
template_vars[ |
|
|
102 | title = "ALERT %s: %s - %s %s" % ( | |
|
103 | template_vars["alert_action"], | |
|
104 | template_vars["resource_name"], | |
|
105 | kwargs["event"].values["reports"], | |
|
106 | template_vars["report_type"], | |
|
105 | 107 | ) |
|
106 | 108 | |
|
107 | 109 | else: |
|
108 |
title = |
|
|
109 |
template_vars[ |
|
|
110 |
template_vars[ |
|
|
111 |
template_vars[ |
|
|
110 | title = "ALERT %s: %s type: %s" % ( | |
|
111 | template_vars["alert_action"], | |
|
112 | template_vars["resource_name"], | |
|
113 | template_vars["alert_type"].replace("_", " "), | |
|
112 | 114 | ) |
|
113 | 115 | |
|
114 |
title += |
|
|
116 | title += "\n " + template_vars["destination_url"] | |
|
115 | 117 | |
|
116 |
api_token = self.integration.config[ |
|
|
118 | api_token = self.integration.config["api_token"] | |
|
117 | 119 | client = HipchatIntegration.create_client(api_token) |
|
118 |
for room in self.integration.config[ |
|
|
119 |
client.send( |
|
|
120 | "message_format": 'text', | |
|
121 |
"message": |
|
|
122 |
" |
|
|
123 |
"r |
|
|
124 |
" |
|
|
125 |
" |
|
|
126 | }) | |
|
120 | for room in self.integration.config["rooms"].split(","): | |
|
121 | client.send( | |
|
122 | { | |
|
123 | "message_format": "text", | |
|
124 | "message": title, | |
|
125 | "from": "AppEnlight", | |
|
126 | "room_id": room.strip(), | |
|
127 | "color": "red", | |
|
128 | "notify": "1", | |
|
129 | } | |
|
130 | ) | |
|
127 | 131 | |
|
128 | 132 | def notify_uptime_alert(self, **kwargs): |
|
129 | 133 | """ |
|
130 | 134 | Build and send uptime alert notification |
|
131 | 135 | |
|
132 | 136 | Kwargs: |
|
133 | 137 | application: application that the event applies for, |
|
134 | 138 | event: event that is notified, |
|
135 | 139 | user: user that should be notified |
|
136 | 140 | request: request object |
|
137 | 141 | |
|
138 | 142 | """ |
|
139 | 143 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
140 | 144 | |
|
141 |
message = |
|
|
142 |
template_vars[ |
|
|
143 |
template_vars[ |
|
|
145 | message = "ALERT %s: %s has uptime issues\n" % ( | |
|
146 | template_vars["alert_action"], | |
|
147 | template_vars["resource_name"], | |
|
144 | 148 | ) |
|
145 |
message += template_vars[ |
|
|
146 |
message += |
|
|
149 | message += template_vars["reason"] | |
|
150 | message += "\n{}".format(template_vars["destination_url"]) | |
|
147 | 151 | |
|
148 |
api_token = self.integration.config[ |
|
|
152 | api_token = self.integration.config["api_token"] | |
|
149 | 153 | client = HipchatIntegration.create_client(api_token) |
|
150 |
for room in self.integration.config[ |
|
|
151 |
client.send( |
|
|
152 | "message_format": 'text', | |
|
153 |
"message": |
|
|
154 |
" |
|
|
155 |
"r |
|
|
156 |
" |
|
|
157 |
" |
|
|
158 | }) | |
|
154 | for room in self.integration.config["rooms"].split(","): | |
|
155 | client.send( | |
|
156 | { | |
|
157 | "message_format": "text", | |
|
158 | "message": message, | |
|
159 | "from": "AppEnlight", | |
|
160 | "room_id": room.strip(), | |
|
161 | "color": "red", | |
|
162 | "notify": "1", | |
|
163 | } | |
|
164 | ) | |
|
159 | 165 | |
|
160 | 166 | def notify_chart_alert(self, **kwargs): |
|
161 | 167 | """ |
|
162 | 168 | Build and send chart alert notification |
|
163 | 169 | |
|
164 | 170 | Kwargs: |
|
165 | 171 | application: application that the event applies for, |
|
166 | 172 | event: event that is notified, |
|
167 | 173 | user: user that should be notified |
|
168 | 174 | request: request object |
|
169 | 175 | |
|
170 | 176 | """ |
|
171 | 177 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
172 | message = 'ALERT {}: value in "{}" chart: ' \ | |
|
173 | 'met alert "{}" criteria\n'.format( | |
|
174 |
template_vars[ |
|
|
175 |
template_vars[ |
|
|
176 | template_vars['action_name'], | |
|
178 | message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format( | |
|
179 | template_vars["alert_action"], | |
|
180 | template_vars["chart_name"], | |
|
181 | template_vars["action_name"], | |
|
177 | 182 | ) |
|
178 | 183 | |
|
179 |
for item in template_vars[ |
|
|
180 |
message += |
|
|
184 | for item in template_vars["readable_values"]: | |
|
185 | message += "{}: {}\n".format(item["label"], item["value"]) | |
|
181 | 186 | |
|
182 |
message += template_vars[ |
|
|
187 | message += template_vars["destination_url"] | |
|
183 | 188 | |
|
184 |
api_token = self.integration.config[ |
|
|
189 | api_token = self.integration.config["api_token"] | |
|
185 | 190 | client = HipchatIntegration.create_client(api_token) |
|
186 |
for room in self.integration.config[ |
|
|
187 |
client.send( |
|
|
188 | "message_format": 'text', | |
|
189 |
"message": |
|
|
190 |
" |
|
|
191 |
"r |
|
|
192 |
" |
|
|
193 |
" |
|
|
194 | }) | |
|
191 | for room in self.integration.config["rooms"].split(","): | |
|
192 | client.send( | |
|
193 | { | |
|
194 | "message_format": "text", | |
|
195 | "message": message, | |
|
196 | "from": "AppEnlight", | |
|
197 | "room_id": room.strip(), | |
|
198 | "color": "red", | |
|
199 | "notify": "1", | |
|
200 | } | |
|
201 | ) | |
|
195 | 202 | |
|
196 | 203 | def send_digest(self, **kwargs): |
|
197 | 204 | """ |
|
198 | 205 | Build and send daily digest notification |
|
199 | 206 | |
|
200 | 207 | kwargs: |
|
201 | 208 | application: application that the event applies for, |
|
202 | 209 | user: user that should be notified |
|
203 | 210 | request: request object |
|
204 | 211 | since_when: reports are newer than this time value, |
|
205 | 212 | reports: list of reports to render |
|
206 | 213 | |
|
207 | 214 | """ |
|
208 | 215 | template_vars = self.report_alert_notification_vars(kwargs) |
|
209 |
f_args = (template_vars[ |
|
|
210 | template_vars['confirmed_total'],) | |
|
216 | f_args = (template_vars["resource_name"], template_vars["confirmed_total"]) | |
|
211 | 217 | message = "Daily report digest: %s - %s reports" % f_args |
|
212 |
message += |
|
|
213 |
api_token = self.integration.config[ |
|
|
218 | message += "\n{}".format(template_vars["destination_url"]) | |
|
219 | api_token = self.integration.config["api_token"] | |
|
214 | 220 | client = HipchatIntegration.create_client(api_token) |
|
215 |
for room in self.integration.config[ |
|
|
216 |
client.send( |
|
|
217 | "message_format": 'text', | |
|
218 |
"message": |
|
|
219 |
" |
|
|
220 |
"r |
|
|
221 |
" |
|
|
222 |
" |
|
|
223 | }) | |
|
224 | ||
|
225 | log_msg = 'DIGEST : %s via %s :: %s reports' % ( | |
|
226 | kwargs['user'].user_name, | |
|
221 | for room in self.integration.config["rooms"].split(","): | |
|
222 | client.send( | |
|
223 | { | |
|
224 | "message_format": "text", | |
|
225 | "message": message, | |
|
226 | "from": "AppEnlight", | |
|
227 | "room_id": room.strip(), | |
|
228 | "color": "green", | |
|
229 | "notify": "1", | |
|
230 | } | |
|
231 | ) | |
|
232 | ||
|
233 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
|
234 | kwargs["user"].user_name, | |
|
227 | 235 | self.channel_visible_value, |
|
228 |
template_vars[ |
|
|
236 | template_vars["confirmed_total"], | |
|
237 | ) | |
|
229 | 238 | log.warning(log_msg) |
@@ -1,285 +1,270 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | from appenlight.models.alert_channel import AlertChannel |
|
19 | 19 | from appenlight.models.integrations.slack import SlackIntegration |
|
20 | 20 | from webhelpers2.text import truncate |
|
21 | 21 | |
|
22 | 22 | log = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class SlackAlertChannel(AlertChannel): |
|
26 | __mapper_args__ = { | |
|
27 | 'polymorphic_identity': 'slack' | |
|
28 | } | |
|
26 | __mapper_args__ = {"polymorphic_identity": "slack"} | |
|
29 | 27 | |
|
30 | 28 | def notify_reports(self, **kwargs): |
|
31 | 29 | """ |
|
32 | 30 | Notify user of individual reports |
|
33 | 31 | |
|
34 | 32 | kwargs: |
|
35 | 33 | application: application that the event applies for, |
|
36 | 34 | user: user that should be notified |
|
37 | 35 | request: request object |
|
38 | 36 | since_when: reports are newer than this time value, |
|
39 | 37 | reports: list of reports to render |
|
40 | 38 | |
|
41 | 39 | """ |
|
42 | 40 | template_vars = self.report_alert_notification_vars(kwargs) |
|
43 |
template_vars["title"] = template_vars[ |
|
|
41 | template_vars["title"] = template_vars["resource_name"] | |
|
44 | 42 | |
|
45 |
if template_vars[ |
|
|
46 |
template_vars[ |
|
|
47 | 'confirmed_total'] | |
|
43 | if template_vars["confirmed_total"] > 1: | |
|
44 | template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"] | |
|
48 | 45 | else: |
|
49 |
error_title = truncate( |
|
|
50 | 'slow report', 90) | |
|
51 | template_vars['subtext'] = error_title | |
|
46 | error_title = truncate( | |
|
47 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
48 | ) | |
|
49 | template_vars["subtext"] = error_title | |
|
52 | 50 | |
|
53 |
log_msg = |
|
|
54 |
kwargs[ |
|
|
51 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
|
52 | kwargs["user"].user_name, | |
|
55 | 53 | self.channel_visible_value, |
|
56 |
template_vars[ |
|
|
54 | template_vars["confirmed_total"], | |
|
55 | ) | |
|
57 | 56 | log.warning(log_msg) |
|
58 | 57 | |
|
59 | client = SlackIntegration.create_client( | |
|
60 | self.integration.config['webhook_url']) | |
|
58 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
|
61 | 59 | report_data = { |
|
62 | 60 | "username": "AppEnlight", |
|
63 | 61 | "icon_emoji": ":fire:", |
|
64 | 62 | "attachments": [ |
|
65 | 63 | { |
|
66 | 64 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
67 |
"fallback": "*%s* - <%s| Browse>" |
|
|
68 |
|
|
|
69 | template_vars['destination_url']), | |
|
70 | "pretext": "*%s* - <%s| Browse>" % ( | |
|
71 | template_vars["title"], | |
|
72 | template_vars['destination_url']), | |
|
65 | "fallback": "*%s* - <%s| Browse>" | |
|
66 | % (template_vars["title"], template_vars["destination_url"]), | |
|
67 | "pretext": "*%s* - <%s| Browse>" | |
|
68 | % (template_vars["title"], template_vars["destination_url"]), | |
|
73 | 69 | "color": "warning", |
|
74 | 70 | "fields": [ |
|
75 | { | |
|
76 | "value": 'Info: %s' % template_vars['subtext'], | |
|
77 | "short": False | |
|
78 | } | |
|
79 | ] | |
|
71 | {"value": "Info: %s" % template_vars["subtext"], "short": False} | |
|
72 | ], | |
|
80 | 73 | } |
|
81 | ] | |
|
74 | ], | |
|
82 | 75 | } |
|
83 | 76 | client.make_request(data=report_data) |
|
84 | 77 | |
|
85 | 78 | def notify_report_alert(self, **kwargs): |
|
86 | 79 | """ |
|
87 | 80 | Build and send report alert notification |
|
88 | 81 | |
|
89 | 82 | Kwargs: |
|
90 | 83 | application: application that the event applies for, |
|
91 | 84 | event: event that is notified, |
|
92 | 85 | user: user that should be notified |
|
93 | 86 | request: request object |
|
94 | 87 | |
|
95 | 88 | """ |
|
96 | 89 | template_vars = self.report_alert_notification_vars(kwargs) |
|
97 | 90 | |
|
98 |
if kwargs[ |
|
|
99 |
title = |
|
|
100 |
template_vars[ |
|
|
101 |
template_vars[ |
|
|
91 | if kwargs["event"].unified_alert_action() == "OPEN": | |
|
92 | title = "*ALERT %s*: %s" % ( | |
|
93 | template_vars["alert_action"], | |
|
94 | template_vars["resource_name"], | |
|
102 | 95 | ) |
|
103 | 96 | |
|
104 |
template_vars[ |
|
|
105 |
kwargs[ |
|
|
106 |
template_vars[ |
|
|
97 | template_vars["subtext"] = "Got at least %s %s" % ( | |
|
98 | kwargs["event"].values["reports"], | |
|
99 | template_vars["report_type"], | |
|
107 | 100 | ) |
|
108 | 101 | |
|
109 | 102 | else: |
|
110 |
title = |
|
|
111 |
template_vars[ |
|
|
112 |
template_vars[ |
|
|
103 | title = "*ALERT %s*: %s" % ( | |
|
104 | template_vars["alert_action"], | |
|
105 | template_vars["resource_name"], | |
|
113 | 106 | ) |
|
114 | 107 | |
|
115 |
template_vars[ |
|
|
108 | template_vars["subtext"] = "" | |
|
116 | 109 | |
|
117 |
alert_type = template_vars[ |
|
|
118 |
alert_type = alert_type.replace( |
|
|
110 | alert_type = template_vars["alert_type"].replace("_", " ") | |
|
111 | alert_type = alert_type.replace("alert", "").capitalize() | |
|
119 | 112 | |
|
120 |
template_vars[ |
|
|
113 | template_vars["type"] = "Type: %s" % alert_type | |
|
121 | 114 | |
|
122 | client = SlackIntegration.create_client( | |
|
123 | self.integration.config['webhook_url'] | |
|
124 | ) | |
|
115 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
|
125 | 116 | report_data = { |
|
126 | 117 | "username": "AppEnlight", |
|
127 | 118 | "icon_emoji": ":rage:", |
|
128 | 119 | "attachments": [ |
|
129 | 120 | { |
|
130 | 121 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
131 |
"fallback": "%s - <%s| Browse>" |
|
|
132 |
|
|
|
133 |
"pretext": "%s - <%s| Browse>" |
|
|
134 |
|
|
|
122 | "fallback": "%s - <%s| Browse>" | |
|
123 | % (title, template_vars["destination_url"]), | |
|
124 | "pretext": "%s - <%s| Browse>" | |
|
125 | % (title, template_vars["destination_url"]), | |
|
135 | 126 | "color": "danger", |
|
136 | 127 | "fields": [ |
|
137 | 128 | { |
|
138 |
"title": template_vars[ |
|
|
139 |
"value": template_vars[ |
|
|
140 | "short": False | |
|
129 | "title": template_vars["type"], | |
|
130 | "value": template_vars["subtext"], | |
|
131 | "short": False, | |
|
141 | 132 | } |
|
142 | ] | |
|
133 | ], | |
|
143 | 134 | } |
|
144 | ] | |
|
135 | ], | |
|
145 | 136 | } |
|
146 | 137 | client.make_request(data=report_data) |
|
147 | 138 | |
|
148 | 139 | def notify_uptime_alert(self, **kwargs): |
|
149 | 140 | """ |
|
150 | 141 | Build and send uptime alert notification |
|
151 | 142 | |
|
152 | 143 | Kwargs: |
|
153 | 144 | application: application that the event applies for, |
|
154 | 145 | event: event that is notified, |
|
155 | 146 | user: user that should be notified |
|
156 | 147 | request: request object |
|
157 | 148 | |
|
158 | 149 | """ |
|
159 | 150 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
160 | 151 | |
|
161 |
title = |
|
|
162 |
template_vars[ |
|
|
163 |
template_vars[ |
|
|
164 | ) | |
|
165 | client = SlackIntegration.create_client( | |
|
166 | self.integration.config['webhook_url'] | |
|
152 | title = "*ALERT %s*: %s" % ( | |
|
153 | template_vars["alert_action"], | |
|
154 | template_vars["resource_name"], | |
|
167 | 155 | ) |
|
156 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
|
168 | 157 | report_data = { |
|
169 | 158 | "username": "AppEnlight", |
|
170 | 159 | "icon_emoji": ":rage:", |
|
171 | 160 | "attachments": [ |
|
172 | 161 | { |
|
173 | 162 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
174 | 163 | "fallback": "{} - <{}| Browse>".format( |
|
175 |
title, template_vars[ |
|
|
164 | title, template_vars["destination_url"] | |
|
165 | ), | |
|
176 | 166 | "pretext": "{} - <{}| Browse>".format( |
|
177 |
title, template_vars[ |
|
|
167 | title, template_vars["destination_url"] | |
|
168 | ), | |
|
178 | 169 | "color": "danger", |
|
179 | 170 | "fields": [ |
|
180 | 171 | { |
|
181 | 172 | "title": "Application has uptime issues", |
|
182 |
"value": template_vars[ |
|
|
183 | "short": False | |
|
173 | "value": template_vars["reason"], | |
|
174 | "short": False, | |
|
184 | 175 | } |
|
185 | ] | |
|
176 | ], | |
|
186 | 177 | } |
|
187 | ] | |
|
178 | ], | |
|
188 | 179 | } |
|
189 | 180 | client.make_request(data=report_data) |
|
190 | 181 | |
|
191 | 182 | def notify_chart_alert(self, **kwargs): |
|
192 | 183 | """ |
|
193 | 184 | Build and send chart alert notification |
|
194 | 185 | |
|
195 | 186 | Kwargs: |
|
196 | 187 | application: application that the event applies for, |
|
197 | 188 | event: event that is notified, |
|
198 | 189 | user: user that should be notified |
|
199 | 190 | request: request object |
|
200 | 191 | |
|
201 | 192 | """ |
|
202 | 193 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
203 | 194 | |
|
204 |
title = '*ALERT {}*: value in *"{}"* chart ' |
|
|
205 | 'met alert *"{}"* criteria'.format( | |
|
206 |
template_vars[ |
|
|
207 |
template_vars[ |
|
|
208 | template_vars['action_name'], | |
|
195 | title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format( | |
|
196 | template_vars["alert_action"], | |
|
197 | template_vars["chart_name"], | |
|
198 | template_vars["action_name"], | |
|
209 | 199 | ) |
|
210 | 200 | |
|
211 |
subtext = |
|
|
212 |
for item in template_vars[ |
|
|
213 |
subtext += |
|
|
201 | subtext = "" | |
|
202 | for item in template_vars["readable_values"]: | |
|
203 | subtext += "{} - {}\n".format(item["label"], item["value"]) | |
|
214 | 204 | |
|
215 | client = SlackIntegration.create_client( | |
|
216 | self.integration.config['webhook_url'] | |
|
217 | ) | |
|
205 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
|
218 | 206 | report_data = { |
|
219 | 207 | "username": "AppEnlight", |
|
220 | 208 | "icon_emoji": ":rage:", |
|
221 | 209 | "attachments": [ |
|
222 | {"mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
|
223 | "fallback": "{} - <{}| Browse>".format( | |
|
224 | title, template_vars['destination_url']), | |
|
225 | "pretext": "{} - <{}| Browse>".format( | |
|
226 | title, template_vars['destination_url']), | |
|
227 | "color": "danger", | |
|
228 | "fields": [ | |
|
229 |
|
|
|
230 |
|
|
|
231 |
|
|
|
232 |
|
|
|
233 | } | |
|
234 | ] | |
|
235 | } | |
|
236 |
|
|
|
210 | { | |
|
211 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
|
212 | "fallback": "{} - <{}| Browse>".format( | |
|
213 | title, template_vars["destination_url"] | |
|
214 | ), | |
|
215 | "pretext": "{} - <{}| Browse>".format( | |
|
216 | title, template_vars["destination_url"] | |
|
217 | ), | |
|
218 | "color": "danger", | |
|
219 | "fields": [ | |
|
220 | { | |
|
221 | "title": "Following criteria were met:", | |
|
222 | "value": subtext, | |
|
223 | "short": False, | |
|
224 | } | |
|
225 | ], | |
|
226 | } | |
|
227 | ], | |
|
237 | 228 | } |
|
238 | 229 | client.make_request(data=report_data) |
|
239 | 230 | |
|
240 | 231 | def send_digest(self, **kwargs): |
|
241 | 232 | """ |
|
242 | 233 | Build and send daily digest notification |
|
243 | 234 | |
|
244 | 235 | kwargs: |
|
245 | 236 | application: application that the event applies for, |
|
246 | 237 | user: user that should be notified |
|
247 | 238 | request: request object |
|
248 | 239 | since_when: reports are newer than this time value, |
|
249 | 240 | reports: list of reports to render |
|
250 | 241 | |
|
251 | 242 | """ |
|
252 | 243 | template_vars = self.report_alert_notification_vars(kwargs) |
|
253 |
title = "*Daily report digest*: %s" % template_vars[ |
|
|
244 | title = "*Daily report digest*: %s" % template_vars["resource_name"] | |
|
254 | 245 | |
|
255 |
subtext = |
|
|
246 | subtext = "%s reports" % template_vars["confirmed_total"] | |
|
256 | 247 | |
|
257 | client = SlackIntegration.create_client( | |
|
258 | self.integration.config['webhook_url'] | |
|
259 | ) | |
|
248 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
|
260 | 249 | report_data = { |
|
261 | 250 | "username": "AppEnlight", |
|
262 | 251 | "attachments": [ |
|
263 | 252 | { |
|
264 | 253 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
265 |
"fallback": "%s : <%s| Browse>" |
|
|
266 |
|
|
|
267 |
"pretext": "%s: <%s| Browse>" |
|
|
268 |
|
|
|
254 | "fallback": "%s : <%s| Browse>" | |
|
255 | % (title, template_vars["destination_url"]), | |
|
256 | "pretext": "%s: <%s| Browse>" | |
|
257 | % (title, template_vars["destination_url"]), | |
|
269 | 258 | "color": "good", |
|
270 | "fields": [ | |
|
271 | { | |
|
272 | "title": "Got at least: %s" % subtext, | |
|
273 | "short": False | |
|
274 | } | |
|
275 | ] | |
|
259 | "fields": [{"title": "Got at least: %s" % subtext, "short": False}], | |
|
276 | 260 | } |
|
277 | ] | |
|
261 | ], | |
|
278 | 262 | } |
|
279 | 263 | client.make_request(data=report_data) |
|
280 | 264 | |
|
281 |
log_msg = |
|
|
282 |
kwargs[ |
|
|
265 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
|
266 | kwargs["user"].user_name, | |
|
283 | 267 | self.channel_visible_value, |
|
284 |
template_vars[ |
|
|
268 | template_vars["confirmed_total"], | |
|
269 | ) | |
|
285 | 270 | log.warning(log_msg) |
@@ -1,104 +1,113 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import uuid |
|
18 | 18 | import logging |
|
19 | 19 | import sqlalchemy as sa |
|
20 | 20 | from appenlight.models.resource import Resource |
|
21 | 21 | from sqlalchemy.orm import aliased |
|
22 | 22 | |
|
23 | 23 | log = logging.getLogger(__name__) |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def generate_api_key(): |
|
27 |
uid = str(uuid.uuid4()).replace( |
|
|
27 | uid = str(uuid.uuid4()).replace("-", "") | |
|
28 | 28 | return uid[0:32] |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | class Application(Resource): |
|
32 | 32 | """ |
|
33 | 33 | Resource of application type |
|
34 | 34 | """ |
|
35 | 35 | |
|
36 |
__tablename__ = |
|
|
37 |
__mapper_args__ = { |
|
|
36 | __tablename__ = "applications" | |
|
37 | __mapper_args__ = {"polymorphic_identity": "application"} | |
|
38 | 38 | |
|
39 | 39 | # lists configurable possible permissions for this resource type |
|
40 |
__possible_permissions__ = ( |
|
|
41 | ||
|
42 |
resource_id = sa.Column( |
|
|
43 | sa.ForeignKey('resources.resource_id', | |
|
44 | onupdate='CASCADE', | |
|
45 | ondelete='CASCADE', ), | |
|
46 | primary_key=True, ) | |
|
47 |
domains = sa.Column(sa.UnicodeText(), nullable=False, default= |
|
|
48 | api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True, | |
|
49 | default=generate_api_key) | |
|
50 | public_key = sa.Column(sa.String(32), nullable=False, unique=True, | |
|
51 | index=True, | |
|
52 | default=generate_api_key) | |
|
53 | default_grouping = sa.Column(sa.Unicode(20), nullable=False, | |
|
54 | default='url_traceback') | |
|
40 | __possible_permissions__ = ("view", "update_reports") | |
|
41 | ||
|
42 | resource_id = sa.Column( | |
|
43 | sa.Integer(), | |
|
44 | sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), | |
|
45 | primary_key=True, | |
|
46 | ) | |
|
47 | domains = sa.Column(sa.UnicodeText(), nullable=False, default="") | |
|
48 | api_key = sa.Column( | |
|
49 | sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key | |
|
50 | ) | |
|
51 | public_key = sa.Column( | |
|
52 | sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key | |
|
53 | ) | |
|
54 | default_grouping = sa.Column( | |
|
55 | sa.Unicode(20), nullable=False, default="url_traceback" | |
|
56 | ) | |
|
55 | 57 | error_report_threshold = sa.Column(sa.Integer(), default=10) |
|
56 | 58 | slow_report_threshold = sa.Column(sa.Integer(), default=10) |
|
57 | allow_permanent_storage = sa.Column(sa.Boolean(), default=False, | |
|
58 | nullable=False) | |
|
59 | allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False) | |
|
59 | 60 | |
|
60 |
@sa.orm.validates( |
|
|
61 | @sa.orm.validates("default_grouping") | |
|
61 | 62 | def validate_default_grouping(self, key, grouping): |
|
62 | 63 | """ validate if resouce can have specific permission """ |
|
63 |
assert grouping in [ |
|
|
64 | assert grouping in ["url_type", "url_traceback", "traceback_server"] | |
|
64 | 65 | return grouping |
|
65 | 66 | |
|
66 |
report_groups = sa.orm.relationship( |
|
|
67 | cascade="all, delete-orphan", | |
|
68 | passive_deletes=True, | |
|
69 | passive_updates=True, | |
|
70 | lazy='dynamic', | |
|
71 | backref=sa.orm.backref('application', | |
|
72 | lazy="joined")) | |
|
73 | ||
|
74 | postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf', | |
|
75 | cascade="all, delete-orphan", | |
|
76 | passive_deletes=True, | |
|
77 | passive_updates=True, | |
|
78 | backref='resource') | |
|
79 | ||
|
80 | logs = sa.orm.relationship('Log', | |
|
81 | lazy='dynamic', | |
|
82 | backref='application', | |
|
83 | passive_deletes=True, | |
|
84 | passive_updates=True, ) | |
|
85 | ||
|
86 | integrations = sa.orm.relationship('IntegrationBase', | |
|
87 | backref='resource', | |
|
88 | cascade="all, delete-orphan", | |
|
89 | passive_deletes=True, | |
|
90 | passive_updates=True, ) | |
|
67 | report_groups = sa.orm.relationship( | |
|
68 | "ReportGroup", | |
|
69 | cascade="all, delete-orphan", | |
|
70 | passive_deletes=True, | |
|
71 | passive_updates=True, | |
|
72 | lazy="dynamic", | |
|
73 | backref=sa.orm.backref("application", lazy="joined"), | |
|
74 | ) | |
|
75 | ||
|
76 | postprocess_conf = sa.orm.relationship( | |
|
77 | "ApplicationPostprocessConf", | |
|
78 | cascade="all, delete-orphan", | |
|
79 | passive_deletes=True, | |
|
80 | passive_updates=True, | |
|
81 | backref="resource", | |
|
82 | ) | |
|
83 | ||
|
84 | logs = sa.orm.relationship( | |
|
85 | "Log", | |
|
86 | lazy="dynamic", | |
|
87 | backref="application", | |
|
88 | passive_deletes=True, | |
|
89 | passive_updates=True, | |
|
90 | ) | |
|
91 | ||
|
92 | integrations = sa.orm.relationship( | |
|
93 | "IntegrationBase", | |
|
94 | backref="resource", | |
|
95 | cascade="all, delete-orphan", | |
|
96 | passive_deletes=True, | |
|
97 | passive_updates=True, | |
|
98 | ) | |
|
91 | 99 | |
|
92 | 100 | def generate_api_key(self): |
|
93 | 101 | return generate_api_key() |
|
94 | 102 | |
|
95 | 103 | |
|
96 | 104 | def after_update(mapper, connection, target): |
|
97 | 105 | from appenlight.models.services.application import ApplicationService |
|
98 | log.info('clearing out ApplicationService cache') | |
|
106 | ||
|
107 | log.info("clearing out ApplicationService cache") | |
|
99 | 108 | ApplicationService.by_id_cached().invalidate(target.resource_id) |
|
100 | 109 | ApplicationService.by_api_key_cached().invalidate(target.api_key) |
|
101 | 110 | |
|
102 | 111 | |
|
103 |
sa.event.listen(Application, |
|
|
104 |
sa.event.listen(Application, |
|
|
112 | sa.event.listen(Application, "after_update", after_update) | |
|
113 | sa.event.listen(Application, "after_delete", after_update) |
@@ -1,45 +1,47 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from ziggurat_foundations.models.base import BaseModel |
|
18 | 18 | import sqlalchemy as sa |
|
19 | 19 | |
|
20 | 20 | from appenlight.models import Base |
|
21 | 21 | from appenlight.models.report_group import ReportGroup |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | class ApplicationPostprocessConf(Base, BaseModel): |
|
25 | 25 | """ |
|
26 | 26 | Stores prioritizing conditions for reports |
|
27 | 27 | This is later used for rule parsing like "if 10 occurences bump priority +1" |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 |
__tablename__ = |
|
|
30 | __tablename__ = "application_postprocess_conf" | |
|
31 | 31 | |
|
32 | 32 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
33 |
resource_id = sa.Column( |
|
|
34 | sa.ForeignKey('resources.resource_id', | |
|
35 | onupdate='CASCADE', | |
|
36 | ondelete='CASCADE')) | |
|
33 | resource_id = sa.Column( | |
|
34 | sa.Integer(), | |
|
35 | sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), | |
|
36 | ) | |
|
37 | 37 | do = sa.Column(sa.Unicode(25), nullable=False) |
|
38 |
new_value = sa.Column(sa.UnicodeText(), nullable=False, default= |
|
|
39 |
rule = sa.Column( |
|
|
40 | nullable=False, default={'field': 'http_status', | |
|
41 | "op": "ge", "value": "500"}) | |
|
38 | new_value = sa.Column(sa.UnicodeText(), nullable=False, default="") | |
|
39 | rule = sa.Column( | |
|
40 | sa.dialects.postgresql.JSON, | |
|
41 | nullable=False, | |
|
42 | default={"field": "http_status", "op": "ge", "value": "500"}, | |
|
43 | ) | |
|
42 | 44 | |
|
43 | 45 | def postprocess(self, item): |
|
44 | 46 | new_value = int(self.new_value) |
|
45 | 47 | item.priority = ReportGroup.priority + new_value |
@@ -1,52 +1,57 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import sqlalchemy as sa |
|
19 | 19 | |
|
20 | 20 | from datetime import datetime |
|
21 | 21 | from appenlight.models import Base |
|
22 | 22 | from ziggurat_foundations.models.base import BaseModel |
|
23 | 23 | from ziggurat_foundations.models.services.user import UserService |
|
24 | 24 | |
|
25 | 25 | log = logging.getLogger(__name__) |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class AuthToken(Base, BaseModel): |
|
29 | 29 | """ |
|
30 | 30 | Stores information about possible alerting options |
|
31 | 31 | """ |
|
32 | __tablename__ = 'auth_tokens' | |
|
32 | ||
|
33 | __tablename__ = "auth_tokens" | |
|
33 | 34 | |
|
34 | 35 | id = sa.Column(sa.Integer, primary_key=True, nullable=False) |
|
35 | token = sa.Column(sa.Unicode(40), nullable=False, | |
|
36 | default=lambda x: UserService.generate_random_string(40)) | |
|
37 | owner_id = sa.Column(sa.Unicode(30), | |
|
38 | sa.ForeignKey('users.id', onupdate='CASCADE', | |
|
39 | ondelete='CASCADE')) | |
|
36 | token = sa.Column( | |
|
37 | sa.Unicode(40), | |
|
38 | nullable=False, | |
|
39 | default=lambda x: UserService.generate_random_string(40), | |
|
40 | ) | |
|
41 | owner_id = sa.Column( | |
|
42 | sa.Unicode(30), | |
|
43 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |
|
44 | ) | |
|
40 | 45 | creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow()) |
|
41 | 46 | expires = sa.Column(sa.DateTime) |
|
42 |
description = sa.Column(sa.Unicode, default= |
|
|
47 | description = sa.Column(sa.Unicode, default="") | |
|
43 | 48 | |
|
44 | 49 | @property |
|
45 | 50 | def is_expired(self): |
|
46 | 51 | if self.expires: |
|
47 | 52 | return self.expires < datetime.utcnow() |
|
48 | 53 | else: |
|
49 | 54 | return False |
|
50 | 55 | |
|
51 | 56 | def __str__(self): |
|
52 |
return |
|
|
57 | return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10]) |
@@ -1,32 +1,32 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from ziggurat_foundations.models.base import BaseModel |
|
19 | 19 | from sqlalchemy.dialects.postgresql import JSON |
|
20 | 20 | |
|
21 | 21 | from . import Base |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | class Config(Base, BaseModel): |
|
25 |
__tablename__ = |
|
|
25 | __tablename__ = "config" | |
|
26 | 26 | |
|
27 | 27 | key = sa.Column(sa.Unicode, primary_key=True) |
|
28 | 28 | section = sa.Column(sa.Unicode, primary_key=True) |
|
29 | 29 | value = sa.Column(JSON, nullable=False) |
|
30 | 30 | |
|
31 | 31 | def __json__(self, request): |
|
32 | 32 | return self.get_dict() |
@@ -1,165 +1,170 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | import logging |
|
19 | 19 | |
|
20 | 20 | from datetime import datetime |
|
21 | 21 | from appenlight.models import Base, get_db_session |
|
22 | 22 | from appenlight.models.services.report_stat import ReportStatService |
|
23 | 23 | from appenlight.models.integrations import IntegrationException |
|
24 | 24 | from pyramid.threadlocal import get_current_request |
|
25 | 25 | from sqlalchemy.dialects.postgresql import JSON |
|
26 | 26 | from ziggurat_foundations.models.base import BaseModel |
|
27 | 27 | from ziggurat_foundations.models.services.resource import ResourceService |
|
28 | 28 | |
|
29 | 29 | log = logging.getLogger(__name__) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | class Event(Base, BaseModel): |
|
33 |
__tablename__ = |
|
|
33 | __tablename__ = "events" | |
|
34 | 34 | |
|
35 | types = {'error_report_alert': 1, | |
|
36 |
|
|
|
37 | 'comment': 5, | |
|
38 |
|
|
|
39 | 'uptime_alert': 7, | |
|
40 |
|
|
|
35 | types = { | |
|
36 | "error_report_alert": 1, | |
|
37 | "slow_report_alert": 3, | |
|
38 | "comment": 5, | |
|
39 | "assignment": 6, | |
|
40 | "uptime_alert": 7, | |
|
41 | "chart_alert": 9, | |
|
42 | } | |
|
41 | 43 | |
|
42 |
statuses = { |
|
|
43 | 'closed': 0} | |
|
44 | statuses = {"active": 1, "closed": 0} | |
|
44 | 45 | |
|
45 | 46 | id = sa.Column(sa.Integer, primary_key=True) |
|
46 | 47 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) |
|
47 | 48 | end_date = sa.Column(sa.DateTime) |
|
48 | 49 | status = sa.Column(sa.Integer, default=1) |
|
49 | 50 | event_type = sa.Column(sa.Integer, default=1) |
|
50 |
origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey( |
|
|
51 | nullable=True) | |
|
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), | |
|
53 | nullable=True) | |
|
54 | resource_id = sa.Column(sa.Integer(), | |
|
55 | sa.ForeignKey('resources.resource_id'), | |
|
56 | nullable=True) | |
|
51 | origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
|
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
|
53 | resource_id = sa.Column( | |
|
54 | sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True | |
|
55 | ) | |
|
57 | 56 | target_id = sa.Column(sa.Integer) |
|
58 | 57 | target_uuid = sa.Column(sa.Unicode(40)) |
|
59 | 58 | text = sa.Column(sa.UnicodeText()) |
|
60 | 59 | values = sa.Column(JSON(), nullable=False, default=None) |
|
61 | 60 | |
|
62 | 61 | def __repr__(self): |
|
63 |
return |
|
|
64 | self.resource_id, | |
|
65 | self.unified_alert_action()) | |
|
62 | return "<Event %s, app:%s, %s>" % ( | |
|
63 | self.unified_alert_name(), | |
|
64 | self.resource_id, | |
|
65 | self.unified_alert_action(), | |
|
66 | ) | |
|
66 | 67 | |
|
67 | 68 | @property |
|
68 | 69 | def reverse_types(self): |
|
69 | 70 | return dict([(v, k) for k, v in self.types.items()]) |
|
70 | 71 | |
|
71 | 72 | def unified_alert_name(self): |
|
72 | 73 | return self.reverse_types[self.event_type] |
|
73 | 74 | |
|
74 | 75 | def unified_alert_action(self): |
|
75 | 76 | event_name = self.reverse_types[self.event_type] |
|
76 |
if self.status == Event.statuses[ |
|
|
77 | if self.status == Event.statuses["closed"]: | |
|
77 | 78 | return "CLOSE" |
|
78 |
if self.status != Event.statuses[ |
|
|
79 | if self.status != Event.statuses["closed"]: | |
|
79 | 80 | return "OPEN" |
|
80 | 81 | return event_name |
|
81 | 82 | |
|
82 | 83 | def send_alerts(self, request=None, resource=None, db_session=None): |
|
83 | 84 | """" Sends alerts to applicable channels """ |
|
84 | 85 | db_session = get_db_session(db_session) |
|
85 | 86 | db_session.flush() |
|
86 | 87 | if not resource: |
|
87 | 88 | resource = ResourceService.by_resource_id(self.resource_id) |
|
88 | 89 | if not request: |
|
89 | 90 | request = get_current_request() |
|
90 | 91 | if not resource: |
|
91 | 92 | return |
|
92 |
users = set([p.user for p in ResourceService.users_for_perm(resource, |
|
|
93 | users = set([p.user for p in ResourceService.users_for_perm(resource, "view")]) | |
|
93 | 94 | for user in users: |
|
94 | 95 | for channel in user.alert_channels: |
|
95 |
matches_resource = not channel.resources or resource in [ |
|
|
96 | matches_resource = not channel.resources or resource in [ | |
|
97 | r.resource_id for r in channel.resources | |
|
98 | ] | |
|
96 | 99 | if ( |
|
97 |
not channel.channel_validated |
|
|
98 |
not channel.send_alerts |
|
|
99 | not matches_resource | |
|
100 | not channel.channel_validated | |
|
101 | or not channel.send_alerts | |
|
102 | or not matches_resource | |
|
100 | 103 | ): |
|
101 | 104 | continue |
|
102 | 105 | else: |
|
103 | 106 | try: |
|
104 |
channel.notify_alert( |
|
|
105 | event=self, | |
|
106 |
|
|
|
107 | request=request) | |
|
107 | channel.notify_alert( | |
|
108 | resource=resource, event=self, user=user, request=request | |
|
109 | ) | |
|
108 | 110 | except IntegrationException as e: |
|
109 |
log.warning( |
|
|
111 | log.warning("%s" % e) | |
|
110 | 112 | |
|
111 | 113 | def validate_or_close(self, since_when, db_session=None): |
|
112 | 114 | """ Checks if alerts should stay open or it's time to close them. |
|
113 | 115 | Generates close alert event if alerts get closed """ |
|
114 | event_types = [Event.types['error_report_alert'], | |
|
115 |
|
|
|
116 | event_types = [ | |
|
117 | Event.types["error_report_alert"], | |
|
118 | Event.types["slow_report_alert"], | |
|
119 | ] | |
|
116 | 120 | app = ResourceService.by_resource_id(self.resource_id) |
|
117 | 121 | # if app was deleted close instantly |
|
118 | 122 | if not app: |
|
119 | 123 | self.close() |
|
120 | 124 | return |
|
121 | 125 | |
|
122 | 126 | if self.event_type in event_types: |
|
123 | 127 | total = ReportStatService.count_by_type( |
|
124 |
self.event_type, self.resource_id, since_when |
|
|
125 | if Event.types['error_report_alert'] == self.event_type: | |
|
128 | self.event_type, self.resource_id, since_when | |
|
129 | ) | |
|
130 | if Event.types["error_report_alert"] == self.event_type: | |
|
126 | 131 | threshold = app.error_report_threshold |
|
127 |
if Event.types[ |
|
|
132 | if Event.types["slow_report_alert"] == self.event_type: | |
|
128 | 133 | threshold = app.slow_report_threshold |
|
129 | 134 | |
|
130 | 135 | if total < threshold: |
|
131 | 136 | self.close() |
|
132 | 137 | |
|
133 | 138 | def close(self, db_session=None): |
|
134 | 139 | """ |
|
135 | 140 | Closes an event and sends notification to affected users |
|
136 | 141 | """ |
|
137 | 142 | self.end_date = datetime.utcnow() |
|
138 |
self.status = Event.statuses[ |
|
|
139 |
log.warning( |
|
|
143 | self.status = Event.statuses["closed"] | |
|
144 | log.warning("ALERT: CLOSE: %s" % self) | |
|
140 | 145 | self.send_alerts() |
|
141 | 146 | |
|
142 | 147 | def text_representation(self): |
|
143 | 148 | alert_type = self.unified_alert_name() |
|
144 |
text = |
|
|
145 |
if |
|
|
146 |
text += |
|
|
147 |
if |
|
|
148 |
text += |
|
|
149 |
if |
|
|
150 |
text += |
|
|
151 |
if |
|
|
152 |
text += |
|
|
149 | text = "" | |
|
150 | if "slow_report" in alert_type: | |
|
151 | text += "Slow report alert" | |
|
152 | if "error_report" in alert_type: | |
|
153 | text += "Exception report alert" | |
|
154 | if "uptime_alert" in alert_type: | |
|
155 | text += "Uptime alert" | |
|
156 | if "chart_alert" in alert_type: | |
|
157 | text += "Metrics value alert" | |
|
153 | 158 | |
|
154 | 159 | alert_action = self.unified_alert_action() |
|
155 |
if alert_action == |
|
|
156 |
text += |
|
|
157 |
if alert_action == |
|
|
158 |
text += |
|
|
160 | if alert_action == "OPEN": | |
|
161 | text += " got opened." | |
|
162 | if alert_action == "CLOSE": | |
|
163 | text += " got closed." | |
|
159 | 164 | return text |
|
160 | 165 | |
|
161 | 166 | def get_dict(self, request=None): |
|
162 | 167 | dict_data = super(Event, self).get_dict() |
|
163 |
dict_data[ |
|
|
164 |
dict_data[ |
|
|
168 | dict_data["text"] = self.text_representation() | |
|
169 | dict_data["resource_name"] = self.resource.resource_name | |
|
165 | 170 | return dict_data |
@@ -1,36 +1,36 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from sqlalchemy.ext.declarative import declared_attr |
|
19 | 19 | from ziggurat_foundations.models.external_identity import ExternalIdentityMixin |
|
20 | 20 | |
|
21 | 21 | from appenlight.models import Base |
|
22 | 22 | from appenlight.lib.sqlalchemy_fields import EncryptedUnicode |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class ExternalIdentity(ExternalIdentityMixin, Base): |
|
26 | 26 | @declared_attr |
|
27 | 27 | def access_token(self): |
|
28 |
return sa.Column(EncryptedUnicode(255), default= |
|
|
28 | return sa.Column(EncryptedUnicode(255), default="") | |
|
29 | 29 | |
|
30 | 30 | @declared_attr |
|
31 | 31 | def alt_token(self): |
|
32 |
return sa.Column(EncryptedUnicode(255), default= |
|
|
32 | return sa.Column(EncryptedUnicode(255), default="") | |
|
33 | 33 | |
|
34 | 34 | @declared_attr |
|
35 | 35 | def token_secret(self): |
|
36 |
return sa.Column(EncryptedUnicode(255), default= |
|
|
36 | return sa.Column(EncryptedUnicode(255), default="") |
@@ -1,45 +1,46 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from ziggurat_foundations.models.group import GroupMixin |
|
18 | 18 | from appenlight.models import Base |
|
19 | 19 | |
|
20 | 20 | |
|
21 | 21 | class Group(GroupMixin, Base): |
|
22 |
__possible_permissions__ = ( |
|
|
23 | 'test_features', | |
|
24 | 'admin_panel', | |
|
25 | 'admin_users', | |
|
26 | 'manage_partitions',) | |
|
22 | __possible_permissions__ = ( | |
|
23 | "root_administration", | |
|
24 | "test_features", | |
|
25 | "admin_panel", | |
|
26 | "admin_users", | |
|
27 | "manage_partitions", | |
|
28 | ) | |
|
27 | 29 | |
|
28 | def get_dict(self, exclude_keys=None, include_keys=None, | |
|
29 | include_perms=False): | |
|
30 | def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False): | |
|
30 | 31 | result = super(Group, self).get_dict(exclude_keys, include_keys) |
|
31 | 32 | if include_perms: |
|
32 |
result[ |
|
|
33 |
result[ |
|
|
34 | self.permissions] | |
|
33 | result["possible_permissions"] = self.__possible_permissions__ | |
|
34 | result["current_permissions"] = [p.perm_name for p in self.permissions] | |
|
35 | 35 | else: |
|
36 |
result[ |
|
|
37 |
result[ |
|
|
36 | result["possible_permissions"] = [] | |
|
37 | result["current_permissions"] = [] | |
|
38 | 38 | exclude_keys_list = exclude_keys or [] |
|
39 | 39 | include_keys_list = include_keys or [] |
|
40 | 40 | d = {} |
|
41 | 41 | for k in result.keys(): |
|
42 |
if |
|
|
43 |
|
|
|
42 | if k not in exclude_keys_list and ( | |
|
43 | k in include_keys_list or not include_keys | |
|
44 | ): | |
|
44 | 45 | d[k] = result[k] |
|
45 | 46 | return d |
@@ -1,23 +1,24 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 |
from ziggurat_foundations.models.group_resource_permission import |
|
|
18 | GroupResourcePermissionMixin | |
|
17 | from ziggurat_foundations.models.group_resource_permission import ( | |
|
18 | GroupResourcePermissionMixin, | |
|
19 | ) | |
|
19 | 20 | from appenlight.models import Base |
|
20 | 21 | |
|
21 | 22 | |
|
22 | 23 | class GroupResourcePermission(GroupResourcePermissionMixin, Base): |
|
23 | 24 | pass |
@@ -1,78 +1,80 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from sqlalchemy.dialects.postgresql import JSON |
|
19 | 19 | from sqlalchemy.ext.hybrid import hybrid_property |
|
20 | 20 | from ziggurat_foundations.models.base import BaseModel |
|
21 | 21 | |
|
22 | 22 | from appenlight.lib.encryption import decrypt_dictionary_keys |
|
23 | 23 | from appenlight.lib.encryption import encrypt_dictionary_keys |
|
24 | 24 | from appenlight.models import Base, get_db_session |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | class IntegrationException(Exception): |
|
28 | 28 | pass |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | class IntegrationBase(Base, BaseModel): |
|
32 | 32 | """ |
|
33 | 33 | Model from which all integrations inherit using polymorphic approach |
|
34 | 34 | """ |
|
35 | __tablename__ = 'integrations' | |
|
35 | ||
|
36 | __tablename__ = "integrations" | |
|
36 | 37 | |
|
37 | 38 | front_visible = False |
|
38 | 39 | as_alert_channel = False |
|
39 | 40 | supports_report_alerting = False |
|
40 | 41 | |
|
41 | 42 | id = sa.Column(sa.Integer, primary_key=True) |
|
42 | resource_id = sa.Column(sa.Integer, | |
|
43 | sa.ForeignKey('applications.resource_id')) | |
|
43 | resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id")) | |
|
44 | 44 | integration_name = sa.Column(sa.Unicode(64)) |
|
45 |
_config = sa.Column( |
|
|
45 | _config = sa.Column("config", JSON(), nullable=False, default="") | |
|
46 | 46 | modified_date = sa.Column(sa.DateTime) |
|
47 | 47 | |
|
48 |
channel = sa.orm.relationship( |
|
|
49 | cascade="all,delete-orphan", | |
|
50 | passive_deletes=True, | |
|
51 |
|
|
|
52 | uselist=False, | |
|
53 | backref='integration') | |
|
48 | channel = sa.orm.relationship( | |
|
49 | "AlertChannel", | |
|
50 | cascade="all,delete-orphan", | |
|
51 | passive_deletes=True, | |
|
52 | passive_updates=True, | |
|
53 | uselist=False, | |
|
54 | backref="integration", | |
|
55 | ) | |
|
54 | 56 | |
|
55 | 57 | __mapper_args__ = { |
|
56 |
|
|
|
57 |
|
|
|
58 | "polymorphic_on": "integration_name", | |
|
59 | "polymorphic_identity": "integration", | |
|
58 | 60 | } |
|
59 | 61 | |
|
60 | 62 | @classmethod |
|
61 |
def by_app_id_and_integration_name( |
|
|
62 | db_session=None): | |
|
63 | def by_app_id_and_integration_name( | |
|
64 | cls, resource_id, integration_name, db_session=None | |
|
65 | ): | |
|
63 | 66 | db_session = get_db_session(db_session) |
|
64 | 67 | query = db_session.query(cls) |
|
65 | 68 | query = query.filter(cls.integration_name == integration_name) |
|
66 | 69 | query = query.filter(cls.resource_id == resource_id) |
|
67 | 70 | return query.first() |
|
68 | 71 | |
|
69 | 72 | @hybrid_property |
|
70 | 73 | def config(self): |
|
71 | 74 | return decrypt_dictionary_keys(self._config) |
|
72 | 75 | |
|
73 | 76 | @config.setter |
|
74 | 77 | def config(self, value): |
|
75 |
if not hasattr(value, |
|
|
76 |
raise Exception( |
|
|
77 | 'flat dictionaries') | |
|
78 | if not hasattr(value, "items"): | |
|
79 | raise Exception("IntegrationBase.config only accepts " "flat dictionaries") | |
|
78 | 80 | self._config = encrypt_dictionary_keys(value) |
@@ -1,163 +1,188 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import requests |
|
18 | 18 | from requests_oauthlib import OAuth1 |
|
19 |
from appenlight.models.integrations import |
|
|
20 | IntegrationException) | |
|
19 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
|
21 | 20 | |
|
22 | 21 | _ = str |
|
23 | 22 | |
|
24 | 23 | |
|
25 | 24 | class NotFoundException(Exception): |
|
26 | 25 | pass |
|
27 | 26 | |
|
28 | 27 | |
|
29 | 28 | class BitbucketIntegration(IntegrationBase): |
|
30 | __mapper_args__ = { | |
|
31 | 'polymorphic_identity': 'bitbucket' | |
|
32 | } | |
|
29 | __mapper_args__ = {"polymorphic_identity": "bitbucket"} | |
|
33 | 30 | front_visible = True |
|
34 | 31 | as_alert_channel = False |
|
35 | 32 | supports_report_alerting = False |
|
36 | 33 | action_notification = True |
|
37 |
integration_action = |
|
|
34 | integration_action = "Add issue to Bitbucket" | |
|
38 | 35 | |
|
39 | 36 | @classmethod |
|
40 | 37 | def create_client(cls, request, user_name=None, repo_name=None): |
|
41 | 38 | """ |
|
42 | 39 | Creates REST client that can authenticate to specific repo |
|
43 | 40 | uses auth tokens for current request user |
|
44 | 41 | """ |
|
45 | 42 | config = request.registry.settings |
|
46 | 43 | token = None |
|
47 | 44 | secret = None |
|
48 | 45 | for identity in request.user.external_identities: |
|
49 |
if identity.provider_name == |
|
|
46 | if identity.provider_name == "bitbucket": | |
|
50 | 47 | token = identity.access_token |
|
51 | 48 | secret = identity.token_secret |
|
52 | 49 | break |
|
53 | 50 | if not token: |
|
54 | raise IntegrationException( | |
|
55 | 'No valid auth token present for this service') | |
|
56 | client = BitbucketClient(token, secret, | |
|
57 | user_name, | |
|
58 | repo_name, | |
|
59 | config['authomatic.pr.bitbucket.key'], | |
|
60 |
|
|
|
51 | raise IntegrationException("No valid auth token present for this service") | |
|
52 | client = BitbucketClient( | |
|
53 | token, | |
|
54 | secret, | |
|
55 | user_name, | |
|
56 | repo_name, | |
|
57 | config["authomatic.pr.bitbucket.key"], | |
|
58 | config["authomatic.pr.bitbucket.secret"], | |
|
59 | ) | |
|
61 | 60 | return client |
|
62 | 61 | |
|
63 | 62 | |
|
64 | 63 | class BitbucketClient(object): |
|
65 |
api_url = |
|
|
66 |
repo_type = |
|
|
67 | ||
|
68 | def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key, | |
|
69 | bitbucket_consumer_secret): | |
|
64 | api_url = "https://bitbucket.org/api/1.0" | |
|
65 | repo_type = "bitbucket" | |
|
66 | ||
|
67 | def __init__( | |
|
68 | self, | |
|
69 | token, | |
|
70 | secret, | |
|
71 | owner, | |
|
72 | repo_name, | |
|
73 | bitbucket_consumer_key, | |
|
74 | bitbucket_consumer_secret, | |
|
75 | ): | |
|
70 | 76 | self.access_token = token |
|
71 | 77 | self.token_secret = secret |
|
72 | 78 | self.owner = owner |
|
73 | 79 | self.repo_name = repo_name |
|
74 | 80 | self.bitbucket_consumer_key = bitbucket_consumer_key |
|
75 | 81 | self.bitbucket_consumer_secret = bitbucket_consumer_secret |
|
76 | 82 | |
|
77 | 83 | possible_keys = { |
|
78 | 'status': ['new', 'open', 'resolved', 'on hold', 'invalid', | |
|
79 | 'duplicate', 'wontfix'], | |
|
80 | 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'], | |
|
81 | 'kind': ['bug', 'enhancement', 'proposal', 'task'] | |
|
84 | "status": [ | |
|
85 | "new", | |
|
86 | "open", | |
|
87 | "resolved", | |
|
88 | "on hold", | |
|
89 | "invalid", | |
|
90 | "duplicate", | |
|
91 | "wontfix", | |
|
92 | ], | |
|
93 | "priority": ["trivial", "minor", "major", "critical", "blocker"], | |
|
94 | "kind": ["bug", "enhancement", "proposal", "task"], | |
|
82 | 95 | } |
|
83 | 96 | |
|
84 | 97 | def get_statuses(self): |
|
85 | 98 | """Gets list of possible item statuses""" |
|
86 |
return self.possible_keys[ |
|
|
99 | return self.possible_keys["status"] | |
|
87 | 100 | |
|
88 | 101 | def get_priorities(self): |
|
89 | 102 | """Gets list of possible item statuses""" |
|
90 |
return self.possible_keys[ |
|
|
103 | return self.possible_keys["priority"] | |
|
91 | 104 | |
|
92 |
def make_request(self, url, method= |
|
|
105 | def make_request(self, url, method="get", data=None, headers=None): | |
|
93 | 106 | """ |
|
94 | 107 | Performs HTTP request to bitbucket |
|
95 | 108 | """ |
|
96 |
auth = OAuth1( |
|
|
97 |
|
|
|
98 | self.access_token, self.token_secret) | |
|
109 | auth = OAuth1( | |
|
110 | self.bitbucket_consumer_key, | |
|
111 | self.bitbucket_consumer_secret, | |
|
112 | self.access_token, | |
|
113 | self.token_secret, | |
|
114 | ) | |
|
99 | 115 | try: |
|
100 | resp = getattr(requests, method)(url, data=data, auth=auth, | |
|
101 | timeout=10) | |
|
116 | resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10) | |
|
102 | 117 | except Exception as e: |
|
103 | 118 | raise IntegrationException( |
|
104 |
_( |
|
|
119 | _("Error communicating with Bitbucket: %s") % (e,) | |
|
120 | ) | |
|
105 | 121 | if resp.status_code == 401: |
|
106 | raise IntegrationException( | |
|
107 | _('You are not authorized to access this repo')) | |
|
122 | raise IntegrationException(_("You are not authorized to access this repo")) | |
|
108 | 123 | elif resp.status_code == 404: |
|
109 |
raise IntegrationException(_( |
|
|
124 | raise IntegrationException(_("User or repo name are incorrect")) | |
|
110 | 125 | elif resp.status_code not in [200, 201]: |
|
111 | 126 | raise IntegrationException( |
|
112 |
_( |
|
|
127 | _("Bitbucket response_code: %s") % resp.status_code | |
|
128 | ) | |
|
113 | 129 | try: |
|
114 | 130 | return resp.json() |
|
115 | 131 | except Exception as e: |
|
116 | 132 | raise IntegrationException( |
|
117 |
_( |
|
|
133 | _("Error decoding response from Bitbucket: %s") % (e,) | |
|
134 | ) | |
|
118 | 135 | |
|
119 | 136 | def get_assignees(self): |
|
120 | 137 | """Gets list of possible assignees""" |
|
121 |
url = |
|
|
122 |
|
|
|
123 |
|
|
|
124 |
|
|
|
138 | url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % { | |
|
139 | "api_url": self.api_url, | |
|
140 | "owner": self.owner, | |
|
141 | "repo_name": self.repo_name, | |
|
142 | } | |
|
125 | 143 | |
|
126 | 144 | data = self.make_request(url) |
|
127 |
results = [{ |
|
|
145 | results = [{"user": self.owner, "name": "(Repo owner)"}] | |
|
128 | 146 | if data: |
|
129 | 147 | for entry in data: |
|
130 |
results.append( |
|
|
131 | "name": entry['user'].get('display_name')}) | |
|
148 | results.append( | |
|
149 | { | |
|
150 | "user": entry["user"]["username"], | |
|
151 | "name": entry["user"].get("display_name"), | |
|
152 | } | |
|
153 | ) | |
|
132 | 154 | return results |
|
133 | 155 | |
|
134 | 156 | def create_issue(self, form_data): |
|
135 | 157 | """ |
|
136 | 158 | Sends creates a new issue in tracker using REST call |
|
137 | 159 | """ |
|
138 |
url = |
|
|
139 |
|
|
|
140 |
|
|
|
141 |
|
|
|
160 | url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % { | |
|
161 | "api_url": self.api_url, | |
|
162 | "owner": self.owner, | |
|
163 | "repo_name": self.repo_name, | |
|
164 | } | |
|
142 | 165 | |
|
143 | 166 | payload = { |
|
144 |
"title": form_data[ |
|
|
145 |
"content": form_data[ |
|
|
146 |
"kind": form_data[ |
|
|
147 |
"priority": form_data[ |
|
|
148 |
"responsible": form_data[ |
|
|
167 | "title": form_data["title"], | |
|
168 | "content": form_data["content"], | |
|
169 | "kind": form_data["kind"], | |
|
170 | "priority": form_data["priority"], | |
|
171 | "responsible": form_data["responsible"], | |
|
149 | 172 | } |
|
150 |
data = self.make_request(url, |
|
|
173 | data = self.make_request(url, "post", payload) | |
|
151 | 174 | f_args = { |
|
152 | 175 | "owner": self.owner, |
|
153 | 176 | "repo_name": self.repo_name, |
|
154 |
"issue_id": data[ |
|
|
177 | "issue_id": data["local_id"], | |
|
155 | 178 | } |
|
156 | web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \ | |
|
157 | '/issue/%(issue_id)s/issue-title' % f_args | |
|
179 | web_url = ( | |
|
180 | "https://bitbucket.org/%(owner)s/%(repo_name)s" | |
|
181 | "/issue/%(issue_id)s/issue-title" % f_args | |
|
182 | ) | |
|
158 | 183 | to_return = { |
|
159 |
|
|
|
160 |
|
|
|
161 |
|
|
|
184 | "id": data["local_id"], | |
|
185 | "resource_url": data["resource_uri"], | |
|
186 | "web_url": web_url, | |
|
162 | 187 | } |
|
163 | 188 | return to_return |
@@ -1,74 +1,71 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | from requests.exceptions import HTTPError, ConnectionError |
|
20 | 20 | from camplight import Request, Campfire |
|
21 | 21 | from camplight.exceptions import CamplightException |
|
22 | 22 | |
|
23 |
from appenlight.models.integrations import |
|
|
24 | IntegrationException) | |
|
23 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
|
25 | 24 | |
|
26 | 25 | _ = str |
|
27 | 26 | |
|
28 | 27 | log = logging.getLogger(__name__) |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | class NotFoundException(Exception): |
|
32 | 31 | pass |
|
33 | 32 | |
|
34 | 33 | |
|
35 | 34 | class CampfireIntegration(IntegrationBase): |
|
36 | __mapper_args__ = { | |
|
37 | 'polymorphic_identity': 'campfire' | |
|
38 | } | |
|
35 | __mapper_args__ = {"polymorphic_identity": "campfire"} | |
|
39 | 36 | front_visible = False |
|
40 | 37 | as_alert_channel = True |
|
41 | 38 | supports_report_alerting = True |
|
42 | 39 | action_notification = True |
|
43 |
integration_action = |
|
|
40 | integration_action = "Message via Campfire" | |
|
44 | 41 | |
|
45 | 42 | @classmethod |
|
46 | 43 | def create_client(cls, api_token, account): |
|
47 | 44 | client = CampfireClient(api_token, account) |
|
48 | 45 | return client |
|
49 | 46 | |
|
50 | 47 | |
|
51 | 48 | class CampfireClient(object): |
|
52 | 49 | def __init__(self, api_token, account): |
|
53 |
request = Request( |
|
|
50 | request = Request("https://%s.campfirenow.com" % account, api_token) | |
|
54 | 51 | self.campfire = Campfire(request) |
|
55 | 52 | |
|
56 | 53 | def get_account(self): |
|
57 | 54 | try: |
|
58 | 55 | return self.campfire.account() |
|
59 | 56 | except (HTTPError, CamplightException) as e: |
|
60 | 57 | raise IntegrationException(str(e)) |
|
61 | 58 | |
|
62 | 59 | def get_rooms(self): |
|
63 | 60 | try: |
|
64 | 61 | return self.campfire.rooms() |
|
65 | 62 | except (HTTPError, CamplightException) as e: |
|
66 | 63 | raise IntegrationException(str(e)) |
|
67 | 64 | |
|
68 |
def speak_to_room(self, room, message, sound= |
|
|
65 | def speak_to_room(self, room, message, sound="RIMSHOT"): | |
|
69 | 66 | try: |
|
70 | 67 | room = self.campfire.room(room) |
|
71 | 68 | room.join() |
|
72 |
room.speak(message, type_= |
|
|
69 | room.speak(message, type_="TextMessage") | |
|
73 | 70 | except (HTTPError, CamplightException, ConnectionError) as e: |
|
74 | 71 | raise IntegrationException(str(e)) |
@@ -1,82 +1,83 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | import requests |
|
20 | 20 | from requests.auth import HTTPBasicAuth |
|
21 | 21 | import simplejson as json |
|
22 | 22 | |
|
23 |
from appenlight.models.integrations import |
|
|
24 | IntegrationException) | |
|
23 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
|
25 | 24 | |
|
26 | 25 | _ = str |
|
27 | 26 | |
|
28 | 27 | log = logging.getLogger(__name__) |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | class NotFoundException(Exception): |
|
32 | 31 | pass |
|
33 | 32 | |
|
34 | 33 | |
|
35 | 34 | class FlowdockIntegration(IntegrationBase): |
|
36 | __mapper_args__ = { | |
|
37 | 'polymorphic_identity': 'flowdock' | |
|
38 | } | |
|
35 | __mapper_args__ = {"polymorphic_identity": "flowdock"} | |
|
39 | 36 | front_visible = False |
|
40 | 37 | as_alert_channel = True |
|
41 | 38 | supports_report_alerting = True |
|
42 | 39 | action_notification = True |
|
43 |
integration_action = |
|
|
40 | integration_action = "Message via Flowdock" | |
|
44 | 41 | |
|
45 | 42 | @classmethod |
|
46 | 43 | def create_client(cls, api_token): |
|
47 | 44 | client = FlowdockClient(api_token) |
|
48 | 45 | return client |
|
49 | 46 | |
|
50 | 47 | |
|
51 | 48 | class FlowdockClient(object): |
|
52 | 49 | def __init__(self, api_token): |
|
53 |
self.auth = HTTPBasicAuth(api_token, |
|
|
50 | self.auth = HTTPBasicAuth(api_token, "") | |
|
54 | 51 | self.api_token = api_token |
|
55 |
self.api_url = |
|
|
52 | self.api_url = "https://api.flowdock.com/v1/messages" | |
|
56 | 53 | |
|
57 |
def make_request(self, url, method= |
|
|
54 | def make_request(self, url, method="get", data=None): | |
|
58 | 55 | headers = { |
|
59 |
|
|
|
60 |
|
|
|
56 | "Content-Type": "application/json", | |
|
57 | "User-Agent": "appenlight-flowdock", | |
|
61 | 58 | } |
|
62 | 59 | try: |
|
63 | 60 | if data: |
|
64 | 61 | data = json.dumps(data) |
|
65 |
resp = getattr(requests, method)( |
|
|
66 | timeout=10) | |
|
62 | resp = getattr(requests, method)( | |
|
63 | url, data=data, headers=headers, timeout=10 | |
|
64 | ) | |
|
67 | 65 | except Exception as e: |
|
68 | 66 | raise IntegrationException( |
|
69 |
_( |
|
|
67 | _("Error communicating with Flowdock: %s") % (e,) | |
|
68 | ) | |
|
70 | 69 | if resp.status_code > 299: |
|
71 | 70 | raise IntegrationException(resp.text) |
|
72 | 71 | return resp |
|
73 | 72 | |
|
74 | 73 | def send_to_chat(self, payload): |
|
75 |
url = |
|
|
76 | 'api_token': self.api_token} | |
|
77 | return self.make_request(url, method='post', data=payload).json() | |
|
74 | url = "%(api_url)s/chat/%(api_token)s" % { | |
|
75 | "api_url": self.api_url, | |
|
76 | "api_token": self.api_token, | |
|
77 | } | |
|
78 | return self.make_request(url, method="post", data=payload).json() | |
|
78 | 79 | |
|
79 | 80 | def send_to_inbox(self, payload): |
|
80 |
f_args = { |
|
|
81 |
url = |
|
|
82 |
return self.make_request(url, method= |
|
|
81 | f_args = {"api_url": self.api_url, "api_token": self.api_token} | |
|
82 | url = "%(api_url)s/team_inbox/%(api_token)s" % f_args | |
|
83 | return self.make_request(url, method="post", data=payload).json() |
@@ -1,156 +1,158 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import json |
|
18 | 18 | import requests |
|
19 | 19 | |
|
20 | 20 | from . import IntegrationBase, IntegrationException |
|
21 | 21 | |
|
22 | 22 | _ = str |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class GithubAuthException(Exception): |
|
26 | 26 | pass |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class GithubIntegration(IntegrationBase): |
|
30 | __mapper_args__ = { | |
|
31 | 'polymorphic_identity': 'github' | |
|
32 | } | |
|
30 | __mapper_args__ = {"polymorphic_identity": "github"} | |
|
33 | 31 | front_visible = True |
|
34 | 32 | as_alert_channel = False |
|
35 | 33 | supports_report_alerting = False |
|
36 | 34 | action_notification = True |
|
37 |
integration_action = |
|
|
35 | integration_action = "Add issue to Github" | |
|
38 | 36 | |
|
39 | 37 | @classmethod |
|
40 | 38 | def create_client(cls, request, user_name=None, repo_name=None): |
|
41 | 39 | """ |
|
42 | 40 | Creates REST client that can authenticate to specific repo |
|
43 | 41 | uses auth tokens for current request user |
|
44 | 42 | """ |
|
45 | 43 | token = None |
|
46 | 44 | secret = None |
|
47 | 45 | for identity in request.user.external_identities: |
|
48 |
if identity.provider_name == |
|
|
46 | if identity.provider_name == "github": | |
|
49 | 47 | token = identity.access_token |
|
50 | 48 | secret = identity.token_secret |
|
51 | 49 | break |
|
52 | 50 | if not token: |
|
53 | raise IntegrationException( | |
|
54 | 'No valid auth token present for this service') | |
|
51 | raise IntegrationException("No valid auth token present for this service") | |
|
55 | 52 | client = GithubClient(token=token, owner=user_name, name=repo_name) |
|
56 | 53 | return client |
|
57 | 54 | |
|
58 | 55 | |
|
59 | 56 | class GithubClient(object): |
|
60 |
api_url = |
|
|
61 |
repo_type = |
|
|
57 | api_url = "https://api.github.com" | |
|
58 | repo_type = "github" | |
|
62 | 59 | |
|
63 | 60 | def __init__(self, token, owner, name): |
|
64 | 61 | self.access_token = token |
|
65 | 62 | self.owner = owner |
|
66 | 63 | self.name = name |
|
67 | 64 | |
|
68 |
def make_request(self, url, method= |
|
|
69 |
req_headers = { |
|
|
70 | 'Content-Type': 'application/json', | |
|
71 | 'Authorization': 'token %s' % self.access_token} | |
|
65 | def make_request(self, url, method="get", data=None, headers=None): | |
|
66 | req_headers = { | |
|
67 | "User-Agent": "appenlight", | |
|
68 | "Content-Type": "application/json", | |
|
69 | "Authorization": "token %s" % self.access_token, | |
|
70 | } | |
|
72 | 71 | try: |
|
73 | 72 | if data: |
|
74 | 73 | data = json.dumps(data) |
|
75 |
resp = getattr(requests, method)( |
|
|
76 | headers=req_headers, | |
|
77 | timeout=10) | |
|
74 | resp = getattr(requests, method)( | |
|
75 | url, data=data, headers=req_headers, timeout=10 | |
|
76 | ) | |
|
78 | 77 | except Exception as e: |
|
79 |
msg = |
|
|
78 | msg = "Error communicating with Github: %s" | |
|
80 | 79 | raise IntegrationException(_(msg) % (e,)) |
|
81 | 80 | |
|
82 | 81 | if resp.status_code == 404: |
|
83 |
msg = |
|
|
82 | msg = "User or repo name are incorrect" | |
|
84 | 83 | raise IntegrationException(_(msg)) |
|
85 | 84 | if resp.status_code == 401: |
|
86 |
msg = |
|
|
85 | msg = "You are not authorized to access this repo" | |
|
87 | 86 | raise IntegrationException(_(msg)) |
|
88 | 87 | elif resp.status_code not in [200, 201]: |
|
89 |
msg = |
|
|
88 | msg = "Github response_code: %s" | |
|
90 | 89 | raise IntegrationException(_(msg) % resp.status_code) |
|
91 | 90 | try: |
|
92 | 91 | return resp.json() |
|
93 | 92 | except Exception as e: |
|
94 |
msg = |
|
|
93 | msg = "Error decoding response from Github: %s" | |
|
95 | 94 | raise IntegrationException(_(msg) % (e,)) |
|
96 | 95 | |
|
97 | 96 | def get_statuses(self): |
|
98 | 97 | """Gets list of possible item statuses""" |
|
99 |
url = |
|
|
100 |
|
|
|
101 |
|
|
|
102 |
|
|
|
98 | url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % { | |
|
99 | "api_url": self.api_url, | |
|
100 | "owner": self.owner, | |
|
101 | "name": self.name, | |
|
102 | } | |
|
103 | 103 | |
|
104 | 104 | data = self.make_request(url) |
|
105 | 105 | |
|
106 | 106 | statuses = [] |
|
107 | 107 | for status in data: |
|
108 |
statuses.append(status[ |
|
|
108 | statuses.append(status["name"]) | |
|
109 | 109 | return statuses |
|
110 | 110 | |
|
111 | 111 | def get_repo(self): |
|
112 | 112 | """Gets list of possible item statuses""" |
|
113 |
url = |
|
|
114 |
|
|
|
115 |
|
|
|
116 |
|
|
|
113 | url = "%(api_url)s/repos/%(owner)s/%(name)s" % { | |
|
114 | "api_url": self.api_url, | |
|
115 | "owner": self.owner, | |
|
116 | "name": self.name, | |
|
117 | } | |
|
117 | 118 | |
|
118 | 119 | data = self.make_request(url) |
|
119 | 120 | return data |
|
120 | 121 | |
|
121 | 122 | def get_assignees(self): |
|
122 | 123 | """Gets list of possible assignees""" |
|
123 |
url = |
|
|
124 |
|
|
|
125 |
|
|
|
126 |
|
|
|
124 | url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % { | |
|
125 | "api_url": self.api_url, | |
|
126 | "owner": self.owner, | |
|
127 | "name": self.name, | |
|
128 | } | |
|
127 | 129 | data = self.make_request(url) |
|
128 | 130 | results = [] |
|
129 | 131 | for entry in data: |
|
130 |
results.append({"user": entry[ |
|
|
131 | "name": entry.get('name')}) | |
|
132 | results.append({"user": entry["login"], "name": entry.get("name")}) | |
|
132 | 133 | return results |
|
133 | 134 | |
|
134 | 135 | def create_issue(self, form_data): |
|
135 | 136 | """ |
|
136 | 137 | Make a REST call to create issue in Github's issue tracker |
|
137 | 138 | """ |
|
138 |
url = |
|
|
139 |
|
|
|
140 |
|
|
|
141 |
|
|
|
139 | url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % { | |
|
140 | "api_url": self.api_url, | |
|
141 | "owner": self.owner, | |
|
142 | "name": self.name, | |
|
143 | } | |
|
142 | 144 | |
|
143 | 145 | payload = { |
|
144 |
"title": form_data[ |
|
|
145 |
"body": form_data[ |
|
|
146 | "title": form_data["title"], | |
|
147 | "body": form_data["content"], | |
|
146 | 148 | "labels": [], |
|
147 |
"assignee": form_data[ |
|
|
149 | "assignee": form_data["responsible"], | |
|
148 | 150 | } |
|
149 |
payload[ |
|
|
150 |
data = self.make_request(url, |
|
|
151 | payload["labels"].extend(form_data["kind"]) | |
|
152 | data = self.make_request(url, "post", data=payload) | |
|
151 | 153 | to_return = { |
|
152 |
|
|
|
153 |
|
|
|
154 |
|
|
|
154 | "id": data["number"], | |
|
155 | "resource_url": data["url"], | |
|
156 | "web_url": data["html_url"], | |
|
155 | 157 | } |
|
156 | 158 | return to_return |
@@ -1,83 +1,75 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | import requests |
|
20 | 20 | |
|
21 | 21 | from . import IntegrationBase, IntegrationException |
|
22 | 22 | |
|
23 | 23 | _ = str |
|
24 | 24 | |
|
25 | 25 | log = logging.getLogger(__name__) |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class NotFoundException(Exception): |
|
29 | 29 | pass |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | class HipchatIntegration(IntegrationBase): |
|
33 | __mapper_args__ = { | |
|
34 | 'polymorphic_identity': 'hipchat' | |
|
35 | } | |
|
33 | __mapper_args__ = {"polymorphic_identity": "hipchat"} | |
|
36 | 34 | front_visible = False |
|
37 | 35 | as_alert_channel = True |
|
38 | 36 | supports_report_alerting = True |
|
39 | 37 | action_notification = True |
|
40 |
integration_action = |
|
|
38 | integration_action = "Message via Hipchat" | |
|
41 | 39 | |
|
42 | 40 | @classmethod |
|
43 | 41 | def create_client(cls, api_token): |
|
44 | 42 | client = HipchatClient(api_token) |
|
45 | 43 | return client |
|
46 | 44 | |
|
47 | 45 | |
|
48 | 46 | class HipchatClient(object): |
|
49 | 47 | def __init__(self, api_token): |
|
50 | 48 | self.api_token = api_token |
|
51 |
self.api_url = |
|
|
52 | ||
|
53 |
def make_request(self, endpoint, method= |
|
|
54 | headers = { | |
|
55 | 'User-Agent': 'appenlight-hipchat', | |
|
56 | } | |
|
57 | url = '%s%s' % (self.api_url, endpoint) | |
|
58 | params = { | |
|
59 | 'format': 'json', | |
|
60 | 'auth_token': self.api_token | |
|
61 | } | |
|
49 | self.api_url = "https://api.hipchat.com/v1" | |
|
50 | ||
|
51 | def make_request(self, endpoint, method="get", data=None): | |
|
52 | headers = {"User-Agent": "appenlight-hipchat"} | |
|
53 | url = "%s%s" % (self.api_url, endpoint) | |
|
54 | params = {"format": "json", "auth_token": self.api_token} | |
|
62 | 55 | try: |
|
63 |
resp = getattr(requests, method)( |
|
|
64 | params=params, | |
|
65 | timeout=3) | |
|
56 | resp = getattr(requests, method)( | |
|
57 | url, data=data, headers=headers, params=params, timeout=3 | |
|
58 | ) | |
|
66 | 59 | except Exception as e: |
|
67 |
msg = |
|
|
60 | msg = "Error communicating with Hipchat: %s" | |
|
68 | 61 | raise IntegrationException(_(msg) % (e,)) |
|
69 | 62 | if resp.status_code == 404: |
|
70 |
msg = |
|
|
63 | msg = "Error communicating with Hipchat - Room not found" | |
|
71 | 64 | raise IntegrationException(msg) |
|
72 | 65 | elif resp.status_code != requests.codes.ok: |
|
73 |
msg = |
|
|
66 | msg = "Error communicating with Hipchat - status code: %s" | |
|
74 | 67 | raise IntegrationException(msg % resp.status_code) |
|
75 | 68 | return resp |
|
76 | 69 | |
|
77 | 70 | def get_rooms(self): |
|
78 | 71 | # not used with notification api token |
|
79 |
return self.make_request( |
|
|
72 | return self.make_request("/rooms/list") | |
|
80 | 73 | |
|
81 | 74 | def send(self, payload): |
|
82 |
return self.make_request( |
|
|
83 | data=payload).json() | |
|
75 | return self.make_request("/rooms/message", method="post", data=payload).json() |
@@ -1,136 +1,137 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import jira |
|
18 |
from appenlight.models.integrations import |
|
|
19 | IntegrationException) | |
|
18 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
|
20 | 19 | |
|
21 | 20 | _ = str |
|
22 | 21 | |
|
23 | 22 | |
|
24 | 23 | class NotFoundException(Exception): |
|
25 | 24 | pass |
|
26 | 25 | |
|
27 | 26 | |
|
28 | 27 | class JiraIntegration(IntegrationBase): |
|
29 | __mapper_args__ = { | |
|
30 | 'polymorphic_identity': 'jira' | |
|
31 | } | |
|
28 | __mapper_args__ = {"polymorphic_identity": "jira"} | |
|
32 | 29 | front_visible = True |
|
33 | 30 | as_alert_channel = False |
|
34 | 31 | supports_report_alerting = False |
|
35 | 32 | action_notification = True |
|
36 |
integration_action = |
|
|
33 | integration_action = "Add issue to Jira" | |
|
37 | 34 | |
|
38 | 35 | |
|
39 | 36 | class JiraClient(object): |
|
40 | 37 | def __init__(self, user_name, password, host_name, project, request=None): |
|
41 | 38 | self.user_name = user_name |
|
42 | 39 | self.password = password |
|
43 | 40 | self.host_name = host_name |
|
44 | 41 | self.project = project |
|
45 | 42 | self.request = request |
|
46 | 43 | try: |
|
47 |
self.client = jira.client.JIRA( |
|
|
48 |
|
|
|
44 | self.client = jira.client.JIRA( | |
|
45 | options={"server": host_name}, basic_auth=(user_name, password) | |
|
46 | ) | |
|
49 | 47 | except jira.JIRAError as e: |
|
50 | 48 | raise IntegrationException( |
|
51 |
|
|
|
52 |
|
|
|
49 | "Communication problem: HTTP_STATUS:%s, URL:%s " | |
|
50 | % (e.status_code, e.url) | |
|
51 | ) | |
|
53 | 52 | |
|
54 | 53 | def get_projects(self): |
|
55 | 54 | projects = self.client.projects() |
|
56 | 55 | return projects |
|
57 | 56 | |
|
58 | 57 | def get_assignees(self, request): |
|
59 | 58 | """Gets list of possible assignees""" |
|
60 | 59 | cache_region = request.registry.cache_regions.redis_sec_30 |
|
61 | @cache_region.cache_on_arguments('JiraClient.get_assignees') | |
|
60 | ||
|
61 | @cache_region.cache_on_arguments("JiraClient.get_assignees") | |
|
62 | 62 | def cached(project_name): |
|
63 | 63 | users = self.client.search_assignable_users_for_issues( |
|
64 |
None, project=project_name |
|
|
64 | None, project=project_name | |
|
65 | ) | |
|
65 | 66 | results = [] |
|
66 | 67 | for user in users: |
|
67 | 68 | results.append({"id": user.name, "name": user.displayName}) |
|
68 | 69 | return results |
|
70 | ||
|
69 | 71 | return cached(self.project) |
|
70 | 72 | |
|
71 | 73 | def get_issue_types(self, request): |
|
72 | 74 | metadata = self.get_metadata(request) |
|
73 | 75 | assignees = self.get_assignees(request) |
|
74 | 76 | parsed_metadata = [] |
|
75 |
for entry in metadata[ |
|
|
76 |
issue = {"name": entry[ |
|
|
77 | "id": entry['id'], | |
|
78 | "fields": []} | |
|
79 | for i_id, field_i in entry['fields'].items(): | |
|
77 | for entry in metadata["projects"][0]["issuetypes"]: | |
|
78 | issue = {"name": entry["name"], "id": entry["id"], "fields": []} | |
|
79 | for i_id, field_i in entry["fields"].items(): | |
|
80 | 80 | field = { |
|
81 |
"name": field_i[ |
|
|
81 | "name": field_i["name"], | |
|
82 | 82 | "id": i_id, |
|
83 |
"required": field_i[ |
|
|
83 | "required": field_i["required"], | |
|
84 | 84 | "values": [], |
|
85 |
"type": field_i[ |
|
|
85 | "type": field_i["schema"].get("type"), | |
|
86 | 86 | } |
|
87 |
if field_i.get( |
|
|
88 |
field[ |
|
|
89 |
for i in field_i[ |
|
|
90 |
field[ |
|
|
91 |
{ |
|
|
92 | 'name': i.get('name', i.get('value', '')) | |
|
93 | }) | |
|
94 |
|
|
|
95 | field['values'] = assignees | |
|
96 | issue['fields'].append(field) | |
|
87 | if field_i.get("allowedValues"): | |
|
88 | field["values"] = [] | |
|
89 | for i in field_i["allowedValues"]: | |
|
90 | field["values"].append( | |
|
91 | {"id": i["id"], "name": i.get("name", i.get("value", ""))} | |
|
92 | ) | |
|
93 | if field["id"] == "assignee": | |
|
94 | field["values"] = assignees | |
|
95 | issue["fields"].append(field) | |
|
97 | 96 | parsed_metadata.append(issue) |
|
98 | 97 | return parsed_metadata |
|
99 | 98 | |
|
100 | 99 | def get_metadata(self, request): |
|
101 | 100 | # cache_region = request.registry.cache_regions.redis_sec_30 |
|
102 | 101 | # @cache_region.cache_on_arguments('JiraClient.get_metadata') |
|
103 | 102 | def cached(project_name): |
|
104 | 103 | return self.client.createmeta( |
|
105 |
projectKeys=project_name, expand= |
|
|
104 | projectKeys=project_name, expand="projects.issuetypes.fields" | |
|
105 | ) | |
|
106 | ||
|
106 | 107 | return cached(self.project) |
|
107 | 108 | |
|
108 | 109 | def create_issue(self, form_data, request): |
|
109 | 110 | issue_types = self.get_issue_types(request) |
|
110 | 111 | payload = { |
|
111 |
|
|
|
112 |
|
|
|
113 |
|
|
|
114 |
|
|
|
115 |
"priority": { |
|
|
116 |
"assignee": { |
|
|
112 | "project": {"key": form_data["project"]}, | |
|
113 | "summary": form_data["title"], | |
|
114 | "description": form_data["content"], | |
|
115 | "issuetype": {"id": form_data["issue_type"]}, | |
|
116 | "priority": {"id": form_data["priority"]}, | |
|
117 | "assignee": {"name": form_data["responsible"]}, | |
|
117 | 118 | } |
|
118 | 119 | for issue_type in issue_types: |
|
119 |
if issue_type[ |
|
|
120 |
for field in issue_type[ |
|
|
120 | if issue_type["id"] == form_data["issue_type"]: | |
|
121 | for field in issue_type["fields"]: | |
|
121 | 122 | # set some defaults for other required fields |
|
122 |
if field == |
|
|
123 |
payload["reporter"] = { |
|
|
124 |
if field[ |
|
|
125 |
if field[ |
|
|
126 |
payload[field[ |
|
|
127 |
elif field[ |
|
|
128 |
payload[field[ |
|
|
123 | if field == "reporter": | |
|
124 | payload["reporter"] = {"id": self.user_name} | |
|
125 | if field["required"] and field["id"] not in payload: | |
|
126 | if field["type"] == "array": | |
|
127 | payload[field["id"]] = [field["values"][0]] | |
|
128 | elif field["type"] == "string": | |
|
129 | payload[field["id"]] = "" | |
|
129 | 130 | new_issue = self.client.create_issue(fields=payload) |
|
130 |
web_url = self.host_name + |
|
|
131 | web_url = self.host_name + "/browse/" + new_issue.key | |
|
131 | 132 | to_return = { |
|
132 |
|
|
|
133 |
|
|
|
134 |
|
|
|
133 | "id": new_issue.id, | |
|
134 | "resource_url": new_issue.self, | |
|
135 | "web_url": web_url, | |
|
135 | 136 | } |
|
136 | 137 | return to_return |
@@ -1,74 +1,65 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | import requests |
|
20 | 20 | |
|
21 |
from appenlight.models.integrations import |
|
|
22 | IntegrationException) | |
|
21 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
|
23 | 22 | from appenlight.lib.ext_json import json |
|
24 | 23 | |
|
25 | 24 | _ = str |
|
26 | 25 | |
|
27 | 26 | log = logging.getLogger(__name__) |
|
28 | 27 | |
|
29 | 28 | |
|
30 | 29 | class NotFoundException(Exception): |
|
31 | 30 | pass |
|
32 | 31 | |
|
33 | 32 | |
|
34 | 33 | class SlackIntegration(IntegrationBase): |
|
35 | __mapper_args__ = { | |
|
36 | 'polymorphic_identity': 'slack' | |
|
37 | } | |
|
34 | __mapper_args__ = {"polymorphic_identity": "slack"} | |
|
38 | 35 | front_visible = False |
|
39 | 36 | as_alert_channel = True |
|
40 | 37 | supports_report_alerting = True |
|
41 | 38 | action_notification = True |
|
42 |
integration_action = |
|
|
39 | integration_action = "Message via Slack" | |
|
43 | 40 | |
|
44 | 41 | @classmethod |
|
45 | 42 | def create_client(cls, api_token): |
|
46 | 43 | client = SlackClient(api_token) |
|
47 | 44 | return client |
|
48 | 45 | |
|
49 | 46 | |
|
50 | 47 | class SlackClient(object): |
|
51 | 48 | def __init__(self, api_url): |
|
52 | 49 | self.api_url = api_url |
|
53 | 50 | |
|
54 | 51 | def make_request(self, data=None): |
|
55 | headers = { | |
|
56 | 'User-Agent': 'appenlight-slack', | |
|
57 | 'Content-Type': 'application/json' | |
|
58 | } | |
|
52 | headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"} | |
|
59 | 53 | try: |
|
60 |
resp = getattr(requests, |
|
|
61 | data=json.dumps(data), | |
|
62 | headers=headers, | |
|
63 | timeout=3) | |
|
54 | resp = getattr(requests, "post")( | |
|
55 | self.api_url, data=json.dumps(data), headers=headers, timeout=3 | |
|
56 | ) | |
|
64 | 57 | except Exception as e: |
|
65 | raise IntegrationException( | |
|
66 | _('Error communicating with Slack: %s') % (e,)) | |
|
58 | raise IntegrationException(_("Error communicating with Slack: %s") % (e,)) | |
|
67 | 59 | if resp.status_code != requests.codes.ok: |
|
68 |
msg = |
|
|
60 | msg = "Error communicating with Slack - status code: %s" | |
|
69 | 61 | raise IntegrationException(msg % resp.status_code) |
|
70 | 62 | return resp |
|
71 | 63 | |
|
72 | 64 | def send(self, payload): |
|
73 |
return self.make_request( |
|
|
74 | data=payload).json() | |
|
65 | return self.make_request("/rooms/message", method="post", data=payload).json() |
@@ -1,138 +1,145 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | import requests |
|
20 | 20 | |
|
21 |
from appenlight.models.integrations import |
|
|
22 | IntegrationException) | |
|
21 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
|
23 | 22 | from appenlight.models.alert_channel import AlertChannel |
|
24 | 23 | from appenlight.lib.ext_json import json |
|
25 | 24 | |
|
26 | 25 | _ = str |
|
27 | 26 | |
|
28 | 27 | log = logging.getLogger(__name__) |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | class NotFoundException(Exception): |
|
32 | 31 | pass |
|
33 | 32 | |
|
34 | 33 | |
|
35 | 34 | class WebhooksIntegration(IntegrationBase): |
|
36 | __mapper_args__ = { | |
|
37 | 'polymorphic_identity': 'webhooks' | |
|
38 | } | |
|
35 | __mapper_args__ = {"polymorphic_identity": "webhooks"} | |
|
39 | 36 | front_visible = False |
|
40 | 37 | as_alert_channel = True |
|
41 | 38 | supports_report_alerting = True |
|
42 | 39 | action_notification = True |
|
43 |
integration_action = |
|
|
40 | integration_action = "Message via Webhooks" | |
|
44 | 41 | |
|
45 | 42 | @classmethod |
|
46 | 43 | def create_client(cls, url): |
|
47 | 44 | client = WebhooksClient(url) |
|
48 | 45 | return client |
|
49 | 46 | |
|
50 | 47 | |
|
51 | 48 | class WebhooksClient(object): |
|
52 | 49 | def __init__(self, url): |
|
53 | 50 | self.api_url = url |
|
54 | 51 | |
|
55 |
def make_request(self, url, method= |
|
|
52 | def make_request(self, url, method="get", data=None): | |
|
56 | 53 | headers = { |
|
57 |
|
|
|
58 |
|
|
|
54 | "Content-Type": "application/json", | |
|
55 | "User-Agent": "appenlight-webhooks", | |
|
59 | 56 | } |
|
60 | 57 | try: |
|
61 | 58 | if data: |
|
62 | 59 | data = json.dumps(data) |
|
63 | resp = getattr(requests, method)(url, data=data, headers=headers, | |
|
64 | timeout=3) | |
|
60 | resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3) | |
|
65 | 61 | except Exception as e: |
|
66 | 62 | raise IntegrationException( |
|
67 |
_( |
|
|
63 | _("Error communicating with Webhooks: {}").format(e) | |
|
64 | ) | |
|
68 | 65 | if resp.status_code > 299: |
|
69 | 66 | raise IntegrationException( |
|
70 |
|
|
|
71 |
resp.status_code |
|
|
67 | "Error communicating with Webhooks - status code: {}".format( | |
|
68 | resp.status_code | |
|
69 | ) | |
|
70 | ) | |
|
72 | 71 | return resp |
|
73 | 72 | |
|
74 | 73 | def send_to_hook(self, payload): |
|
75 |
return self.make_request(self.api_url, method= |
|
|
76 | data=payload).json() | |
|
74 | return self.make_request(self.api_url, method="post", data=payload).json() | |
|
77 | 75 | |
|
78 | 76 | |
|
79 | 77 | class WebhooksAlertChannel(AlertChannel): |
|
80 | __mapper_args__ = { | |
|
81 | 'polymorphic_identity': 'webhooks' | |
|
82 | } | |
|
78 | __mapper_args__ = {"polymorphic_identity": "webhooks"} | |
|
83 | 79 | |
|
84 | 80 | def notify_reports(self, **kwargs): |
|
85 | 81 | """ |
|
86 | 82 | Notify user of individual reports |
|
87 | 83 | |
|
88 | 84 | kwargs: |
|
89 | 85 | application: application that the event applies for, |
|
90 | 86 | user: user that should be notified |
|
91 | 87 | request: request object |
|
92 | 88 | since_when: reports are newer than this time value, |
|
93 | 89 | reports: list of reports to render |
|
94 | 90 | |
|
95 | 91 | """ |
|
96 | 92 | template_vars = self.get_notification_basic_vars(kwargs) |
|
97 | 93 | payload = [] |
|
98 | include_keys = ('id', 'http_status', 'report_type', 'resource_name', | |
|
99 | 'front_url', 'resource_id', 'error', 'url_path', | |
|
100 | 'tags', 'duration') | |
|
101 | ||
|
102 | for occurences, report in kwargs['reports']: | |
|
103 | r_dict = report.last_report_ref.get_dict(kwargs['request'], | |
|
104 | include_keys=include_keys) | |
|
105 | r_dict['group']['occurences'] = occurences | |
|
94 | include_keys = ( | |
|
95 | "id", | |
|
96 | "http_status", | |
|
97 | "report_type", | |
|
98 | "resource_name", | |
|
99 | "front_url", | |
|
100 | "resource_id", | |
|
101 | "error", | |
|
102 | "url_path", | |
|
103 | "tags", | |
|
104 | "duration", | |
|
105 | ) | |
|
106 | ||
|
107 | for occurences, report in kwargs["reports"]: | |
|
108 | r_dict = report.last_report_ref.get_dict( | |
|
109 | kwargs["request"], include_keys=include_keys | |
|
110 | ) | |
|
111 | r_dict["group"]["occurences"] = occurences | |
|
106 | 112 | payload.append(r_dict) |
|
107 | 113 | client = WebhooksIntegration.create_client( |
|
108 |
self.integration.config[ |
|
|
114 | self.integration.config["reports_webhook"] | |
|
115 | ) | |
|
109 | 116 | client.send_to_hook(payload) |
|
110 | 117 | |
|
111 | 118 | def notify_alert(self, **kwargs): |
|
112 | 119 | """ |
|
113 | 120 | Notify user of report or uptime threshold events based on events alert type |
|
114 | 121 | |
|
115 | 122 | Kwargs: |
|
116 | 123 | application: application that the event applies for, |
|
117 | 124 | event: event that is notified, |
|
118 | 125 | user: user that should be notified |
|
119 | 126 | request: request object |
|
120 | 127 | |
|
121 | 128 | """ |
|
122 | 129 | payload = { |
|
123 |
|
|
|
124 |
|
|
|
125 |
|
|
|
126 | 'event'].start_date, | |
|
127 |
|
|
|
128 | 'resource_id': None | |
|
130 | "alert_action": kwargs["event"].unified_alert_action(), | |
|
131 | "alert_name": kwargs["event"].unified_alert_name(), | |
|
132 | "event_time": kwargs["event"].end_date or kwargs["event"].start_date, | |
|
133 | "resource_name": None, | |
|
134 | "resource_id": None, | |
|
129 | 135 | } |
|
130 |
if kwargs[ |
|
|
131 |
payload[ |
|
|
132 |
if |
|
|
133 |
payload[ |
|
|
134 |
payload[ |
|
|
136 | if kwargs["event"].values and kwargs["event"].values.get("reports"): | |
|
137 | payload["reports"] = kwargs["event"].values.get("reports", []) | |
|
138 | if "application" in kwargs: | |
|
139 | payload["resource_name"] = kwargs["application"].resource_name | |
|
140 | payload["resource_id"] = kwargs["application"].resource_id | |
|
135 | 141 | |
|
136 | 142 | client = WebhooksIntegration.create_client( |
|
137 |
self.integration.config[ |
|
|
143 | self.integration.config["alerts_webhook"] | |
|
144 | ) | |
|
138 | 145 | client.send_to_hook(payload) |
@@ -1,130 +1,132 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | import logging |
|
19 | 19 | import hashlib |
|
20 | 20 | |
|
21 | 21 | from datetime import datetime |
|
22 | 22 | from appenlight.models import Base |
|
23 | 23 | from appenlight.lib.utils import convert_es_type |
|
24 | 24 | from appenlight.lib.enums import LogLevel |
|
25 | 25 | from sqlalchemy.dialects.postgresql import JSON |
|
26 | 26 | from ziggurat_foundations.models.base import BaseModel |
|
27 | 27 | |
|
28 | 28 | log = logging.getLogger(__name__) |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | class Log(Base, BaseModel): |
|
32 |
__tablename__ = |
|
|
33 |
__table_args__ = { |
|
|
32 | __tablename__ = "logs" | |
|
33 | __table_args__ = {"implicit_returning": False} | |
|
34 | 34 | |
|
35 | 35 | log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) |
|
36 |
resource_id = sa.Column( |
|
|
37 | sa.ForeignKey('applications.resource_id', | |
|
38 | onupdate='CASCADE', | |
|
39 | ondelete='CASCADE'), | |
|
40 | nullable=False, | |
|
41 | index=True) | |
|
42 | log_level = sa.Column(sa.Unicode, nullable=False, index=True, | |
|
43 | default='INFO') | |
|
44 |
|
|
|
45 |
|
|
|
46 | server_default=sa.func.now()) | |
|
36 | resource_id = sa.Column( | |
|
37 | sa.Integer(), | |
|
38 | sa.ForeignKey( | |
|
39 | "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE" | |
|
40 | ), | |
|
41 | nullable=False, | |
|
42 | index=True, | |
|
43 | ) | |
|
44 | log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO") | |
|
45 | message = sa.Column(sa.UnicodeText(), default="") | |
|
46 | timestamp = sa.Column( | |
|
47 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
48 | ) | |
|
47 | 49 | request_id = sa.Column(sa.Unicode()) |
|
48 | 50 | namespace = sa.Column(sa.Unicode()) |
|
49 | 51 | primary_key = sa.Column(sa.Unicode()) |
|
50 | 52 | |
|
51 | 53 | tags = sa.Column(JSON(), default={}) |
|
52 | 54 | permanent = sa.Column(sa.Boolean(), nullable=False, default=False) |
|
53 | 55 | |
|
54 | 56 | def __str__(self): |
|
55 |
return self.__unicode__().encode( |
|
|
57 | return self.__unicode__().encode("utf8") | |
|
56 | 58 | |
|
57 | 59 | def __unicode__(self): |
|
58 |
return |
|
|
59 |
self.log_id, |
|
|
60 | return "<Log id:%s, lv:%s, ns:%s >" % ( | |
|
61 | self.log_id, | |
|
62 | self.log_level, | |
|
63 | self.namespace, | |
|
64 | ) | |
|
60 | 65 | |
|
61 | 66 | def set_data(self, data, resource): |
|
62 |
level = data.get( |
|
|
67 | level = data.get("log_level").upper() | |
|
63 | 68 | self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN) |
|
64 |
self.message = data.get( |
|
|
65 |
server_name = data.get( |
|
|
66 | self.tags = { | |
|
67 | 'server_name': server_name | |
|
68 | } | |
|
69 | if data.get('tags'): | |
|
70 | for tag_tuple in data['tags']: | |
|
69 | self.message = data.get("message", "") | |
|
70 | server_name = data.get("server", "").lower() or "unknown" | |
|
71 | self.tags = {"server_name": server_name} | |
|
72 | if data.get("tags"): | |
|
73 | for tag_tuple in data["tags"]: | |
|
71 | 74 | self.tags[tag_tuple[0]] = tag_tuple[1] |
|
72 |
self.timestamp = data[ |
|
|
73 |
r_id = data.get( |
|
|
75 | self.timestamp = data["date"] | |
|
76 | r_id = data.get("request_id", "") | |
|
74 | 77 | if not r_id: |
|
75 |
r_id = |
|
|
76 |
self.request_id = r_id.replace( |
|
|
78 | r_id = "" | |
|
79 | self.request_id = r_id.replace("-", "") | |
|
77 | 80 | self.resource_id = resource.resource_id |
|
78 |
self.namespace = data.get( |
|
|
79 |
self.permanent = data.get( |
|
|
80 |
self.primary_key = data.get( |
|
|
81 | self.namespace = data.get("namespace") or "" | |
|
82 | self.permanent = data.get("permanent") | |
|
83 | self.primary_key = data.get("primary_key") | |
|
81 | 84 | if self.primary_key is not None: |
|
82 |
self.tags[ |
|
|
85 | self.tags["appenlight_primary_key"] = self.primary_key | |
|
83 | 86 | |
|
84 | 87 | def get_dict(self): |
|
85 | 88 | instance_dict = super(Log, self).get_dict() |
|
86 |
instance_dict[ |
|
|
87 |
instance_dict[ |
|
|
89 | instance_dict["log_level"] = LogLevel.key_from_value(self.log_level) | |
|
90 | instance_dict["resource_name"] = self.application.resource_name | |
|
88 | 91 | return instance_dict |
|
89 | 92 | |
|
90 | 93 | @property |
|
91 | 94 | def delete_hash(self): |
|
92 | 95 | if not self.primary_key: |
|
93 | 96 | return None |
|
94 | 97 | |
|
95 |
to_hash = |
|
|
96 | self.namespace) | |
|
97 | return hashlib.sha1(to_hash.encode('utf8')).hexdigest() | |
|
98 | to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace) | |
|
99 | return hashlib.sha1(to_hash.encode("utf8")).hexdigest() | |
|
98 | 100 | |
|
99 | 101 | def es_doc(self): |
|
100 | 102 | tags = {} |
|
101 | 103 | tag_list = [] |
|
102 | 104 | for name, value in self.tags.items(): |
|
103 | 105 | # replace dot in indexed tag name |
|
104 |
name = name.replace( |
|
|
106 | name = name.replace(".", "_") | |
|
105 | 107 | tag_list.append(name) |
|
106 | 108 | tags[name] = { |
|
107 | 109 | "values": convert_es_type(value), |
|
108 |
"numeric_values": value |
|
|
109 |
|
|
|
110 | not isinstance(value, bool)) else None | |
|
110 | "numeric_values": value | |
|
111 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
|
112 | else None, | |
|
111 | 113 | } |
|
112 | 114 | return { |
|
113 |
|
|
|
114 |
|
|
|
115 |
|
|
|
116 |
|
|
|
117 |
|
|
|
118 |
|
|
|
119 |
|
|
|
120 |
|
|
|
121 |
|
|
|
122 |
|
|
|
115 | "pg_id": str(self.log_id), | |
|
116 | "delete_hash": self.delete_hash, | |
|
117 | "resource_id": self.resource_id, | |
|
118 | "request_id": self.request_id, | |
|
119 | "log_level": LogLevel.key_from_value(self.log_level), | |
|
120 | "timestamp": self.timestamp, | |
|
121 | "message": self.message if self.message else "", | |
|
122 | "namespace": self.namespace if self.namespace else "", | |
|
123 | "tags": tags, | |
|
124 | "tag_list": tag_list, | |
|
123 | 125 | } |
|
124 | 126 | |
|
125 | 127 | @property |
|
126 | 128 | def partition_id(self): |
|
127 | 129 | if self.permanent: |
|
128 |
return |
|
|
130 | return "rcae_l_%s" % self.timestamp.strftime("%Y_%m") | |
|
129 | 131 | else: |
|
130 |
return |
|
|
132 | return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d") |
@@ -1,64 +1,68 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from datetime import datetime |
|
18 | 18 | |
|
19 | 19 | import sqlalchemy as sa |
|
20 | 20 | from sqlalchemy.dialects.postgresql import JSON |
|
21 | 21 | |
|
22 | 22 | from ziggurat_foundations.models.base import BaseModel |
|
23 | 23 | from appenlight.lib.utils import convert_es_type |
|
24 | 24 | from appenlight.models import Base |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | class Metric(Base, BaseModel): |
|
28 |
__tablename__ = |
|
|
29 |
__table_args__ = { |
|
|
28 | __tablename__ = "metrics" | |
|
29 | __table_args__ = {"implicit_returning": False} | |
|
30 | 30 | |
|
31 | 31 | pkey = sa.Column(sa.BigInteger(), primary_key=True) |
|
32 |
resource_id = sa.Column( |
|
|
33 | sa.ForeignKey('applications.resource_id'), | |
|
34 | nullable=False, primary_key=True) | |
|
35 | timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, | |
|
36 | server_default=sa.func.now()) | |
|
32 | resource_id = sa.Column( | |
|
33 | sa.Integer(), | |
|
34 | sa.ForeignKey("applications.resource_id"), | |
|
35 | nullable=False, | |
|
36 | primary_key=True, | |
|
37 | ) | |
|
38 | timestamp = sa.Column( | |
|
39 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |
|
40 | ) | |
|
37 | 41 | tags = sa.Column(JSON(), default={}) |
|
38 | 42 | namespace = sa.Column(sa.Unicode(255)) |
|
39 | 43 | |
|
40 | 44 | @property |
|
41 | 45 | def partition_id(self): |
|
42 |
return |
|
|
46 | return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d") | |
|
43 | 47 | |
|
44 | 48 | def es_doc(self): |
|
45 | 49 | tags = {} |
|
46 | 50 | tag_list = [] |
|
47 | 51 | for name, value in self.tags.items(): |
|
48 | 52 | # replace dot in indexed tag name |
|
49 |
name = name.replace( |
|
|
53 | name = name.replace(".", "_") | |
|
50 | 54 | tag_list.append(name) |
|
51 | 55 | tags[name] = { |
|
52 | 56 | "values": convert_es_type(value), |
|
53 |
"numeric_values": value |
|
|
54 |
|
|
|
55 | not isinstance(value, bool)) else None | |
|
57 | "numeric_values": value | |
|
58 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
|
59 | else None, | |
|
56 | 60 | } |
|
57 | 61 | |
|
58 | 62 | return { |
|
59 |
|
|
|
60 |
|
|
|
61 |
|
|
|
62 |
|
|
|
63 |
|
|
|
63 | "resource_id": self.resource_id, | |
|
64 | "timestamp": self.timestamp, | |
|
65 | "namespace": self.namespace, | |
|
66 | "tags": tags, | |
|
67 | "tag_list": tag_list, | |
|
64 | 68 | } |
@@ -1,40 +1,40 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from ziggurat_foundations.models.base import BaseModel |
|
19 | 19 | from sqlalchemy.dialects.postgresql import JSON |
|
20 | 20 | |
|
21 | 21 | from . import Base |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | class PluginConfig(Base, BaseModel): |
|
25 |
__tablename__ = |
|
|
25 | __tablename__ = "plugin_configs" | |
|
26 | 26 | |
|
27 | 27 | id = sa.Column(sa.Integer, primary_key=True) |
|
28 | 28 | plugin_name = sa.Column(sa.Unicode) |
|
29 | 29 | section = sa.Column(sa.Unicode) |
|
30 | 30 | config = sa.Column(JSON, nullable=False) |
|
31 |
resource_id = sa.Column( |
|
|
32 | sa.ForeignKey('resources.resource_id', | |
|
33 | onupdate='cascade', | |
|
34 | ondelete='cascade')) | |
|
35 |
owner_id = sa.Column( |
|
|
36 |
|
|
|
37 | ondelete='cascade')) | |
|
31 | resource_id = sa.Column( | |
|
32 | sa.Integer(), | |
|
33 | sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"), | |
|
34 | ) | |
|
35 | owner_id = sa.Column( | |
|
36 | sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade") | |
|
37 | ) | |
|
38 | 38 | |
|
39 | 39 | def __json__(self, request): |
|
40 | 40 | return self.get_dict() |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now