Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,81 +1,89 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import re |
|
2 | import re | |
3 |
|
3 | |||
4 | from setuptools import setup, find_packages |
|
4 | from setuptools import setup, find_packages | |
5 |
|
5 | |||
6 | here = os.path.abspath(os.path.dirname(__file__)) |
|
6 | here = os.path.abspath(os.path.dirname(__file__)) | |
7 |
README = open(os.path.join(here, |
|
7 | README = open(os.path.join(here, "README.rst")).read() | |
8 |
CHANGES = open(os.path.join(here, |
|
8 | CHANGES = open(os.path.join(here, "CHANGELOG.rst")).read() | |
9 |
|
9 | |||
10 |
REQUIREMENTS = open(os.path.join(here, |
|
10 | REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines() | |
11 |
|
11 | |||
12 |
compiled = re.compile( |
|
12 | compiled = re.compile("([^=><]*).*") | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | def parse_req(req): |
|
15 | def parse_req(req): | |
16 | return compiled.search(req).group(1).strip() |
|
16 | return compiled.search(req).group(1).strip() | |
17 |
|
17 | |||
18 |
|
18 | |||
19 | requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f] |
|
19 | requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f] | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | def _get_meta_var(name, data, callback_handler=None): |
|
22 | def _get_meta_var(name, data, callback_handler=None): | |
23 | import re |
|
23 | import re | |
24 | matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data) |
|
24 | ||
|
25 | matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data) | |||
25 | if matches: |
|
26 | if matches: | |
26 | if not callable(callback_handler): |
|
27 | if not callable(callback_handler): | |
27 | callback_handler = lambda v: v |
|
28 | callback_handler = lambda v: v | |
28 |
|
29 | |||
29 | return callback_handler(eval(matches.groups()[0])) |
|
30 | return callback_handler(eval(matches.groups()[0])) | |
30 |
|
31 | |||
31 |
|
32 | |||
32 |
with open(os.path.join(here, |
|
33 | with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta: | |
33 | _metadata = _meta.read() |
|
34 | _metadata = _meta.read() | |
34 |
|
35 | |||
35 |
with open(os.path.join(here, |
|
36 | with open(os.path.join(here, "VERSION"), "r") as _meta_version: | |
36 | __version__ = _meta_version.read().strip() |
|
37 | __version__ = _meta_version.read().strip() | |
37 |
|
38 | |||
38 |
__license__ = _get_meta_var( |
|
39 | __license__ = _get_meta_var("__license__", _metadata) | |
39 |
__author__ = _get_meta_var( |
|
40 | __author__ = _get_meta_var("__author__", _metadata) | |
40 |
__url__ = _get_meta_var( |
|
41 | __url__ = _get_meta_var("__url__", _metadata) | |
41 |
|
42 | |||
42 |
found_packages = find_packages( |
|
43 | found_packages = find_packages("src") | |
43 |
found_packages.append( |
|
44 | found_packages.append("appenlight.migrations.versions") | |
44 | setup(name='appenlight', |
|
45 | setup( | |
45 |
|
|
46 | name="appenlight", | |
46 | long_description=README + '\n\n' + CHANGES, |
|
47 | description="appenlight", | |
47 | classifiers=[ |
|
48 | long_description=README + "\n\n" + CHANGES, | |
48 | "Programming Language :: Python", |
|
49 | classifiers=[ | |
49 |
|
|
50 | "Programming Language :: Python", | |
50 | "Topic :: Internet :: WWW/HTTP", |
|
51 | "Framework :: Pylons", | |
51 |
|
|
52 | "Topic :: Internet :: WWW/HTTP", | |
52 | ], |
|
53 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", | |
53 | version=__version__, |
|
54 | ], | |
54 | license=__license__, |
|
55 | version=__version__, | |
55 | author=__author__, |
|
56 | license=__license__, | |
56 |
|
|
57 | author=__author__, | |
57 | keywords='web wsgi bfg pylons pyramid', |
|
58 | url=__url__, | |
58 | package_dir={'': 'src'}, |
|
59 | keywords="web wsgi bfg pylons pyramid", | |
59 | packages=found_packages, |
|
60 | package_dir={"": "src"}, | |
60 | include_package_data=True, |
|
61 | packages=found_packages, | |
61 | zip_safe=False, |
|
62 | include_package_data=True, | |
62 | test_suite='appenlight', |
|
63 | zip_safe=False, | |
63 | install_requires=requires, |
|
64 | test_suite="appenlight", | |
64 | extras_require={ |
|
65 | install_requires=requires, | |
65 | "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "pytest-mock", "webtest"], |
|
66 | extras_require={ | |
66 | "lint": ["black"], |
|
67 | "dev": [ | |
67 | }, |
|
68 | "coverage", | |
68 | entry_points={ |
|
69 | "pytest", | |
69 | 'paste.app_factory': [ |
|
70 | "pyramid", | |
70 | 'main = appenlight:main' |
|
71 | "tox", | |
71 |
|
|
72 | "mock", | |
72 | 'console_scripts': [ |
|
73 | "pytest-mock", | |
73 | 'appenlight-cleanup = appenlight.scripts.cleanup:main', |
|
74 | "webtest", | |
74 | 'appenlight-initializedb = appenlight.scripts.initialize_db:main', |
|
75 | ], | |
75 | 'appenlight-migratedb = appenlight.scripts.migratedb:main', |
|
76 | "lint": ["black"], | |
76 | 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main', |
|
77 | }, | |
77 | 'appenlight-static = appenlight.scripts.static:main', |
|
78 | entry_points={ | |
78 | 'appenlight-make-config = appenlight.scripts.make_config:main', |
|
79 | "paste.app_factory": ["main = appenlight:main"], | |
79 | ] |
|
80 | "console_scripts": [ | |
80 | } |
|
81 | "appenlight-cleanup = appenlight.scripts.cleanup:main", | |
81 | ) |
|
82 | "appenlight-initializedb = appenlight.scripts.initialize_db:main", | |
|
83 | "appenlight-migratedb = appenlight.scripts.migratedb:main", | |||
|
84 | "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main", | |||
|
85 | "appenlight-static = appenlight.scripts.static:main", | |||
|
86 | "appenlight-make-config = appenlight.scripts.make_config:main", | |||
|
87 | ], | |||
|
88 | }, | |||
|
89 | ) |
@@ -1,225 +1,254 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import datetime |
|
17 | import datetime | |
18 | import logging |
|
18 | import logging | |
19 | from elasticsearch import Elasticsearch |
|
19 | from elasticsearch import Elasticsearch | |
20 | import redis |
|
20 | import redis | |
21 | import os |
|
21 | import os | |
22 | import pkg_resources |
|
22 | import pkg_resources | |
23 | from pkg_resources import iter_entry_points |
|
23 | from pkg_resources import iter_entry_points | |
24 |
|
24 | |||
25 | import appenlight.lib.jinja2_filters as jinja2_filters |
|
25 | import appenlight.lib.jinja2_filters as jinja2_filters | |
26 | import appenlight.lib.encryption as encryption |
|
26 | import appenlight.lib.encryption as encryption | |
27 |
|
27 | |||
28 | from pyramid.config import PHASE3_CONFIG |
|
28 | from pyramid.config import PHASE3_CONFIG | |
29 | from pyramid.authentication import AuthTktAuthenticationPolicy |
|
29 | from pyramid.authentication import AuthTktAuthenticationPolicy | |
30 | from pyramid.authorization import ACLAuthorizationPolicy |
|
30 | from pyramid.authorization import ACLAuthorizationPolicy | |
31 | from pyramid_mailer.interfaces import IMailer |
|
31 | from pyramid_mailer.interfaces import IMailer | |
32 | from pyramid.renderers import JSON |
|
32 | from pyramid.renderers import JSON | |
33 | from pyramid_redis_sessions import session_factory_from_settings |
|
33 | from pyramid_redis_sessions import session_factory_from_settings | |
34 | from pyramid.settings import asbool, aslist |
|
34 | from pyramid.settings import asbool, aslist | |
35 | from pyramid.security import AllPermissionsList |
|
35 | from pyramid.security import AllPermissionsList | |
36 | from pyramid_authstack import AuthenticationStackPolicy |
|
36 | from pyramid_authstack import AuthenticationStackPolicy | |
37 | from redlock import Redlock |
|
37 | from redlock import Redlock | |
38 | from sqlalchemy import engine_from_config |
|
38 | from sqlalchemy import engine_from_config | |
39 |
|
39 | |||
40 | from appenlight.celery import configure_celery |
|
40 | from appenlight.celery import configure_celery | |
41 |
from appenlight.lib.configurator import ( |
|
41 | from appenlight.lib.configurator import ( | |
42 | register_appenlight_plugin) |
|
42 | CythonCompatConfigurator, | |
|
43 | register_appenlight_plugin, | |||
|
44 | ) | |||
43 | from appenlight.lib import cache_regions |
|
45 | from appenlight.lib import cache_regions | |
44 | from appenlight.lib.ext_json import json |
|
46 | from appenlight.lib.ext_json import json | |
45 | from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy |
|
47 | from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy | |
46 |
|
48 | |||
47 |
__license__ = |
|
49 | __license__ = "Apache 2.0" | |
48 |
__author__ = |
|
50 | __author__ = "RhodeCode GmbH" | |
49 |
__url__ = |
|
51 | __url__ = "http://rhodecode.com" | |
50 | __version__ = pkg_resources.get_distribution("appenlight").parsed_version |
|
52 | __version__ = pkg_resources.get_distribution("appenlight").parsed_version | |
51 |
|
53 | |||
52 | json_renderer = JSON(serializer=json.dumps, indent=4) |
|
54 | json_renderer = JSON(serializer=json.dumps, indent=4) | |
53 |
|
55 | |||
54 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
55 |
|
57 | |||
56 |
|
58 | |||
57 | def datetime_adapter(obj, request): |
|
59 | def datetime_adapter(obj, request): | |
58 | return obj.isoformat() |
|
60 | return obj.isoformat() | |
59 |
|
61 | |||
60 |
|
62 | |||
61 | def all_permissions_adapter(obj, request): |
|
63 | def all_permissions_adapter(obj, request): | |
62 |
return |
|
64 | return "__all_permissions__" | |
63 |
|
65 | |||
64 |
|
66 | |||
65 | json_renderer.add_adapter(datetime.datetime, datetime_adapter) |
|
67 | json_renderer.add_adapter(datetime.datetime, datetime_adapter) | |
66 | json_renderer.add_adapter(AllPermissionsList, all_permissions_adapter) |
|
68 | json_renderer.add_adapter(AllPermissionsList, all_permissions_adapter) | |
67 |
|
69 | |||
68 |
|
70 | |||
69 | def main(global_config, **settings): |
|
71 | def main(global_config, **settings): | |
70 | """ This function returns a Pyramid WSGI application. |
|
72 | """ This function returns a Pyramid WSGI application. | |
71 | """ |
|
73 | """ | |
72 | auth_tkt_policy = AuthTktAuthenticationPolicy( |
|
74 | auth_tkt_policy = AuthTktAuthenticationPolicy( | |
73 |
settings[ |
|
75 | settings["authtkt.secret"], | |
74 |
hashalg= |
|
76 | hashalg="sha512", | |
75 | callback=groupfinder, |
|
77 | callback=groupfinder, | |
76 | max_age=2592000, |
|
78 | max_age=2592000, | |
77 |
secure=asbool(settings.get( |
|
79 | secure=asbool(settings.get("authtkt.secure", "false")), | |
78 | auth_token_policy = AuthTokenAuthenticationPolicy( |
|
|||
79 | callback=groupfinder |
|
|||
80 | ) |
|
80 | ) | |
|
81 | auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder) | |||
81 | authorization_policy = ACLAuthorizationPolicy() |
|
82 | authorization_policy = ACLAuthorizationPolicy() | |
82 | authentication_policy = AuthenticationStackPolicy() |
|
83 | authentication_policy = AuthenticationStackPolicy() | |
83 |
authentication_policy.add_policy( |
|
84 | authentication_policy.add_policy("auth_tkt", auth_tkt_policy) | |
84 |
authentication_policy.add_policy( |
|
85 | authentication_policy.add_policy("auth_token", auth_token_policy) | |
85 | # set crypto key |
|
86 | # set crypto key | |
86 |
encryption.ENCRYPTION_SECRET = settings.get( |
|
87 | encryption.ENCRYPTION_SECRET = settings.get("encryption_secret") | |
87 | # import this later so encyption key can be monkeypatched |
|
88 | # import this later so encyption key can be monkeypatched | |
88 | from appenlight.models import DBSession, register_datastores |
|
89 | from appenlight.models import DBSession, register_datastores | |
89 |
|
90 | |||
90 | # registration |
|
91 | # registration | |
91 |
settings[ |
|
92 | settings["appenlight.disable_registration"] = asbool( | |
92 |
settings.get( |
|
93 | settings.get("appenlight.disable_registration") | |
|
94 | ) | |||
93 |
|
95 | |||
94 | # update config with cometd info |
|
96 | # update config with cometd info | |
95 |
settings[ |
|
97 | settings["cometd_servers"] = { | |
96 |
|
|
98 | "server": settings["cometd.server"], | |
|
99 | "secret": settings["cometd.secret"], | |||
|
100 | } | |||
97 |
|
101 | |||
98 | # Create the Pyramid Configurator. |
|
102 | # Create the Pyramid Configurator. | |
99 |
settings[ |
|
103 | settings["_mail_url"] = settings["mailing.app_url"] | |
100 | config = CythonCompatConfigurator( |
|
104 | config = CythonCompatConfigurator( | |
101 | settings=settings, |
|
105 | settings=settings, | |
102 | authentication_policy=authentication_policy, |
|
106 | authentication_policy=authentication_policy, | |
103 | authorization_policy=authorization_policy, |
|
107 | authorization_policy=authorization_policy, | |
104 |
root_factory= |
|
108 | root_factory="appenlight.security.RootFactory", | |
105 |
default_permission= |
|
109 | default_permission="view", | |
|
110 | ) | |||
106 | # custom registry variables |
|
111 | # custom registry variables | |
107 |
|
112 | |||
108 | # resource type information |
|
113 | # resource type information | |
109 |
config.registry.resource_types = [ |
|
114 | config.registry.resource_types = ["resource", "application"] | |
110 | # plugin information |
|
115 | # plugin information | |
111 | config.registry.appenlight_plugins = {} |
|
116 | config.registry.appenlight_plugins = {} | |
112 |
|
117 | |||
113 |
config.set_default_csrf_options(require_csrf=True, header= |
|
118 | config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN") | |
114 |
config.add_view_deriver( |
|
119 | config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view") | |
115 | name='csrf_view') |
|
|||
116 |
|
120 | |||
117 | # later, when config is available |
|
121 | # later, when config is available | |
118 |
dogpile_config = { |
|
122 | dogpile_config = { | |
119 | "redis_expiration_time": 86400, |
|
123 | "url": settings["redis.url"], | |
120 | "redis_distributed_lock": True} |
|
124 | "redis_expiration_time": 86400, | |
|
125 | "redis_distributed_lock": True, | |||
|
126 | } | |||
121 | cache_regions.regions = cache_regions.CacheRegions(dogpile_config) |
|
127 | cache_regions.regions = cache_regions.CacheRegions(dogpile_config) | |
122 | config.registry.cache_regions = cache_regions.regions |
|
128 | config.registry.cache_regions = cache_regions.regions | |
123 |
engine = engine_from_config(settings, |
|
129 | engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps) | |
124 | json_serializer=json.dumps) |
|
|||
125 | DBSession.configure(bind=engine) |
|
130 | DBSession.configure(bind=engine) | |
126 |
|
131 | |||
127 | # json rederer that serializes datetime |
|
132 | # json rederer that serializes datetime | |
128 |
config.add_renderer( |
|
133 | config.add_renderer("json", json_renderer) | |
129 | config.add_request_method('appenlight.lib.request.es_conn', 'es_conn', property=True) |
|
134 | config.add_request_method( | |
130 | config.add_request_method('appenlight.lib.request.get_user', 'user', |
|
135 | "appenlight.lib.request.es_conn", "es_conn", property=True | |
131 | reify=True, property=True) |
|
136 | ) | |
132 |
config.add_request_method( |
|
137 | config.add_request_method( | |
133 | 'csrf_token', reify=True, property=True) |
|
138 | "appenlight.lib.request.get_user", "user", reify=True, property=True | |
134 | config.add_request_method('appenlight.lib.request.safe_json_body', |
|
139 | ) | |
135 | 'safe_json_body', reify=True, property=True) |
|
140 | config.add_request_method( | |
136 | config.add_request_method('appenlight.lib.request.unsafe_json_body', |
|
141 | "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True | |
137 | 'unsafe_json_body', reify=True, property=True) |
|
142 | ) | |
138 | config.add_request_method('appenlight.lib.request.add_flash_to_headers', |
|
143 | config.add_request_method( | |
139 | 'add_flash_to_headers') |
|
144 | "appenlight.lib.request.safe_json_body", | |
140 | config.add_request_method('appenlight.lib.request.get_authomatic', |
|
145 | "safe_json_body", | |
141 | 'authomatic', reify=True) |
|
146 | reify=True, | |
142 |
|
147 | property=True, | ||
143 | config.include('pyramid_redis_sessions') |
|
148 | ) | |
144 | config.include('pyramid_tm') |
|
149 | config.add_request_method( | |
145 | config.include('pyramid_jinja2') |
|
150 | "appenlight.lib.request.unsafe_json_body", | |
146 | config.include('pyramid_mailer') |
|
151 | "unsafe_json_body", | |
147 | config.include('appenlight_client.ext.pyramid_tween') |
|
152 | reify=True, | |
148 | config.include('ziggurat_foundations.ext.pyramid.sign_in') |
|
153 | property=True, | |
149 | es_server_list = aslist(settings['elasticsearch.nodes']) |
|
154 | ) | |
150 | redis_url = settings['redis.url'] |
|
155 | config.add_request_method( | |
151 | log.warning('Elasticsearch server list: {}'.format(es_server_list)) |
|
156 | "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers" | |
152 | log.warning('Redis server: {}'.format(redis_url)) |
|
157 | ) | |
|
158 | config.add_request_method( | |||
|
159 | "appenlight.lib.request.get_authomatic", "authomatic", reify=True | |||
|
160 | ) | |||
|
161 | ||||
|
162 | config.include("pyramid_redis_sessions") | |||
|
163 | config.include("pyramid_tm") | |||
|
164 | config.include("pyramid_jinja2") | |||
|
165 | config.include("pyramid_mailer") | |||
|
166 | config.include("appenlight_client.ext.pyramid_tween") | |||
|
167 | config.include("ziggurat_foundations.ext.pyramid.sign_in") | |||
|
168 | es_server_list = aslist(settings["elasticsearch.nodes"]) | |||
|
169 | redis_url = settings["redis.url"] | |||
|
170 | log.warning("Elasticsearch server list: {}".format(es_server_list)) | |||
|
171 | log.warning("Redis server: {}".format(redis_url)) | |||
153 | config.registry.es_conn = Elasticsearch(es_server_list) |
|
172 | config.registry.es_conn = Elasticsearch(es_server_list) | |
154 | config.registry.redis_conn = redis.StrictRedis.from_url(redis_url) |
|
173 | config.registry.redis_conn = redis.StrictRedis.from_url(redis_url) | |
155 |
|
174 | |||
156 |
config.registry.redis_lockmgr = Redlock( |
|
175 | config.registry.redis_lockmgr = Redlock( | |
157 | retry_count=0, retry_delay=0) |
|
176 | [settings["redis.redlock.url"]], retry_count=0, retry_delay=0 | |
|
177 | ) | |||
158 | # mailer bw compat |
|
178 | # mailer bw compat | |
159 | config.registry.mailer = config.registry.getUtility(IMailer) |
|
179 | config.registry.mailer = config.registry.getUtility(IMailer) | |
160 |
|
180 | |||
161 | # Configure sessions |
|
181 | # Configure sessions | |
162 | session_factory = session_factory_from_settings(settings) |
|
182 | session_factory = session_factory_from_settings(settings) | |
163 | config.set_session_factory(session_factory) |
|
183 | config.set_session_factory(session_factory) | |
164 |
|
184 | |||
165 | # Configure renderers and event subscribers |
|
185 | # Configure renderers and event subscribers | |
166 |
config.add_jinja2_extension( |
|
186 | config.add_jinja2_extension("jinja2.ext.loopcontrols") | |
167 |
config.add_jinja2_search_path( |
|
187 | config.add_jinja2_search_path("appenlight:templates") | |
168 | # event subscribers |
|
188 | # event subscribers | |
169 | config.add_subscriber("appenlight.subscribers.application_created", |
|
189 | config.add_subscriber( | |
170 | "pyramid.events.ApplicationCreated") |
|
190 | "appenlight.subscribers.application_created", | |
171 | config.add_subscriber("appenlight.subscribers.add_renderer_globals", |
|
191 | "pyramid.events.ApplicationCreated", | |
172 | "pyramid.events.BeforeRender") |
|
192 | ) | |
173 | config.add_subscriber('appenlight.subscribers.new_request', |
|
193 | config.add_subscriber( | |
174 | 'pyramid.events.NewRequest') |
|
194 | "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender" | |
175 | config.add_view_predicate('context_type_class', |
|
195 | ) | |
176 | 'appenlight.predicates.contextTypeClass') |
|
196 | config.add_subscriber( | |
177 |
|
197 | "appenlight.subscribers.new_request", "pyramid.events.NewRequest" | ||
178 | register_datastores(es_conn=config.registry.es_conn, |
|
198 | ) | |
179 | redis_conn=config.registry.redis_conn, |
|
199 | config.add_view_predicate( | |
180 | redis_lockmgr=config.registry.redis_lockmgr) |
|
200 | "context_type_class", "appenlight.predicates.contextTypeClass" | |
|
201 | ) | |||
|
202 | ||||
|
203 | register_datastores( | |||
|
204 | es_conn=config.registry.es_conn, | |||
|
205 | redis_conn=config.registry.redis_conn, | |||
|
206 | redis_lockmgr=config.registry.redis_lockmgr, | |||
|
207 | ) | |||
181 |
|
208 | |||
182 | # base stuff and scan |
|
209 | # base stuff and scan | |
183 |
|
210 | |||
184 | # need to ensure webassets exists otherwise config.override_asset() |
|
211 | # need to ensure webassets exists otherwise config.override_asset() | |
185 | # throws exception |
|
212 | # throws exception | |
186 |
if not os.path.exists(settings[ |
|
213 | if not os.path.exists(settings["webassets.dir"]): | |
187 |
os.mkdir(settings[ |
|
214 | os.mkdir(settings["webassets.dir"]) | |
188 |
config.add_static_view( |
|
215 | config.add_static_view( | |
189 |
|
|
216 | path="appenlight:webassets", name="static", cache_max_age=3600 | |
190 | config.override_asset(to_override='appenlight:webassets/', |
|
217 | ) | |
191 | override_with=settings['webassets.dir']) |
|
218 | config.override_asset( | |
192 |
|
219 | to_override="appenlight:webassets/", override_with=settings["webassets.dir"] | ||
193 | config.include('appenlight.views') |
|
220 | ) | |
194 | config.include('appenlight.views.admin') |
|
221 | ||
195 | config.scan(ignore=['appenlight.migrations', 'appenlight.scripts', |
|
222 | config.include("appenlight.views") | |
196 | 'appenlight.tests']) |
|
223 | config.include("appenlight.views.admin") | |
197 |
|
224 | config.scan( | ||
198 | config.add_directive('register_appenlight_plugin', |
|
225 | ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"] | |
199 | register_appenlight_plugin) |
|
226 | ) | |
200 |
|
227 | |||
201 | for entry_point in iter_entry_points(group='appenlight.plugins'): |
|
228 | config.add_directive("register_appenlight_plugin", register_appenlight_plugin) | |
|
229 | ||||
|
230 | for entry_point in iter_entry_points(group="appenlight.plugins"): | |||
202 | plugin = entry_point.load() |
|
231 | plugin = entry_point.load() | |
203 | plugin.includeme(config) |
|
232 | plugin.includeme(config) | |
204 |
|
233 | |||
205 | # include other appenlight plugins explictly if needed |
|
234 | # include other appenlight plugins explictly if needed | |
206 |
includes = aslist(settings.get( |
|
235 | includes = aslist(settings.get("appenlight.includes", [])) | |
207 | for inc in includes: |
|
236 | for inc in includes: | |
208 | config.include(inc) |
|
237 | config.include(inc) | |
209 |
|
238 | |||
210 | # run this after everything registers in configurator |
|
239 | # run this after everything registers in configurator | |
211 |
|
240 | |||
212 | def pre_commit(): |
|
241 | def pre_commit(): | |
213 | jinja_env = config.get_jinja2_environment() |
|
242 | jinja_env = config.get_jinja2_environment() | |
214 |
jinja_env.filters[ |
|
243 | jinja_env.filters["tojson"] = json.dumps | |
215 |
jinja_env.filters[ |
|
244 | jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe | |
216 |
|
245 | |||
217 | config.action(None, pre_commit, order=PHASE3_CONFIG + 999) |
|
246 | config.action(None, pre_commit, order=PHASE3_CONFIG + 999) | |
218 |
|
247 | |||
219 | def wrap_config_celery(): |
|
248 | def wrap_config_celery(): | |
220 | configure_celery(config.registry) |
|
249 | configure_celery(config.registry) | |
221 |
|
250 | |||
222 | config.action(None, wrap_config_celery, order=PHASE3_CONFIG + 999) |
|
251 | config.action(None, wrap_config_celery, order=PHASE3_CONFIG + 999) | |
223 |
|
252 | |||
224 | app = config.make_wsgi_app() |
|
253 | app = config.make_wsgi_app() | |
225 | return app |
|
254 | return app |
@@ -1,171 +1,181 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | from datetime import timedelta |
|
19 | from datetime import timedelta | |
20 | from celery import Celery |
|
20 | from celery import Celery | |
21 | from celery.bin import Option |
|
21 | from celery.bin import Option | |
22 | from celery.schedules import crontab |
|
22 | from celery.schedules import crontab | |
23 | from celery.signals import worker_init, task_revoked, user_preload_options |
|
23 | from celery.signals import worker_init, task_revoked, user_preload_options | |
24 | from celery.signals import task_prerun, task_retry, task_failure, task_success |
|
24 | from celery.signals import task_prerun, task_retry, task_failure, task_success | |
25 | from kombu.serialization import register |
|
25 | from kombu.serialization import register | |
26 | from pyramid.paster import bootstrap |
|
26 | from pyramid.paster import bootstrap | |
27 | from pyramid.request import Request |
|
27 | from pyramid.request import Request | |
28 | from pyramid.scripting import prepare |
|
28 | from pyramid.scripting import prepare | |
29 | from pyramid.settings import asbool |
|
29 | from pyramid.settings import asbool | |
30 | from pyramid.threadlocal import get_current_request |
|
30 | from pyramid.threadlocal import get_current_request | |
31 |
|
31 | |||
32 | from appenlight.celery.encoders import json_dumps, json_loads |
|
32 | from appenlight.celery.encoders import json_dumps, json_loads | |
33 | from appenlight_client.ext.celery import register_signals |
|
33 | from appenlight_client.ext.celery import register_signals | |
34 |
|
34 | |||
35 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
36 |
|
36 | |||
37 | register('date_json', json_dumps, json_loads, |
|
37 | register( | |
38 | content_type='application/x-date_json', |
|
38 | "date_json", | |
39 | content_encoding='utf-8') |
|
39 | json_dumps, | |
|
40 | json_loads, | |||
|
41 | content_type="application/x-date_json", | |||
|
42 | content_encoding="utf-8", | |||
|
43 | ) | |||
40 |
|
44 | |||
41 | celery = Celery() |
|
45 | celery = Celery() | |
42 |
|
46 | |||
43 |
celery.user_options[ |
|
47 | celery.user_options["preload"].add( | |
44 | Option('--ini', dest='ini', default=None, |
|
48 | Option( | |
45 | help='Specifies pyramid configuration file location.') |
|
49 | "--ini", | |
|
50 | dest="ini", | |||
|
51 | default=None, | |||
|
52 | help="Specifies pyramid configuration file location.", | |||
|
53 | ) | |||
46 | ) |
|
54 | ) | |
47 |
|
55 | |||
48 |
|
56 | |||
49 | @user_preload_options.connect |
|
57 | @user_preload_options.connect | |
50 | def on_preload_parsed(options, **kwargs): |
|
58 | def on_preload_parsed(options, **kwargs): | |
51 | """ |
|
59 | """ | |
52 | This actually configures celery from pyramid config file |
|
60 | This actually configures celery from pyramid config file | |
53 | """ |
|
61 | """ | |
54 |
celery.conf[ |
|
62 | celery.conf["INI_PYRAMID"] = options["ini"] | |
55 | import appenlight_client.client as e_client |
|
63 | import appenlight_client.client as e_client | |
56 | ini_location = options['ini'] |
|
64 | ||
|
65 | ini_location = options["ini"] | |||
57 | if not ini_location: |
|
66 | if not ini_location: | |
58 | raise Exception('You need to pass pyramid ini location using ' |
|
67 | raise Exception( | |
59 | '--ini=filename.ini argument to the worker') |
|
68 | "You need to pass pyramid ini location using " | |
|
69 | "--ini=filename.ini argument to the worker" | |||
|
70 | ) | |||
60 | env = bootstrap(ini_location[0]) |
|
71 | env = bootstrap(ini_location[0]) | |
61 |
api_key = env[ |
|
72 | api_key = env["request"].registry.settings["appenlight.api_key"] | |
62 |
tr_config = env[ |
|
73 | tr_config = env["request"].registry.settings.get("appenlight.transport_config") | |
63 | 'appenlight.transport_config') |
|
74 | CONFIG = e_client.get_config({"appenlight.api_key": api_key}) | |
64 | CONFIG = e_client.get_config({'appenlight.api_key': api_key}) |
|
|||
65 | if tr_config: |
|
75 | if tr_config: | |
66 |
CONFIG[ |
|
76 | CONFIG["appenlight.transport_config"] = tr_config | |
67 | APPENLIGHT_CLIENT = e_client.Client(CONFIG) |
|
77 | APPENLIGHT_CLIENT = e_client.Client(CONFIG) | |
68 | # log.addHandler(APPENLIGHT_CLIENT.log_handler) |
|
78 | # log.addHandler(APPENLIGHT_CLIENT.log_handler) | |
69 | register_signals(APPENLIGHT_CLIENT) |
|
79 | register_signals(APPENLIGHT_CLIENT) | |
70 | celery.pyramid = env |
|
80 | celery.pyramid = env | |
71 |
|
81 | |||
72 |
|
82 | |||
73 | celery_config = { |
|
83 | celery_config = { | |
74 |
|
|
84 | "CELERY_IMPORTS": ["appenlight.celery.tasks"], | |
75 |
|
|
85 | "CELERYD_TASK_TIME_LIMIT": 60, | |
76 |
|
|
86 | "CELERYD_MAX_TASKS_PER_CHILD": 1000, | |
77 |
|
|
87 | "CELERY_IGNORE_RESULT": True, | |
78 |
|
|
88 | "CELERY_ACCEPT_CONTENT": ["date_json"], | |
79 |
|
|
89 | "CELERY_TASK_SERIALIZER": "date_json", | |
80 |
|
|
90 | "CELERY_RESULT_SERIALIZER": "date_json", | |
81 |
|
|
91 | "BROKER_URL": None, | |
82 |
|
|
92 | "CELERYD_CONCURRENCY": None, | |
83 |
|
|
93 | "CELERY_TIMEZONE": None, | |
84 |
|
|
94 | "CELERYBEAT_SCHEDULE": { | |
85 |
|
|
95 | "alerting_reports": { | |
86 |
|
|
96 | "task": "appenlight.celery.tasks.alerting_reports", | |
87 |
|
|
97 | "schedule": timedelta(seconds=60), | |
88 | }, |
|
98 | }, | |
89 |
|
|
99 | "close_alerts": { | |
90 |
|
|
100 | "task": "appenlight.celery.tasks.close_alerts", | |
91 |
|
|
101 | "schedule": timedelta(seconds=60), | |
92 | } |
|
102 | }, | |
93 | } |
|
103 | }, | |
94 | } |
|
104 | } | |
95 | celery.config_from_object(celery_config) |
|
105 | celery.config_from_object(celery_config) | |
96 |
|
106 | |||
97 |
|
107 | |||
98 | def configure_celery(pyramid_registry): |
|
108 | def configure_celery(pyramid_registry): | |
99 | settings = pyramid_registry.settings |
|
109 | settings = pyramid_registry.settings | |
100 |
celery_config[ |
|
110 | celery_config["BROKER_URL"] = settings["celery.broker_url"] | |
101 |
celery_config[ |
|
111 | celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"] | |
102 |
celery_config[ |
|
112 | celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"] | |
103 |
|
113 | |||
104 | notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60)) |
|
114 | notifications_seconds = int( | |
|
115 | settings.get("tasks.notifications_reports.interval", 60) | |||
|
116 | ) | |||
105 |
|
117 | |||
106 |
celery_config[ |
|
118 | celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = { | |
107 |
|
|
119 | "task": "appenlight.celery.tasks.notifications_reports", | |
108 |
|
|
120 | "schedule": timedelta(seconds=notifications_seconds), | |
109 | } |
|
121 | } | |
110 |
|
122 | |||
111 |
celery_config[ |
|
123 | celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = { | |
112 |
|
|
124 | "task": "appenlight.celery.tasks.daily_digest", | |
113 |
|
|
125 | "schedule": crontab(minute=1, hour="4,12,20"), | |
114 | } |
|
126 | } | |
115 |
|
127 | |||
116 |
if asbool(settings.get( |
|
128 | if asbool(settings.get("celery.always_eager")): | |
117 |
celery_config[ |
|
129 | celery_config["CELERY_ALWAYS_EAGER"] = True | |
118 |
celery_config[ |
|
130 | celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True | |
119 |
|
131 | |||
120 | for plugin in pyramid_registry.appenlight_plugins.values(): |
|
132 | for plugin in pyramid_registry.appenlight_plugins.values(): | |
121 |
if plugin.get( |
|
133 | if plugin.get("celery_tasks"): | |
122 |
celery_config[ |
|
134 | celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"]) | |
123 |
if plugin.get( |
|
135 | if plugin.get("celery_beats"): | |
124 |
for name, config in plugin[ |
|
136 | for name, config in plugin["celery_beats"]: | |
125 |
celery_config[ |
|
137 | celery_config["CELERYBEAT_SCHEDULE"][name] = config | |
126 | celery.config_from_object(celery_config) |
|
138 | celery.config_from_object(celery_config) | |
127 |
|
139 | |||
128 |
|
140 | |||
129 | @task_prerun.connect |
|
141 | @task_prerun.connect | |
130 | def task_prerun_signal(task_id, task, args, kwargs, **kwaargs): |
|
142 | def task_prerun_signal(task_id, task, args, kwargs, **kwaargs): | |
131 |
if hasattr(celery, |
|
143 | if hasattr(celery, "pyramid"): | |
132 | env = celery.pyramid |
|
144 | env = celery.pyramid | |
133 |
env = prepare(registry=env[ |
|
145 | env = prepare(registry=env["request"].registry) | |
134 |
proper_base_url = env[ |
|
146 | proper_base_url = env["request"].registry.settings["mailing.app_url"] | |
135 |
tmp_req = Request.blank( |
|
147 | tmp_req = Request.blank("/", base_url=proper_base_url) | |
136 | # ensure tasks generate url for right domain from config |
|
148 | # ensure tasks generate url for right domain from config | |
137 |
env[ |
|
149 | env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"] | |
138 |
env[ |
|
150 | env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"] | |
139 |
env[ |
|
151 | env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"] | |
140 |
env[ |
|
152 | env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"] | |
141 | tmp_req.environ['wsgi.url_scheme'] |
|
|||
142 | get_current_request().tm.begin() |
|
153 | get_current_request().tm.begin() | |
143 |
|
154 | |||
144 |
|
155 | |||
145 | @task_success.connect |
|
156 | @task_success.connect | |
146 | def task_success_signal(result, **kwargs): |
|
157 | def task_success_signal(result, **kwargs): | |
147 | get_current_request().tm.commit() |
|
158 | get_current_request().tm.commit() | |
148 |
if hasattr(celery, |
|
159 | if hasattr(celery, "pyramid"): | |
149 | celery.pyramid["closer"]() |
|
160 | celery.pyramid["closer"]() | |
150 |
|
161 | |||
151 |
|
162 | |||
152 | @task_retry.connect |
|
163 | @task_retry.connect | |
153 | def task_retry_signal(request, reason, einfo, **kwargs): |
|
164 | def task_retry_signal(request, reason, einfo, **kwargs): | |
154 | get_current_request().tm.abort() |
|
165 | get_current_request().tm.abort() | |
155 |
if hasattr(celery, |
|
166 | if hasattr(celery, "pyramid"): | |
156 | celery.pyramid["closer"]() |
|
167 | celery.pyramid["closer"]() | |
157 |
|
168 | |||
158 |
|
169 | |||
159 | @task_failure.connect |
|
170 | @task_failure.connect | |
160 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, |
|
171 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs): | |
161 | **kwaargs): |
|
|||
162 | get_current_request().tm.abort() |
|
172 | get_current_request().tm.abort() | |
163 |
if hasattr(celery, |
|
173 | if hasattr(celery, "pyramid"): | |
164 | celery.pyramid["closer"]() |
|
174 | celery.pyramid["closer"]() | |
165 |
|
175 | |||
166 |
|
176 | |||
167 | @task_revoked.connect |
|
177 | @task_revoked.connect | |
168 | def task_revoked_signal(request, terminated, signum, expired, **kwaargs): |
|
178 | def task_revoked_signal(request, terminated, signum, expired, **kwaargs): | |
169 | get_current_request().tm.abort() |
|
179 | get_current_request().tm.abort() | |
170 |
if hasattr(celery, |
|
180 | if hasattr(celery, "pyramid"): | |
171 | celery.pyramid["closer"]() |
|
181 | celery.pyramid["closer"]() |
@@ -1,60 +1,51 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import json |
|
17 | import json | |
18 | from datetime import datetime, date, timedelta |
|
18 | from datetime import datetime, date, timedelta | |
19 |
|
19 | |||
20 |
DATE_FORMAT = |
|
20 | DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class DateEncoder(json.JSONEncoder): |
|
23 | class DateEncoder(json.JSONEncoder): | |
24 | def default(self, obj): |
|
24 | def default(self, obj): | |
25 | if isinstance(obj, datetime): |
|
25 | if isinstance(obj, datetime): | |
26 | return { |
|
26 | return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)} | |
27 | '__type__': '__datetime__', |
|
|||
28 | 'iso': obj.strftime(DATE_FORMAT) |
|
|||
29 | } |
|
|||
30 | elif isinstance(obj, date): |
|
27 | elif isinstance(obj, date): | |
31 | return { |
|
28 | return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)} | |
32 | '__type__': '__date__', |
|
|||
33 | 'iso': obj.strftime(DATE_FORMAT) |
|
|||
34 | } |
|
|||
35 | elif isinstance(obj, timedelta): |
|
29 | elif isinstance(obj, timedelta): | |
36 | return { |
|
30 | return {"__type__": "__timedelta__", "seconds": obj.total_seconds()} | |
37 | '__type__': '__timedelta__', |
|
|||
38 | 'seconds': obj.total_seconds() |
|
|||
39 | } |
|
|||
40 | else: |
|
31 | else: | |
41 | return json.JSONEncoder.default(self, obj) |
|
32 | return json.JSONEncoder.default(self, obj) | |
42 |
|
33 | |||
43 |
|
34 | |||
44 | def date_decoder(dct): |
|
35 | def date_decoder(dct): | |
45 |
if |
|
36 | if "__type__" in dct: | |
46 |
if dct[ |
|
37 | if dct["__type__"] == "__datetime__": | |
47 |
return datetime.strptime(dct[ |
|
38 | return datetime.strptime(dct["iso"], DATE_FORMAT) | |
48 |
elif dct[ |
|
39 | elif dct["__type__"] == "__date__": | |
49 |
return datetime.strptime(dct[ |
|
40 | return datetime.strptime(dct["iso"], DATE_FORMAT).date() | |
50 |
elif dct[ |
|
41 | elif dct["__type__"] == "__timedelta__": | |
51 |
return timedelta(seconds=dct[ |
|
42 | return timedelta(seconds=dct["seconds"]) | |
52 | return dct |
|
43 | return dct | |
53 |
|
44 | |||
54 |
|
45 | |||
55 | def json_dumps(obj): |
|
46 | def json_dumps(obj): | |
56 | return json.dumps(obj, cls=DateEncoder) |
|
47 | return json.dumps(obj, cls=DateEncoder) | |
57 |
|
48 | |||
58 |
|
49 | |||
59 | def json_loads(obj): |
|
50 | def json_loads(obj): | |
60 |
return json.loads(obj.decode( |
|
51 | return json.loads(obj.decode("utf8"), object_hook=date_decoder) |
@@ -1,657 +1,708 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import bisect |
|
17 | import bisect | |
18 | import collections |
|
18 | import collections | |
19 | import math |
|
19 | import math | |
20 | from datetime import datetime, timedelta |
|
20 | from datetime import datetime, timedelta | |
21 |
|
21 | |||
22 | import sqlalchemy as sa |
|
22 | import sqlalchemy as sa | |
23 | import elasticsearch.exceptions |
|
23 | import elasticsearch.exceptions | |
24 | import elasticsearch.helpers |
|
24 | import elasticsearch.helpers | |
25 |
|
25 | |||
26 | from celery.utils.log import get_task_logger |
|
26 | from celery.utils.log import get_task_logger | |
27 | from zope.sqlalchemy import mark_changed |
|
27 | from zope.sqlalchemy import mark_changed | |
28 | from pyramid.threadlocal import get_current_request, get_current_registry |
|
28 | from pyramid.threadlocal import get_current_request, get_current_registry | |
29 | from ziggurat_foundations.models.services.resource import ResourceService |
|
29 | from ziggurat_foundations.models.services.resource import ResourceService | |
30 |
|
30 | |||
31 | from appenlight.celery import celery |
|
31 | from appenlight.celery import celery | |
32 | from appenlight.models.report_group import ReportGroup |
|
32 | from appenlight.models.report_group import ReportGroup | |
33 | from appenlight.models import DBSession, Datastores |
|
33 | from appenlight.models import DBSession, Datastores | |
34 | from appenlight.models.report import Report |
|
34 | from appenlight.models.report import Report | |
35 | from appenlight.models.log import Log |
|
35 | from appenlight.models.log import Log | |
36 | from appenlight.models.metric import Metric |
|
36 | from appenlight.models.metric import Metric | |
37 | from appenlight.models.event import Event |
|
37 | from appenlight.models.event import Event | |
38 |
|
38 | |||
39 | from appenlight.models.services.application import ApplicationService |
|
39 | from appenlight.models.services.application import ApplicationService | |
40 | from appenlight.models.services.event import EventService |
|
40 | from appenlight.models.services.event import EventService | |
41 | from appenlight.models.services.log import LogService |
|
41 | from appenlight.models.services.log import LogService | |
42 | from appenlight.models.services.report import ReportService |
|
42 | from appenlight.models.services.report import ReportService | |
43 | from appenlight.models.services.report_group import ReportGroupService |
|
43 | from appenlight.models.services.report_group import ReportGroupService | |
44 | from appenlight.models.services.user import UserService |
|
44 | from appenlight.models.services.user import UserService | |
45 | from appenlight.models.tag import Tag |
|
45 | from appenlight.models.tag import Tag | |
46 | from appenlight.lib import print_traceback |
|
46 | from appenlight.lib import print_traceback | |
47 | from appenlight.lib.utils import parse_proto, in_batches |
|
47 | from appenlight.lib.utils import parse_proto, in_batches | |
48 | from appenlight.lib.ext_json import json |
|
48 | from appenlight.lib.ext_json import json | |
49 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
49 | from appenlight.lib.redis_keys import REDIS_KEYS | |
50 | from appenlight.lib.enums import ReportType |
|
50 | from appenlight.lib.enums import ReportType | |
51 |
|
51 | |||
52 | log = get_task_logger(__name__) |
|
52 | log = get_task_logger(__name__) | |
53 |
|
53 | |||
54 | sample_boundries = list(range(100, 1000, 100)) + \ |
|
54 | sample_boundries = ( | |
55 |
|
|
55 | list(range(100, 1000, 100)) | |
56 |
|
|
56 | + list(range(1000, 10000, 1000)) | |
|
57 | + list(range(10000, 100000, 5000)) | |||
|
58 | ) | |||
57 |
|
59 | |||
58 |
|
60 | |||
59 | def pick_sample(total_occurences, report_type=None): |
|
61 | def pick_sample(total_occurences, report_type=None): | |
60 | every = 1.0 |
|
62 | every = 1.0 | |
61 | position = bisect.bisect_left(sample_boundries, total_occurences) |
|
63 | position = bisect.bisect_left(sample_boundries, total_occurences) | |
62 | if position > 0: |
|
64 | if position > 0: | |
63 | if report_type == ReportType.not_found: |
|
65 | if report_type == ReportType.not_found: | |
64 | divide = 10.0 |
|
66 | divide = 10.0 | |
65 | else: |
|
67 | else: | |
66 | divide = 100.0 |
|
68 | divide = 100.0 | |
67 | every = sample_boundries[position - 1] / divide |
|
69 | every = sample_boundries[position - 1] / divide | |
68 | return total_occurences % every == 0 |
|
70 | return total_occurences % every == 0 | |
69 |
|
71 | |||
70 |
|
72 | |||
71 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
73 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
72 | def test_exception_task(): |
|
74 | def test_exception_task(): | |
73 |
log.error( |
|
75 | log.error("test celery log", extra={"location": "celery"}) | |
74 |
log.warning( |
|
76 | log.warning("test celery log", extra={"location": "celery"}) | |
75 |
raise Exception( |
|
77 | raise Exception("Celery exception test") | |
76 |
|
78 | |||
77 |
|
79 | |||
78 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
80 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
79 | def test_retry_exception_task(): |
|
81 | def test_retry_exception_task(): | |
80 | try: |
|
82 | try: | |
81 | import time |
|
83 | import time | |
82 |
|
84 | |||
83 | time.sleep(1.3) |
|
85 | time.sleep(1.3) | |
84 |
log.error( |
|
86 | log.error("test retry celery log", extra={"location": "celery"}) | |
85 |
log.warning( |
|
87 | log.warning("test retry celery log", extra={"location": "celery"}) | |
86 |
raise Exception( |
|
88 | raise Exception("Celery exception test") | |
87 | except Exception as exc: |
|
89 | except Exception as exc: | |
88 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
90 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: | |
89 | raise |
|
91 | raise | |
90 | test_retry_exception_task.retry(exc=exc) |
|
92 | test_retry_exception_task.retry(exc=exc) | |
91 |
|
93 | |||
92 |
|
94 | |||
93 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) |
|
95 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) | |
94 | def add_reports(resource_id, request_params, dataset, **kwargs): |
|
96 | def add_reports(resource_id, request_params, dataset, **kwargs): | |
95 |
proto_version = parse_proto(request_params.get( |
|
97 | proto_version = parse_proto(request_params.get("protocol_version", "")) | |
96 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
98 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
97 | try: |
|
99 | try: | |
98 | # we will store solr docs here for single insert |
|
100 | # we will store solr docs here for single insert | |
99 | es_report_docs = {} |
|
101 | es_report_docs = {} | |
100 | es_report_group_docs = {} |
|
102 | es_report_group_docs = {} | |
101 | resource = ApplicationService.by_id(resource_id) |
|
103 | resource = ApplicationService.by_id(resource_id) | |
102 |
|
104 | |||
103 | tags = [] |
|
105 | tags = [] | |
104 | es_slow_calls_docs = {} |
|
106 | es_slow_calls_docs = {} | |
105 | es_reports_stats_rows = {} |
|
107 | es_reports_stats_rows = {} | |
106 | for report_data in dataset: |
|
108 | for report_data in dataset: | |
107 | # build report details for later |
|
109 | # build report details for later | |
108 | added_details = 0 |
|
110 | added_details = 0 | |
109 | report = Report() |
|
111 | report = Report() | |
110 | report.set_data(report_data, resource, proto_version) |
|
112 | report.set_data(report_data, resource, proto_version) | |
111 | report._skip_ft_index = True |
|
113 | report._skip_ft_index = True | |
112 |
|
114 | |||
113 | # find latest group in this months partition |
|
115 | # find latest group in this months partition | |
114 | report_group = ReportGroupService.by_hash_and_resource( |
|
116 | report_group = ReportGroupService.by_hash_and_resource( | |
115 | report.resource_id, |
|
117 | report.resource_id, | |
116 | report.grouping_hash, |
|
118 | report.grouping_hash, | |
117 | since_when=datetime.utcnow().date().replace(day=1) |
|
119 | since_when=datetime.utcnow().date().replace(day=1), | |
118 | ) |
|
120 | ) | |
119 |
occurences = report_data.get( |
|
121 | occurences = report_data.get("occurences", 1) | |
120 | if not report_group: |
|
122 | if not report_group: | |
121 | # total reports will be +1 moment later |
|
123 | # total reports will be +1 moment later | |
122 |
report_group = ReportGroup( |
|
124 | report_group = ReportGroup( | |
123 | occurences=0, total_reports=0, |
|
125 | grouping_hash=report.grouping_hash, | |
124 | last_report=0, |
|
126 | occurences=0, | |
125 | priority=report.priority, |
|
127 | total_reports=0, | |
126 | error=report.error, |
|
128 | last_report=0, | |
127 | first_timestamp=report.start_time) |
|
129 | priority=report.priority, | |
|
130 | error=report.error, | |||
|
131 | first_timestamp=report.start_time, | |||
|
132 | ) | |||
128 | report_group._skip_ft_index = True |
|
133 | report_group._skip_ft_index = True | |
129 | report_group.report_type = report.report_type |
|
134 | report_group.report_type = report.report_type | |
130 | report.report_group_time = report_group.first_timestamp |
|
135 | report.report_group_time = report_group.first_timestamp | |
131 |
add_sample = pick_sample( |
|
136 | add_sample = pick_sample( | |
132 |
|
|
137 | report_group.occurences, report_type=report_group.report_type | |
|
138 | ) | |||
133 | if add_sample: |
|
139 | if add_sample: | |
134 | resource.report_groups.append(report_group) |
|
140 | resource.report_groups.append(report_group) | |
135 | report_group.reports.append(report) |
|
141 | report_group.reports.append(report) | |
136 | added_details += 1 |
|
142 | added_details += 1 | |
137 | DBSession.flush() |
|
143 | DBSession.flush() | |
138 | if report.partition_id not in es_report_docs: |
|
144 | if report.partition_id not in es_report_docs: | |
139 | es_report_docs[report.partition_id] = [] |
|
145 | es_report_docs[report.partition_id] = [] | |
140 | es_report_docs[report.partition_id].append(report.es_doc()) |
|
146 | es_report_docs[report.partition_id].append(report.es_doc()) | |
141 | tags.extend(list(report.tags.items())) |
|
147 | tags.extend(list(report.tags.items())) | |
142 | slow_calls = report.add_slow_calls(report_data, report_group) |
|
148 | slow_calls = report.add_slow_calls(report_data, report_group) | |
143 | DBSession.flush() |
|
149 | DBSession.flush() | |
144 | for s_call in slow_calls: |
|
150 | for s_call in slow_calls: | |
145 | if s_call.partition_id not in es_slow_calls_docs: |
|
151 | if s_call.partition_id not in es_slow_calls_docs: | |
146 | es_slow_calls_docs[s_call.partition_id] = [] |
|
152 | es_slow_calls_docs[s_call.partition_id] = [] | |
147 | es_slow_calls_docs[s_call.partition_id].append( |
|
153 | es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc()) | |
148 | s_call.es_doc()) |
|
|||
149 | # try generating new stat rows if needed |
|
154 | # try generating new stat rows if needed | |
150 | else: |
|
155 | else: | |
151 | # required for postprocessing to not fail later |
|
156 | # required for postprocessing to not fail later | |
152 | report.report_group = report_group |
|
157 | report.report_group = report_group | |
153 |
|
158 | |||
154 | stat_row = ReportService.generate_stat_rows( |
|
159 | stat_row = ReportService.generate_stat_rows(report, resource, report_group) | |
155 | report, resource, report_group) |
|
|||
156 | if stat_row.partition_id not in es_reports_stats_rows: |
|
160 | if stat_row.partition_id not in es_reports_stats_rows: | |
157 | es_reports_stats_rows[stat_row.partition_id] = [] |
|
161 | es_reports_stats_rows[stat_row.partition_id] = [] | |
158 | es_reports_stats_rows[stat_row.partition_id].append( |
|
162 | es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc()) | |
159 | stat_row.es_doc()) |
|
|||
160 |
|
163 | |||
161 | # see if we should mark 10th occurence of report |
|
164 | # see if we should mark 10th occurence of report | |
162 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) |
|
165 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) | |
163 |
curr_occurences_10 = int( |
|
166 | curr_occurences_10 = int( | |
164 |
(report_group.occurences + report.occurences) / 10) |
|
167 | math.floor((report_group.occurences + report.occurences) / 10) | |
165 | last_occurences_100 = int( |
|
168 | ) | |
166 |
|
|
169 | last_occurences_100 = int(math.floor(report_group.occurences / 100)) | |
167 |
curr_occurences_100 = int( |
|
170 | curr_occurences_100 = int( | |
168 |
(report_group.occurences + report.occurences) / 100) |
|
171 | math.floor((report_group.occurences + report.occurences) / 100) | |
|
172 | ) | |||
169 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 |
|
173 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 | |
170 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 |
|
174 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 | |
171 | report_group.occurences = ReportGroup.occurences + occurences |
|
175 | report_group.occurences = ReportGroup.occurences + occurences | |
172 | report_group.last_timestamp = report.start_time |
|
176 | report_group.last_timestamp = report.start_time | |
173 | report_group.summed_duration = ReportGroup.summed_duration + report.duration |
|
177 | report_group.summed_duration = ReportGroup.summed_duration + report.duration | |
174 | summed_duration = ReportGroup.summed_duration + report.duration |
|
178 | summed_duration = ReportGroup.summed_duration + report.duration | |
175 | summed_occurences = ReportGroup.occurences + occurences |
|
179 | summed_occurences = ReportGroup.occurences + occurences | |
176 | report_group.average_duration = summed_duration / summed_occurences |
|
180 | report_group.average_duration = summed_duration / summed_occurences | |
177 | report_group.run_postprocessing(report) |
|
181 | report_group.run_postprocessing(report) | |
178 | if added_details: |
|
182 | if added_details: | |
179 | report_group.total_reports = ReportGroup.total_reports + 1 |
|
183 | report_group.total_reports = ReportGroup.total_reports + 1 | |
180 | report_group.last_report = report.id |
|
184 | report_group.last_report = report.id | |
181 |
report_group.set_notification_info( |
|
185 | report_group.set_notification_info( | |
182 |
|
|
186 | notify_10=notify_occurences_10, notify_100=notify_occurences_100 | |
|
187 | ) | |||
183 | DBSession.flush() |
|
188 | DBSession.flush() | |
184 | report_group.get_report().notify_channel(report_group) |
|
189 | report_group.get_report().notify_channel(report_group) | |
185 | if report_group.partition_id not in es_report_group_docs: |
|
190 | if report_group.partition_id not in es_report_group_docs: | |
186 | es_report_group_docs[report_group.partition_id] = [] |
|
191 | es_report_group_docs[report_group.partition_id] = [] | |
187 | es_report_group_docs[report_group.partition_id].append( |
|
192 | es_report_group_docs[report_group.partition_id].append( | |
188 |
report_group.es_doc() |
|
193 | report_group.es_doc() | |
|
194 | ) | |||
189 |
|
195 | |||
190 |
action = |
|
196 | action = "REPORT" | |
191 |
log_msg = |
|
197 | log_msg = "%s: %s %s, client: %s, proto: %s" % ( | |
192 | action, |
|
198 | action, | |
193 |
report_data.get( |
|
199 | report_data.get("http_status", "unknown"), | |
194 | str(resource), |
|
200 | str(resource), | |
195 |
report_data.get( |
|
201 | report_data.get("client"), | |
196 |
proto_version |
|
202 | proto_version, | |
|
203 | ) | |||
197 | log.info(log_msg) |
|
204 | log.info(log_msg) | |
198 | total_reports = len(dataset) |
|
205 | total_reports = len(dataset) | |
199 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
206 | redis_pipeline = Datastores.redis.pipeline(transaction=False) | |
200 |
key = REDIS_KEYS[ |
|
207 | key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time) | |
201 | redis_pipeline.incr(key, total_reports) |
|
208 | redis_pipeline.incr(key, total_reports) | |
202 | redis_pipeline.expire(key, 3600 * 24) |
|
209 | redis_pipeline.expire(key, 3600 * 24) | |
203 |
key = REDIS_KEYS[ |
|
210 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
204 |
resource.owner_user_id, current_time |
|
211 | resource.owner_user_id, current_time | |
|
212 | ) | |||
205 | redis_pipeline.incr(key, total_reports) |
|
213 | redis_pipeline.incr(key, total_reports) | |
206 | redis_pipeline.expire(key, 3600) |
|
214 | redis_pipeline.expire(key, 3600) | |
207 |
key = REDIS_KEYS[ |
|
215 | key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format( | |
208 |
resource_id, current_time.replace(minute=0) |
|
216 | resource_id, current_time.replace(minute=0) | |
|
217 | ) | |||
209 | redis_pipeline.incr(key, total_reports) |
|
218 | redis_pipeline.incr(key, total_reports) | |
210 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
219 | redis_pipeline.expire(key, 3600 * 24 * 7) | |
211 | redis_pipeline.sadd( |
|
220 | redis_pipeline.sadd( | |
212 |
REDIS_KEYS[ |
|
221 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
213 |
current_time.replace(minute=0) |
|
222 | current_time.replace(minute=0) | |
|
223 | ), | |||
|
224 | resource_id, | |||
|
225 | ) | |||
214 | redis_pipeline.execute() |
|
226 | redis_pipeline.execute() | |
215 |
|
227 | |||
216 | add_reports_es(es_report_group_docs, es_report_docs) |
|
228 | add_reports_es(es_report_group_docs, es_report_docs) | |
217 | add_reports_slow_calls_es(es_slow_calls_docs) |
|
229 | add_reports_slow_calls_es(es_slow_calls_docs) | |
218 | add_reports_stats_rows_es(es_reports_stats_rows) |
|
230 | add_reports_stats_rows_es(es_reports_stats_rows) | |
219 | return True |
|
231 | return True | |
220 | except Exception as exc: |
|
232 | except Exception as exc: | |
221 | print_traceback(log) |
|
233 | print_traceback(log) | |
222 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
234 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: | |
223 | raise |
|
235 | raise | |
224 | add_reports.retry(exc=exc) |
|
236 | add_reports.retry(exc=exc) | |
225 |
|
237 | |||
226 |
|
238 | |||
227 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
239 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
228 | def add_reports_es(report_group_docs, report_docs): |
|
240 | def add_reports_es(report_group_docs, report_docs): | |
229 | for k, v in report_group_docs.items(): |
|
241 | for k, v in report_group_docs.items(): | |
230 |
to_update = { |
|
242 | to_update = {"_index": k, "_type": "report_group"} | |
231 | [i.update(to_update) for i in v] |
|
243 | [i.update(to_update) for i in v] | |
232 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
244 | elasticsearch.helpers.bulk(Datastores.es, v) | |
233 | for k, v in report_docs.items(): |
|
245 | for k, v in report_docs.items(): | |
234 |
to_update = { |
|
246 | to_update = {"_index": k, "_type": "report"} | |
235 | [i.update(to_update) for i in v] |
|
247 | [i.update(to_update) for i in v] | |
236 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
248 | elasticsearch.helpers.bulk(Datastores.es, v) | |
237 |
|
249 | |||
238 |
|
250 | |||
239 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
251 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
240 | def add_reports_slow_calls_es(es_docs): |
|
252 | def add_reports_slow_calls_es(es_docs): | |
241 | for k, v in es_docs.items(): |
|
253 | for k, v in es_docs.items(): | |
242 |
to_update = { |
|
254 | to_update = {"_index": k, "_type": "log"} | |
243 | [i.update(to_update) for i in v] |
|
255 | [i.update(to_update) for i in v] | |
244 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
256 | elasticsearch.helpers.bulk(Datastores.es, v) | |
245 |
|
257 | |||
246 |
|
258 | |||
247 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
259 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
248 | def add_reports_stats_rows_es(es_docs): |
|
260 | def add_reports_stats_rows_es(es_docs): | |
249 | for k, v in es_docs.items(): |
|
261 | for k, v in es_docs.items(): | |
250 |
to_update = { |
|
262 | to_update = {"_index": k, "_type": "log"} | |
251 | [i.update(to_update) for i in v] |
|
263 | [i.update(to_update) for i in v] | |
252 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
264 | elasticsearch.helpers.bulk(Datastores.es, v) | |
253 |
|
265 | |||
254 |
|
266 | |||
255 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) |
|
267 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) | |
256 | def add_logs(resource_id, request_params, dataset, **kwargs): |
|
268 | def add_logs(resource_id, request_params, dataset, **kwargs): | |
257 |
proto_version = request_params.get( |
|
269 | proto_version = request_params.get("protocol_version") | |
258 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
270 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
259 |
|
271 | |||
260 | try: |
|
272 | try: | |
261 | es_docs = collections.defaultdict(list) |
|
273 | es_docs = collections.defaultdict(list) | |
262 | resource = ApplicationService.by_id_cached()(resource_id) |
|
274 | resource = ApplicationService.by_id_cached()(resource_id) | |
263 | resource = DBSession.merge(resource, load=False) |
|
275 | resource = DBSession.merge(resource, load=False) | |
264 | ns_pairs = [] |
|
276 | ns_pairs = [] | |
265 | for entry in dataset: |
|
277 | for entry in dataset: | |
266 | # gather pk and ns so we can remove older versions of row later |
|
278 | # gather pk and ns so we can remove older versions of row later | |
267 |
if entry[ |
|
279 | if entry["primary_key"] is not None: | |
268 |
ns_pairs.append({"pk": entry[ |
|
280 | ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]}) | |
269 | "ns": entry['namespace']}) |
|
|||
270 | log_entry = Log() |
|
281 | log_entry = Log() | |
271 | log_entry.set_data(entry, resource=resource) |
|
282 | log_entry.set_data(entry, resource=resource) | |
272 | log_entry._skip_ft_index = True |
|
283 | log_entry._skip_ft_index = True | |
273 | resource.logs.append(log_entry) |
|
284 | resource.logs.append(log_entry) | |
274 | DBSession.flush() |
|
285 | DBSession.flush() | |
275 | # insert non pk rows first |
|
286 | # insert non pk rows first | |
276 |
if entry[ |
|
287 | if entry["primary_key"] is None: | |
277 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
288 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) | |
278 |
|
289 | |||
279 | # 2nd pass to delete all log entries from db foe same pk/ns pair |
|
290 | # 2nd pass to delete all log entries from db foe same pk/ns pair | |
280 | if ns_pairs: |
|
291 | if ns_pairs: | |
281 | ids_to_delete = [] |
|
292 | ids_to_delete = [] | |
282 | es_docs = collections.defaultdict(list) |
|
293 | es_docs = collections.defaultdict(list) | |
283 | es_docs_to_delete = collections.defaultdict(list) |
|
294 | es_docs_to_delete = collections.defaultdict(list) | |
284 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( |
|
295 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( | |
285 |
list_of_pairs=ns_pairs |
|
296 | list_of_pairs=ns_pairs | |
|
297 | ) | |||
286 | log_dict = {} |
|
298 | log_dict = {} | |
287 | for log_entry in found_pkey_logs: |
|
299 | for log_entry in found_pkey_logs: | |
288 | log_key = (log_entry.primary_key, log_entry.namespace) |
|
300 | log_key = (log_entry.primary_key, log_entry.namespace) | |
289 | if log_key not in log_dict: |
|
301 | if log_key not in log_dict: | |
290 | log_dict[log_key] = [] |
|
302 | log_dict[log_key] = [] | |
291 | log_dict[log_key].append(log_entry) |
|
303 | log_dict[log_key].append(log_entry) | |
292 |
|
304 | |||
293 | for ns, entry_list in log_dict.items(): |
|
305 | for ns, entry_list in log_dict.items(): | |
294 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) |
|
306 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) | |
295 | # newest row needs to be indexed in es |
|
307 | # newest row needs to be indexed in es | |
296 | log_entry = entry_list[-1] |
|
308 | log_entry = entry_list[-1] | |
297 | # delete everything from pg and ES, leave the last row in pg |
|
309 | # delete everything from pg and ES, leave the last row in pg | |
298 | for e in entry_list[:-1]: |
|
310 | for e in entry_list[:-1]: | |
299 | ids_to_delete.append(e.log_id) |
|
311 | ids_to_delete.append(e.log_id) | |
300 | es_docs_to_delete[e.partition_id].append(e.delete_hash) |
|
312 | es_docs_to_delete[e.partition_id].append(e.delete_hash) | |
301 |
|
313 | |||
302 | es_docs_to_delete[log_entry.partition_id].append( |
|
314 | es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash) | |
303 | log_entry.delete_hash) |
|
|||
304 |
|
315 | |||
305 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
316 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) | |
306 |
|
317 | |||
307 | if ids_to_delete: |
|
318 | if ids_to_delete: | |
308 | query = DBSession.query(Log).filter( |
|
319 | query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete)) | |
309 | Log.log_id.in_(ids_to_delete)) |
|
|||
310 | query.delete(synchronize_session=False) |
|
320 | query.delete(synchronize_session=False) | |
311 | if es_docs_to_delete: |
|
321 | if es_docs_to_delete: | |
312 | # batch this to avoid problems with default ES bulk limits |
|
322 | # batch this to avoid problems with default ES bulk limits | |
313 | for es_index in es_docs_to_delete.keys(): |
|
323 | for es_index in es_docs_to_delete.keys(): | |
314 | for batch in in_batches(es_docs_to_delete[es_index], 20): |
|
324 | for batch in in_batches(es_docs_to_delete[es_index], 20): | |
315 |
query = {"query": { |
|
325 | query = {"query": {"terms": {"delete_hash": batch}}} | |
316 |
|
326 | |||
317 | try: |
|
327 | try: | |
318 | Datastores.es.transport.perform_request( |
|
328 | Datastores.es.transport.perform_request( | |
319 |
"DELETE", |
|
329 | "DELETE", | |
|
330 | "/{}/{}/_query".format(es_index, "log"), | |||
|
331 | body=query, | |||
|
332 | ) | |||
320 | except elasticsearch.exceptions.NotFoundError as exc: |
|
333 | except elasticsearch.exceptions.NotFoundError as exc: | |
321 |
msg = |
|
334 | msg = "skipping index {}".format(es_index) | |
322 | log.info(msg) |
|
335 | log.info(msg) | |
323 |
|
336 | |||
324 | total_logs = len(dataset) |
|
337 | total_logs = len(dataset) | |
325 |
|
338 | |||
326 |
log_msg = |
|
339 | log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % ( | |
327 | str(resource), |
|
340 | str(resource), | |
328 | total_logs, |
|
341 | total_logs, | |
329 |
proto_version |
|
342 | proto_version, | |
|
343 | ) | |||
330 | log.info(log_msg) |
|
344 | log.info(log_msg) | |
331 | # mark_changed(session) |
|
345 | # mark_changed(session) | |
332 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
346 | redis_pipeline = Datastores.redis.pipeline(transaction=False) | |
333 |
key = REDIS_KEYS[ |
|
347 | key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time) | |
334 | redis_pipeline.incr(key, total_logs) |
|
348 | redis_pipeline.incr(key, total_logs) | |
335 | redis_pipeline.expire(key, 3600 * 24) |
|
349 | redis_pipeline.expire(key, 3600 * 24) | |
336 |
key = REDIS_KEYS[ |
|
350 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
337 |
resource.owner_user_id, current_time |
|
351 | resource.owner_user_id, current_time | |
|
352 | ) | |||
338 | redis_pipeline.incr(key, total_logs) |
|
353 | redis_pipeline.incr(key, total_logs) | |
339 | redis_pipeline.expire(key, 3600) |
|
354 | redis_pipeline.expire(key, 3600) | |
340 |
key = REDIS_KEYS[ |
|
355 | key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format( | |
341 |
resource_id, current_time.replace(minute=0) |
|
356 | resource_id, current_time.replace(minute=0) | |
|
357 | ) | |||
342 | redis_pipeline.incr(key, total_logs) |
|
358 | redis_pipeline.incr(key, total_logs) | |
343 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
359 | redis_pipeline.expire(key, 3600 * 24 * 7) | |
344 | redis_pipeline.sadd( |
|
360 | redis_pipeline.sadd( | |
345 |
REDIS_KEYS[ |
|
361 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
346 |
current_time.replace(minute=0) |
|
362 | current_time.replace(minute=0) | |
|
363 | ), | |||
|
364 | resource_id, | |||
|
365 | ) | |||
347 | redis_pipeline.execute() |
|
366 | redis_pipeline.execute() | |
348 | add_logs_es(es_docs) |
|
367 | add_logs_es(es_docs) | |
349 | return True |
|
368 | return True | |
350 | except Exception as exc: |
|
369 | except Exception as exc: | |
351 | print_traceback(log) |
|
370 | print_traceback(log) | |
352 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
371 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: | |
353 | raise |
|
372 | raise | |
354 | add_logs.retry(exc=exc) |
|
373 | add_logs.retry(exc=exc) | |
355 |
|
374 | |||
356 |
|
375 | |||
357 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
376 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
358 | def add_logs_es(es_docs): |
|
377 | def add_logs_es(es_docs): | |
359 | for k, v in es_docs.items(): |
|
378 | for k, v in es_docs.items(): | |
360 |
to_update = { |
|
379 | to_update = {"_index": k, "_type": "log"} | |
361 | [i.update(to_update) for i in v] |
|
380 | [i.update(to_update) for i in v] | |
362 | elasticsearch.helpers.bulk(Datastores.es, v) |
|
381 | elasticsearch.helpers.bulk(Datastores.es, v) | |
363 |
|
382 | |||
364 |
|
383 | |||
365 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) |
|
384 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) | |
366 | def add_metrics(resource_id, request_params, dataset, proto_version): |
|
385 | def add_metrics(resource_id, request_params, dataset, proto_version): | |
367 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
386 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
368 | try: |
|
387 | try: | |
369 | resource = ApplicationService.by_id_cached()(resource_id) |
|
388 | resource = ApplicationService.by_id_cached()(resource_id) | |
370 | resource = DBSession.merge(resource, load=False) |
|
389 | resource = DBSession.merge(resource, load=False) | |
371 | es_docs = [] |
|
390 | es_docs = [] | |
372 | rows = [] |
|
391 | rows = [] | |
373 | for metric in dataset: |
|
392 | for metric in dataset: | |
374 |
tags = dict(metric[ |
|
393 | tags = dict(metric["tags"]) | |
375 |
server_n = tags.get( |
|
394 | server_n = tags.get("server_name", metric["server_name"]).lower() | |
376 |
tags[ |
|
395 | tags["server_name"] = server_n or "unknown" | |
377 | new_metric = Metric( |
|
396 | new_metric = Metric( | |
378 |
timestamp=metric[ |
|
397 | timestamp=metric["timestamp"], | |
379 | resource_id=resource.resource_id, |
|
398 | resource_id=resource.resource_id, | |
380 |
namespace=metric[ |
|
399 | namespace=metric["namespace"], | |
381 |
tags=tags |
|
400 | tags=tags, | |
|
401 | ) | |||
382 | rows.append(new_metric) |
|
402 | rows.append(new_metric) | |
383 | es_docs.append(new_metric.es_doc()) |
|
403 | es_docs.append(new_metric.es_doc()) | |
384 | session = DBSession() |
|
404 | session = DBSession() | |
385 | session.bulk_save_objects(rows) |
|
405 | session.bulk_save_objects(rows) | |
386 | session.flush() |
|
406 | session.flush() | |
387 |
|
407 | |||
388 |
action = |
|
408 | action = "METRICS" | |
389 |
metrics_msg = |
|
409 | metrics_msg = "%s: %s, metrics: %s, proto:%s" % ( | |
390 | action, |
|
410 | action, | |
391 | str(resource), |
|
411 | str(resource), | |
392 | len(dataset), |
|
412 | len(dataset), | |
393 | proto_version |
|
413 | proto_version, | |
394 | ) |
|
414 | ) | |
395 | log.info(metrics_msg) |
|
415 | log.info(metrics_msg) | |
396 |
|
416 | |||
397 | mark_changed(session) |
|
417 | mark_changed(session) | |
398 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
418 | redis_pipeline = Datastores.redis.pipeline(transaction=False) | |
399 |
key = REDIS_KEYS[ |
|
419 | key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time) | |
400 | redis_pipeline.incr(key, len(rows)) |
|
420 | redis_pipeline.incr(key, len(rows)) | |
401 | redis_pipeline.expire(key, 3600 * 24) |
|
421 | redis_pipeline.expire(key, 3600 * 24) | |
402 |
key = REDIS_KEYS[ |
|
422 | key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( | |
403 |
resource.owner_user_id, current_time |
|
423 | resource.owner_user_id, current_time | |
|
424 | ) | |||
404 | redis_pipeline.incr(key, len(rows)) |
|
425 | redis_pipeline.incr(key, len(rows)) | |
405 | redis_pipeline.expire(key, 3600) |
|
426 | redis_pipeline.expire(key, 3600) | |
406 |
key = REDIS_KEYS[ |
|
427 | key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format( | |
407 |
resource_id, current_time.replace(minute=0) |
|
428 | resource_id, current_time.replace(minute=0) | |
|
429 | ) | |||
408 | redis_pipeline.incr(key, len(rows)) |
|
430 | redis_pipeline.incr(key, len(rows)) | |
409 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
431 | redis_pipeline.expire(key, 3600 * 24 * 7) | |
410 | redis_pipeline.sadd( |
|
432 | redis_pipeline.sadd( | |
411 |
REDIS_KEYS[ |
|
433 | REDIS_KEYS["apps_that_got_new_data_per_hour"].format( | |
412 |
current_time.replace(minute=0) |
|
434 | current_time.replace(minute=0) | |
|
435 | ), | |||
|
436 | resource_id, | |||
|
437 | ) | |||
413 | redis_pipeline.execute() |
|
438 | redis_pipeline.execute() | |
414 | add_metrics_es(es_docs) |
|
439 | add_metrics_es(es_docs) | |
415 | return True |
|
440 | return True | |
416 | except Exception as exc: |
|
441 | except Exception as exc: | |
417 | print_traceback(log) |
|
442 | print_traceback(log) | |
418 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
443 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: | |
419 | raise |
|
444 | raise | |
420 | add_metrics.retry(exc=exc) |
|
445 | add_metrics.retry(exc=exc) | |
421 |
|
446 | |||
422 |
|
447 | |||
423 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
448 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
424 | def add_metrics_es(es_docs): |
|
449 | def add_metrics_es(es_docs): | |
425 | for doc in es_docs: |
|
450 | for doc in es_docs: | |
426 |
partition = |
|
451 | partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d") | |
427 |
Datastores.es.index(partition, |
|
452 | Datastores.es.index(partition, "log", doc) | |
428 |
|
453 | |||
429 |
|
454 | |||
430 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
455 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) | |
431 | def check_user_report_notifications(resource_id): |
|
456 | def check_user_report_notifications(resource_id): | |
432 | since_when = datetime.utcnow() |
|
457 | since_when = datetime.utcnow() | |
433 | try: |
|
458 | try: | |
434 | request = get_current_request() |
|
459 | request = get_current_request() | |
435 | application = ApplicationService.by_id(resource_id) |
|
460 | application = ApplicationService.by_id(resource_id) | |
436 | if not application: |
|
461 | if not application: | |
437 | return |
|
462 | return | |
438 |
error_key = REDIS_KEYS[ |
|
463 | error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
439 |
ReportType.error, resource_id |
|
464 | ReportType.error, resource_id | |
440 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
465 | ) | |
441 | ReportType.slow, resource_id) |
|
466 | slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( | |
|
467 | ReportType.slow, resource_id | |||
|
468 | ) | |||
442 | error_group_ids = Datastores.redis.smembers(error_key) |
|
469 | error_group_ids = Datastores.redis.smembers(error_key) | |
443 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
470 | slow_group_ids = Datastores.redis.smembers(slow_key) | |
444 | Datastores.redis.delete(error_key) |
|
471 | Datastores.redis.delete(error_key) | |
445 | Datastores.redis.delete(slow_key) |
|
472 | Datastores.redis.delete(slow_key) | |
446 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
473 | err_gids = [int(g_id) for g_id in error_group_ids] | |
447 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
474 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] | |
448 | group_ids = err_gids + slow_gids |
|
475 | group_ids = err_gids + slow_gids | |
449 | occurence_dict = {} |
|
476 | occurence_dict = {} | |
450 | for g_id in group_ids: |
|
477 | for g_id in group_ids: | |
451 |
key = REDIS_KEYS[ |
|
478 | key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id) | |
452 | g_id) |
|
|||
453 | val = Datastores.redis.get(key) |
|
479 | val = Datastores.redis.get(key) | |
454 | Datastores.redis.delete(key) |
|
480 | Datastores.redis.delete(key) | |
455 | if val: |
|
481 | if val: | |
456 | occurence_dict[g_id] = int(val) |
|
482 | occurence_dict[g_id] = int(val) | |
457 | else: |
|
483 | else: | |
458 | occurence_dict[g_id] = 1 |
|
484 | occurence_dict[g_id] = 1 | |
459 | report_groups = ReportGroupService.by_ids(group_ids) |
|
485 | report_groups = ReportGroupService.by_ids(group_ids) | |
460 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
486 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |
461 |
|
487 | |||
462 | ApplicationService.check_for_groups_alert( |
|
488 | ApplicationService.check_for_groups_alert( | |
463 | application, 'alert', report_groups=report_groups, |
|
489 | application, | |
464 | occurence_dict=occurence_dict) |
|
490 | "alert", | |
465 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) |
|
491 | report_groups=report_groups, | |
|
492 | occurence_dict=occurence_dict, | |||
|
493 | ) | |||
|
494 | users = set( | |||
|
495 | [p.user for p in ResourceService.users_for_perm(application, "view")] | |||
|
496 | ) | |||
466 | report_groups = report_groups.all() |
|
497 | report_groups = report_groups.all() | |
467 | for user in users: |
|
498 | for user in users: | |
468 |
UserService.report_notify( |
|
499 | UserService.report_notify( | |
469 | report_groups=report_groups, |
|
500 | user, | |
470 | occurence_dict=occurence_dict) |
|
501 | request, | |
|
502 | application, | |||
|
503 | report_groups=report_groups, | |||
|
504 | occurence_dict=occurence_dict, | |||
|
505 | ) | |||
471 | for group in report_groups: |
|
506 | for group in report_groups: | |
472 | # marks report_groups as notified |
|
507 | # marks report_groups as notified | |
473 | if not group.notified: |
|
508 | if not group.notified: | |
474 | group.notified = True |
|
509 | group.notified = True | |
475 | except Exception as exc: |
|
510 | except Exception as exc: | |
476 | print_traceback(log) |
|
511 | print_traceback(log) | |
477 | raise |
|
512 | raise | |
478 |
|
513 | |||
479 |
|
514 | |||
480 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
515 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) | |
481 | def check_alerts(resource_id): |
|
516 | def check_alerts(resource_id): | |
482 | since_when = datetime.utcnow() |
|
517 | since_when = datetime.utcnow() | |
483 | try: |
|
518 | try: | |
484 | request = get_current_request() |
|
519 | request = get_current_request() | |
485 | application = ApplicationService.by_id(resource_id) |
|
520 | application = ApplicationService.by_id(resource_id) | |
486 | if not application: |
|
521 | if not application: | |
487 | return |
|
522 | return | |
488 | error_key = REDIS_KEYS[ |
|
523 | error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
489 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
524 | ReportType.error, resource_id | |
490 | ReportType.error, resource_id) |
|
525 | ) | |
491 | slow_key = REDIS_KEYS[ |
|
526 | slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format( | |
492 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
527 | ReportType.slow, resource_id | |
493 | ReportType.slow, resource_id) |
|
528 | ) | |
494 | error_group_ids = Datastores.redis.smembers(error_key) |
|
529 | error_group_ids = Datastores.redis.smembers(error_key) | |
495 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
530 | slow_group_ids = Datastores.redis.smembers(slow_key) | |
496 | Datastores.redis.delete(error_key) |
|
531 | Datastores.redis.delete(error_key) | |
497 | Datastores.redis.delete(slow_key) |
|
532 | Datastores.redis.delete(slow_key) | |
498 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
533 | err_gids = [int(g_id) for g_id in error_group_ids] | |
499 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
534 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] | |
500 | group_ids = err_gids + slow_gids |
|
535 | group_ids = err_gids + slow_gids | |
501 | occurence_dict = {} |
|
536 | occurence_dict = {} | |
502 | for g_id in group_ids: |
|
537 | for g_id in group_ids: | |
503 |
key = REDIS_KEYS[ |
|
538 | key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format( | |
504 | 'report_group_occurences_alerting'].format( |
|
539 | g_id | |
505 |
|
|
540 | ) | |
506 | val = Datastores.redis.get(key) |
|
541 | val = Datastores.redis.get(key) | |
507 | Datastores.redis.delete(key) |
|
542 | Datastores.redis.delete(key) | |
508 | if val: |
|
543 | if val: | |
509 | occurence_dict[g_id] = int(val) |
|
544 | occurence_dict[g_id] = int(val) | |
510 | else: |
|
545 | else: | |
511 | occurence_dict[g_id] = 1 |
|
546 | occurence_dict[g_id] = 1 | |
512 | report_groups = ReportGroupService.by_ids(group_ids) |
|
547 | report_groups = ReportGroupService.by_ids(group_ids) | |
513 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
548 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |
514 |
|
549 | |||
515 | ApplicationService.check_for_groups_alert( |
|
550 | ApplicationService.check_for_groups_alert( | |
516 | application, 'alert', report_groups=report_groups, |
|
551 | application, | |
517 | occurence_dict=occurence_dict, since_when=since_when) |
|
552 | "alert", | |
|
553 | report_groups=report_groups, | |||
|
554 | occurence_dict=occurence_dict, | |||
|
555 | since_when=since_when, | |||
|
556 | ) | |||
518 | except Exception as exc: |
|
557 | except Exception as exc: | |
519 | print_traceback(log) |
|
558 | print_traceback(log) | |
520 | raise |
|
559 | raise | |
521 |
|
560 | |||
522 |
|
561 | |||
523 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
562 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
524 | def close_alerts(): |
|
563 | def close_alerts(): | |
525 |
log.warning( |
|
564 | log.warning("Checking alerts") | |
526 | since_when = datetime.utcnow() |
|
565 | since_when = datetime.utcnow() | |
527 | try: |
|
566 | try: | |
528 | event_types = [Event.types['error_report_alert'], |
|
567 | event_types = [ | |
529 |
|
|
568 | Event.types["error_report_alert"], | |
530 | statuses = [Event.statuses['active']] |
|
569 | Event.types["slow_report_alert"], | |
|
570 | ] | |||
|
571 | statuses = [Event.statuses["active"]] | |||
531 | # get events older than 5 min |
|
572 | # get events older than 5 min | |
532 | events = EventService.by_type_and_status( |
|
573 | events = EventService.by_type_and_status( | |
533 | event_types, |
|
574 | event_types, statuses, older_than=(since_when - timedelta(minutes=5)) | |
534 | statuses, |
|
575 | ) | |
535 | older_than=(since_when - timedelta(minutes=5))) |
|
|||
536 | for event in events: |
|
576 | for event in events: | |
537 | # see if we can close them |
|
577 | # see if we can close them | |
538 | event.validate_or_close( |
|
578 | event.validate_or_close(since_when=(since_when - timedelta(minutes=1))) | |
539 | since_when=(since_when - timedelta(minutes=1))) |
|
|||
540 | except Exception as exc: |
|
579 | except Exception as exc: | |
541 | print_traceback(log) |
|
580 | print_traceback(log) | |
542 | raise |
|
581 | raise | |
543 |
|
582 | |||
544 |
|
583 | |||
545 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) |
|
584 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) | |
546 | def update_tag_counter(tag_name, tag_value, count): |
|
585 | def update_tag_counter(tag_name, tag_value, count): | |
547 | try: |
|
586 | try: | |
548 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( |
|
587 | query = ( | |
549 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), |
|
588 | DBSession.query(Tag) | |
550 | sa.types.TEXT)) |
|
589 | .filter(Tag.name == tag_name) | |
551 | query.update({'times_seen': Tag.times_seen + count, |
|
590 | .filter( | |
552 | 'last_timestamp': datetime.utcnow()}, |
|
591 | sa.cast(Tag.value, sa.types.TEXT) | |
553 | synchronize_session=False) |
|
592 | == sa.cast(json.dumps(tag_value), sa.types.TEXT) | |
|
593 | ) | |||
|
594 | ) | |||
|
595 | query.update( | |||
|
596 | {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()}, | |||
|
597 | synchronize_session=False, | |||
|
598 | ) | |||
554 | session = DBSession() |
|
599 | session = DBSession() | |
555 | mark_changed(session) |
|
600 | mark_changed(session) | |
556 | return True |
|
601 | return True | |
557 | except Exception as exc: |
|
602 | except Exception as exc: | |
558 | print_traceback(log) |
|
603 | print_traceback(log) | |
559 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: |
|
604 | if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: | |
560 | raise |
|
605 | raise | |
561 | update_tag_counter.retry(exc=exc) |
|
606 | update_tag_counter.retry(exc=exc) | |
562 |
|
607 | |||
563 |
|
608 | |||
564 | @celery.task(queue="default") |
|
609 | @celery.task(queue="default") | |
565 | def update_tag_counters(): |
|
610 | def update_tag_counters(): | |
566 | """ |
|
611 | """ | |
567 | Sets task to update counters for application tags |
|
612 | Sets task to update counters for application tags | |
568 | """ |
|
613 | """ | |
569 |
tags = Datastores.redis.lrange(REDIS_KEYS[ |
|
614 | tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1) | |
570 |
Datastores.redis.delete(REDIS_KEYS[ |
|
615 | Datastores.redis.delete(REDIS_KEYS["seen_tag_list"]) | |
571 | c = collections.Counter(tags) |
|
616 | c = collections.Counter(tags) | |
572 | for t_json, count in c.items(): |
|
617 | for t_json, count in c.items(): | |
573 | tag_info = json.loads(t_json) |
|
618 | tag_info = json.loads(t_json) | |
574 | update_tag_counter.delay(tag_info[0], tag_info[1], count) |
|
619 | update_tag_counter.delay(tag_info[0], tag_info[1], count) | |
575 |
|
620 | |||
576 |
|
621 | |||
577 | @celery.task(queue="default") |
|
622 | @celery.task(queue="default") | |
578 | def daily_digest(): |
|
623 | def daily_digest(): | |
579 | """ |
|
624 | """ | |
580 | Sends daily digest with top 50 error reports |
|
625 | Sends daily digest with top 50 error reports | |
581 | """ |
|
626 | """ | |
582 | request = get_current_request() |
|
627 | request = get_current_request() | |
583 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
628 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) | |
584 |
Datastores.redis.delete(REDIS_KEYS[ |
|
629 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) | |
585 | since_when = datetime.utcnow() - timedelta(hours=8) |
|
630 | since_when = datetime.utcnow() - timedelta(hours=8) | |
586 |
log.warning( |
|
631 | log.warning("Generating daily digests") | |
587 | for resource_id in apps: |
|
632 | for resource_id in apps: | |
588 |
resource_id = resource_id.decode( |
|
633 | resource_id = resource_id.decode("utf8") | |
589 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
634 | end_date = datetime.utcnow().replace(microsecond=0, second=0) | |
590 |
filter_settings = { |
|
635 | filter_settings = { | |
591 | 'tags': [{'name': 'type', |
|
636 | "resource": [resource_id], | |
592 | 'value': ['error'], 'op': None}], |
|
637 | "tags": [{"name": "type", "value": ["error"], "op": None}], | |
593 | 'type': 'error', 'start_date': since_when, |
|
638 | "type": "error", | |
594 | 'end_date': end_date} |
|
639 | "start_date": since_when, | |
|
640 | "end_date": end_date, | |||
|
641 | } | |||
595 |
|
642 | |||
596 | reports = ReportGroupService.get_trending( |
|
643 | reports = ReportGroupService.get_trending( | |
597 |
request, filter_settings=filter_settings, limit=50 |
|
644 | request, filter_settings=filter_settings, limit=50 | |
|
645 | ) | |||
598 |
|
646 | |||
599 | application = ApplicationService.by_id(resource_id) |
|
647 | application = ApplicationService.by_id(resource_id) | |
600 | if application: |
|
648 | if application: | |
601 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) |
|
649 | users = set( | |
|
650 | [p.user for p in ResourceService.users_for_perm(application, "view")] | |||
|
651 | ) | |||
602 | for user in users: |
|
652 | for user in users: | |
603 |
user.send_digest( |
|
653 | user.send_digest( | |
604 |
|
|
654 | request, application, reports=reports, since_when=since_when | |
|
655 | ) | |||
605 |
|
656 | |||
606 |
|
657 | |||
607 | @celery.task(queue="default") |
|
658 | @celery.task(queue="default") | |
608 | def notifications_reports(): |
|
659 | def notifications_reports(): | |
609 | """ |
|
660 | """ | |
610 | Loop that checks redis for info and then issues new tasks to celery to |
|
661 | Loop that checks redis for info and then issues new tasks to celery to | |
611 | issue notifications |
|
662 | issue notifications | |
612 | """ |
|
663 | """ | |
613 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
664 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) | |
614 |
Datastores.redis.delete(REDIS_KEYS[ |
|
665 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) | |
615 | for app in apps: |
|
666 | for app in apps: | |
616 |
log.warning( |
|
667 | log.warning("Notify for app: %s" % app) | |
617 |
check_user_report_notifications.delay(app.decode( |
|
668 | check_user_report_notifications.delay(app.decode("utf8")) | |
|
669 | ||||
618 |
|
670 | |||
619 | @celery.task(queue="default") |
|
671 | @celery.task(queue="default") | |
620 | def alerting_reports(): |
|
672 | def alerting_reports(): | |
621 | """ |
|
673 | """ | |
622 | Loop that checks redis for info and then issues new tasks to celery to |
|
674 | Loop that checks redis for info and then issues new tasks to celery to | |
623 | perform the following: |
|
675 | perform the following: | |
624 | - which applications should have new alerts opened |
|
676 | - which applications should have new alerts opened | |
625 | """ |
|
677 | """ | |
626 |
|
678 | |||
627 |
apps = Datastores.redis.smembers(REDIS_KEYS[ |
|
679 | apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"]) | |
628 |
Datastores.redis.delete(REDIS_KEYS[ |
|
680 | Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"]) | |
629 | for app in apps: |
|
681 | for app in apps: | |
630 |
log.warning( |
|
682 | log.warning("Notify for app: %s" % app) | |
631 |
check_alerts.delay(app.decode( |
|
683 | check_alerts.delay(app.decode("utf8")) | |
632 |
|
684 | |||
633 |
|
685 | |||
634 | @celery.task(queue="default", soft_time_limit=3600 * 4, |
|
686 | @celery.task( | |
635 |
|
|
687 | queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144 | |
|
688 | ) | |||
636 | def logs_cleanup(resource_id, filter_settings): |
|
689 | def logs_cleanup(resource_id, filter_settings): | |
637 | request = get_current_request() |
|
690 | request = get_current_request() | |
638 | request.tm.begin() |
|
691 | request.tm.begin() | |
639 | es_query = { |
|
692 | es_query = { | |
640 | "query": { |
|
693 | "query": { | |
641 | "filtered": { |
|
694 | "filtered": {"filter": {"and": [{"term": {"resource_id": resource_id}}]}} | |
642 | "filter": { |
|
|||
643 | "and": [{"term": {"resource_id": resource_id}}] |
|
|||
644 | } |
|
|||
645 | } |
|
|||
646 | } |
|
695 | } | |
647 | } |
|
696 | } | |
648 |
|
697 | |||
649 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) |
|
698 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) | |
650 |
if filter_settings[ |
|
699 | if filter_settings["namespace"]: | |
651 |
query = query.filter(Log.namespace == filter_settings[ |
|
700 | query = query.filter(Log.namespace == filter_settings["namespace"][0]) | |
652 |
es_query[ |
|
701 | es_query["query"]["filtered"]["filter"]["and"].append( | |
653 |
{"term": {"namespace": filter_settings[ |
|
702 | {"term": {"namespace": filter_settings["namespace"][0]}} | |
654 | ) |
|
703 | ) | |
655 | query.delete(synchronize_session=False) |
|
704 | query.delete(synchronize_session=False) | |
656 | request.tm.commit() |
|
705 | request.tm.commit() | |
657 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format('rcae_l_*', 'log'), body=es_query) |
|
706 | Datastores.es.transport.perform_request( | |
|
707 | "DELETE", "/{}/{}/_query".format("rcae_l_*", "log"), body=es_query | |||
|
708 | ) |
@@ -1,19 +1,20 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
|
17 | ||||
17 | def filter_callable(structure, section=None): |
|
18 | def filter_callable(structure, section=None): | |
18 |
structure[ |
|
19 | structure["SOMEVAL"] = "***REMOVED***" | |
19 | return structure |
|
20 | return structure |
This diff has been collapsed as it changes many lines, (863 lines changed) Show them Hide them | |||||
@@ -1,896 +1,981 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import wtforms |
|
17 | import wtforms | |
18 | import formencode |
|
18 | import formencode | |
19 | import re |
|
19 | import re | |
20 | import pyramid.threadlocal |
|
20 | import pyramid.threadlocal | |
21 | import datetime |
|
21 | import datetime | |
22 | import appenlight.lib.helpers as h |
|
22 | import appenlight.lib.helpers as h | |
23 |
|
23 | |||
24 | from ziggurat_foundations.models.services.user import UserService |
|
24 | from ziggurat_foundations.models.services.user import UserService | |
25 | from ziggurat_foundations.models.services.group import GroupService |
|
25 | from ziggurat_foundations.models.services.group import GroupService | |
26 | from appenlight.models import DBSession |
|
26 | from appenlight.models import DBSession | |
27 | from appenlight.models.alert_channel import AlertChannel |
|
27 | from appenlight.models.alert_channel import AlertChannel | |
28 | from appenlight.models.integrations import IntegrationException |
|
28 | from appenlight.models.integrations import IntegrationException | |
29 | from appenlight.models.integrations.campfire import CampfireIntegration |
|
29 | from appenlight.models.integrations.campfire import CampfireIntegration | |
30 | from appenlight.models.integrations.bitbucket import BitbucketIntegration |
|
30 | from appenlight.models.integrations.bitbucket import BitbucketIntegration | |
31 | from appenlight.models.integrations.github import GithubIntegration |
|
31 | from appenlight.models.integrations.github import GithubIntegration | |
32 | from appenlight.models.integrations.flowdock import FlowdockIntegration |
|
32 | from appenlight.models.integrations.flowdock import FlowdockIntegration | |
33 | from appenlight.models.integrations.hipchat import HipchatIntegration |
|
33 | from appenlight.models.integrations.hipchat import HipchatIntegration | |
34 | from appenlight.models.integrations.jira import JiraClient |
|
34 | from appenlight.models.integrations.jira import JiraClient | |
35 | from appenlight.models.integrations.slack import SlackIntegration |
|
35 | from appenlight.models.integrations.slack import SlackIntegration | |
36 | from appenlight.lib.ext_json import json |
|
36 | from appenlight.lib.ext_json import json | |
37 | from wtforms.ext.csrf.form import SecureForm |
|
37 | from wtforms.ext.csrf.form import SecureForm | |
38 | from wtforms.compat import iteritems |
|
38 | from wtforms.compat import iteritems | |
39 | from collections import defaultdict |
|
39 | from collections import defaultdict | |
40 |
|
40 | |||
41 | _ = str |
|
41 | _ = str | |
42 |
|
42 | |||
43 | strip_filter = lambda x: x.strip() if x else None |
|
43 | strip_filter = lambda x: x.strip() if x else None | |
44 | uppercase_filter = lambda x: x.upper() if x else None |
|
44 | uppercase_filter = lambda x: x.upper() if x else None | |
45 |
|
45 | |||
46 |
FALSE_VALUES = ( |
|
46 | FALSE_VALUES = ("false", "", False, None) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class CSRFException(Exception): |
|
49 | class CSRFException(Exception): | |
50 | pass |
|
50 | pass | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | class ReactorForm(SecureForm): |
|
53 | class ReactorForm(SecureForm): | |
54 |
def __init__(self, formdata=None, obj=None, prefix= |
|
54 | def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs): | |
55 | **kwargs): |
|
55 | super(ReactorForm, self).__init__( | |
56 | super(ReactorForm, self).__init__(formdata=formdata, obj=obj, |
|
56 | formdata=formdata, | |
57 | prefix=prefix, |
|
57 | obj=obj, | |
58 | csrf_context=csrf_context, **kwargs) |
|
58 | prefix=prefix, | |
|
59 | csrf_context=csrf_context, | |||
|
60 | **kwargs | |||
|
61 | ) | |||
59 | self._csrf_context = csrf_context |
|
62 | self._csrf_context = csrf_context | |
60 |
|
63 | |||
61 | def generate_csrf_token(self, csrf_context): |
|
64 | def generate_csrf_token(self, csrf_context): | |
62 | return csrf_context.session.get_csrf_token() |
|
65 | return csrf_context.session.get_csrf_token() | |
63 |
|
66 | |||
64 | def validate_csrf_token(self, field): |
|
67 | def validate_csrf_token(self, field): | |
65 | request = self._csrf_context or pyramid.threadlocal.get_current_request() |
|
68 | request = self._csrf_context or pyramid.threadlocal.get_current_request() | |
66 |
is_from_auth_token = |
|
69 | is_from_auth_token = "auth:auth_token" in request.effective_principals | |
67 | if is_from_auth_token: |
|
70 | if is_from_auth_token: | |
68 | return True |
|
71 | return True | |
69 |
|
72 | |||
70 | if field.data != field.current_token: |
|
73 | if field.data != field.current_token: | |
71 | # try to save the day by using token from angular |
|
74 | # try to save the day by using token from angular | |
72 |
if request.headers.get( |
|
75 | if request.headers.get("X-XSRF-TOKEN") != field.current_token: | |
73 |
raise CSRFException( |
|
76 | raise CSRFException("Invalid CSRF token") | |
74 |
|
77 | |||
75 | @property |
|
78 | @property | |
76 | def errors_dict(self): |
|
79 | def errors_dict(self): | |
77 | r_dict = defaultdict(list) |
|
80 | r_dict = defaultdict(list) | |
78 | for k, errors in self.errors.items(): |
|
81 | for k, errors in self.errors.items(): | |
79 | r_dict[k].extend([str(e) for e in errors]) |
|
82 | r_dict[k].extend([str(e) for e in errors]) | |
80 | return r_dict |
|
83 | return r_dict | |
81 |
|
84 | |||
82 | @property |
|
85 | @property | |
83 | def errors_json(self): |
|
86 | def errors_json(self): | |
84 | return json.dumps(self.errors_dict) |
|
87 | return json.dumps(self.errors_dict) | |
85 |
|
88 | |||
86 | def populate_obj(self, obj, ignore_none=False): |
|
89 | def populate_obj(self, obj, ignore_none=False): | |
87 | """ |
|
90 | """ | |
88 | Populates the attributes of the passed `obj` with data from the form's |
|
91 | Populates the attributes of the passed `obj` with data from the form's | |
89 | fields. |
|
92 | fields. | |
90 |
|
93 | |||
91 | :note: This is a destructive operation; Any attribute with the same name |
|
94 | :note: This is a destructive operation; Any attribute with the same name | |
92 | as a field will be overridden. Use with caution. |
|
95 | as a field will be overridden. Use with caution. | |
93 | """ |
|
96 | """ | |
94 | if ignore_none: |
|
97 | if ignore_none: | |
95 | for name, field in iteritems(self._fields): |
|
98 | for name, field in iteritems(self._fields): | |
96 | if field.data is not None: |
|
99 | if field.data is not None: | |
97 | field.populate_obj(obj, name) |
|
100 | field.populate_obj(obj, name) | |
98 | else: |
|
101 | else: | |
99 | for name, field in iteritems(self._fields): |
|
102 | for name, field in iteritems(self._fields): | |
100 | field.populate_obj(obj, name) |
|
103 | field.populate_obj(obj, name) | |
101 |
|
104 | |||
102 | css_classes = {} |
|
105 | css_classes = {} | |
103 | ignore_labels = {} |
|
106 | ignore_labels = {} | |
104 |
|
107 | |||
105 |
|
108 | |||
106 | class SignInForm(ReactorForm): |
|
109 | class SignInForm(ReactorForm): | |
107 | came_from = wtforms.HiddenField() |
|
110 | came_from = wtforms.HiddenField() | |
108 |
sign_in_user_name = wtforms.StringField(_( |
|
111 | sign_in_user_name = wtforms.StringField(_("User Name")) | |
109 |
sign_in_user_password = wtforms.PasswordField(_( |
|
112 | sign_in_user_password = wtforms.PasswordField(_("Password")) | |
110 |
|
113 | |||
111 |
ignore_labels = [ |
|
114 | ignore_labels = ["submit"] | |
112 |
css_classes = { |
|
115 | css_classes = {"submit": "btn btn-primary"} | |
113 |
|
116 | |||
114 | html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'}, |
|
117 | html_attrs = { | |
115 | 'sign_in_user_password': { |
|
118 | "sign_in_user_name": {"placeholder": "Your login"}, | |
116 |
|
|
119 | "sign_in_user_password": {"placeholder": "Your password"}, | |
|
120 | } | |||
117 |
|
121 | |||
118 |
|
122 | |||
119 | from wtforms.widgets import html_params, HTMLString |
|
123 | from wtforms.widgets import html_params, HTMLString | |
120 |
|
124 | |||
121 |
|
125 | |||
122 |
def select_multi_checkbox(field, ul_class= |
|
126 | def select_multi_checkbox(field, ul_class="set", **kwargs): | |
123 | """Render a multi-checkbox widget""" |
|
127 | """Render a multi-checkbox widget""" | |
124 |
kwargs.setdefault( |
|
128 | kwargs.setdefault("type", "checkbox") | |
125 |
field_id = kwargs.pop( |
|
129 | field_id = kwargs.pop("id", field.id) | |
126 |
html = [ |
|
130 | html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)] | |
127 | for value, label, checked in field.iter_choices(): |
|
131 | for value, label, checked in field.iter_choices(): | |
128 |
choice_id = |
|
132 | choice_id = "%s-%s" % (field_id, value) | |
129 | options = dict(kwargs, name=field.name, value=value, id=choice_id) |
|
133 | options = dict(kwargs, name=field.name, value=value, id=choice_id) | |
130 | if checked: |
|
134 | if checked: | |
131 |
options[ |
|
135 | options["checked"] = "checked" | |
132 |
html.append( |
|
136 | html.append("<li><input %s /> " % html_params(**options)) | |
133 | html.append('<label for="%s">%s</label></li>' % (choice_id, label)) |
|
137 | html.append('<label for="%s">%s</label></li>' % (choice_id, label)) | |
134 |
html.append( |
|
138 | html.append("</ul>") | |
135 |
return HTMLString( |
|
139 | return HTMLString("".join(html)) | |
136 |
|
140 | |||
137 |
|
141 | |||
138 |
def button_widget(field, button_cls= |
|
142 | def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs): | |
139 | """Render a button widget""" |
|
143 | """Render a button widget""" | |
140 |
kwargs.setdefault( |
|
144 | kwargs.setdefault("type", "button") | |
141 |
field_id = kwargs.pop( |
|
145 | field_id = kwargs.pop("id", field.id) | |
142 |
kwargs.setdefault( |
|
146 | kwargs.setdefault("value", field.label.text) | |
143 | html = ['<button %s>%s</button>' % (html_params(id=field_id, |
|
147 | html = [ | |
144 | class_=button_cls), |
|
148 | "<button %s>%s</button>" | |
145 | kwargs['value'],)] |
|
149 | % (html_params(id=field_id, class_=button_cls), kwargs["value"]) | |
146 | return HTMLString(''.join(html)) |
|
150 | ] | |
|
151 | return HTMLString("".join(html)) | |||
147 |
|
152 | |||
148 |
|
153 | |||
149 | def clean_whitespace(value): |
|
154 | def clean_whitespace(value): | |
150 | if value: |
|
155 | if value: | |
151 | return value.strip() |
|
156 | return value.strip() | |
152 | return value |
|
157 | return value | |
153 |
|
158 | |||
154 |
|
159 | |||
155 | def found_username_validator(form, field): |
|
160 | def found_username_validator(form, field): | |
156 | user = UserService.by_user_name(field.data) |
|
161 | user = UserService.by_user_name(field.data) | |
157 | # sets user to recover in email validator |
|
162 | # sets user to recover in email validator | |
158 | form.field_user = user |
|
163 | form.field_user = user | |
159 | if not user: |
|
164 | if not user: | |
160 |
raise wtforms.ValidationError( |
|
165 | raise wtforms.ValidationError("This username does not exist") | |
161 |
|
166 | |||
162 |
|
167 | |||
163 | def found_username_email_validator(form, field): |
|
168 | def found_username_email_validator(form, field): | |
164 | user = UserService.by_email(field.data) |
|
169 | user = UserService.by_email(field.data) | |
165 | if not user: |
|
170 | if not user: | |
166 |
raise wtforms.ValidationError( |
|
171 | raise wtforms.ValidationError("Email is incorrect") | |
167 |
|
172 | |||
168 |
|
173 | |||
169 | def unique_username_validator(form, field): |
|
174 | def unique_username_validator(form, field): | |
170 | user = UserService.by_user_name(field.data) |
|
175 | user = UserService.by_user_name(field.data) | |
171 | if user: |
|
176 | if user: | |
172 |
raise wtforms.ValidationError( |
|
177 | raise wtforms.ValidationError("This username already exists in system") | |
173 |
|
178 | |||
174 |
|
179 | |||
175 | def unique_groupname_validator(form, field): |
|
180 | def unique_groupname_validator(form, field): | |
176 | group = GroupService.by_group_name(field.data) |
|
181 | group = GroupService.by_group_name(field.data) | |
177 |
mod_group = getattr(form, |
|
182 | mod_group = getattr(form, "_modified_group", None) | |
178 | if group and (not mod_group or mod_group.id != group.id): |
|
183 | if group and (not mod_group or mod_group.id != group.id): | |
179 | raise wtforms.ValidationError( |
|
184 | raise wtforms.ValidationError("This group name already exists in system") | |
180 | 'This group name already exists in system') |
|
|||
181 |
|
185 | |||
182 |
|
186 | |||
183 | def unique_email_validator(form, field): |
|
187 | def unique_email_validator(form, field): | |
184 | user = UserService.by_email(field.data) |
|
188 | user = UserService.by_email(field.data) | |
185 | if user: |
|
189 | if user: | |
186 |
raise wtforms.ValidationError( |
|
190 | raise wtforms.ValidationError("This email already exists in system") | |
187 |
|
191 | |||
188 |
|
192 | |||
189 | def email_validator(form, field): |
|
193 | def email_validator(form, field): | |
190 | validator = formencode.validators.Email() |
|
194 | validator = formencode.validators.Email() | |
191 | try: |
|
195 | try: | |
192 | validator.to_python(field.data) |
|
196 | validator.to_python(field.data) | |
193 | except formencode.Invalid as e: |
|
197 | except formencode.Invalid as e: | |
194 | raise wtforms.ValidationError(e) |
|
198 | raise wtforms.ValidationError(e) | |
195 |
|
199 | |||
196 |
|
200 | |||
197 | def unique_alert_email_validator(form, field): |
|
201 | def unique_alert_email_validator(form, field): | |
198 | q = DBSession.query(AlertChannel) |
|
202 | q = DBSession.query(AlertChannel) | |
199 |
q = q.filter(AlertChannel.channel_name == |
|
203 | q = q.filter(AlertChannel.channel_name == "email") | |
200 | q = q.filter(AlertChannel.channel_value == field.data) |
|
204 | q = q.filter(AlertChannel.channel_value == field.data) | |
201 | email = q.first() |
|
205 | email = q.first() | |
202 | if email: |
|
206 | if email: | |
203 | raise wtforms.ValidationError( |
|
207 | raise wtforms.ValidationError("This email already exists in alert system") | |
204 | 'This email already exists in alert system') |
|
|||
205 |
|
208 | |||
206 |
|
209 | |||
207 | def blocked_email_validator(form, field): |
|
210 | def blocked_email_validator(form, field): | |
208 | blocked_emails = [ |
|
211 | blocked_emails = [ | |
209 |
|
|
212 | "goood-mail.org", | |
210 |
|
|
213 | "shoeonlineblog.com", | |
211 |
|
|
214 | "louboutinemart.com", | |
212 |
|
|
215 | "guccibagshere.com", | |
213 |
|
|
216 | "nikeshoesoutletforsale.com", | |
214 | ] |
|
217 | ] | |
215 |
data = field.data or |
|
218 | data = field.data or "" | |
216 |
domain = data.split( |
|
219 | domain = data.split("@")[-1] | |
217 | if domain in blocked_emails: |
|
220 | if domain in blocked_emails: | |
218 |
raise wtforms.ValidationError(' |
|
221 | raise wtforms.ValidationError("Don't spam") | |
219 |
|
222 | |||
220 |
|
223 | |||
221 | def old_password_validator(form, field): |
|
224 | def old_password_validator(form, field): | |
222 |
if not UserService.check_password(field.user, field.data or |
|
225 | if not UserService.check_password(field.user, field.data or ""): | |
223 |
raise wtforms.ValidationError( |
|
226 | raise wtforms.ValidationError("You need to enter correct password") | |
224 |
|
227 | |||
225 |
|
228 | |||
226 | class UserRegisterForm(ReactorForm): |
|
229 | class UserRegisterForm(ReactorForm): | |
227 | user_name = wtforms.StringField( |
|
230 | user_name = wtforms.StringField( | |
228 |
_( |
|
231 | _("User Name"), | |
229 | filters=[strip_filter], |
|
232 | filters=[strip_filter], | |
230 | validators=[ |
|
233 | validators=[ | |
231 | wtforms.validators.Length(min=2, max=30), |
|
234 | wtforms.validators.Length(min=2, max=30), | |
232 | wtforms.validators.Regexp( |
|
235 | wtforms.validators.Regexp( | |
233 |
re.compile(r |
|
236 | re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used" | |
234 | message="Invalid characters used"), |
|
237 | ), | |
235 | unique_username_validator, |
|
238 | unique_username_validator, | |
236 | wtforms.validators.DataRequired() |
|
239 | wtforms.validators.DataRequired(), | |
237 |
] |
|
240 | ], | |
|
241 | ) | |||
238 |
|
242 | |||
239 |
user_password = wtforms.PasswordField( |
|
243 | user_password = wtforms.PasswordField( | |
240 | filters=[strip_filter], |
|
244 | _("User Password"), | |
241 | validators=[ |
|
245 | filters=[strip_filter], | |
242 | wtforms.validators.Length(min=4), |
|
246 | validators=[ | |
243 | wtforms.validators.DataRequired() |
|
247 | wtforms.validators.Length(min=4), | |
244 | ]) |
|
248 | wtforms.validators.DataRequired(), | |
|
249 | ], | |||
|
250 | ) | |||
245 |
|
251 | |||
246 |
email = wtforms.StringField( |
|
252 | email = wtforms.StringField( | |
247 | filters=[strip_filter], |
|
253 | _("Email Address"), | |
248 | validators=[email_validator, |
|
254 | filters=[strip_filter], | |
249 | unique_email_validator, |
|
255 | validators=[ | |
250 | blocked_email_validator, |
|
256 | email_validator, | |
251 | wtforms.validators.DataRequired()]) |
|
257 | unique_email_validator, | |
252 | first_name = wtforms.HiddenField(_('First Name')) |
|
258 | blocked_email_validator, | |
253 | last_name = wtforms.HiddenField(_('Last Name')) |
|
259 | wtforms.validators.DataRequired(), | |
|
260 | ], | |||
|
261 | ) | |||
|
262 | first_name = wtforms.HiddenField(_("First Name")) | |||
|
263 | last_name = wtforms.HiddenField(_("Last Name")) | |||
254 |
|
264 | |||
255 |
ignore_labels = [ |
|
265 | ignore_labels = ["submit"] | |
256 |
css_classes = { |
|
266 | css_classes = {"submit": "btn btn-primary"} | |
257 |
|
267 | |||
258 | html_attrs = {'user_name': {'placeholder': 'Your login'}, |
|
268 | html_attrs = { | |
259 |
|
|
269 | "user_name": {"placeholder": "Your login"}, | |
260 | 'email': {'placeholder': 'Your email'}} |
|
270 | "user_password": {"placeholder": "Your password"}, | |
|
271 | "email": {"placeholder": "Your email"}, | |||
|
272 | } | |||
261 |
|
273 | |||
262 |
|
274 | |||
263 | class UserCreateForm(UserRegisterForm): |
|
275 | class UserCreateForm(UserRegisterForm): | |
264 |
status = wtforms.BooleanField( |
|
276 | status = wtforms.BooleanField("User status", false_values=FALSE_VALUES) | |
265 | false_values=FALSE_VALUES) |
|
|||
266 |
|
277 | |||
267 |
|
278 | |||
268 | class UserUpdateForm(UserCreateForm): |
|
279 | class UserUpdateForm(UserCreateForm): | |
269 | user_name = None |
|
280 | user_name = None | |
270 |
user_password = wtforms.PasswordField( |
|
281 | user_password = wtforms.PasswordField( | |
271 | filters=[strip_filter], |
|
282 | _("User Password"), | |
272 | validators=[ |
|
283 | filters=[strip_filter], | |
273 | wtforms.validators.Length(min=4), |
|
284 | validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()], | |
274 | wtforms.validators.Optional() |
|
285 | ) | |
275 | ]) |
|
286 | email = wtforms.StringField( | |
276 | email = wtforms.StringField(_('Email Address'), |
|
287 | _("Email Address"), | |
277 |
|
|
288 | filters=[strip_filter], | |
278 | validators=[email_validator, |
|
289 | validators=[email_validator, wtforms.validators.DataRequired()], | |
279 | wtforms.validators.DataRequired()]) |
|
290 | ) | |
280 |
|
291 | |||
281 |
|
292 | |||
282 | class LostPasswordForm(ReactorForm): |
|
293 | class LostPasswordForm(ReactorForm): | |
283 |
email = wtforms.StringField( |
|
294 | email = wtforms.StringField( | |
284 | filters=[strip_filter], |
|
295 | _("Email Address"), | |
285 | validators=[email_validator, |
|
296 | filters=[strip_filter], | |
286 | found_username_email_validator, |
|
297 | validators=[ | |
287 | wtforms.validators.DataRequired()]) |
|
298 | email_validator, | |
|
299 | found_username_email_validator, | |||
|
300 | wtforms.validators.DataRequired(), | |||
|
301 | ], | |||
|
302 | ) | |||
288 |
|
303 | |||
289 |
submit = wtforms.SubmitField(_( |
|
304 | submit = wtforms.SubmitField(_("Reset password")) | |
290 |
ignore_labels = [ |
|
305 | ignore_labels = ["submit"] | |
291 |
css_classes = { |
|
306 | css_classes = {"submit": "btn btn-primary"} | |
292 |
|
307 | |||
293 |
|
308 | |||
294 | class ChangePasswordForm(ReactorForm): |
|
309 | class ChangePasswordForm(ReactorForm): | |
295 | old_password = wtforms.PasswordField( |
|
310 | old_password = wtforms.PasswordField( | |
296 |
|
|
311 | "Old Password", | |
297 | filters=[strip_filter], |
|
312 | filters=[strip_filter], | |
298 | validators=[old_password_validator, |
|
313 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
299 | wtforms.validators.DataRequired()]) |
|
314 | ) | |
300 |
|
315 | |||
301 | new_password = wtforms.PasswordField( |
|
316 | new_password = wtforms.PasswordField( | |
302 |
|
|
317 | "New Password", | |
303 | filters=[strip_filter], |
|
318 | filters=[strip_filter], | |
304 | validators=[wtforms.validators.Length(min=4), |
|
319 | validators=[ | |
305 |
|
|
320 | wtforms.validators.Length(min=4), | |
|
321 | wtforms.validators.DataRequired(), | |||
|
322 | ], | |||
|
323 | ) | |||
306 | new_password_confirm = wtforms.PasswordField( |
|
324 | new_password_confirm = wtforms.PasswordField( | |
307 |
|
|
325 | "Confirm Password", | |
308 | filters=[strip_filter], |
|
326 | filters=[strip_filter], | |
309 | validators=[wtforms.validators.EqualTo('new_password'), |
|
327 | validators=[ | |
310 |
|
|
328 | wtforms.validators.EqualTo("new_password"), | |
311 | submit = wtforms.SubmitField('Change Password') |
|
329 | wtforms.validators.DataRequired(), | |
312 | ignore_labels = ['submit'] |
|
330 | ], | |
313 | css_classes = {'submit': 'btn btn-primary'} |
|
331 | ) | |
|
332 | submit = wtforms.SubmitField("Change Password") | |||
|
333 | ignore_labels = ["submit"] | |||
|
334 | css_classes = {"submit": "btn btn-primary"} | |||
314 |
|
335 | |||
315 |
|
336 | |||
316 | class CheckPasswordForm(ReactorForm): |
|
337 | class CheckPasswordForm(ReactorForm): | |
317 | password = wtforms.PasswordField( |
|
338 | password = wtforms.PasswordField( | |
318 |
|
|
339 | "Password", | |
319 | filters=[strip_filter], |
|
340 | filters=[strip_filter], | |
320 | validators=[old_password_validator, |
|
341 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
321 | wtforms.validators.DataRequired()]) |
|
342 | ) | |
322 |
|
343 | |||
323 |
|
344 | |||
324 | class NewPasswordForm(ReactorForm): |
|
345 | class NewPasswordForm(ReactorForm): | |
325 | new_password = wtforms.PasswordField( |
|
346 | new_password = wtforms.PasswordField( | |
326 |
|
|
347 | "New Password", | |
327 | filters=[strip_filter], |
|
348 | filters=[strip_filter], | |
328 | validators=[wtforms.validators.Length(min=4), |
|
349 | validators=[ | |
329 |
|
|
350 | wtforms.validators.Length(min=4), | |
|
351 | wtforms.validators.DataRequired(), | |||
|
352 | ], | |||
|
353 | ) | |||
330 | new_password_confirm = wtforms.PasswordField( |
|
354 | new_password_confirm = wtforms.PasswordField( | |
331 |
|
|
355 | "Confirm Password", | |
332 | filters=[strip_filter], |
|
356 | filters=[strip_filter], | |
333 | validators=[wtforms.validators.EqualTo('new_password'), |
|
357 | validators=[ | |
334 |
|
|
358 | wtforms.validators.EqualTo("new_password"), | |
335 | submit = wtforms.SubmitField('Set Password') |
|
359 | wtforms.validators.DataRequired(), | |
336 | ignore_labels = ['submit'] |
|
360 | ], | |
337 | css_classes = {'submit': 'btn btn-primary'} |
|
361 | ) | |
|
362 | submit = wtforms.SubmitField("Set Password") | |||
|
363 | ignore_labels = ["submit"] | |||
|
364 | css_classes = {"submit": "btn btn-primary"} | |||
338 |
|
365 | |||
339 |
|
366 | |||
340 | class CORSTextAreaField(wtforms.StringField): |
|
367 | class CORSTextAreaField(wtforms.StringField): | |
341 | """ |
|
368 | """ | |
342 | This field represents an HTML ``<textarea>`` and can be used to take |
|
369 | This field represents an HTML ``<textarea>`` and can be used to take | |
343 | multi-line input. |
|
370 | multi-line input. | |
344 | """ |
|
371 | """ | |
|
372 | ||||
345 | widget = wtforms.widgets.TextArea() |
|
373 | widget = wtforms.widgets.TextArea() | |
346 |
|
374 | |||
347 | def process_formdata(self, valuelist): |
|
375 | def process_formdata(self, valuelist): | |
348 | self.data = [] |
|
376 | self.data = [] | |
349 | if valuelist: |
|
377 | if valuelist: | |
350 |
data = [x.strip() for x in valuelist[0].split( |
|
378 | data = [x.strip() for x in valuelist[0].split("\n")] | |
351 | for d in data: |
|
379 | for d in data: | |
352 | if not d: |
|
380 | if not d: | |
353 | continue |
|
381 | continue | |
354 |
if d.startswith( |
|
382 | if d.startswith("www."): | |
355 | d = d[4:] |
|
383 | d = d[4:] | |
356 | if data: |
|
384 | if data: | |
357 | self.data.append(d) |
|
385 | self.data.append(d) | |
358 | else: |
|
386 | else: | |
359 | self.data = [] |
|
387 | self.data = [] | |
360 |
self.data = |
|
388 | self.data = "\n".join(self.data) | |
361 |
|
389 | |||
362 |
|
390 | |||
363 | class ApplicationCreateForm(ReactorForm): |
|
391 | class ApplicationCreateForm(ReactorForm): | |
364 | resource_name = wtforms.StringField( |
|
392 | resource_name = wtforms.StringField( | |
365 |
_( |
|
393 | _("Application name"), | |
366 | filters=[strip_filter], |
|
394 | filters=[strip_filter], | |
367 | validators=[wtforms.validators.Length(min=1), |
|
395 | validators=[ | |
368 |
|
|
396 | wtforms.validators.Length(min=1), | |
|
397 | wtforms.validators.DataRequired(), | |||
|
398 | ], | |||
|
399 | ) | |||
369 |
|
400 | |||
370 | domains = CORSTextAreaField( |
|
401 | domains = CORSTextAreaField( | |
371 |
_( |
|
402 | _("Domain names for CORS headers "), | |
372 | validators=[wtforms.validators.Length(min=1), |
|
403 | validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()], | |
373 | wtforms.validators.Optional()], |
|
404 | description="Required for Javascript error " | |
374 | description='Required for Javascript error ' |
|
405 | "tracking (one line one domain, skip http:// part)", | |
375 | 'tracking (one line one domain, skip http:// part)') |
|
406 | ) | |
376 |
|
407 | |||
377 |
submit = wtforms.SubmitField(_( |
|
408 | submit = wtforms.SubmitField(_("Create Application")) | |
378 |
|
409 | |||
379 |
ignore_labels = [ |
|
410 | ignore_labels = ["submit"] | |
380 |
css_classes = { |
|
411 | css_classes = {"submit": "btn btn-primary"} | |
381 | html_attrs = {'resource_name': {'placeholder': 'Application Name'}, |
|
412 | html_attrs = { | |
382 | 'uptime_url': {'placeholder': 'http://somedomain.com'}} |
|
413 | "resource_name": {"placeholder": "Application Name"}, | |
|
414 | "uptime_url": {"placeholder": "http://somedomain.com"}, | |||
|
415 | } | |||
383 |
|
416 | |||
384 |
|
417 | |||
385 | class ApplicationUpdateForm(ApplicationCreateForm): |
|
418 | class ApplicationUpdateForm(ApplicationCreateForm): | |
386 | default_grouping = wtforms.SelectField( |
|
419 | default_grouping = wtforms.SelectField( | |
387 |
_( |
|
420 | _("Default grouping for errors"), | |
388 | choices=[('url_type', 'Error Type + location',), |
|
421 | choices=[ | |
389 |
|
|
422 | ("url_type", "Error Type + location"), | |
390 |
|
|
423 | ("url_traceback", "Traceback + location"), | |
391 | default='url_traceback') |
|
424 | ("traceback_server", "Traceback + Server"), | |
|
425 | ], | |||
|
426 | default="url_traceback", | |||
|
427 | ) | |||
392 |
|
428 | |||
393 | error_report_threshold = wtforms.IntegerField( |
|
429 | error_report_threshold = wtforms.IntegerField( | |
394 |
_( |
|
430 | _("Alert on error reports"), | |
395 | validators=[ |
|
431 | validators=[ | |
396 | wtforms.validators.NumberRange(min=1), |
|
432 | wtforms.validators.NumberRange(min=1), | |
397 | wtforms.validators.DataRequired() |
|
433 | wtforms.validators.DataRequired(), | |
398 | ], |
|
434 | ], | |
399 |
description= |
|
435 | description="Application requires to send at least this amount of " | |
400 |
|
|
436 | "error reports per minute to open alert", | |
401 | ) |
|
437 | ) | |
402 |
|
438 | |||
403 | slow_report_threshold = wtforms.IntegerField( |
|
439 | slow_report_threshold = wtforms.IntegerField( | |
404 |
_( |
|
440 | _("Alert on slow reports"), | |
405 | validators=[wtforms.validators.NumberRange(min=1), |
|
441 | validators=[ | |
406 |
|
|
442 | wtforms.validators.NumberRange(min=1), | |
407 | description='Application requires to send at least this amount of ' |
|
443 | wtforms.validators.DataRequired(), | |
408 | 'slow reports per minute to open alert') |
|
444 | ], | |
|
445 | description="Application requires to send at least this amount of " | |||
|
446 | "slow reports per minute to open alert", | |||
|
447 | ) | |||
409 |
|
448 | |||
410 | allow_permanent_storage = wtforms.BooleanField( |
|
449 | allow_permanent_storage = wtforms.BooleanField( | |
411 |
_( |
|
450 | _("Permanent logs"), | |
412 | false_values=FALSE_VALUES, |
|
451 | false_values=FALSE_VALUES, | |
413 | description=_( |
|
452 | description=_("Allow permanent storage of logs in separate DB partitions"), | |
414 | 'Allow permanent storage of logs in separate DB partitions')) |
|
453 | ) | |
415 |
|
454 | |||
416 |
submit = wtforms.SubmitField(_( |
|
455 | submit = wtforms.SubmitField(_("Create Application")) | |
417 |
|
456 | |||
418 |
|
457 | |||
419 | class UserSearchSchemaForm(ReactorForm): |
|
458 | class UserSearchSchemaForm(ReactorForm): | |
420 |
user_name = wtforms.StringField( |
|
459 | user_name = wtforms.StringField("User Name", filters=[strip_filter]) | |
421 | filters=[strip_filter], ) |
|
|||
422 |
|
460 | |||
423 |
submit = wtforms.SubmitField(_( |
|
461 | submit = wtforms.SubmitField(_("Search User")) | |
424 |
ignore_labels = [ |
|
462 | ignore_labels = ["submit"] | |
425 |
css_classes = { |
|
463 | css_classes = {"submit": "btn btn-primary"} | |
426 |
|
464 | |||
427 | '<li class="user_exists"><span></span></li>' |
|
465 | '<li class="user_exists"><span></span></li>' | |
428 |
|
466 | |||
429 |
|
467 | |||
430 | class YesNoForm(ReactorForm): |
|
468 | class YesNoForm(ReactorForm): | |
431 |
no = wtforms.SubmitField( |
|
469 | no = wtforms.SubmitField("No", default="") | |
432 |
yes = wtforms.SubmitField( |
|
470 | yes = wtforms.SubmitField("Yes", default="") | |
433 |
ignore_labels = [ |
|
471 | ignore_labels = ["submit"] | |
434 |
css_classes = { |
|
472 | css_classes = {"submit": "btn btn-primary"} | |
435 |
|
473 | |||
436 |
|
474 | |||
437 |
status_codes = [( |
|
475 | status_codes = [("", "All"), ("500", "500"), ("404", "404")] | |
438 |
|
476 | |||
439 |
priorities = [( |
|
477 | priorities = [("", "All")] | |
440 | for i in range(1, 11): |
|
478 | for i in range(1, 11): | |
441 |
priorities.append((str(i), str(i) |
|
479 | priorities.append((str(i), str(i))) | |
442 |
|
480 | |||
443 |
report_status_choices = [ |
|
481 | report_status_choices = [ | |
444 | ('never_reviewed', 'Never revieved',), |
|
482 | ("", "All"), | |
445 | ('reviewed', 'Revieved',), |
|
483 | ("never_reviewed", "Never revieved"), | |
446 | ('public', 'Public',), |
|
484 | ("reviewed", "Revieved"), | |
447 | ('fixed', 'Fixed',), ] |
|
485 | ("public", "Public"), | |
|
486 | ("fixed", "Fixed"), | |||
|
487 | ] | |||
448 |
|
488 | |||
449 |
|
489 | |||
450 | class ReportBrowserForm(ReactorForm): |
|
490 | class ReportBrowserForm(ReactorForm): | |
451 |
applications = wtforms.SelectMultipleField( |
|
491 | applications = wtforms.SelectMultipleField( | |
452 | widget=select_multi_checkbox) |
|
492 | "Applications", widget=select_multi_checkbox | |
453 | http_status = wtforms.SelectField('HTTP Status', choices=status_codes) |
|
493 | ) | |
454 | priority = wtforms.SelectField('Priority', choices=priorities, default='') |
|
494 | http_status = wtforms.SelectField("HTTP Status", choices=status_codes) | |
455 | start_date = wtforms.DateField('Start Date') |
|
495 | priority = wtforms.SelectField("Priority", choices=priorities, default="") | |
456 |
|
|
496 | start_date = wtforms.DateField("Start Date") | |
457 | error = wtforms.StringField('Error') |
|
497 | end_date = wtforms.DateField("End Date") | |
458 |
|
|
498 | error = wtforms.StringField("Error") | |
459 |
url_ |
|
499 | url_path = wtforms.StringField("URL Path") | |
460 | report_status = wtforms.SelectField('Report status', |
|
500 | url_domain = wtforms.StringField("URL Domain") | |
461 | choices=report_status_choices, |
|
501 | report_status = wtforms.SelectField( | |
462 | default='') |
|
502 | "Report status", choices=report_status_choices, default="" | |
463 | submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">' |
|
503 | ) | |
464 | '</span> Filter results', |
|
504 | submit = wtforms.SubmitField( | |
465 | widget=button_widget) |
|
505 | '<span class="glyphicon glyphicon-search">' "</span> Filter results", | |
466 |
|
506 | widget=button_widget, | ||
467 | ignore_labels = ['submit'] |
|
507 | ) | |
468 | css_classes = {'submit': 'btn btn-primary'} |
|
508 | ||
469 |
|
509 | ignore_labels = ["submit"] | ||
470 |
|
510 | css_classes = {"submit": "btn btn-primary"} | ||
471 | slow_report_status_choices = [('', 'All',), |
|
511 | ||
472 | ('never_reviewed', 'Never revieved',), |
|
512 | ||
473 | ('reviewed', 'Revieved',), |
|
513 | slow_report_status_choices = [ | |
474 | ('public', 'Public',), ] |
|
514 | ("", "All"), | |
|
515 | ("never_reviewed", "Never revieved"), | |||
|
516 | ("reviewed", "Revieved"), | |||
|
517 | ("public", "Public"), | |||
|
518 | ] | |||
475 |
|
519 | |||
476 |
|
520 | |||
477 | class BulkOperationForm(ReactorForm): |
|
521 | class BulkOperationForm(ReactorForm): | |
478 |
applications = wtforms.SelectField( |
|
522 | applications = wtforms.SelectField("Applications") | |
479 | start_date = wtforms.DateField( |
|
523 | start_date = wtforms.DateField( | |
480 |
|
|
524 | "Start Date", | |
481 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta( |
|
525 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90), | |
482 | days=90)) |
|
526 | ) | |
483 |
end_date = wtforms.DateField( |
|
527 | end_date = wtforms.DateField("End Date") | |
484 | confirm = wtforms.BooleanField( |
|
528 | confirm = wtforms.BooleanField( | |
485 | 'Confirm operation', |
|
529 | "Confirm operation", validators=[wtforms.validators.DataRequired()] | |
486 | validators=[wtforms.validators.DataRequired()]) |
|
530 | ) | |
487 |
|
531 | |||
488 |
|
532 | |||
489 | class LogBrowserForm(ReactorForm): |
|
533 | class LogBrowserForm(ReactorForm): | |
490 |
applications = wtforms.SelectMultipleField( |
|
534 | applications = wtforms.SelectMultipleField( | |
491 | widget=select_multi_checkbox) |
|
535 | "Applications", widget=select_multi_checkbox | |
492 | start_date = wtforms.DateField('Start Date') |
|
536 | ) | |
493 | log_level = wtforms.StringField('Log level') |
|
537 | start_date = wtforms.DateField("Start Date") | |
494 |
|
|
538 | log_level = wtforms.StringField("Log level") | |
495 |
|
|
539 | message = wtforms.StringField("Message") | |
|
540 | namespace = wtforms.StringField("Namespace") | |||
496 | submit = wtforms.SubmitField( |
|
541 | submit = wtforms.SubmitField( | |
497 | '<span class="glyphicon glyphicon-search"></span> Filter results', |
|
542 | '<span class="glyphicon glyphicon-search"></span> Filter results', | |
498 |
widget=button_widget |
|
543 | widget=button_widget, | |
499 | ignore_labels = ['submit'] |
|
544 | ) | |
500 | css_classes = {'submit': 'btn btn-primary'} |
|
545 | ignore_labels = ["submit"] | |
|
546 | css_classes = {"submit": "btn btn-primary"} | |||
501 |
|
547 | |||
502 |
|
548 | |||
503 | class CommentForm(ReactorForm): |
|
549 | class CommentForm(ReactorForm): | |
504 |
body = wtforms.TextAreaField( |
|
550 | body = wtforms.TextAreaField( | |
505 | wtforms.validators.Length(min=1), |
|
551 | "Comment", | |
506 | wtforms.validators.DataRequired() |
|
552 | validators=[ | |
507 | ]) |
|
553 | wtforms.validators.Length(min=1), | |
508 | submit = wtforms.SubmitField('Comment', ) |
|
554 | wtforms.validators.DataRequired(), | |
509 | ignore_labels = ['submit'] |
|
555 | ], | |
510 | css_classes = {'submit': 'btn btn-primary'} |
|
556 | ) | |
|
557 | submit = wtforms.SubmitField("Comment") | |||
|
558 | ignore_labels = ["submit"] | |||
|
559 | css_classes = {"submit": "btn btn-primary"} | |||
511 |
|
560 | |||
512 |
|
561 | |||
513 | class EmailChannelCreateForm(ReactorForm): |
|
562 | class EmailChannelCreateForm(ReactorForm): | |
514 |
email = wtforms.StringField( |
|
563 | email = wtforms.StringField( | |
515 | filters=[strip_filter], |
|
564 | _("Email Address"), | |
516 | validators=[email_validator, |
|
565 | filters=[strip_filter], | |
517 | unique_alert_email_validator, |
|
566 | validators=[ | |
518 | wtforms.validators.DataRequired()]) |
|
567 | email_validator, | |
519 | submit = wtforms.SubmitField('Add email channel', ) |
|
568 | unique_alert_email_validator, | |
520 | ignore_labels = ['submit'] |
|
569 | wtforms.validators.DataRequired(), | |
521 | css_classes = {'submit': 'btn btn-primary'} |
|
570 | ], | |
|
571 | ) | |||
|
572 | submit = wtforms.SubmitField("Add email channel") | |||
|
573 | ignore_labels = ["submit"] | |||
|
574 | css_classes = {"submit": "btn btn-primary"} | |||
522 |
|
575 | |||
523 |
|
576 | |||
524 | def gen_user_profile_form(): |
|
577 | def gen_user_profile_form(): | |
525 | class UserProfileForm(ReactorForm): |
|
578 | class UserProfileForm(ReactorForm): | |
526 | email = wtforms.StringField( |
|
579 | email = wtforms.StringField( | |
527 |
_( |
|
580 | _("Email Address"), | |
528 |
validators=[email_validator, wtforms.validators.DataRequired()] |
|
581 | validators=[email_validator, wtforms.validators.DataRequired()], | |
529 | first_name = wtforms.StringField(_('First Name')) |
|
582 | ) | |
530 |
|
|
583 | first_name = wtforms.StringField(_("First Name")) | |
531 |
|
|
584 | last_name = wtforms.StringField(_("Last Name")) | |
532 |
company_ |
|
585 | company_name = wtforms.StringField(_("Company Name")) | |
533 |
|
|
586 | company_address = wtforms.TextAreaField(_("Company Address")) | |
534 |
|
|
587 | zip_code = wtforms.StringField(_("ZIP code")) | |
535 | notifications = wtforms.BooleanField('Account notifications', |
|
588 | city = wtforms.StringField(_("City")) | |
536 | false_values=FALSE_VALUES) |
|
589 | notifications = wtforms.BooleanField( | |
537 | submit = wtforms.SubmitField(_('Update Account')) |
|
590 | "Account notifications", false_values=FALSE_VALUES | |
538 | ignore_labels = ['submit'] |
|
591 | ) | |
539 | css_classes = {'submit': 'btn btn-primary'} |
|
592 | submit = wtforms.SubmitField(_("Update Account")) | |
|
593 | ignore_labels = ["submit"] | |||
|
594 | css_classes = {"submit": "btn btn-primary"} | |||
540 |
|
595 | |||
541 | return UserProfileForm |
|
596 | return UserProfileForm | |
542 |
|
597 | |||
543 |
|
598 | |||
544 | class PurgeAppForm(ReactorForm): |
|
599 | class PurgeAppForm(ReactorForm): | |
545 | resource_id = wtforms.HiddenField( |
|
600 | resource_id = wtforms.HiddenField( | |
546 | 'App Id', |
|
601 | "App Id", validators=[wtforms.validators.DataRequired()] | |
547 | validators=[wtforms.validators.DataRequired()]) |
|
602 | ) | |
548 | days = wtforms.IntegerField( |
|
603 | days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()]) | |
549 | 'Days', |
|
|||
550 | validators=[wtforms.validators.DataRequired()]) |
|
|||
551 | password = wtforms.PasswordField( |
|
604 | password = wtforms.PasswordField( | |
552 |
|
|
605 | "Admin Password", | |
553 |
validators=[old_password_validator, wtforms.validators.DataRequired()] |
|
606 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
554 | submit = wtforms.SubmitField(_('Purge Data')) |
|
607 | ) | |
555 | ignore_labels = ['submit'] |
|
608 | submit = wtforms.SubmitField(_("Purge Data")) | |
556 | css_classes = {'submit': 'btn btn-primary'} |
|
609 | ignore_labels = ["submit"] | |
|
610 | css_classes = {"submit": "btn btn-primary"} | |||
557 |
|
611 | |||
558 |
|
612 | |||
559 | class IntegrationRepoForm(ReactorForm): |
|
613 | class IntegrationRepoForm(ReactorForm): | |
560 |
host_name = wtforms.StringField("Service Host", default= |
|
614 | host_name = wtforms.StringField("Service Host", default="") | |
561 | user_name = wtforms.StringField( |
|
615 | user_name = wtforms.StringField( | |
562 | "User Name", |
|
616 | "User Name", | |
563 | filters=[strip_filter], |
|
617 | filters=[strip_filter], | |
564 | validators=[wtforms.validators.DataRequired(), |
|
618 | validators=[ | |
565 |
|
|
619 | wtforms.validators.DataRequired(), | |
|
620 | wtforms.validators.Length(min=1), | |||
|
621 | ], | |||
|
622 | ) | |||
566 | repo_name = wtforms.StringField( |
|
623 | repo_name = wtforms.StringField( | |
567 | "Repo Name", |
|
624 | "Repo Name", | |
568 | filters=[strip_filter], |
|
625 | filters=[strip_filter], | |
569 | validators=[wtforms.validators.DataRequired(), |
|
626 | validators=[ | |
570 |
|
|
627 | wtforms.validators.DataRequired(), | |
|
628 | wtforms.validators.Length(min=1), | |||
|
629 | ], | |||
|
630 | ) | |||
571 |
|
631 | |||
572 |
|
632 | |||
573 | class IntegrationBitbucketForm(IntegrationRepoForm): |
|
633 | class IntegrationBitbucketForm(IntegrationRepoForm): | |
574 | host_name = wtforms.StringField("Service Host", |
|
634 | host_name = wtforms.StringField("Service Host", default="https://bitbucket.org") | |
575 | default='https://bitbucket.org') |
|
|||
576 |
|
635 | |||
577 | def validate_user_name(self, field): |
|
636 | def validate_user_name(self, field): | |
578 | try: |
|
637 | try: | |
579 | request = pyramid.threadlocal.get_current_request() |
|
638 | request = pyramid.threadlocal.get_current_request() | |
580 | client = BitbucketIntegration.create_client( |
|
639 | client = BitbucketIntegration.create_client( | |
581 | request, |
|
640 | request, self.user_name.data, self.repo_name.data | |
582 | self.user_name.data, |
|
641 | ) | |
583 | self.repo_name.data) |
|
|||
584 | client.get_assignees() |
|
642 | client.get_assignees() | |
585 | except IntegrationException as e: |
|
643 | except IntegrationException as e: | |
586 | raise wtforms.validators.ValidationError(str(e)) |
|
644 | raise wtforms.validators.ValidationError(str(e)) | |
587 |
|
645 | |||
588 |
|
646 | |||
589 | class IntegrationGithubForm(IntegrationRepoForm): |
|
647 | class IntegrationGithubForm(IntegrationRepoForm): | |
590 | host_name = wtforms.StringField("Service Host", |
|
648 | host_name = wtforms.StringField("Service Host", default="https://github.com") | |
591 | default='https://github.com') |
|
|||
592 |
|
649 | |||
593 | def validate_user_name(self, field): |
|
650 | def validate_user_name(self, field): | |
594 | try: |
|
651 | try: | |
595 | request = pyramid.threadlocal.get_current_request() |
|
652 | request = pyramid.threadlocal.get_current_request() | |
596 | client = GithubIntegration.create_client( |
|
653 | client = GithubIntegration.create_client( | |
597 | request, |
|
654 | request, self.user_name.data, self.repo_name.data | |
598 | self.user_name.data, |
|
655 | ) | |
599 | self.repo_name.data) |
|
|||
600 | client.get_assignees() |
|
656 | client.get_assignees() | |
601 | except IntegrationException as e: |
|
657 | except IntegrationException as e: | |
602 | raise wtforms.validators.ValidationError(str(e)) |
|
658 | raise wtforms.validators.ValidationError(str(e)) | |
603 | raise wtforms.validators.ValidationError(str(e)) |
|
659 | raise wtforms.validators.ValidationError(str(e)) | |
604 |
|
660 | |||
605 |
|
661 | |||
606 | def filter_rooms(data): |
|
662 | def filter_rooms(data): | |
607 | if data is not None: |
|
663 | if data is not None: | |
608 |
rooms = data.split( |
|
664 | rooms = data.split(",") | |
609 |
return |
|
665 | return ",".join([r.strip() for r in rooms]) | |
610 |
|
666 | |||
611 |
|
667 | |||
612 | class IntegrationCampfireForm(ReactorForm): |
|
668 | class IntegrationCampfireForm(ReactorForm): | |
613 | account = wtforms.StringField( |
|
669 | account = wtforms.StringField( | |
614 |
|
|
670 | "Account", | |
615 | filters=[strip_filter], |
|
671 | filters=[strip_filter], | |
616 |
validators=[wtforms.validators.DataRequired()] |
|
672 | validators=[wtforms.validators.DataRequired()], | |
|
673 | ) | |||
617 | api_token = wtforms.StringField( |
|
674 | api_token = wtforms.StringField( | |
618 |
|
|
675 | "Api Token", | |
619 | filters=[strip_filter], |
|
676 | filters=[strip_filter], | |
620 |
validators=[wtforms.validators.DataRequired()] |
|
677 | validators=[wtforms.validators.DataRequired()], | |
621 | rooms = wtforms.StringField('Room ID list', filters=[filter_rooms]) |
|
678 | ) | |
|
679 | rooms = wtforms.StringField("Room ID list", filters=[filter_rooms]) | |||
622 |
|
680 | |||
623 | def validate_api_token(self, field): |
|
681 | def validate_api_token(self, field): | |
624 | try: |
|
682 | try: | |
625 |
client = CampfireIntegration.create_client( |
|
683 | client = CampfireIntegration.create_client( | |
626 | self.account.data) |
|
684 | self.api_token.data, self.account.data | |
|
685 | ) | |||
627 | client.get_account() |
|
686 | client.get_account() | |
628 | except IntegrationException as e: |
|
687 | except IntegrationException as e: | |
629 | raise wtforms.validators.ValidationError(str(e)) |
|
688 | raise wtforms.validators.ValidationError(str(e)) | |
630 |
|
689 | |||
631 | def validate_rooms(self, field): |
|
690 | def validate_rooms(self, field): | |
632 | if not field.data: |
|
691 | if not field.data: | |
633 | return |
|
692 | return | |
634 |
client = CampfireIntegration.create_client( |
|
693 | client = CampfireIntegration.create_client( | |
635 | self.account.data) |
|
694 | self.api_token.data, self.account.data | |
|
695 | ) | |||
636 |
|
696 | |||
637 | try: |
|
697 | try: | |
638 |
room_list = [r[ |
|
698 | room_list = [r["id"] for r in client.get_rooms()] | |
639 | except IntegrationException as e: |
|
699 | except IntegrationException as e: | |
640 | raise wtforms.validators.ValidationError(str(e)) |
|
700 | raise wtforms.validators.ValidationError(str(e)) | |
641 |
|
701 | |||
642 |
rooms = field.data.split( |
|
702 | rooms = field.data.split(",") | |
643 | if len(rooms) > 3: |
|
703 | if len(rooms) > 3: | |
644 |
msg = |
|
704 | msg = "You can use up to 3 room ids" | |
645 | raise wtforms.validators.ValidationError(msg) |
|
705 | raise wtforms.validators.ValidationError(msg) | |
646 | if rooms: |
|
706 | if rooms: | |
647 | for room_id in rooms: |
|
707 | for room_id in rooms: | |
648 | if int(room_id) not in room_list: |
|
708 | if int(room_id) not in room_list: | |
649 | msg = "Room %s doesn't exist" |
|
709 | msg = "Room %s doesn't exist" | |
650 | raise wtforms.validators.ValidationError(msg % room_id) |
|
710 | raise wtforms.validators.ValidationError(msg % room_id) | |
651 | if not room_id.strip().isdigit(): |
|
711 | if not room_id.strip().isdigit(): | |
652 |
msg = |
|
712 | msg = "You must use only integers for room ids" | |
653 | raise wtforms.validators.ValidationError(msg) |
|
713 | raise wtforms.validators.ValidationError(msg) | |
654 |
|
714 | |||
655 |
submit = wtforms.SubmitField(_( |
|
715 | submit = wtforms.SubmitField(_("Connect to Campfire")) | |
656 |
ignore_labels = [ |
|
716 | ignore_labels = ["submit"] | |
657 |
css_classes = { |
|
717 | css_classes = {"submit": "btn btn-primary"} | |
658 |
|
718 | |||
659 |
|
719 | |||
660 | def filter_rooms(data): |
|
720 | def filter_rooms(data): | |
661 | if data is not None: |
|
721 | if data is not None: | |
662 |
rooms = data.split( |
|
722 | rooms = data.split(",") | |
663 |
return |
|
723 | return ",".join([r.strip() for r in rooms]) | |
664 |
|
724 | |||
665 |
|
725 | |||
666 | class IntegrationHipchatForm(ReactorForm): |
|
726 | class IntegrationHipchatForm(ReactorForm): | |
667 | api_token = wtforms.StringField( |
|
727 | api_token = wtforms.StringField( | |
668 |
|
|
728 | "Api Token", | |
669 | filters=[strip_filter], |
|
729 | filters=[strip_filter], | |
670 |
validators=[wtforms.validators.DataRequired()] |
|
730 | validators=[wtforms.validators.DataRequired()], | |
|
731 | ) | |||
671 | rooms = wtforms.StringField( |
|
732 | rooms = wtforms.StringField( | |
672 |
|
|
733 | "Room ID list", | |
673 | filters=[filter_rooms], |
|
734 | filters=[filter_rooms], | |
674 |
validators=[wtforms.validators.DataRequired()] |
|
735 | validators=[wtforms.validators.DataRequired()], | |
|
736 | ) | |||
675 |
|
737 | |||
676 | def validate_rooms(self, field): |
|
738 | def validate_rooms(self, field): | |
677 | if not field.data: |
|
739 | if not field.data: | |
678 | return |
|
740 | return | |
679 | client = HipchatIntegration.create_client(self.api_token.data) |
|
741 | client = HipchatIntegration.create_client(self.api_token.data) | |
680 |
rooms = field.data.split( |
|
742 | rooms = field.data.split(",") | |
681 | if len(rooms) > 3: |
|
743 | if len(rooms) > 3: | |
682 |
msg = |
|
744 | msg = "You can use up to 3 room ids" | |
683 | raise wtforms.validators.ValidationError(msg) |
|
745 | raise wtforms.validators.ValidationError(msg) | |
684 | if rooms: |
|
746 | if rooms: | |
685 | for room_id in rooms: |
|
747 | for room_id in rooms: | |
686 | if not room_id.strip().isdigit(): |
|
748 | if not room_id.strip().isdigit(): | |
687 |
msg = |
|
749 | msg = "You must use only integers for room ids" | |
688 | raise wtforms.validators.ValidationError(msg) |
|
750 | raise wtforms.validators.ValidationError(msg) | |
689 | try: |
|
751 | try: | |
690 |
client.send( |
|
752 | client.send( | |
691 |
|
|
753 | { | |
692 |
"message": "te |
|
754 | "message_format": "text", | |
693 |
" |
|
755 | "message": "testing for room existence", | |
694 |
"r |
|
756 | "from": "AppEnlight", | |
695 |
" |
|
757 | "room_id": room_id, | |
696 | }) |
|
758 | "color": "green", | |
|
759 | } | |||
|
760 | ) | |||
697 | except IntegrationException as exc: |
|
761 | except IntegrationException as exc: | |
698 |
msg = |
|
762 | msg = "Room id: %s exception: %s" | |
699 | raise wtforms.validators.ValidationError(msg % (room_id, |
|
763 | raise wtforms.validators.ValidationError(msg % (room_id, exc)) | |
700 | exc)) |
|
|||
701 |
|
764 | |||
702 |
|
765 | |||
703 | class IntegrationFlowdockForm(ReactorForm): |
|
766 | class IntegrationFlowdockForm(ReactorForm): | |
704 |
api_token = wtforms.StringField( |
|
767 | api_token = wtforms.StringField( | |
705 | filters=[strip_filter], |
|
768 | "API Token", | |
706 | validators=[ |
|
769 | filters=[strip_filter], | |
707 |
|
|
770 | validators=[wtforms.validators.DataRequired()], | |
708 | ], ) |
|
771 | ) | |
709 |
|
772 | |||
710 | def validate_api_token(self, field): |
|
773 | def validate_api_token(self, field): | |
711 | try: |
|
774 | try: | |
712 | client = FlowdockIntegration.create_client(self.api_token.data) |
|
775 | client = FlowdockIntegration.create_client(self.api_token.data) | |
713 | registry = pyramid.threadlocal.get_current_registry() |
|
776 | registry = pyramid.threadlocal.get_current_registry() | |
714 | payload = { |
|
777 | payload = { | |
715 |
"source": registry.settings[ |
|
778 | "source": registry.settings["mailing.from_name"], | |
716 |
"from_address": registry.settings[ |
|
779 | "from_address": registry.settings["mailing.from_email"], | |
717 | "subject": "Integration test", |
|
780 | "subject": "Integration test", | |
718 | "content": "If you can see this it was successful", |
|
781 | "content": "If you can see this it was successful", | |
719 | "tags": ["appenlight"], |
|
782 | "tags": ["appenlight"], | |
720 |
"link": registry.settings[ |
|
783 | "link": registry.settings["mailing.app_url"], | |
721 | } |
|
784 | } | |
722 | client.send_to_inbox(payload) |
|
785 | client.send_to_inbox(payload) | |
723 | except IntegrationException as e: |
|
786 | except IntegrationException as e: | |
724 | raise wtforms.validators.ValidationError(str(e)) |
|
787 | raise wtforms.validators.ValidationError(str(e)) | |
725 |
|
788 | |||
726 |
|
789 | |||
727 | class IntegrationSlackForm(ReactorForm): |
|
790 | class IntegrationSlackForm(ReactorForm): | |
728 | webhook_url = wtforms.StringField( |
|
791 | webhook_url = wtforms.StringField( | |
729 |
|
|
792 | "Reports webhook", | |
730 | filters=[strip_filter], |
|
793 | filters=[strip_filter], | |
731 |
validators=[wtforms.validators.DataRequired()] |
|
794 | validators=[wtforms.validators.DataRequired()], | |
|
795 | ) | |||
732 |
|
796 | |||
733 | def validate_webhook_url(self, field): |
|
797 | def validate_webhook_url(self, field): | |
734 | registry = pyramid.threadlocal.get_current_registry() |
|
798 | registry = pyramid.threadlocal.get_current_registry() | |
735 | client = SlackIntegration.create_client(field.data) |
|
799 | client = SlackIntegration.create_client(field.data) | |
736 |
link = "<%s|%s>" % ( |
|
800 | link = "<%s|%s>" % ( | |
737 |
|
|
801 | registry.settings["mailing.app_url"], | |
|
802 | registry.settings["mailing.from_name"], | |||
|
803 | ) | |||
738 | test_data = { |
|
804 | test_data = { | |
739 | "username": "AppEnlight", |
|
805 | "username": "AppEnlight", | |
740 | "icon_emoji": ":fire:", |
|
806 | "icon_emoji": ":fire:", | |
741 | "attachments": [ |
|
807 | "attachments": [ | |
742 | {"fallback": "Testing integration channel: %s" % link, |
|
808 | { | |
743 |
" |
|
809 | "fallback": "Testing integration channel: %s" % link, | |
744 | "color": "good", |
|
810 | "pretext": "Testing integration channel: %s" % link, | |
745 |
" |
|
811 | "color": "good", | |
746 |
|
|
812 | "fields": [ | |
747 |
|
|
813 | { | |
748 |
" |
|
814 | "title": "Status", | |
749 |
" |
|
815 | "value": "Integration is working fine", | |
750 |
|
|
816 | "short": False, | |
751 |
|
|
817 | } | |
752 | ] |
|
818 | ], | |
|
819 | } | |||
|
820 | ], | |||
753 | } |
|
821 | } | |
754 | try: |
|
822 | try: | |
755 | client.make_request(data=test_data) |
|
823 | client.make_request(data=test_data) | |
756 | except IntegrationException as exc: |
|
824 | except IntegrationException as exc: | |
757 | raise wtforms.validators.ValidationError(str(exc)) |
|
825 | raise wtforms.validators.ValidationError(str(exc)) | |
758 |
|
826 | |||
759 |
|
827 | |||
760 | class IntegrationWebhooksForm(ReactorForm): |
|
828 | class IntegrationWebhooksForm(ReactorForm): | |
761 | reports_webhook = wtforms.StringField( |
|
829 | reports_webhook = wtforms.StringField( | |
762 |
|
|
830 | "Reports webhook", | |
763 | filters=[strip_filter], |
|
831 | filters=[strip_filter], | |
764 |
validators=[wtforms.validators.DataRequired()] |
|
832 | validators=[wtforms.validators.DataRequired()], | |
|
833 | ) | |||
765 | alerts_webhook = wtforms.StringField( |
|
834 | alerts_webhook = wtforms.StringField( | |
766 |
|
|
835 | "Alerts webhook", | |
767 | filters=[strip_filter], |
|
836 | filters=[strip_filter], | |
768 |
validators=[wtforms.validators.DataRequired()] |
|
837 | validators=[wtforms.validators.DataRequired()], | |
769 | submit = wtforms.SubmitField(_('Setup webhooks')) |
|
838 | ) | |
770 | ignore_labels = ['submit'] |
|
839 | submit = wtforms.SubmitField(_("Setup webhooks")) | |
771 | css_classes = {'submit': 'btn btn-primary'} |
|
840 | ignore_labels = ["submit"] | |
|
841 | css_classes = {"submit": "btn btn-primary"} | |||
772 |
|
842 | |||
773 |
|
843 | |||
774 | class IntegrationJiraForm(ReactorForm): |
|
844 | class IntegrationJiraForm(ReactorForm): | |
775 | host_name = wtforms.StringField( |
|
845 | host_name = wtforms.StringField( | |
776 |
|
|
846 | "Server URL", | |
777 | filters=[strip_filter], |
|
847 | filters=[strip_filter], | |
778 |
validators=[wtforms.validators.DataRequired()] |
|
848 | validators=[wtforms.validators.DataRequired()], | |
|
849 | ) | |||
779 | user_name = wtforms.StringField( |
|
850 | user_name = wtforms.StringField( | |
780 |
|
|
851 | "Username", | |
781 | filters=[strip_filter], |
|
852 | filters=[strip_filter], | |
782 |
validators=[wtforms.validators.DataRequired()] |
|
853 | validators=[wtforms.validators.DataRequired()], | |
|
854 | ) | |||
783 | password = wtforms.PasswordField( |
|
855 | password = wtforms.PasswordField( | |
784 |
|
|
856 | "Password", | |
785 | filters=[strip_filter], |
|
857 | filters=[strip_filter], | |
786 |
validators=[wtforms.validators.DataRequired()] |
|
858 | validators=[wtforms.validators.DataRequired()], | |
|
859 | ) | |||
787 | project = wtforms.StringField( |
|
860 | project = wtforms.StringField( | |
788 |
|
|
861 | "Project key", | |
789 | filters=[uppercase_filter, strip_filter], |
|
862 | filters=[uppercase_filter, strip_filter], | |
790 |
validators=[wtforms.validators.DataRequired()] |
|
863 | validators=[wtforms.validators.DataRequired()], | |
|
864 | ) | |||
791 |
|
865 | |||
792 | def validate_project(self, field): |
|
866 | def validate_project(self, field): | |
793 | if not field.data: |
|
867 | if not field.data: | |
794 | return |
|
868 | return | |
795 | try: |
|
869 | try: | |
796 |
client = JiraClient( |
|
870 | client = JiraClient( | |
797 |
|
|
871 | self.user_name.data, | |
798 |
|
|
872 | self.password.data, | |
799 |
|
|
873 | self.host_name.data, | |
|
874 | self.project.data, | |||
|
875 | ) | |||
800 | except Exception as exc: |
|
876 | except Exception as exc: | |
801 | raise wtforms.validators.ValidationError(str(exc)) |
|
877 | raise wtforms.validators.ValidationError(str(exc)) | |
802 |
|
878 | |||
803 | room_list = [r.key.upper() for r in client.get_projects()] |
|
879 | room_list = [r.key.upper() for r in client.get_projects()] | |
804 | if field.data.upper() not in room_list: |
|
880 | if field.data.upper() not in room_list: | |
805 | msg = "Project %s doesn\t exist in your Jira Instance" |
|
881 | msg = "Project %s doesn\t exist in your Jira Instance" | |
806 | raise wtforms.validators.ValidationError(msg % field.data) |
|
882 | raise wtforms.validators.ValidationError(msg % field.data) | |
807 |
|
883 | |||
808 |
|
884 | |||
809 | def get_deletion_form(resource): |
|
885 | def get_deletion_form(resource): | |
810 | class F(ReactorForm): |
|
886 | class F(ReactorForm): | |
811 | application_name = wtforms.StringField( |
|
887 | application_name = wtforms.StringField( | |
812 |
|
|
888 | "Application Name", | |
813 | filters=[strip_filter], |
|
889 | filters=[strip_filter], | |
814 |
validators=[wtforms.validators.AnyOf([resource.resource_name])] |
|
890 | validators=[wtforms.validators.AnyOf([resource.resource_name])], | |
|
891 | ) | |||
815 | resource_id = wtforms.HiddenField(default=resource.resource_id) |
|
892 | resource_id = wtforms.HiddenField(default=resource.resource_id) | |
816 |
submit = wtforms.SubmitField(_( |
|
893 | submit = wtforms.SubmitField(_("Delete my application")) | |
817 |
ignore_labels = [ |
|
894 | ignore_labels = ["submit"] | |
818 |
css_classes = { |
|
895 | css_classes = {"submit": "btn btn-danger"} | |
819 |
|
896 | |||
820 | return F |
|
897 | return F | |
821 |
|
898 | |||
822 |
|
899 | |||
823 | class ChangeApplicationOwnerForm(ReactorForm): |
|
900 | class ChangeApplicationOwnerForm(ReactorForm): | |
824 | password = wtforms.PasswordField( |
|
901 | password = wtforms.PasswordField( | |
825 |
|
|
902 | "Password", | |
826 | filters=[strip_filter], |
|
903 | filters=[strip_filter], | |
827 | validators=[old_password_validator, |
|
904 | validators=[old_password_validator, wtforms.validators.DataRequired()], | |
828 | wtforms.validators.DataRequired()]) |
|
905 | ) | |
829 |
|
906 | |||
830 | user_name = wtforms.StringField( |
|
907 | user_name = wtforms.StringField( | |
831 |
|
|
908 | "New owners username", | |
832 | filters=[strip_filter], |
|
909 | filters=[strip_filter], | |
833 | validators=[found_username_validator, |
|
910 | validators=[found_username_validator, wtforms.validators.DataRequired()], | |
834 | wtforms.validators.DataRequired()]) |
|
911 | ) | |
835 |
submit = wtforms.SubmitField(_( |
|
912 | submit = wtforms.SubmitField(_("Transfer ownership of application")) | |
836 |
ignore_labels = [ |
|
913 | ignore_labels = ["submit"] | |
837 |
css_classes = { |
|
914 | css_classes = {"submit": "btn btn-danger"} | |
838 |
|
915 | |||
839 |
|
916 | |||
840 | def default_filename(): |
|
917 | def default_filename(): | |
841 |
return |
|
918 | return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m") | |
842 |
|
919 | |||
843 |
|
920 | |||
844 | class FileUploadForm(ReactorForm): |
|
921 | class FileUploadForm(ReactorForm): | |
845 |
title = wtforms.StringField( |
|
922 | title = wtforms.StringField( | |
846 | default=default_filename, |
|
923 | "File Title", | |
847 | validators=[wtforms.validators.DataRequired()]) |
|
924 | default=default_filename, | |
848 | file = wtforms.FileField('File') |
|
925 | validators=[wtforms.validators.DataRequired()], | |
|
926 | ) | |||
|
927 | file = wtforms.FileField("File") | |||
849 |
|
928 | |||
850 | def validate_file(self, field): |
|
929 | def validate_file(self, field): | |
851 |
if not hasattr(field.data, |
|
930 | if not hasattr(field.data, "file"): | |
852 |
raise wtforms.ValidationError( |
|
931 | raise wtforms.ValidationError("File is missing") | |
853 |
|
932 | |||
854 |
submit = wtforms.SubmitField(_( |
|
933 | submit = wtforms.SubmitField(_("Upload")) | |
855 |
|
934 | |||
856 |
|
935 | |||
857 | def get_partition_deletion_form(es_indices, pg_indices): |
|
936 | def get_partition_deletion_form(es_indices, pg_indices): | |
858 | class F(ReactorForm): |
|
937 | class F(ReactorForm): | |
859 |
es_index = wtforms.SelectMultipleField( |
|
938 | es_index = wtforms.SelectMultipleField( | |
860 | choices=[(ix, '') for ix in |
|
939 | "Elasticsearch", choices=[(ix, "") for ix in es_indices] | |
861 | es_indices]) |
|
940 | ) | |
862 |
pg_index = wtforms.SelectMultipleField( |
|
941 | pg_index = wtforms.SelectMultipleField( | |
863 | choices=[(ix, '') for ix in |
|
942 | "pg", choices=[(ix, "") for ix in pg_indices] | |
864 | pg_indices]) |
|
943 | ) | |
865 |
confirm = wtforms.TextField( |
|
944 | confirm = wtforms.TextField( | |
866 | filters=[uppercase_filter, strip_filter], |
|
945 | "Confirm", | |
867 | validators=[ |
|
946 | filters=[uppercase_filter, strip_filter], | |
868 | wtforms.validators.AnyOf(['CONFIRM']), |
|
947 | validators=[ | |
869 | wtforms.validators.DataRequired()]) |
|
948 | wtforms.validators.AnyOf(["CONFIRM"]), | |
870 | ignore_labels = ['submit'] |
|
949 | wtforms.validators.DataRequired(), | |
871 | css_classes = {'submit': 'btn btn-danger'} |
|
950 | ], | |
|
951 | ) | |||
|
952 | ignore_labels = ["submit"] | |||
|
953 | css_classes = {"submit": "btn btn-danger"} | |||
872 |
|
954 | |||
873 | return F |
|
955 | return F | |
874 |
|
956 | |||
875 |
|
957 | |||
876 | class GroupCreateForm(ReactorForm): |
|
958 | class GroupCreateForm(ReactorForm): | |
877 | group_name = wtforms.StringField( |
|
959 | group_name = wtforms.StringField( | |
878 |
_( |
|
960 | _("Group Name"), | |
879 | filters=[strip_filter], |
|
961 | filters=[strip_filter], | |
880 | validators=[ |
|
962 | validators=[ | |
881 | wtforms.validators.Length(min=2, max=50), |
|
963 | wtforms.validators.Length(min=2, max=50), | |
882 | unique_groupname_validator, |
|
964 | unique_groupname_validator, | |
883 | wtforms.validators.DataRequired() |
|
965 | wtforms.validators.DataRequired(), | |
884 |
] |
|
966 | ], | |
885 | description = wtforms.StringField(_('Group description')) |
|
967 | ) | |
|
968 | description = wtforms.StringField(_("Group description")) | |||
886 |
|
969 | |||
887 |
|
970 | |||
888 |
time_choices = [(k, v[ |
|
971 | time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()] | |
889 |
|
972 | |||
890 |
|
973 | |||
891 | class AuthTokenCreateForm(ReactorForm): |
|
974 | class AuthTokenCreateForm(ReactorForm): | |
892 |
description = wtforms.StringField(_( |
|
975 | description = wtforms.StringField(_("Token description")) | |
893 |
expires = wtforms.SelectField( |
|
976 | expires = wtforms.SelectField( | |
894 | coerce=lambda x: x, |
|
977 | "Expires", | |
895 | choices=time_choices, |
|
978 | coerce=lambda x: x, | |
896 | validators=[wtforms.validators.Optional()]) |
|
979 | choices=time_choices, | |
|
980 | validators=[wtforms.validators.Optional()], | |||
|
981 | ) |
@@ -1,50 +1,49 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | """Miscellaneous support packages for {{project}}. |
|
17 | """Miscellaneous support packages for {{project}}. | |
18 | """ |
|
18 | """ | |
19 | import random |
|
19 | import random | |
20 | import string |
|
20 | import string | |
21 | import importlib |
|
21 | import importlib | |
22 |
|
22 | |||
23 | from appenlight_client.exceptions import get_current_traceback |
|
23 | from appenlight_client.exceptions import get_current_traceback | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def generate_random_string(chars=10): |
|
26 | def generate_random_string(chars=10): | |
27 |
return |
|
27 | return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars)) | |
28 | chars)) |
|
|||
29 |
|
28 | |||
30 |
|
29 | |||
31 | def to_integer_safe(input): |
|
30 | def to_integer_safe(input): | |
32 | try: |
|
31 | try: | |
33 | return int(input) |
|
32 | return int(input) | |
34 |
except (TypeError, ValueError |
|
33 | except (TypeError, ValueError): | |
35 | return None |
|
34 | return None | |
36 |
|
35 | |||
37 |
|
36 | |||
38 | def print_traceback(log): |
|
37 | def print_traceback(log): | |
39 |
traceback = get_current_traceback( |
|
38 | traceback = get_current_traceback( | |
40 | ignore_system_exceptions=True) |
|
39 | skip=1, show_hidden_frames=True, ignore_system_exceptions=True | |
|
40 | ) | |||
41 | exception_text = traceback.exception |
|
41 | exception_text = traceback.exception | |
42 | log.error(exception_text) |
|
42 | log.error(exception_text) | |
43 | log.error(traceback.plaintext) |
|
43 | log.error(traceback.plaintext) | |
44 | del traceback |
|
44 | del traceback | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | def get_callable(import_string): |
|
47 | def get_callable(import_string): | |
48 |
import_module, indexer_callable = import_string.split( |
|
48 | import_module, indexer_callable = import_string.split(":") | |
49 | return getattr(importlib.import_module(import_module), |
|
49 | return getattr(importlib.import_module(import_module), indexer_callable) | |
50 | indexer_callable) |
|
@@ -1,80 +1,80 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import datetime |
|
17 | import datetime | |
18 | import logging |
|
18 | import logging | |
19 |
|
19 | |||
20 | from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests |
|
20 | from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests | |
21 |
|
21 | |||
22 | from appenlight.models.services.config import ConfigService |
|
22 | from appenlight.models.services.config import ConfigService | |
23 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
23 | from appenlight.lib.redis_keys import REDIS_KEYS | |
24 |
|
24 | |||
25 | log = logging.getLogger(__name__) |
|
25 | log = logging.getLogger(__name__) | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | def rate_limiting(request, resource, section, to_increment=1): |
|
28 | def rate_limiting(request, resource, section, to_increment=1): | |
29 | tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0) |
|
29 | tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0) | |
30 |
key = REDIS_KEYS[ |
|
30 | key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id) | |
31 | resource.resource_id) |
|
|||
32 | redis_pipeline = request.registry.redis_conn.pipeline() |
|
31 | redis_pipeline = request.registry.redis_conn.pipeline() | |
33 | redis_pipeline.incr(key, to_increment) |
|
32 | redis_pipeline.incr(key, to_increment) | |
34 | redis_pipeline.expire(key, 3600 * 24) |
|
33 | redis_pipeline.expire(key, 3600 * 24) | |
35 | results = redis_pipeline.execute() |
|
34 | results = redis_pipeline.execute() | |
36 | current_count = results[0] |
|
35 | current_count = results[0] | |
37 |
config = ConfigService.by_key_and_section(section, |
|
36 | config = ConfigService.by_key_and_section(section, "global") | |
38 | limit = config.value if config else 1000 |
|
37 | limit = config.value if config else 1000 | |
39 | if current_count > int(limit): |
|
38 | if current_count > int(limit): | |
40 |
log.info( |
|
39 | log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count)) | |
41 | section, resource, current_count)) |
|
40 | abort_msg = "Rate limits are in effect for this application" | |
42 | abort_msg = 'Rate limits are in effect for this application' |
|
41 | raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg}) | |
43 | raise HTTPTooManyRequests(abort_msg, |
|
|||
44 | headers={'X-AppEnlight': abort_msg}) |
|
|||
45 |
|
42 | |||
46 |
|
43 | |||
47 | def check_cors(request, application, should_return=True): |
|
44 | def check_cors(request, application, should_return=True): | |
48 | """ |
|
45 | """ | |
49 | Performs a check and validation if request comes from authorized domain for |
|
46 | Performs a check and validation if request comes from authorized domain for | |
50 | application, otherwise return 403 |
|
47 | application, otherwise return 403 | |
51 | """ |
|
48 | """ | |
52 | origin_found = False |
|
49 | origin_found = False | |
53 |
origin = request.headers.get( |
|
50 | origin = request.headers.get("Origin") | |
54 | if should_return: |
|
51 | if should_return: | |
55 |
log.info( |
|
52 | log.info("CORS for %s" % origin) | |
56 | if not origin: |
|
53 | if not origin: | |
57 | return False |
|
54 | return False | |
58 |
for domain in application.domains.split( |
|
55 | for domain in application.domains.split("\n"): | |
59 | if domain in origin: |
|
56 | if domain in origin: | |
60 | origin_found = True |
|
57 | origin_found = True | |
61 | if origin_found: |
|
58 | if origin_found: | |
62 |
request.response.headers.add( |
|
59 | request.response.headers.add("Access-Control-Allow-Origin", origin) | |
63 |
request.response.headers.add( |
|
60 | request.response.headers.add("XDomainRequestAllowed", "1") | |
64 |
request.response.headers.add( |
|
61 | request.response.headers.add( | |
65 | 'GET, POST, OPTIONS') |
|
62 | "Access-Control-Allow-Methods", "GET, POST, OPTIONS" | |
66 | request.response.headers.add('Access-Control-Allow-Headers', |
|
63 | ) | |
67 | 'Accept-Encoding, Accept-Language, ' |
|
64 | request.response.headers.add( | |
68 | 'Content-Type, ' |
|
65 | "Access-Control-Allow-Headers", | |
69 | 'Depth, User-Agent, X-File-Size, ' |
|
66 | "Accept-Encoding, Accept-Language, " | |
70 | 'X-Requested-With, If-Modified-Since, ' |
|
67 | "Content-Type, " | |
71 | 'X-File-Name, ' |
|
68 | "Depth, User-Agent, X-File-Size, " | |
72 | 'Cache-Control, Host, Pragma, Accept, ' |
|
69 | "X-Requested-With, If-Modified-Since, " | |
73 | 'Origin, Connection, ' |
|
70 | "X-File-Name, " | |
74 | 'Referer, Cookie, ' |
|
71 | "Cache-Control, Host, Pragma, Accept, " | |
75 | 'X-appenlight-public-api-key, ' |
|
72 | "Origin, Connection, " | |
76 | 'x-appenlight-public-api-key') |
|
73 | "Referer, Cookie, " | |
77 | request.response.headers.add('Access-Control-Max-Age', '86400') |
|
74 | "X-appenlight-public-api-key, " | |
|
75 | "x-appenlight-public-api-key", | |||
|
76 | ) | |||
|
77 | request.response.headers.add("Access-Control-Max-Age", "86400") | |||
78 | return request.response |
|
78 | return request.response | |
79 | else: |
|
79 | else: | |
80 | return HTTPForbidden() |
|
80 | return HTTPForbidden() |
@@ -1,184 +1,169 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import copy |
|
17 | import copy | |
18 | import hashlib |
|
18 | import hashlib | |
19 | import inspect |
|
19 | import inspect | |
20 |
|
20 | |||
21 | from dogpile.cache import make_region |
|
21 | from dogpile.cache import make_region | |
22 | from dogpile.cache.util import compat |
|
22 | from dogpile.cache.util import compat | |
23 |
|
23 | |||
24 | regions = None |
|
24 | regions = None | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | def key_mangler(key): |
|
27 | def key_mangler(key): | |
28 | return "appenlight:dogpile:{}".format(key) |
|
28 | return "appenlight:dogpile:{}".format(key) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def hashgen(namespace, fn, to_str=compat.string_type): |
|
31 | def hashgen(namespace, fn, to_str=compat.string_type): | |
32 | """Return a function that generates a string |
|
32 | """Return a function that generates a string | |
33 | key, based on a given function as well as |
|
33 | key, based on a given function as well as | |
34 | arguments to the returned function itself. |
|
34 | arguments to the returned function itself. | |
35 |
|
35 | |||
36 | This is used by :meth:`.CacheRegion.cache_on_arguments` |
|
36 | This is used by :meth:`.CacheRegion.cache_on_arguments` | |
37 | to generate a cache key from a decorated function. |
|
37 | to generate a cache key from a decorated function. | |
38 |
|
38 | |||
39 | It can be replaced using the ``function_key_generator`` |
|
39 | It can be replaced using the ``function_key_generator`` | |
40 | argument passed to :func:`.make_region`. |
|
40 | argument passed to :func:`.make_region`. | |
41 |
|
41 | |||
42 | """ |
|
42 | """ | |
43 |
|
43 | |||
44 | if namespace is None: |
|
44 | if namespace is None: | |
45 |
namespace = |
|
45 | namespace = "%s:%s" % (fn.__module__, fn.__name__) | |
46 | else: |
|
46 | else: | |
47 |
namespace = |
|
47 | namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace) | |
48 |
|
48 | |||
49 | args = inspect.getargspec(fn) |
|
49 | args = inspect.getargspec(fn) | |
50 |
has_self = args[0] and args[0][0] in ( |
|
50 | has_self = args[0] and args[0][0] in ("self", "cls") | |
51 |
|
51 | |||
52 | def generate_key(*args, **kw): |
|
52 | def generate_key(*args, **kw): | |
53 | if kw: |
|
53 | if kw: | |
54 | raise ValueError( |
|
54 | raise ValueError( | |
55 | "dogpile.cache's default key creation " |
|
55 | "dogpile.cache's default key creation " | |
56 |
"function does not accept keyword arguments." |
|
56 | "function does not accept keyword arguments." | |
|
57 | ) | |||
57 | if has_self: |
|
58 | if has_self: | |
58 | args = args[1:] |
|
59 | args = args[1:] | |
59 |
|
60 | |||
60 | return namespace + "|" + hashlib.sha1( |
|
61 | return ( | |
61 | " ".join(map(to_str, args)).encode('utf8')).hexdigest() |
|
62 | namespace | |
|
63 | + "|" | |||
|
64 | + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest() | |||
|
65 | ) | |||
62 |
|
66 | |||
63 | return generate_key |
|
67 | return generate_key | |
64 |
|
68 | |||
65 |
|
69 | |||
66 | class CacheRegions(object): |
|
70 | class CacheRegions(object): | |
67 | def __init__(self, settings): |
|
71 | def __init__(self, settings): | |
68 | config_redis = {"arguments": settings} |
|
72 | config_redis = {"arguments": settings} | |
69 |
|
73 | |||
70 | self.redis_min_1 = make_region( |
|
74 | self.redis_min_1 = make_region( | |
71 | function_key_generator=hashgen, |
|
75 | function_key_generator=hashgen, key_mangler=key_mangler | |
72 | key_mangler=key_mangler).configure( |
|
76 | ).configure( | |
73 | "dogpile.cache.redis", |
|
77 | "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis) | |
74 | expiration_time=60, |
|
78 | ) | |
75 | **copy.deepcopy(config_redis)) |
|
|||
76 | self.redis_min_5 = make_region( |
|
79 | self.redis_min_5 = make_region( | |
77 | function_key_generator=hashgen, |
|
80 | function_key_generator=hashgen, key_mangler=key_mangler | |
78 | key_mangler=key_mangler).configure( |
|
81 | ).configure( | |
79 | "dogpile.cache.redis", |
|
82 | "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis) | |
80 | expiration_time=300, |
|
83 | ) | |
81 | **copy.deepcopy(config_redis)) |
|
|||
82 |
|
84 | |||
83 | self.redis_min_10 = make_region( |
|
85 | self.redis_min_10 = make_region( | |
84 | function_key_generator=hashgen, |
|
86 | function_key_generator=hashgen, key_mangler=key_mangler | |
85 | key_mangler=key_mangler).configure( |
|
87 | ).configure( | |
86 | "dogpile.cache.redis", |
|
88 | "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis) | |
87 | expiration_time=60, |
|
89 | ) | |
88 | **copy.deepcopy(config_redis)) |
|
|||
89 |
|
90 | |||
90 | self.redis_min_60 = make_region( |
|
91 | self.redis_min_60 = make_region( | |
91 | function_key_generator=hashgen, |
|
92 | function_key_generator=hashgen, key_mangler=key_mangler | |
92 | key_mangler=key_mangler).configure( |
|
93 | ).configure( | |
93 | "dogpile.cache.redis", |
|
94 | "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis) | |
94 | expiration_time=3600, |
|
95 | ) | |
95 | **copy.deepcopy(config_redis)) |
|
|||
96 |
|
96 | |||
97 | self.redis_sec_1 = make_region( |
|
97 | self.redis_sec_1 = make_region( | |
98 | function_key_generator=hashgen, |
|
98 | function_key_generator=hashgen, key_mangler=key_mangler | |
99 | key_mangler=key_mangler).configure( |
|
99 | ).configure( | |
100 | "dogpile.cache.redis", |
|
100 | "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis) | |
101 | expiration_time=1, |
|
101 | ) | |
102 | **copy.deepcopy(config_redis)) |
|
|||
103 |
|
102 | |||
104 | self.redis_sec_5 = make_region( |
|
103 | self.redis_sec_5 = make_region( | |
105 | function_key_generator=hashgen, |
|
104 | function_key_generator=hashgen, key_mangler=key_mangler | |
106 | key_mangler=key_mangler).configure( |
|
105 | ).configure( | |
107 | "dogpile.cache.redis", |
|
106 | "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis) | |
108 | expiration_time=5, |
|
107 | ) | |
109 | **copy.deepcopy(config_redis)) |
|
|||
110 |
|
108 | |||
111 | self.redis_sec_30 = make_region( |
|
109 | self.redis_sec_30 = make_region( | |
112 | function_key_generator=hashgen, |
|
110 | function_key_generator=hashgen, key_mangler=key_mangler | |
113 | key_mangler=key_mangler).configure( |
|
111 | ).configure( | |
114 | "dogpile.cache.redis", |
|
112 | "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis) | |
115 | expiration_time=30, |
|
113 | ) | |
116 | **copy.deepcopy(config_redis)) |
|
|||
117 |
|
114 | |||
118 | self.redis_day_1 = make_region( |
|
115 | self.redis_day_1 = make_region( | |
119 | function_key_generator=hashgen, |
|
116 | function_key_generator=hashgen, key_mangler=key_mangler | |
120 | key_mangler=key_mangler).configure( |
|
117 | ).configure( | |
121 | "dogpile.cache.redis", |
|
118 | "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis) | |
122 | expiration_time=86400, |
|
119 | ) | |
123 | **copy.deepcopy(config_redis)) |
|
|||
124 |
|
120 | |||
125 | self.redis_day_7 = make_region( |
|
121 | self.redis_day_7 = make_region( | |
126 | function_key_generator=hashgen, |
|
122 | function_key_generator=hashgen, key_mangler=key_mangler | |
127 | key_mangler=key_mangler).configure( |
|
123 | ).configure( | |
128 | "dogpile.cache.redis", |
|
124 | "dogpile.cache.redis", | |
129 | expiration_time=86400 * 7, |
|
125 | expiration_time=86400 * 7, | |
130 |
**copy.deepcopy(config_redis) |
|
126 | **copy.deepcopy(config_redis) | |
|
127 | ) | |||
131 |
|
128 | |||
132 | self.redis_day_30 = make_region( |
|
129 | self.redis_day_30 = make_region( | |
133 | function_key_generator=hashgen, |
|
130 | function_key_generator=hashgen, key_mangler=key_mangler | |
134 | key_mangler=key_mangler).configure( |
|
131 | ).configure( | |
135 | "dogpile.cache.redis", |
|
132 | "dogpile.cache.redis", | |
136 | expiration_time=86400 * 30, |
|
133 | expiration_time=86400 * 30, | |
137 |
**copy.deepcopy(config_redis) |
|
134 | **copy.deepcopy(config_redis) | |
|
135 | ) | |||
138 |
|
136 | |||
139 | self.memory_day_1 = make_region( |
|
137 | self.memory_day_1 = make_region( | |
140 | function_key_generator=hashgen, |
|
138 | function_key_generator=hashgen, key_mangler=key_mangler | |
141 | key_mangler=key_mangler).configure( |
|
139 | ).configure( | |
142 | "dogpile.cache.memory", |
|
140 | "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis) | |
143 | expiration_time=86400, |
|
141 | ) | |
144 | **copy.deepcopy(config_redis)) |
|
|||
145 |
|
142 | |||
146 | self.memory_sec_1 = make_region( |
|
143 | self.memory_sec_1 = make_region( | |
147 | function_key_generator=hashgen, |
|
144 | function_key_generator=hashgen, key_mangler=key_mangler | |
148 | key_mangler=key_mangler).configure( |
|
145 | ).configure("dogpile.cache.memory", expiration_time=1) | |
149 | "dogpile.cache.memory", |
|
|||
150 | expiration_time=1) |
|
|||
151 |
|
146 | |||
152 | self.memory_sec_5 = make_region( |
|
147 | self.memory_sec_5 = make_region( | |
153 | function_key_generator=hashgen, |
|
148 | function_key_generator=hashgen, key_mangler=key_mangler | |
154 | key_mangler=key_mangler).configure( |
|
149 | ).configure("dogpile.cache.memory", expiration_time=5) | |
155 | "dogpile.cache.memory", |
|
|||
156 | expiration_time=5) |
|
|||
157 |
|
150 | |||
158 | self.memory_min_1 = make_region( |
|
151 | self.memory_min_1 = make_region( | |
159 | function_key_generator=hashgen, |
|
152 | function_key_generator=hashgen, key_mangler=key_mangler | |
160 | key_mangler=key_mangler).configure( |
|
153 | ).configure("dogpile.cache.memory", expiration_time=60) | |
161 | "dogpile.cache.memory", |
|
|||
162 | expiration_time=60) |
|
|||
163 |
|
154 | |||
164 | self.memory_min_5 = make_region( |
|
155 | self.memory_min_5 = make_region( | |
165 | function_key_generator=hashgen, |
|
156 | function_key_generator=hashgen, key_mangler=key_mangler | |
166 | key_mangler=key_mangler).configure( |
|
157 | ).configure("dogpile.cache.memory", expiration_time=300) | |
167 | "dogpile.cache.memory", |
|
|||
168 | expiration_time=300) |
|
|||
169 |
|
158 | |||
170 | self.memory_min_10 = make_region( |
|
159 | self.memory_min_10 = make_region( | |
171 | function_key_generator=hashgen, |
|
160 | function_key_generator=hashgen, key_mangler=key_mangler | |
172 | key_mangler=key_mangler).configure( |
|
161 | ).configure("dogpile.cache.memory", expiration_time=600) | |
173 | "dogpile.cache.memory", |
|
|||
174 | expiration_time=600) |
|
|||
175 |
|
162 | |||
176 | self.memory_min_60 = make_region( |
|
163 | self.memory_min_60 = make_region( | |
177 | function_key_generator=hashgen, |
|
164 | function_key_generator=hashgen, key_mangler=key_mangler | |
178 | key_mangler=key_mangler).configure( |
|
165 | ).configure("dogpile.cache.memory", expiration_time=3600) | |
179 | "dogpile.cache.memory", |
|
|||
180 | expiration_time=3600) |
|
|||
181 |
|
166 | |||
182 |
|
167 | |||
183 | def get_region(region): |
|
168 | def get_region(region): | |
184 | return getattr(regions, region) |
|
169 | return getattr(regions, region) |
@@ -1,92 +1,92 b'' | |||||
1 | import inspect |
|
1 | import inspect | |
2 | import logging |
|
2 | import logging | |
3 |
|
3 | |||
4 | from pyramid.config import Configurator |
|
4 | from pyramid.config import Configurator | |
5 |
|
5 | |||
6 | log = logging.getLogger(__name__) |
|
6 | log = logging.getLogger(__name__) | |
7 |
|
7 | |||
|
8 | ||||
8 | class InspectProxy(object): |
|
9 | class InspectProxy(object): | |
9 | """ |
|
10 | """ | |
10 | Proxy to the `inspect` module that allows us to use the pyramid include |
|
11 | Proxy to the `inspect` module that allows us to use the pyramid include | |
11 | mechanism for cythonized modules without source file. |
|
12 | mechanism for cythonized modules without source file. | |
12 | """ |
|
13 | """ | |
13 |
|
14 | |||
14 | def _get_cyfunction_func_code(self, cyfunction): |
|
15 | def _get_cyfunction_func_code(self, cyfunction): | |
15 | """ |
|
16 | """ | |
16 | Unpack the `func_code` attribute of a cython function. |
|
17 | Unpack the `func_code` attribute of a cython function. | |
17 | """ |
|
18 | """ | |
18 | if inspect.ismethod(cyfunction): |
|
19 | if inspect.ismethod(cyfunction): | |
19 | cyfunction = cyfunction.im_func |
|
20 | cyfunction = cyfunction.im_func | |
20 |
return getattr(cyfunction, |
|
21 | return getattr(cyfunction, "func_code") | |
21 |
|
22 | |||
22 | def getmodule(self, *args, **kwds): |
|
23 | def getmodule(self, *args, **kwds): | |
23 | """ |
|
24 | """ | |
24 | Simple proxy to `inspect.getmodule`. |
|
25 | Simple proxy to `inspect.getmodule`. | |
25 | """ |
|
26 | """ | |
26 | return inspect.getmodule(*args, **kwds) |
|
27 | return inspect.getmodule(*args, **kwds) | |
27 |
|
28 | |||
28 | def getsourcefile(self, obj): |
|
29 | def getsourcefile(self, obj): | |
29 | """ |
|
30 | """ | |
30 | Proxy to `inspect.getsourcefile` or `inspect.getfile` depending on if |
|
31 | Proxy to `inspect.getsourcefile` or `inspect.getfile` depending on if | |
31 | it's called to look up the source file that contains the magic pyramid |
|
32 | it's called to look up the source file that contains the magic pyramid | |
32 | `includeme` callable. |
|
33 | `includeme` callable. | |
33 |
|
34 | |||
34 | For cythonized modules the source file may be deleted. Therefore we |
|
35 | For cythonized modules the source file may be deleted. Therefore we | |
35 | return the result of `inspect.getfile` instead. In the case of the |
|
36 | return the result of `inspect.getfile` instead. In the case of the | |
36 | `configurator.include` method this is OK, because the result is passed |
|
37 | `configurator.include` method this is OK, because the result is passed | |
37 | to `os.path.dirname` which strips the file name. So it doesn't matter |
|
38 | to `os.path.dirname` which strips the file name. So it doesn't matter | |
38 | if we return the path to the source file or another file in the same |
|
39 | if we return the path to the source file or another file in the same | |
39 | directory. |
|
40 | directory. | |
40 | """ |
|
41 | """ | |
41 | # Check if it's called to look up the source file that contains the |
|
42 | # Check if it's called to look up the source file that contains the | |
42 | # magic pyramid `includeme` callable. |
|
43 | # magic pyramid `includeme` callable. | |
43 |
if getattr(obj, |
|
44 | if getattr(obj, "__name__") == "includeme": | |
44 | try: |
|
45 | try: | |
45 | return inspect.getfile(obj) |
|
46 | return inspect.getfile(obj) | |
46 | except TypeError as e: |
|
47 | except TypeError as e: | |
47 | # Cython functions are not recognized as functions by the |
|
48 | # Cython functions are not recognized as functions by the | |
48 | # inspect module. We have to unpack the func_code attribute |
|
49 | # inspect module. We have to unpack the func_code attribute | |
49 | # ourself. |
|
50 | # ourself. | |
50 |
if |
|
51 | if "cyfunction" in e.message: | |
51 | obj = self._get_cyfunction_func_code(obj) |
|
52 | obj = self._get_cyfunction_func_code(obj) | |
52 | return inspect.getfile(obj) |
|
53 | return inspect.getfile(obj) | |
53 | raise |
|
54 | raise | |
54 | else: |
|
55 | else: | |
55 | return inspect.getsourcefile(obj) |
|
56 | return inspect.getsourcefile(obj) | |
56 |
|
57 | |||
57 |
|
58 | |||
58 | class CythonCompatConfigurator(Configurator): |
|
59 | class CythonCompatConfigurator(Configurator): | |
59 | """ |
|
60 | """ | |
60 | Customized configurator to replace the inspect class attribute with |
|
61 | Customized configurator to replace the inspect class attribute with | |
61 | a custom one that is cython compatible. |
|
62 | a custom one that is cython compatible. | |
62 | """ |
|
63 | """ | |
|
64 | ||||
63 | inspect = InspectProxy() |
|
65 | inspect = InspectProxy() | |
64 |
|
66 | |||
65 |
|
67 | |||
66 | def register_appenlight_plugin(config, plugin_name, plugin_config): |
|
68 | def register_appenlight_plugin(config, plugin_name, plugin_config): | |
67 | def register(): |
|
69 | def register(): | |
68 |
log.warning( |
|
70 | log.warning("Registering plugin: {}".format(plugin_name)) | |
69 | if plugin_name not in config.registry.appenlight_plugins: |
|
71 | if plugin_name not in config.registry.appenlight_plugins: | |
70 | config.registry.appenlight_plugins[plugin_name] = { |
|
72 | config.registry.appenlight_plugins[plugin_name] = { | |
71 |
|
|
73 | "javascript": None, | |
72 |
|
|
74 | "static": None, | |
73 |
|
|
75 | "css": None, | |
74 |
|
|
76 | "celery_tasks": None, | |
75 |
|
|
77 | "celery_beats": None, | |
76 |
|
|
78 | "fulltext_indexer": None, | |
77 |
|
|
79 | "sqlalchemy_migrations": None, | |
78 |
|
|
80 | "default_values_setter": None, | |
79 |
|
|
81 | "header_html": None, | |
80 |
|
|
82 | "resource_types": [], | |
81 |
|
|
83 | "url_gen": None, | |
82 | } |
|
84 | } | |
83 | config.registry.appenlight_plugins[plugin_name].update( |
|
85 | config.registry.appenlight_plugins[plugin_name].update(plugin_config) | |
84 | plugin_config) |
|
|||
85 | # inform AE what kind of resource types we have available |
|
86 | # inform AE what kind of resource types we have available | |
86 | # so we can avoid failing when a plugin is removed but data |
|
87 | # so we can avoid failing when a plugin is removed but data | |
87 | # is still present in the db |
|
88 | # is still present in the db | |
88 |
if plugin_config.get( |
|
89 | if plugin_config.get("resource_types"): | |
89 | config.registry.resource_types.extend( |
|
90 | config.registry.resource_types.extend(plugin_config["resource_types"]) | |
90 | plugin_config['resource_types']) |
|
|||
91 |
|
91 | |||
92 |
config.action( |
|
92 | config.action("appenlight_plugin={}".format(plugin_name), register) |
@@ -1,58 +1,58 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | # this gets set on runtime |
|
17 | # this gets set on runtime | |
18 | from cryptography.fernet import Fernet |
|
18 | from cryptography.fernet import Fernet | |
19 |
|
19 | |||
20 | ENCRYPTION_SECRET = None |
|
20 | ENCRYPTION_SECRET = None | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | def encrypt_fernet(value): |
|
23 | def encrypt_fernet(value): | |
24 | # avoid double encryption |
|
24 | # avoid double encryption | |
25 | # not sure if this is needed but it won't hurt too much to have this |
|
25 | # not sure if this is needed but it won't hurt too much to have this | |
26 |
if value.startswith( |
|
26 | if value.startswith("enc$fernet$"): | |
27 | return value |
|
27 | return value | |
28 | f = Fernet(ENCRYPTION_SECRET) |
|
28 | f = Fernet(ENCRYPTION_SECRET) | |
29 |
return |
|
29 | return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8")) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def decrypt_fernet(value): |
|
32 | def decrypt_fernet(value): | |
33 |
parts = value.split( |
|
33 | parts = value.split("$", 3) | |
34 | if not len(parts) == 3: |
|
34 | if not len(parts) == 3: | |
35 | # not encrypted values |
|
35 | # not encrypted values | |
36 | return value |
|
36 | return value | |
37 | else: |
|
37 | else: | |
38 | f = Fernet(ENCRYPTION_SECRET) |
|
38 | f = Fernet(ENCRYPTION_SECRET) | |
39 |
decrypted_data = f.decrypt(parts[2].encode( |
|
39 | decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8") | |
40 | return decrypted_data |
|
40 | return decrypted_data | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def encrypt_dictionary_keys(_dict, exclude_keys=None): |
|
43 | def encrypt_dictionary_keys(_dict, exclude_keys=None): | |
44 | if not exclude_keys: |
|
44 | if not exclude_keys: | |
45 | exclude_keys = [] |
|
45 | exclude_keys = [] | |
46 | keys = [k for k in _dict.keys() if k not in exclude_keys] |
|
46 | keys = [k for k in _dict.keys() if k not in exclude_keys] | |
47 | for k in keys: |
|
47 | for k in keys: | |
48 | _dict[k] = encrypt_fernet(_dict[k]) |
|
48 | _dict[k] = encrypt_fernet(_dict[k]) | |
49 | return _dict |
|
49 | return _dict | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | def decrypt_dictionary_keys(_dict, exclude_keys=None): |
|
52 | def decrypt_dictionary_keys(_dict, exclude_keys=None): | |
53 | if not exclude_keys: |
|
53 | if not exclude_keys: | |
54 | exclude_keys = [] |
|
54 | exclude_keys = [] | |
55 | keys = [k for k in _dict.keys() if k not in exclude_keys] |
|
55 | keys = [k for k in _dict.keys() if k not in exclude_keys] | |
56 | for k in keys: |
|
56 | for k in keys: | |
57 | _dict[k] = decrypt_fernet(_dict[k]) |
|
57 | _dict[k] = decrypt_fernet(_dict[k]) | |
58 | return _dict |
|
58 | return _dict |
@@ -1,88 +1,90 b'' | |||||
1 | import collections |
|
1 | import collections | |
|
2 | ||||
2 | # -*- coding: utf-8 -*- |
|
3 | # -*- coding: utf-8 -*- | |
3 |
|
4 | |||
4 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
5 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
5 | # |
|
6 | # | |
6 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
7 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | # you may not use this file except in compliance with the License. |
|
8 | # you may not use this file except in compliance with the License. | |
8 | # You may obtain a copy of the License at |
|
9 | # You may obtain a copy of the License at | |
9 | # |
|
10 | # | |
10 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
11 | # http://www.apache.org/licenses/LICENSE-2.0 | |
11 | # |
|
12 | # | |
12 | # Unless required by applicable law or agreed to in writing, software |
|
13 | # Unless required by applicable law or agreed to in writing, software | |
13 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
14 | # distributed under the License is distributed on an "AS IS" BASIS, | |
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | # See the License for the specific language governing permissions and |
|
16 | # See the License for the specific language governing permissions and | |
16 | # limitations under the License. |
|
17 | # limitations under the License. | |
17 |
|
18 | |||
18 |
|
19 | |||
19 | class StupidEnum(object): |
|
20 | class StupidEnum(object): | |
20 | @classmethod |
|
21 | @classmethod | |
21 | def set_inverse(cls): |
|
22 | def set_inverse(cls): | |
22 | cls._inverse_values = dict( |
|
23 | cls._inverse_values = dict( | |
23 | (y, x) for x, y in vars(cls).items() if |
|
24 | (y, x) | |
24 | not x.startswith('_') and not callable(y) |
|
25 | for x, y in vars(cls).items() | |
|
26 | if not x.startswith("_") and not callable(y) | |||
25 | ) |
|
27 | ) | |
26 |
|
28 | |||
27 | @classmethod |
|
29 | @classmethod | |
28 | def key_from_value(cls, value): |
|
30 | def key_from_value(cls, value): | |
29 |
if not hasattr(cls, |
|
31 | if not hasattr(cls, "_inverse_values"): | |
30 | cls.set_inverse() |
|
32 | cls.set_inverse() | |
31 | return cls._inverse_values.get(value) |
|
33 | return cls._inverse_values.get(value) | |
32 |
|
34 | |||
33 |
|
35 | |||
34 | class ReportType(StupidEnum): |
|
36 | class ReportType(StupidEnum): | |
35 | unknown = 0 |
|
37 | unknown = 0 | |
36 | error = 1 |
|
38 | error = 1 | |
37 | not_found = 2 |
|
39 | not_found = 2 | |
38 | slow = 3 |
|
40 | slow = 3 | |
39 |
|
41 | |||
40 |
|
42 | |||
41 | class Language(StupidEnum): |
|
43 | class Language(StupidEnum): | |
42 | unknown = 0 |
|
44 | unknown = 0 | |
43 | python = 1 |
|
45 | python = 1 | |
44 | javascript = 2 |
|
46 | javascript = 2 | |
45 | java = 3 |
|
47 | java = 3 | |
46 | objectivec = 4 |
|
48 | objectivec = 4 | |
47 | swift = 5 |
|
49 | swift = 5 | |
48 | cpp = 6 |
|
50 | cpp = 6 | |
49 | basic = 7 |
|
51 | basic = 7 | |
50 | csharp = 8 |
|
52 | csharp = 8 | |
51 | php = 9 |
|
53 | php = 9 | |
52 | perl = 10 |
|
54 | perl = 10 | |
53 | vb = 11 |
|
55 | vb = 11 | |
54 | vbnet = 12 |
|
56 | vbnet = 12 | |
55 | ruby = 13 |
|
57 | ruby = 13 | |
56 | fsharp = 14 |
|
58 | fsharp = 14 | |
57 | actionscript = 15 |
|
59 | actionscript = 15 | |
58 | go = 16 |
|
60 | go = 16 | |
59 | scala = 17 |
|
61 | scala = 17 | |
60 | haskell = 18 |
|
62 | haskell = 18 | |
61 | erlang = 19 |
|
63 | erlang = 19 | |
62 | haxe = 20 |
|
64 | haxe = 20 | |
63 | scheme = 21 |
|
65 | scheme = 21 | |
64 |
|
66 | |||
65 |
|
67 | |||
66 | class LogLevel(StupidEnum): |
|
68 | class LogLevel(StupidEnum): | |
67 | UNKNOWN = 0 |
|
69 | UNKNOWN = 0 | |
68 | DEBUG = 2 |
|
70 | DEBUG = 2 | |
69 | TRACE = 4 |
|
71 | TRACE = 4 | |
70 | INFO = 6 |
|
72 | INFO = 6 | |
71 | WARNING = 8 |
|
73 | WARNING = 8 | |
72 | ERROR = 10 |
|
74 | ERROR = 10 | |
73 | CRITICAL = 12 |
|
75 | CRITICAL = 12 | |
74 | FATAL = 14 |
|
76 | FATAL = 14 | |
75 |
|
77 | |||
76 |
|
78 | |||
77 | class LogLevelPython(StupidEnum): |
|
79 | class LogLevelPython(StupidEnum): | |
78 | CRITICAL = 50 |
|
80 | CRITICAL = 50 | |
79 | ERROR = 40 |
|
81 | ERROR = 40 | |
80 | WARNING = 30 |
|
82 | WARNING = 30 | |
81 | INFO = 20 |
|
83 | INFO = 20 | |
82 | DEBUG = 10 |
|
84 | DEBUG = 10 | |
83 | NOTSET = 0 |
|
85 | NOTSET = 0 | |
84 |
|
86 | |||
85 |
|
87 | |||
86 | class ParsedSentryEventType(StupidEnum): |
|
88 | class ParsedSentryEventType(StupidEnum): | |
87 | ERROR_REPORT = 1 |
|
89 | ERROR_REPORT = 1 | |
88 | LOG = 2 |
|
90 | LOG = 2 |
@@ -1,148 +1,143 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | """ |
|
17 | """ | |
18 | ex-json borrowed from Marcin Kuzminski |
|
18 | ex-json borrowed from Marcin Kuzminski | |
19 |
|
19 | |||
20 | source: https://secure.rhodecode.org/ext-json |
|
20 | source: https://secure.rhodecode.org/ext-json | |
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | import datetime |
|
23 | import datetime | |
24 | import functools |
|
24 | import functools | |
25 | import decimal |
|
25 | import decimal | |
26 | import imp |
|
26 | import imp | |
27 |
|
27 | |||
28 |
__all__ = [ |
|
28 | __all__ = ["json", "simplejson", "stdlibjson"] | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def _is_aware(value): |
|
31 | def _is_aware(value): | |
32 | """ |
|
32 | """ | |
33 | Determines if a given datetime.time is aware. |
|
33 | Determines if a given datetime.time is aware. | |
34 |
|
34 | |||
35 | The logic is described in Python's docs: |
|
35 | The logic is described in Python's docs: | |
36 | http://docs.python.org/library/datetime.html#datetime.tzinfo |
|
36 | http://docs.python.org/library/datetime.html#datetime.tzinfo | |
37 | """ |
|
37 | """ | |
38 | return (value.tzinfo is not None |
|
38 | return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None | |
39 | and value.tzinfo.utcoffset(value) is not None) |
|
|||
40 |
|
39 | |||
41 |
|
40 | |||
42 | def _obj_dump(obj): |
|
41 | def _obj_dump(obj): | |
43 | """ |
|
42 | """ | |
44 | Custom function for dumping objects to JSON, if obj has __json__ attribute |
|
43 | Custom function for dumping objects to JSON, if obj has __json__ attribute | |
45 | or method defined it will be used for serialization |
|
44 | or method defined it will be used for serialization | |
46 |
|
45 | |||
47 | :param obj: |
|
46 | :param obj: | |
48 | """ |
|
47 | """ | |
49 |
|
48 | |||
50 | if isinstance(obj, complex): |
|
49 | if isinstance(obj, complex): | |
51 | return [obj.real, obj.imag] |
|
50 | return [obj.real, obj.imag] | |
52 | # See "Date Time String Format" in the ECMA-262 specification. |
|
51 | # See "Date Time String Format" in the ECMA-262 specification. | |
53 | # some code borrowed from django 1.4 |
|
52 | # some code borrowed from django 1.4 | |
54 | elif isinstance(obj, datetime.datetime): |
|
53 | elif isinstance(obj, datetime.datetime): | |
55 | r = obj.isoformat() |
|
54 | r = obj.isoformat() | |
56 | # if obj.microsecond: |
|
55 | # if obj.microsecond: | |
57 | # r = r[:23] + r[26:] |
|
56 | # r = r[:23] + r[26:] | |
58 |
if r.endswith( |
|
57 | if r.endswith("+00:00"): | |
59 |
r = r[:-6] + |
|
58 | r = r[:-6] + "Z" | |
60 | return r |
|
59 | return r | |
61 | elif isinstance(obj, datetime.date): |
|
60 | elif isinstance(obj, datetime.date): | |
62 | return obj.isoformat() |
|
61 | return obj.isoformat() | |
63 | elif isinstance(obj, decimal.Decimal): |
|
62 | elif isinstance(obj, decimal.Decimal): | |
64 | return str(obj) |
|
63 | return str(obj) | |
65 | elif isinstance(obj, datetime.time): |
|
64 | elif isinstance(obj, datetime.time): | |
66 | if _is_aware(obj): |
|
65 | if _is_aware(obj): | |
67 | raise ValueError("JSON can't represent timezone-aware times.") |
|
66 | raise ValueError("JSON can't represent timezone-aware times.") | |
68 | r = obj.isoformat() |
|
67 | r = obj.isoformat() | |
69 | if obj.microsecond: |
|
68 | if obj.microsecond: | |
70 | r = r[:12] |
|
69 | r = r[:12] | |
71 | return r |
|
70 | return r | |
72 | elif isinstance(obj, set): |
|
71 | elif isinstance(obj, set): | |
73 | return list(obj) |
|
72 | return list(obj) | |
74 |
elif hasattr(obj, |
|
73 | elif hasattr(obj, "__json__"): | |
75 | if callable(obj.__json__): |
|
74 | if callable(obj.__json__): | |
76 | return obj.__json__() |
|
75 | return obj.__json__() | |
77 | else: |
|
76 | else: | |
78 | return obj.__json__ |
|
77 | return obj.__json__ | |
79 | else: |
|
78 | else: | |
80 | raise NotImplementedError |
|
79 | raise NotImplementedError | |
81 |
|
80 | |||
82 |
|
81 | |||
83 | # Import simplejson |
|
82 | # Import simplejson | |
84 | try: |
|
83 | try: | |
85 | # import simplejson initially |
|
84 | # import simplejson initially | |
86 |
_sj = imp.load_module( |
|
85 | _sj = imp.load_module("_sj", *imp.find_module("simplejson")) | |
87 |
|
||||
88 |
|
86 | |||
89 | def extended_encode(obj): |
|
87 | def extended_encode(obj): | |
90 | try: |
|
88 | try: | |
91 | return _obj_dump(obj) |
|
89 | return _obj_dump(obj) | |
92 | except NotImplementedError: |
|
90 | except NotImplementedError: | |
93 | pass |
|
91 | pass | |
94 | raise TypeError("%r is not JSON serializable" % (obj,)) |
|
92 | raise TypeError("%r is not JSON serializable" % (obj,)) | |
95 |
|
93 | |||
96 |
|
||||
97 | # we handle decimals our own it makes unified behavior of json vs |
|
94 | # we handle decimals our own it makes unified behavior of json vs | |
98 | # simplejson |
|
95 | # simplejson | |
99 |
sj_version = [int(x) for x in _sj.__version__.split( |
|
96 | sj_version = [int(x) for x in _sj.__version__.split(".")] | |
100 | major, minor = sj_version[0], sj_version[1] |
|
97 | major, minor = sj_version[0], sj_version[1] | |
101 | if major < 2 or (major == 2 and minor < 1): |
|
98 | if major < 2 or (major == 2 and minor < 1): | |
102 | # simplejson < 2.1 doesnt support use_decimal |
|
99 | # simplejson < 2.1 doesnt support use_decimal | |
103 | _sj.dumps = functools.partial( |
|
100 | _sj.dumps = functools.partial(_sj.dumps, default=extended_encode) | |
104 |
|
|
101 | _sj.dump = functools.partial(_sj.dump, default=extended_encode) | |
105 | _sj.dump = functools.partial( |
|
|||
106 | _sj.dump, default=extended_encode) |
|
|||
107 | else: |
|
102 | else: | |
108 | _sj.dumps = functools.partial( |
|
103 | _sj.dumps = functools.partial( | |
109 |
_sj.dumps, default=extended_encode, use_decimal=False |
|
104 | _sj.dumps, default=extended_encode, use_decimal=False | |
|
105 | ) | |||
110 | _sj.dump = functools.partial( |
|
106 | _sj.dump = functools.partial( | |
111 |
_sj.dump, default=extended_encode, use_decimal=False |
|
107 | _sj.dump, default=extended_encode, use_decimal=False | |
|
108 | ) | |||
112 | simplejson = _sj |
|
109 | simplejson = _sj | |
113 |
|
110 | |||
114 | except ImportError: |
|
111 | except ImportError: | |
115 | # no simplejson set it to None |
|
112 | # no simplejson set it to None | |
116 | simplejson = None |
|
113 | simplejson = None | |
117 |
|
114 | |||
118 | try: |
|
115 | try: | |
119 | # simplejson not found try out regular json module |
|
116 | # simplejson not found try out regular json module | |
120 |
_json = imp.load_module( |
|
117 | _json = imp.load_module("_json", *imp.find_module("json")) | |
121 |
|
||||
122 |
|
118 | |||
123 | # extended JSON encoder for json |
|
119 | # extended JSON encoder for json | |
124 | class ExtendedEncoder(_json.JSONEncoder): |
|
120 | class ExtendedEncoder(_json.JSONEncoder): | |
125 | def default(self, obj): |
|
121 | def default(self, obj): | |
126 | try: |
|
122 | try: | |
127 | return _obj_dump(obj) |
|
123 | return _obj_dump(obj) | |
128 | except NotImplementedError: |
|
124 | except NotImplementedError: | |
129 | pass |
|
125 | pass | |
130 | raise TypeError("%r is not JSON serializable" % (obj,)) |
|
126 | raise TypeError("%r is not JSON serializable" % (obj,)) | |
131 |
|
127 | |||
132 |
|
||||
133 | # monkey-patch JSON encoder to use extended version |
|
128 | # monkey-patch JSON encoder to use extended version | |
134 | _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder) |
|
129 | _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder) | |
135 | _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder) |
|
130 | _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder) | |
136 |
|
131 | |||
137 | except ImportError: |
|
132 | except ImportError: | |
138 | json = None |
|
133 | json = None | |
139 |
|
134 | |||
140 | stdlibjson = _json |
|
135 | stdlibjson = _json | |
141 |
|
136 | |||
142 | # set all available json modules |
|
137 | # set all available json modules | |
143 | if simplejson: |
|
138 | if simplejson: | |
144 | json = _sj |
|
139 | json = _sj | |
145 | elif _json: |
|
140 | elif _json: | |
146 | json = _json |
|
141 | json = _json | |
147 | else: |
|
142 | else: | |
148 |
raise ImportError( |
|
143 | raise ImportError("Could not find any json modules") |
@@ -1,119 +1,160 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | """ |
|
17 | """ | |
18 | Helper functions |
|
18 | Helper functions | |
19 | """ |
|
19 | """ | |
20 | import copy |
|
20 | import copy | |
21 | import datetime |
|
21 | import datetime | |
22 |
|
22 | |||
23 | from collections import namedtuple, OrderedDict |
|
23 | from collections import namedtuple, OrderedDict | |
24 |
|
24 | |||
25 | _ = lambda x: x |
|
25 | _ = lambda x: x | |
26 |
|
26 | |||
27 | time_deltas = OrderedDict() |
|
27 | time_deltas = OrderedDict() | |
28 |
|
28 | |||
29 | time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1), |
|
29 | time_deltas["1m"] = { | |
30 | 'label': '1 minute', 'minutes': 1} |
|
30 | "delta": datetime.timedelta(minutes=1), | |
31 |
|
31 | "label": "1 minute", | ||
32 | time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5), |
|
32 | "minutes": 1, | |
33 | 'label': '5 minutes', 'minutes': 5} |
|
33 | } | |
34 | time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30), |
|
34 | ||
35 | 'label': '30 minutes', 'minutes': 30} |
|
35 | time_deltas["5m"] = { | |
36 |
|
|
36 | "delta": datetime.timedelta(minutes=5), | |
37 | 'label': '60 minutes', 'minutes': 60} |
|
37 | "label": "5 minutes", | |
38 | time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours', |
|
38 | "minutes": 5, | |
39 | 'minutes': 60 * 4} |
|
39 | } | |
40 | time_deltas['12h'] = {'delta': datetime.timedelta(hours=12), |
|
40 | time_deltas["30m"] = { | |
41 | 'label': '12 hours', 'minutes': 60 * 12} |
|
41 | "delta": datetime.timedelta(minutes=30), | |
42 | time_deltas['24h'] = {'delta': datetime.timedelta(hours=24), |
|
42 | "label": "30 minutes", | |
43 | 'label': '24 hours', 'minutes': 60 * 24} |
|
43 | "minutes": 30, | |
44 | time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days', |
|
44 | } | |
45 | 'minutes': 60 * 24 * 3} |
|
45 | time_deltas["1h"] = { | |
46 | time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days', |
|
46 | "delta": datetime.timedelta(hours=1), | |
47 | 'minutes': 60 * 24 * 7} |
|
47 | "label": "60 minutes", | |
48 | time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days', |
|
48 | "minutes": 60, | |
49 | 'minutes': 60 * 24 * 14} |
|
49 | } | |
50 | time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days', |
|
50 | time_deltas["4h"] = { | |
51 | 'minutes': 60 * 24 * 31} |
|
51 | "delta": datetime.timedelta(hours=4), | |
52 | time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3), |
|
52 | "label": "4 hours", | |
53 | 'label': '3 months', |
|
53 | "minutes": 60 * 4, | |
54 | 'minutes': 60 * 24 * 31 * 3} |
|
54 | } | |
55 | time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6), |
|
55 | time_deltas["12h"] = { | |
56 | 'label': '6 months', |
|
56 | "delta": datetime.timedelta(hours=12), | |
57 | 'minutes': 60 * 24 * 31 * 6} |
|
57 | "label": "12 hours", | |
58 | time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12), |
|
58 | "minutes": 60 * 12, | |
59 | 'label': '12 months', |
|
59 | } | |
60 | 'minutes': 60 * 24 * 31 * 12} |
|
60 | time_deltas["24h"] = { | |
|
61 | "delta": datetime.timedelta(hours=24), | |||
|
62 | "label": "24 hours", | |||
|
63 | "minutes": 60 * 24, | |||
|
64 | } | |||
|
65 | time_deltas["3d"] = { | |||
|
66 | "delta": datetime.timedelta(days=3), | |||
|
67 | "label": "3 days", | |||
|
68 | "minutes": 60 * 24 * 3, | |||
|
69 | } | |||
|
70 | time_deltas["1w"] = { | |||
|
71 | "delta": datetime.timedelta(days=7), | |||
|
72 | "label": "7 days", | |||
|
73 | "minutes": 60 * 24 * 7, | |||
|
74 | } | |||
|
75 | time_deltas["2w"] = { | |||
|
76 | "delta": datetime.timedelta(days=14), | |||
|
77 | "label": "14 days", | |||
|
78 | "minutes": 60 * 24 * 14, | |||
|
79 | } | |||
|
80 | time_deltas["1M"] = { | |||
|
81 | "delta": datetime.timedelta(days=31), | |||
|
82 | "label": "31 days", | |||
|
83 | "minutes": 60 * 24 * 31, | |||
|
84 | } | |||
|
85 | time_deltas["3M"] = { | |||
|
86 | "delta": datetime.timedelta(days=31 * 3), | |||
|
87 | "label": "3 months", | |||
|
88 | "minutes": 60 * 24 * 31 * 3, | |||
|
89 | } | |||
|
90 | time_deltas["6M"] = { | |||
|
91 | "delta": datetime.timedelta(days=31 * 6), | |||
|
92 | "label": "6 months", | |||
|
93 | "minutes": 60 * 24 * 31 * 6, | |||
|
94 | } | |||
|
95 | time_deltas["12M"] = { | |||
|
96 | "delta": datetime.timedelta(days=31 * 12), | |||
|
97 | "label": "12 months", | |||
|
98 | "minutes": 60 * 24 * 31 * 12, | |||
|
99 | } | |||
61 |
|
100 | |||
62 | # used in json representation |
|
101 | # used in json representation | |
63 | time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']}) |
|
102 | time_options = dict( | |
64 | for k, v in time_deltas.items()]) |
|
103 | [ | |
65 | FlashMsg = namedtuple('FlashMsg', ['msg', 'level']) |
|
104 | (k, {"label": v["label"], "minutes": v["minutes"]}) | |
|
105 | for k, v in time_deltas.items() | |||
|
106 | ] | |||
|
107 | ) | |||
|
108 | FlashMsg = namedtuple("FlashMsg", ["msg", "level"]) | |||
66 |
|
109 | |||
67 |
|
110 | |||
68 | def get_flash(request): |
|
111 | def get_flash(request): | |
69 | messages = [] |
|
112 | messages = [] | |
70 | messages.extend( |
|
113 | messages.extend( | |
71 | [FlashMsg(msg, 'error') |
|
114 | [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")] | |
72 | for msg in request.session.peek_flash('error')]) |
|
115 | ) | |
73 | messages.extend([FlashMsg(msg, 'warning') |
|
|||
74 | for msg in request.session.peek_flash('warning')]) |
|
|||
75 | messages.extend( |
|
116 | messages.extend( | |
76 |
[FlashMsg(msg, |
|
117 | [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")] | |
|
118 | ) | |||
|
119 | messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()]) | |||
77 | return messages |
|
120 | return messages | |
78 |
|
121 | |||
79 |
|
122 | |||
80 | def clear_flash(request): |
|
123 | def clear_flash(request): | |
81 |
request.session.pop_flash( |
|
124 | request.session.pop_flash("error") | |
82 |
request.session.pop_flash( |
|
125 | request.session.pop_flash("warning") | |
83 | request.session.pop_flash() |
|
126 | request.session.pop_flash() | |
84 |
|
127 | |||
85 |
|
128 | |||
86 | def get_type_formatted_flash(request): |
|
129 | def get_type_formatted_flash(request): | |
87 | return [{'msg': message.msg, 'type': message.level} |
|
130 | return [ | |
88 |
for message in get_flash(request) |
|
131 | {"msg": message.msg, "type": message.level} for message in get_flash(request) | |
|
132 | ] | |||
89 |
|
133 | |||
90 |
|
134 | |||
91 | def gen_pagination_headers(request, paginator): |
|
135 | def gen_pagination_headers(request, paginator): | |
92 | headers = { |
|
136 | headers = { | |
93 |
|
|
137 | "x-total-count": str(paginator.item_count), | |
94 |
|
|
138 | "x-current-page": str(paginator.page), | |
95 |
|
|
139 | "x-items-per-page": str(paginator.items_per_page), | |
96 | } |
|
140 | } | |
97 | params_dict = request.GET.dict_of_lists() |
|
141 | params_dict = request.GET.dict_of_lists() | |
98 | last_page_params = copy.deepcopy(params_dict) |
|
142 | last_page_params = copy.deepcopy(params_dict) | |
99 |
last_page_params[ |
|
143 | last_page_params["page"] = paginator.last_page or 1 | |
100 | first_page_params = copy.deepcopy(params_dict) |
|
144 | first_page_params = copy.deepcopy(params_dict) | |
101 |
first_page_params.pop( |
|
145 | first_page_params.pop("page", None) | |
102 | next_page_params = copy.deepcopy(params_dict) |
|
146 | next_page_params = copy.deepcopy(params_dict) | |
103 |
next_page_params[ |
|
147 | next_page_params["page"] = paginator.next_page or paginator.last_page or 1 | |
104 | prev_page_params = copy.deepcopy(params_dict) |
|
148 | prev_page_params = copy.deepcopy(params_dict) | |
105 |
prev_page_params[ |
|
149 | prev_page_params["page"] = paginator.previous_page or 1 | |
106 | lp_url = request.current_route_url(_query=last_page_params) |
|
150 | lp_url = request.current_route_url(_query=last_page_params) | |
107 | fp_url = request.current_route_url(_query=first_page_params) |
|
151 | fp_url = request.current_route_url(_query=first_page_params) | |
108 | links = [ |
|
152 | links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)] | |
109 | 'rel="last", <{}>'.format(lp_url), |
|
|||
110 | 'rel="first", <{}>'.format(fp_url), |
|
|||
111 | ] |
|
|||
112 | if first_page_params != prev_page_params: |
|
153 | if first_page_params != prev_page_params: | |
113 | prev_url = request.current_route_url(_query=prev_page_params) |
|
154 | prev_url = request.current_route_url(_query=prev_page_params) | |
114 | links.append('rel="prev", <{}>'.format(prev_url)) |
|
155 | links.append('rel="prev", <{}>'.format(prev_url)) | |
115 | if last_page_params != next_page_params: |
|
156 | if last_page_params != next_page_params: | |
116 | next_url = request.current_route_url(_query=next_page_params) |
|
157 | next_url = request.current_route_url(_query=next_page_params) | |
117 | links.append('rel="next", <{}>'.format(next_url)) |
|
158 | links.append('rel="next", <{}>'.format(next_url)) | |
118 |
headers[ |
|
159 | headers["link"] = "; ".join(links) | |
119 | return headers |
|
160 | return headers |
@@ -1,46 +1,53 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import re |
|
17 | import re | |
18 | from appenlight.lib.ext_json import json |
|
18 | from appenlight.lib.ext_json import json | |
19 | from jinja2 import Markup, escape, evalcontextfilter |
|
19 | from jinja2 import Markup, escape, evalcontextfilter | |
20 |
|
20 | |||
21 |
_paragraph_re = re.compile(r |
|
21 | _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}") | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | @evalcontextfilter |
|
24 | @evalcontextfilter | |
25 | def nl2br(eval_ctx, value): |
|
25 | def nl2br(eval_ctx, value): | |
26 | if eval_ctx.autoescape: |
|
26 | if eval_ctx.autoescape: | |
27 | result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n')) |
|
27 | result = "\n\n".join( | |
28 | for p in _paragraph_re.split(escape(value))) |
|
28 | "<p>%s</p>" % p.replace("\n", Markup("<br>\n")) | |
|
29 | for p in _paragraph_re.split(escape(value)) | |||
|
30 | ) | |||
29 | else: |
|
31 | else: | |
30 | result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n') |
|
32 | result = "\n\n".join( | |
31 | for p in _paragraph_re.split(escape(value))) |
|
33 | "<p>%s</p>" % p.replace("\n", "<br>\n") | |
|
34 | for p in _paragraph_re.split(escape(value)) | |||
|
35 | ) | |||
32 | if eval_ctx.autoescape: |
|
36 | if eval_ctx.autoescape: | |
33 | result = Markup(result) |
|
37 | result = Markup(result) | |
34 | return result |
|
38 | return result | |
35 |
|
39 | |||
36 |
|
40 | |||
37 | @evalcontextfilter |
|
41 | @evalcontextfilter | |
38 | def toJSONUnsafe(eval_ctx, value): |
|
42 | def toJSONUnsafe(eval_ctx, value): | |
39 | encoded = json.dumps(value).replace('&', '\\u0026') \ |
|
43 | encoded = ( | |
40 | .replace('<', '\\u003c') \ |
|
44 | json.dumps(value) | |
41 |
.replace( |
|
45 | .replace("&", "\\u0026") | |
42 |
.replace( |
|
46 | .replace("<", "\\u003c") | |
43 |
.replace( |
|
47 | .replace(">", "\\u003e") | |
44 |
.replace(" |
|
48 | .replace(">", "\\u003e") | |
45 |
.replace( |
|
49 | .replace('"', "\\u0022") | |
|
50 | .replace("'", "\\u0027") | |||
|
51 | .replace(r"\n", "/\\\n") | |||
|
52 | ) | |||
46 | return Markup("'%s'" % encoded) |
|
53 | return Markup("'%s'" % encoded) |
@@ -1,64 +1,83 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import json |
|
17 | import json | |
18 | import logging |
|
18 | import logging | |
19 |
|
19 | |||
20 | ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text', |
|
20 | ignored_keys = [ | |
21 | 'filename', 'funcName', 'levelname', 'levelno', 'lineno', |
|
21 | "args", | |
22 | 'message', 'module', 'msecs', 'msg', 'name', 'pathname', |
|
22 | "asctime", | |
23 | 'process', 'processName', 'relativeCreated', 'stack_info', |
|
23 | "created", | |
24 | 'thread', 'threadName'] |
|
24 | "exc_info", | |
|
25 | "exc_text", | |||
|
26 | "filename", | |||
|
27 | "funcName", | |||
|
28 | "levelname", | |||
|
29 | "levelno", | |||
|
30 | "lineno", | |||
|
31 | "message", | |||
|
32 | "module", | |||
|
33 | "msecs", | |||
|
34 | "msg", | |||
|
35 | "name", | |||
|
36 | "pathname", | |||
|
37 | "process", | |||
|
38 | "processName", | |||
|
39 | "relativeCreated", | |||
|
40 | "stack_info", | |||
|
41 | "thread", | |||
|
42 | "threadName", | |||
|
43 | ] | |||
25 |
|
44 | |||
26 |
|
45 | |||
27 | class JSONFormatter(logging.Formatter): |
|
46 | class JSONFormatter(logging.Formatter): | |
28 | def format(self, record): |
|
47 | def format(self, record): | |
29 | """ |
|
48 | """ | |
30 | Format the specified record as text. |
|
49 | Format the specified record as text. | |
31 |
|
50 | |||
32 | The record's attribute dictionary is used as the operand to a |
|
51 | The record's attribute dictionary is used as the operand to a | |
33 | string formatting operation which yields the returned string. |
|
52 | string formatting operation which yields the returned string. | |
34 | Before formatting the dictionary, a couple of preparatory steps |
|
53 | Before formatting the dictionary, a couple of preparatory steps | |
35 | are carried out. The message attribute of the record is computed |
|
54 | are carried out. The message attribute of the record is computed | |
36 | using LogRecord.getMessage(). If the formatting string uses the |
|
55 | using LogRecord.getMessage(). If the formatting string uses the | |
37 | time (as determined by a call to usesTime(), formatTime() is |
|
56 | time (as determined by a call to usesTime(), formatTime() is | |
38 | called to format the event time. If there is exception information, |
|
57 | called to format the event time. If there is exception information, | |
39 | it is formatted using formatException() and appended to the message. |
|
58 | it is formatted using formatException() and appended to the message. | |
40 | """ |
|
59 | """ | |
41 | record.message = record.getMessage() |
|
60 | record.message = record.getMessage() | |
42 | log_dict = vars(record) |
|
61 | log_dict = vars(record) | |
43 | keys = [k for k in log_dict.keys() if k not in ignored_keys] |
|
62 | keys = [k for k in log_dict.keys() if k not in ignored_keys] | |
44 |
payload = { |
|
63 | payload = {"message": record.message} | |
45 | payload.update({k: log_dict[k] for k in keys}) |
|
64 | payload.update({k: log_dict[k] for k in keys}) | |
46 | record.message = json.dumps(payload, default=lambda x: str(x)) |
|
65 | record.message = json.dumps(payload, default=lambda x: str(x)) | |
47 |
|
66 | |||
48 | if self.usesTime(): |
|
67 | if self.usesTime(): | |
49 | record.asctime = self.formatTime(record, self.datefmt) |
|
68 | record.asctime = self.formatTime(record, self.datefmt) | |
50 | s = self.formatMessage(record) |
|
69 | s = self.formatMessage(record) | |
51 | if record.exc_info: |
|
70 | if record.exc_info: | |
52 | # Cache the traceback text to avoid converting it multiple times |
|
71 | # Cache the traceback text to avoid converting it multiple times | |
53 | # (it's constant anyway) |
|
72 | # (it's constant anyway) | |
54 | if not record.exc_text: |
|
73 | if not record.exc_text: | |
55 | record.exc_text = self.formatException(record.exc_info) |
|
74 | record.exc_text = self.formatException(record.exc_info) | |
56 | if record.exc_text: |
|
75 | if record.exc_text: | |
57 | if s[-1:] != "\n": |
|
76 | if s[-1:] != "\n": | |
58 | s = s + "\n" |
|
77 | s = s + "\n" | |
59 | s = s + record.exc_text |
|
78 | s = s + record.exc_text | |
60 | if record.stack_info: |
|
79 | if record.stack_info: | |
61 | if s[-1:] != "\n": |
|
80 | if s[-1:] != "\n": | |
62 | s = s + "\n" |
|
81 | s = s + "\n" | |
63 | s = s + self.formatStack(record.stack_info) |
|
82 | s = s + self.formatStack(record.stack_info) | |
64 | return s |
|
83 | return s |
@@ -1,65 +1,69 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 |
BASE = |
|
17 | BASE = "appenlight:data:{}" | |
18 |
|
18 | |||
19 | REDIS_KEYS = { |
|
19 | REDIS_KEYS = { | |
20 |
|
|
20 | "tasks": { | |
21 |
|
|
21 | "add_reports_lock": BASE.format("add_reports_lock:{}"), | |
22 |
|
|
22 | "add_logs_lock": BASE.format("add_logs_lock:{}"), | |
23 | }, |
|
23 | }, | |
24 |
|
|
24 | "counters": { | |
25 |
|
|
25 | "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"), | |
26 | 'events_per_minute_per_user:{}:{}'), |
|
26 | "reports_per_minute": BASE.format("reports_per_minute:{}"), | |
27 |
|
|
27 | "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"), | |
28 |
|
|
28 | "reports_per_type": BASE.format("reports_per_type:{}"), | |
29 | 'reports_per_hour_per_app:{}:{}'), |
|
29 | "logs_per_minute": BASE.format("logs_per_minute:{}"), | |
30 |
|
|
30 | "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"), | |
31 |
|
|
31 | "metrics_per_minute": BASE.format("metrics_per_minute:{}"), | |
32 |
|
|
32 | "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"), | |
33 | 'logs_per_hour_per_app:{}:{}'), |
|
33 | "report_group_occurences": BASE.format("report_group_occurences:{}"), | |
34 | 'metrics_per_minute': BASE.format('metrics_per_minute:{}'), |
|
34 | "report_group_occurences_alerting": BASE.format( | |
35 | 'metrics_per_hour_per_app': BASE.format( |
|
35 | "report_group_occurences_alerting:{}" | |
36 | 'metrics_per_hour_per_app:{}:{}'), |
|
36 | ), | |
37 |
|
|
37 | "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"), | |
38 |
|
|
38 | "report_group_occurences_100th": BASE.format( | |
39 |
|
|
39 | "report_group_occurences_100th:{}" | |
40 | 'report_group_occurences_10th': BASE.format( |
|
40 | ), | |
41 | 'report_group_occurences_10th:{}'), |
|
|||
42 | 'report_group_occurences_100th': BASE.format( |
|
|||
43 | 'report_group_occurences_100th:{}'), |
|
|||
44 | }, |
|
41 | }, | |
45 |
|
|
42 | "rate_limits": { | |
46 |
|
|
43 | "per_application_reports_rate_limit": BASE.format( | |
47 |
|
|
44 | "per_application_reports_limit:{}:{}" | |
48 | 'per_application_logs_rate_limit': BASE.format( |
|
45 | ), | |
49 |
|
|
46 | "per_application_logs_rate_limit": BASE.format( | |
50 |
|
|
47 | "per_application_logs_rate_limit:{}:{}" | |
51 | 'per_application_metrics_rate_limit:{}:{}'), |
|
48 | ), | |
|
49 | "per_application_metrics_rate_limit": BASE.format( | |||
|
50 | "per_application_metrics_rate_limit:{}:{}" | |||
|
51 | ), | |||
52 | }, |
|
52 | }, | |
53 |
|
|
53 | "apps_that_got_new_data_per_hour": BASE.format( | |
54 | 'apps_that_had_reports': BASE.format('apps_that_had_reports'), |
|
54 | "apps_that_got_new_data_per_hour:{}" | |
55 | 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'), |
|
55 | ), | |
56 |
|
|
56 | "apps_that_had_reports": BASE.format("apps_that_had_reports"), | |
57 | 'apps_that_had_reports_alerting'), |
|
57 | "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"), | |
58 |
|
|
58 | "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"), | |
59 |
|
|
59 | "apps_that_had_error_reports_alerting": BASE.format( | |
60 | 'reports_to_notify_per_type_per_app': BASE.format( |
|
60 | "apps_that_had_error_reports_alerting" | |
61 | 'reports_to_notify_per_type_per_app:{}:{}'), |
|
61 | ), | |
62 |
|
|
62 | "reports_to_notify_per_type_per_app": BASE.format( | |
63 |
|
|
63 | "reports_to_notify_per_type_per_app:{}:{}" | |
64 | 'seen_tag_list': BASE.format('seen_tag_list') |
|
64 | ), | |
|
65 | "reports_to_notify_per_type_per_app_alerting": BASE.format( | |||
|
66 | "reports_to_notify_per_type_per_app_alerting:{}:{}" | |||
|
67 | ), | |||
|
68 | "seen_tag_list": BASE.format("seen_tag_list"), | |||
65 | } |
|
69 | } |
@@ -1,135 +1,131 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import json |
|
17 | import json | |
18 |
|
18 | |||
19 | from pyramid.security import unauthenticated_userid |
|
19 | from pyramid.security import unauthenticated_userid | |
20 |
|
20 | |||
21 | import appenlight.lib.helpers as helpers |
|
21 | import appenlight.lib.helpers as helpers | |
22 |
|
22 | |||
23 | from authomatic.providers import oauth2, oauth1 |
|
23 | from authomatic.providers import oauth2, oauth1 | |
24 | from authomatic import Authomatic |
|
24 | from authomatic import Authomatic | |
25 | from ziggurat_foundations.models.services.user import UserService |
|
25 | from ziggurat_foundations.models.services.user import UserService | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | class CSRFException(Exception): |
|
28 | class CSRFException(Exception): | |
29 | pass |
|
29 | pass | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | class JSONException(Exception): |
|
32 | class JSONException(Exception): | |
33 | pass |
|
33 | pass | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | def get_csrf_token(request): |
|
36 | def get_csrf_token(request): | |
37 | return request.session.get_csrf_token() |
|
37 | return request.session.get_csrf_token() | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | def safe_json_body(request): |
|
40 | def safe_json_body(request): | |
41 | """ |
|
41 | """ | |
42 | Returns None if json body is missing or erroneous |
|
42 | Returns None if json body is missing or erroneous | |
43 | """ |
|
43 | """ | |
44 | try: |
|
44 | try: | |
45 | return request.json_body |
|
45 | return request.json_body | |
46 | except ValueError: |
|
46 | except ValueError: | |
47 | return None |
|
47 | return None | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | def unsafe_json_body(request): |
|
50 | def unsafe_json_body(request): | |
51 | """ |
|
51 | """ | |
52 | Throws JSONException if json can't deserialize |
|
52 | Throws JSONException if json can't deserialize | |
53 | """ |
|
53 | """ | |
54 | try: |
|
54 | try: | |
55 | return request.json_body |
|
55 | return request.json_body | |
56 | except ValueError: |
|
56 | except ValueError: | |
57 |
raise JSONException( |
|
57 | raise JSONException("Incorrect JSON") | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def get_user(request): |
|
60 | def get_user(request): | |
61 |
if not request.path_info.startswith( |
|
61 | if not request.path_info.startswith("/static"): | |
62 | user_id = unauthenticated_userid(request) |
|
62 | user_id = unauthenticated_userid(request) | |
63 | try: |
|
63 | try: | |
64 | user_id = int(user_id) |
|
64 | user_id = int(user_id) | |
65 | except Exception: |
|
65 | except Exception: | |
66 | return None |
|
66 | return None | |
67 |
|
67 | |||
68 | if user_id: |
|
68 | if user_id: | |
69 | user = UserService.by_id(user_id) |
|
69 | user = UserService.by_id(user_id) | |
70 | if user: |
|
70 | if user: | |
71 |
request.environ[ |
|
71 | request.environ["appenlight.username"] = "%d:%s" % ( | |
72 |
user_id, |
|
72 | user_id, | |
|
73 | user.user_name, | |||
|
74 | ) | |||
73 | return user |
|
75 | return user | |
74 | else: |
|
76 | else: | |
75 | return None |
|
77 | return None | |
76 |
|
78 | |||
77 |
|
79 | |||
78 | def es_conn(request): |
|
80 | def es_conn(request): | |
79 | return request.registry.es_conn |
|
81 | return request.registry.es_conn | |
80 |
|
82 | |||
81 |
|
83 | |||
82 | def add_flash_to_headers(request, clear=True): |
|
84 | def add_flash_to_headers(request, clear=True): | |
83 | """ |
|
85 | """ | |
84 | Adds pending flash messages to response, if clear is true clears out the |
|
86 | Adds pending flash messages to response, if clear is true clears out the | |
85 | flash queue |
|
87 | flash queue | |
86 | """ |
|
88 | """ | |
87 | flash_msgs = helpers.get_type_formatted_flash(request) |
|
89 | flash_msgs = helpers.get_type_formatted_flash(request) | |
88 |
request.response.headers[ |
|
90 | request.response.headers["x-flash-messages"] = json.dumps(flash_msgs) | |
89 | helpers.clear_flash(request) |
|
91 | helpers.clear_flash(request) | |
90 |
|
92 | |||
91 |
|
93 | |||
92 | def get_authomatic(request): |
|
94 | def get_authomatic(request): | |
93 | settings = request.registry.settings |
|
95 | settings = request.registry.settings | |
94 | # authomatic social auth |
|
96 | # authomatic social auth | |
95 | authomatic_conf = { |
|
97 | authomatic_conf = { | |
96 | # callback http://yourapp.com/social_auth/twitter |
|
98 | # callback http://yourapp.com/social_auth/twitter | |
97 |
|
|
99 | "twitter": { | |
98 |
|
|
100 | "class_": oauth1.Twitter, | |
99 |
|
|
101 | "consumer_key": settings.get("authomatic.pr.twitter.key", ""), | |
100 |
|
|
102 | "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""), | |
101 | ''), |
|
|||
102 | }, |
|
103 | }, | |
103 | # callback http://yourapp.com/social_auth/facebook |
|
104 | # callback http://yourapp.com/social_auth/facebook | |
104 |
|
|
105 | "facebook": { | |
105 |
|
|
106 | "class_": oauth2.Facebook, | |
106 |
|
|
107 | "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""), | |
107 |
|
|
108 | "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""), | |
108 | ''), |
|
109 | "scope": ["email"], | |
109 | 'scope': ['email'], |
|
|||
110 | }, |
|
110 | }, | |
111 | # callback http://yourapp.com/social_auth/google |
|
111 | # callback http://yourapp.com/social_auth/google | |
112 |
|
|
112 | "google": { | |
113 |
|
|
113 | "class_": oauth2.Google, | |
114 |
|
|
114 | "consumer_key": settings.get("authomatic.pr.google.key", ""), | |
115 |
|
|
115 | "consumer_secret": settings.get("authomatic.pr.google.secret", ""), | |
116 | 'authomatic.pr.google.secret', ''), |
|
116 | "scope": ["profile", "email"], | |
117 | 'scope': ['profile', 'email'], |
|
|||
118 | }, |
|
117 | }, | |
119 |
|
|
118 | "github": { | |
120 |
|
|
119 | "class_": oauth2.GitHub, | |
121 |
|
|
120 | "consumer_key": settings.get("authomatic.pr.github.key", ""), | |
122 |
|
|
121 | "consumer_secret": settings.get("authomatic.pr.github.secret", ""), | |
123 | 'authomatic.pr.github.secret', ''), |
|
122 | "scope": ["repo", "public_repo", "user:email"], | |
124 | 'scope': ['repo', 'public_repo', 'user:email'], |
|
123 | "access_headers": {"User-Agent": "AppEnlight"}, | |
125 | 'access_headers': {'User-Agent': 'AppEnlight'}, |
|
124 | }, | |
|
125 | "bitbucket": { | |||
|
126 | "class_": oauth1.Bitbucket, | |||
|
127 | "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""), | |||
|
128 | "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""), | |||
126 | }, |
|
129 | }, | |
127 | 'bitbucket': { |
|
|||
128 | 'class_': oauth1.Bitbucket, |
|
|||
129 | 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''), |
|
|||
130 | 'consumer_secret': settings.get( |
|
|||
131 | 'authomatic.pr.bitbucket.secret', '') |
|
|||
132 | } |
|
|||
133 | } |
|
130 | } | |
134 | return Authomatic( |
|
131 | return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"]) | |
135 | config=authomatic_conf, secret=settings['authomatic.secret']) |
|
@@ -1,298 +1,312 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | import operator |
|
18 | import operator | |
19 |
|
19 | |||
20 | log = logging.getLogger(__name__) |
|
20 | log = logging.getLogger(__name__) | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class RuleException(Exception): |
|
23 | class RuleException(Exception): | |
24 | pass |
|
24 | pass | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | class KeyNotFoundException(RuleException): |
|
27 | class KeyNotFoundException(RuleException): | |
28 | pass |
|
28 | pass | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class UnknownTypeException(RuleException): |
|
31 | class UnknownTypeException(RuleException): | |
32 | pass |
|
32 | pass | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | class BadConfigException(RuleException): |
|
35 | class BadConfigException(RuleException): | |
36 | pass |
|
36 | pass | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | class InvalidValueException(RuleException): |
|
39 | class InvalidValueException(RuleException): | |
40 | pass |
|
40 | pass | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class RuleBase(object): |
|
43 | class RuleBase(object): | |
44 | @classmethod |
|
44 | @classmethod | |
45 | def default_dict_struct_getter(cls, struct, field_name): |
|
45 | def default_dict_struct_getter(cls, struct, field_name): | |
46 | """ |
|
46 | """ | |
47 | returns a key from dictionary based on field_name, if the name contains |
|
47 | returns a key from dictionary based on field_name, if the name contains | |
48 | `:` then it means additional nesting levels should be checked for the |
|
48 | `:` then it means additional nesting levels should be checked for the | |
49 | key so `a:b:c` means return struct['a']['b']['c'] |
|
49 | key so `a:b:c` means return struct['a']['b']['c'] | |
50 |
|
50 | |||
51 | :param struct: |
|
51 | :param struct: | |
52 | :param field_name: |
|
52 | :param field_name: | |
53 | :return: |
|
53 | :return: | |
54 | """ |
|
54 | """ | |
55 |
parts = field_name.split( |
|
55 | parts = field_name.split(":") if field_name else [] | |
56 | found = struct |
|
56 | found = struct | |
57 | while parts: |
|
57 | while parts: | |
58 | current_key = parts.pop(0) |
|
58 | current_key = parts.pop(0) | |
59 | found = found.get(current_key) |
|
59 | found = found.get(current_key) | |
60 | if not found and parts: |
|
60 | if not found and parts: | |
61 |
raise KeyNotFoundException( |
|
61 | raise KeyNotFoundException("Key not found in structure") | |
62 | return found |
|
62 | return found | |
63 |
|
63 | |||
64 | @classmethod |
|
64 | @classmethod | |
65 | def default_obj_struct_getter(cls, struct, field_name): |
|
65 | def default_obj_struct_getter(cls, struct, field_name): | |
66 | """ |
|
66 | """ | |
67 | returns a key from instance based on field_name, if the name contains |
|
67 | returns a key from instance based on field_name, if the name contains | |
68 | `:` then it means additional nesting levels should be checked for the |
|
68 | `:` then it means additional nesting levels should be checked for the | |
69 | key so `a:b:c` means return struct.a.b.c |
|
69 | key so `a:b:c` means return struct.a.b.c | |
70 |
|
70 | |||
71 | :param struct: |
|
71 | :param struct: | |
72 | :param field_name: |
|
72 | :param field_name: | |
73 | :return: |
|
73 | :return: | |
74 | """ |
|
74 | """ | |
75 |
parts = field_name.split( |
|
75 | parts = field_name.split(":") | |
76 | found = struct |
|
76 | found = struct | |
77 | while parts: |
|
77 | while parts: | |
78 | current_key = parts.pop(0) |
|
78 | current_key = parts.pop(0) | |
79 | found = getattr(found, current_key, None) |
|
79 | found = getattr(found, current_key, None) | |
80 | if not found and parts: |
|
80 | if not found and parts: | |
81 |
raise KeyNotFoundException( |
|
81 | raise KeyNotFoundException("Key not found in structure") | |
82 | return found |
|
82 | return found | |
83 |
|
83 | |||
84 | def normalized_type(self, field, value): |
|
84 | def normalized_type(self, field, value): | |
85 | """ |
|
85 | """ | |
86 | Converts text values from self.conf_value based on type_matrix below |
|
86 | Converts text values from self.conf_value based on type_matrix below | |
87 | check_matrix defines what kind of checks we can perform on a field |
|
87 | check_matrix defines what kind of checks we can perform on a field | |
88 | value based on field name |
|
88 | value based on field name | |
89 | """ |
|
89 | """ | |
90 | f_type = self.type_matrix.get(field) |
|
90 | f_type = self.type_matrix.get(field) | |
91 | if f_type: |
|
91 | if f_type: | |
92 |
cast_to = f_type[ |
|
92 | cast_to = f_type["type"] | |
93 | else: |
|
93 | else: | |
94 |
raise UnknownTypeException( |
|
94 | raise UnknownTypeException("Unknown type") | |
95 |
|
95 | |||
96 | if value is None: |
|
96 | if value is None: | |
97 | return None |
|
97 | return None | |
98 |
|
98 | |||
99 | try: |
|
99 | try: | |
100 |
if cast_to == |
|
100 | if cast_to == "int": | |
101 | return int(value) |
|
101 | return int(value) | |
102 |
elif cast_to == |
|
102 | elif cast_to == "float": | |
103 | return float(value) |
|
103 | return float(value) | |
104 |
elif cast_to == |
|
104 | elif cast_to == "unicode": | |
105 | return str(value) |
|
105 | return str(value) | |
106 | except ValueError as exc: |
|
106 | except ValueError as exc: | |
107 | raise InvalidValueException(exc) |
|
107 | raise InvalidValueException(exc) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | class Rule(RuleBase): |
|
110 | class Rule(RuleBase): | |
111 | def __init__(self, config, type_matrix, |
|
111 | def __init__( | |
112 | struct_getter=RuleBase.default_dict_struct_getter, |
|
112 | self, | |
113 | config_manipulator=None): |
|
113 | config, | |
|
114 | type_matrix, | |||
|
115 | struct_getter=RuleBase.default_dict_struct_getter, | |||
|
116 | config_manipulator=None, | |||
|
117 | ): | |||
114 | """ |
|
118 | """ | |
115 |
|
119 | |||
116 | :param config: dict - contains rule configuration |
|
120 | :param config: dict - contains rule configuration | |
117 | example:: |
|
121 | example:: | |
118 | { |
|
122 | { | |
119 | "field": "__OR__", |
|
123 | "field": "__OR__", | |
120 | "rules": [ |
|
124 | "rules": [ | |
121 | { |
|
125 | { | |
122 | "field": "__AND__", |
|
126 | "field": "__AND__", | |
123 | "rules": [ |
|
127 | "rules": [ | |
124 | { |
|
128 | { | |
125 | "op": "ge", |
|
129 | "op": "ge", | |
126 | "field": "occurences", |
|
130 | "field": "occurences", | |
127 | "value": "10" |
|
131 | "value": "10" | |
128 | }, |
|
132 | }, | |
129 | { |
|
133 | { | |
130 | "op": "ge", |
|
134 | "op": "ge", | |
131 | "field": "priority", |
|
135 | "field": "priority", | |
132 | "value": "4" |
|
136 | "value": "4" | |
133 | } |
|
137 | } | |
134 | ] |
|
138 | ] | |
135 | }, |
|
139 | }, | |
136 | { |
|
140 | { | |
137 | "op": "eq", |
|
141 | "op": "eq", | |
138 | "field": "http_status", |
|
142 | "field": "http_status", | |
139 | "value": "500" |
|
143 | "value": "500" | |
140 | } |
|
144 | } | |
141 | ] |
|
145 | ] | |
142 | } |
|
146 | } | |
143 | :param type_matrix: dict - contains map of type casts |
|
147 | :param type_matrix: dict - contains map of type casts | |
144 | example:: |
|
148 | example:: | |
145 | { |
|
149 | { | |
146 | 'http_status': 'int', |
|
150 | 'http_status': 'int', | |
147 | 'priority': 'unicode', |
|
151 | 'priority': 'unicode', | |
148 | } |
|
152 | } | |
149 | :param struct_getter: callable - used to grab the value of field from |
|
153 | :param struct_getter: callable - used to grab the value of field from | |
150 | the structure passed to match() based |
|
154 | the structure passed to match() based | |
151 | on key, default |
|
155 | on key, default | |
152 |
|
156 | |||
153 | """ |
|
157 | """ | |
154 | self.type_matrix = type_matrix |
|
158 | self.type_matrix = type_matrix | |
155 | self.config = config |
|
159 | self.config = config | |
156 | self.struct_getter = struct_getter |
|
160 | self.struct_getter = struct_getter | |
157 | self.config_manipulator = config_manipulator |
|
161 | self.config_manipulator = config_manipulator | |
158 | if config_manipulator: |
|
162 | if config_manipulator: | |
159 | config_manipulator(self) |
|
163 | config_manipulator(self) | |
160 |
|
164 | |||
161 | def subrule_check(self, rule_config, struct): |
|
165 | def subrule_check(self, rule_config, struct): | |
162 | rule = Rule(rule_config, self.type_matrix, |
|
166 | rule = Rule( | |
163 |
|
|
167 | rule_config, self.type_matrix, config_manipulator=self.config_manipulator | |
|
168 | ) | |||
164 | return rule.match(struct) |
|
169 | return rule.match(struct) | |
165 |
|
170 | |||
166 | def match(self, struct): |
|
171 | def match(self, struct): | |
167 | """ |
|
172 | """ | |
168 | Check if rule matched for this specific report |
|
173 | Check if rule matched for this specific report | |
169 | First tries report value, then tests tags in not found, then finally |
|
174 | First tries report value, then tests tags in not found, then finally | |
170 | report group |
|
175 | report group | |
171 | """ |
|
176 | """ | |
172 |
field_name = self.config.get( |
|
177 | field_name = self.config.get("field") | |
173 |
test_value = self.config.get( |
|
178 | test_value = self.config.get("value") | |
174 |
|
179 | |||
175 | if not field_name: |
|
180 | if not field_name: | |
176 | return False |
|
181 | return False | |
177 |
|
182 | |||
178 |
if field_name == |
|
183 | if field_name == "__AND__": | |
179 | rule = AND(self.config['rules'], self.type_matrix, |
|
184 | rule = AND( | |
180 | config_manipulator=self.config_manipulator) |
|
185 | self.config["rules"], | |
|
186 | self.type_matrix, | |||
|
187 | config_manipulator=self.config_manipulator, | |||
|
188 | ) | |||
181 | return rule.match(struct) |
|
189 | return rule.match(struct) | |
182 |
elif field_name == |
|
190 | elif field_name == "__OR__": | |
183 | rule = OR(self.config['rules'], self.type_matrix, |
|
191 | rule = OR( | |
184 | config_manipulator=self.config_manipulator) |
|
192 | self.config["rules"], | |
|
193 | self.type_matrix, | |||
|
194 | config_manipulator=self.config_manipulator, | |||
|
195 | ) | |||
185 | return rule.match(struct) |
|
196 | return rule.match(struct) | |
186 |
elif field_name == |
|
197 | elif field_name == "__NOT__": | |
187 | rule = NOT(self.config['rules'], self.type_matrix, |
|
198 | rule = NOT( | |
188 | config_manipulator=self.config_manipulator) |
|
199 | self.config["rules"], | |
|
200 | self.type_matrix, | |||
|
201 | config_manipulator=self.config_manipulator, | |||
|
202 | ) | |||
189 | return rule.match(struct) |
|
203 | return rule.match(struct) | |
190 |
|
204 | |||
191 | if test_value is None: |
|
205 | if test_value is None: | |
192 | return False |
|
206 | return False | |
193 |
|
207 | |||
194 | try: |
|
208 | try: | |
195 |
struct_value = self.normalized_type( |
|
209 | struct_value = self.normalized_type( | |
196 |
|
|
210 | field_name, self.struct_getter(struct, field_name) | |
197 | field_name)) |
|
211 | ) | |
198 | except (UnknownTypeException, InvalidValueException) as exc: |
|
212 | except (UnknownTypeException, InvalidValueException) as exc: | |
199 | log.error(str(exc)) |
|
213 | log.error(str(exc)) | |
200 | return False |
|
214 | return False | |
201 |
|
215 | |||
202 | try: |
|
216 | try: | |
203 | test_value = self.normalized_type(field_name, test_value) |
|
217 | test_value = self.normalized_type(field_name, test_value) | |
204 | except (UnknownTypeException, InvalidValueException) as exc: |
|
218 | except (UnknownTypeException, InvalidValueException) as exc: | |
205 | log.error(str(exc)) |
|
219 | log.error(str(exc)) | |
206 | return False |
|
220 | return False | |
207 |
|
221 | |||
208 |
if self.config[ |
|
222 | if self.config["op"] not in ("startswith", "endswith", "contains"): | |
209 | try: |
|
223 | try: | |
210 | return getattr(operator, |
|
224 | return getattr(operator, self.config["op"])(struct_value, test_value) | |
211 | self.config['op'])(struct_value, test_value) |
|
|||
212 | except TypeError: |
|
225 | except TypeError: | |
213 | return False |
|
226 | return False | |
214 |
elif self.config[ |
|
227 | elif self.config["op"] == "startswith": | |
215 | return struct_value.startswith(test_value) |
|
228 | return struct_value.startswith(test_value) | |
216 |
elif self.config[ |
|
229 | elif self.config["op"] == "endswith": | |
217 | return struct_value.endswith(test_value) |
|
230 | return struct_value.endswith(test_value) | |
218 |
elif self.config[ |
|
231 | elif self.config["op"] == "contains": | |
219 | return test_value in struct_value |
|
232 | return test_value in struct_value | |
220 |
raise BadConfigException( |
|
233 | raise BadConfigException( | |
221 |
|
|
234 | "Invalid configuration, " "unknown operator: {}".format(self.config) | |
|
235 | ) | |||
222 |
|
236 | |||
223 | def __repr__(self): |
|
237 | def __repr__(self): | |
224 |
return |
|
238 | return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value")) | |
225 | self.config.get('value')) |
|
|||
226 |
|
239 | |||
227 |
|
240 | |||
228 | class AND(Rule): |
|
241 | class AND(Rule): | |
229 | def __init__(self, rules, *args, **kwargs): |
|
242 | def __init__(self, rules, *args, **kwargs): | |
230 | super(AND, self).__init__({}, *args, **kwargs) |
|
243 | super(AND, self).__init__({}, *args, **kwargs) | |
231 | self.rules = rules |
|
244 | self.rules = rules | |
232 |
|
245 | |||
233 | def match(self, struct): |
|
246 | def match(self, struct): | |
234 | return all([self.subrule_check(r_conf, struct) for r_conf |
|
247 | return all([self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
235 | in self.rules]) |
|
|||
236 |
|
248 | |||
237 |
|
249 | |||
238 | class NOT(Rule): |
|
250 | class NOT(Rule): | |
239 | def __init__(self, rules, *args, **kwargs): |
|
251 | def __init__(self, rules, *args, **kwargs): | |
240 | super(NOT, self).__init__({}, *args, **kwargs) |
|
252 | super(NOT, self).__init__({}, *args, **kwargs) | |
241 | self.rules = rules |
|
253 | self.rules = rules | |
242 |
|
254 | |||
243 | def match(self, struct): |
|
255 | def match(self, struct): | |
244 | return all([not self.subrule_check(r_conf, struct) for r_conf |
|
256 | return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
245 | in self.rules]) |
|
|||
246 |
|
257 | |||
247 |
|
258 | |||
248 | class OR(Rule): |
|
259 | class OR(Rule): | |
249 | def __init__(self, rules, *args, **kwargs): |
|
260 | def __init__(self, rules, *args, **kwargs): | |
250 | super(OR, self).__init__({}, *args, **kwargs) |
|
261 | super(OR, self).__init__({}, *args, **kwargs) | |
251 | self.rules = rules |
|
262 | self.rules = rules | |
252 |
|
263 | |||
253 | def match(self, struct): |
|
264 | def match(self, struct): | |
254 | return any([self.subrule_check(r_conf, struct) for r_conf |
|
265 | return any([self.subrule_check(r_conf, struct) for r_conf in self.rules]) | |
255 | in self.rules]) |
|
|||
256 |
|
266 | |||
257 |
|
267 | |||
258 | class RuleService(object): |
|
268 | class RuleService(object): | |
259 | @staticmethod |
|
269 | @staticmethod | |
260 | def rule_from_config(config, field_mappings, labels_dict, |
|
270 | def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None): | |
261 | manipulator_func=None): |
|
|||
262 | """ |
|
271 | """ | |
263 | Returns modified rule with manipulator function |
|
272 | Returns modified rule with manipulator function | |
264 | By default manipulator function replaces field id from labels_dict |
|
273 | By default manipulator function replaces field id from labels_dict | |
265 | with current field id proper for the rule from fields_mappings |
|
274 | with current field id proper for the rule from fields_mappings | |
266 |
|
275 | |||
267 | because label X_X id might be pointing different value on next request |
|
276 | because label X_X id might be pointing different value on next request | |
268 | when new term is returned from elasticsearch - this ensures things |
|
277 | when new term is returned from elasticsearch - this ensures things | |
269 | are kept 1:1 all the time |
|
278 | are kept 1:1 all the time | |
270 | """ |
|
279 | """ | |
271 | rev_map = {} |
|
280 | rev_map = {} | |
272 | for k, v in labels_dict.items(): |
|
281 | for k, v in labels_dict.items(): | |
273 |
rev_map[(v[ |
|
282 | rev_map[(v["agg"], v["key"])] = k | |
274 |
|
283 | |||
275 | if manipulator_func is None: |
|
284 | if manipulator_func is None: | |
|
285 | ||||
276 | def label_rewriter_func(rule): |
|
286 | def label_rewriter_func(rule): | |
277 |
field = rule.config.get( |
|
287 | field = rule.config.get("field") | |
278 |
if not field or rule.config[ |
|
288 | if not field or rule.config["field"] in [ | |
279 | '__AND__', '__NOT__']: |
|
289 | "__OR__", | |
|
290 | "__AND__", | |||
|
291 | "__NOT__", | |||
|
292 | ]: | |||
280 | return |
|
293 | return | |
281 |
|
294 | |||
282 |
to_map = field_mappings.get(rule.config[ |
|
295 | to_map = field_mappings.get(rule.config["field"]) | |
283 |
|
296 | |||
284 | # we need to replace series field with _AE_NOT_FOUND_ to not match |
|
297 | # we need to replace series field with _AE_NOT_FOUND_ to not match | |
285 | # accidently some other field which happens to have the series that |
|
298 | # accidently some other field which happens to have the series that | |
286 | # was used when the alert was created |
|
299 | # was used when the alert was created | |
287 | if to_map: |
|
300 | if to_map: | |
288 |
to_replace = rev_map.get( |
|
301 | to_replace = rev_map.get( | |
289 | '_AE_NOT_FOUND_') |
|
302 | (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_" | |
|
303 | ) | |||
290 | else: |
|
304 | else: | |
291 |
to_replace = |
|
305 | to_replace = "_AE_NOT_FOUND_" | |
292 |
|
306 | |||
293 |
rule.config[ |
|
307 | rule.config["field"] = to_replace | |
294 |
rule.type_matrix[to_replace] = {"type": |
|
308 | rule.type_matrix[to_replace] = {"type": "float"} | |
295 |
|
309 | |||
296 | manipulator_func = label_rewriter_func |
|
310 | manipulator_func = label_rewriter_func | |
297 |
|
311 | |||
298 | return Rule(config, {}, config_manipulator=manipulator_func) |
|
312 | return Rule(config, {}, config_manipulator=manipulator_func) |
@@ -1,60 +1,62 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 |
from ziggurat_foundations.models.services.external_identity import |
|
17 | from ziggurat_foundations.models.services.external_identity import ( | |
18 | ExternalIdentityService |
|
18 | ExternalIdentityService, | |
|
19 | ) | |||
19 | from appenlight.models.external_identity import ExternalIdentity |
|
20 | from appenlight.models.external_identity import ExternalIdentity | |
20 |
|
21 | |||
21 |
|
22 | |||
22 | def handle_social_data(request, user, social_data): |
|
23 | def handle_social_data(request, user, social_data): | |
23 | social_data = social_data |
|
24 | social_data = social_data | |
24 | update_identity = False |
|
25 | update_identity = False | |
25 |
|
26 | |||
26 | extng_id = ExternalIdentityService.by_external_id_and_provider( |
|
27 | extng_id = ExternalIdentityService.by_external_id_and_provider( | |
27 | social_data['user']['id'], |
|
28 | social_data["user"]["id"], social_data["credentials"].provider_name | |
28 | social_data['credentials'].provider_name |
|
|||
29 | ) |
|
29 | ) | |
30 |
|
30 | |||
31 | # fix legacy accounts with wrong google ID |
|
31 | # fix legacy accounts with wrong google ID | |
32 |
if not extng_id and social_data[ |
|
32 | if not extng_id and social_data["credentials"].provider_name == "google": | |
33 | extng_id = ExternalIdentityService.by_external_id_and_provider( |
|
33 | extng_id = ExternalIdentityService.by_external_id_and_provider( | |
34 |
social_data[ |
|
34 | social_data["user"]["email"], social_data["credentials"].provider_name | |
35 | social_data['credentials'].provider_name |
|
|||
36 | ) |
|
35 | ) | |
37 |
|
36 | |||
38 | if extng_id: |
|
37 | if extng_id: | |
39 | extng_id.delete() |
|
38 | extng_id.delete() | |
40 | update_identity = True |
|
39 | update_identity = True | |
41 |
|
40 | |||
42 |
if not social_data[ |
|
41 | if not social_data["user"]["id"]: | |
43 | request.session.flash( |
|
42 | request.session.flash( | |
44 |
|
|
43 | "No external user id found? Perhaps permissions for " | |
45 |
|
|
44 | "authentication are set incorrectly", | |
|
45 | "error", | |||
|
46 | ) | |||
46 | return False |
|
47 | return False | |
47 |
|
48 | |||
48 | if not extng_id or update_identity: |
|
49 | if not extng_id or update_identity: | |
49 | if not update_identity: |
|
50 | if not update_identity: | |
50 |
request.session.flash( |
|
51 | request.session.flash( | |
51 |
|
|
52 | "Your external identity is now " "connected with your account" | |
|
53 | ) | |||
52 | ex_identity = ExternalIdentity() |
|
54 | ex_identity = ExternalIdentity() | |
53 |
ex_identity.external_id = social_data[ |
|
55 | ex_identity.external_id = social_data["user"]["id"] | |
54 |
ex_identity.external_user_name = social_data[ |
|
56 | ex_identity.external_user_name = social_data["user"]["user_name"] | |
55 |
ex_identity.provider_name = social_data[ |
|
57 | ex_identity.provider_name = social_data["credentials"].provider_name | |
56 |
ex_identity.access_token = social_data[ |
|
58 | ex_identity.access_token = social_data["credentials"].token | |
57 |
ex_identity.token_secret = social_data[ |
|
59 | ex_identity.token_secret = social_data["credentials"].token_secret | |
58 |
ex_identity.alt_token = social_data[ |
|
60 | ex_identity.alt_token = social_data["credentials"].refresh_token | |
59 | user.external_identities.append(ex_identity) |
|
61 | user.external_identities.append(ex_identity) | |
60 |
request.session.pop( |
|
62 | request.session.pop("zigg.social_auth", None) |
@@ -1,491 +1,548 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | """ |
|
17 | """ | |
18 | Utility functions. |
|
18 | Utility functions. | |
19 | """ |
|
19 | """ | |
20 | import logging |
|
20 | import logging | |
21 | import requests |
|
21 | import requests | |
22 | import hashlib |
|
22 | import hashlib | |
23 | import json |
|
23 | import json | |
24 | import copy |
|
24 | import copy | |
25 | import uuid |
|
25 | import uuid | |
26 | import appenlight.lib.helpers as h |
|
26 | import appenlight.lib.helpers as h | |
27 | from collections import namedtuple |
|
27 | from collections import namedtuple | |
28 | from datetime import timedelta, datetime, date |
|
28 | from datetime import timedelta, datetime, date | |
29 | from dogpile.cache.api import NO_VALUE |
|
29 | from dogpile.cache.api import NO_VALUE | |
30 | from appenlight.models import Datastores |
|
30 | from appenlight.models import Datastores | |
31 |
from appenlight.validators import |
|
31 | from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params | |
32 | TagListSchema, |
|
|||
33 | accepted_search_params) |
|
|||
34 | from itsdangerous import TimestampSigner |
|
32 | from itsdangerous import TimestampSigner | |
35 | from ziggurat_foundations.permissions import ALL_PERMISSIONS |
|
33 | from ziggurat_foundations.permissions import ALL_PERMISSIONS | |
36 | from ziggurat_foundations.models.services.user import UserService |
|
34 | from ziggurat_foundations.models.services.user import UserService | |
37 | from dateutil.relativedelta import relativedelta |
|
35 | from dateutil.relativedelta import relativedelta | |
38 | from dateutil.rrule import rrule, MONTHLY, DAILY |
|
36 | from dateutil.rrule import rrule, MONTHLY, DAILY | |
39 |
|
37 | |||
40 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
41 |
|
39 | |||
42 |
|
40 | |||
43 |
Stat = namedtuple( |
|
41 | Stat = namedtuple("Stat", "start_interval value") | |
44 |
|
42 | |||
45 |
|
43 | |||
46 | def default_extractor(item): |
|
44 | def default_extractor(item): | |
47 | """ |
|
45 | """ | |
48 | :param item - item to extract date from |
|
46 | :param item - item to extract date from | |
49 | """ |
|
47 | """ | |
50 |
if hasattr(item, |
|
48 | if hasattr(item, "start_interval"): | |
51 | return item.start_interval |
|
49 | return item.start_interval | |
52 |
return item[ |
|
50 | return item["start_interval"] | |
53 |
|
51 | |||
54 |
|
52 | |||
55 | # fast gap generator |
|
53 | # fast gap generator | |
56 | def gap_gen_default(start, step, itemiterator, end_time=None, |
|
54 | def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None): | |
57 | iv_extractor=None): |
|
|||
58 | """ generates a list of time/value items based on step and itemiterator |
|
55 | """ generates a list of time/value items based on step and itemiterator | |
59 | if there are entries missing from iterator time/None will be returned |
|
56 | if there are entries missing from iterator time/None will be returned | |
60 | instead |
|
57 | instead | |
61 | :param start - datetime - what time should we start generating our values |
|
58 | :param start - datetime - what time should we start generating our values | |
62 | :param step - timedelta - stepsize |
|
59 | :param step - timedelta - stepsize | |
63 | :param itemiterator - iterable - we will check this iterable for values |
|
60 | :param itemiterator - iterable - we will check this iterable for values | |
64 | corresponding to generated steps |
|
61 | corresponding to generated steps | |
65 | :param end_time - datetime - when last step is >= end_time stop iterating |
|
62 | :param end_time - datetime - when last step is >= end_time stop iterating | |
66 | :param iv_extractor - extracts current step from iterable items |
|
63 | :param iv_extractor - extracts current step from iterable items | |
67 | """ |
|
64 | """ | |
68 |
|
65 | |||
69 | if not iv_extractor: |
|
66 | if not iv_extractor: | |
70 | iv_extractor = default_extractor |
|
67 | iv_extractor = default_extractor | |
71 |
|
68 | |||
72 | next_step = start |
|
69 | next_step = start | |
73 | minutes = step.total_seconds() / 60.0 |
|
70 | minutes = step.total_seconds() / 60.0 | |
74 | while next_step.minute % minutes != 0: |
|
71 | while next_step.minute % minutes != 0: | |
75 | next_step = next_step.replace(minute=next_step.minute - 1) |
|
72 | next_step = next_step.replace(minute=next_step.minute - 1) | |
76 | for item in itemiterator: |
|
73 | for item in itemiterator: | |
77 | item_start_interval = iv_extractor(item) |
|
74 | item_start_interval = iv_extractor(item) | |
78 | # do we have a match for current time step in our data? |
|
75 | # do we have a match for current time step in our data? | |
79 | # no gen a new tuple with 0 values |
|
76 | # no gen a new tuple with 0 values | |
80 | while next_step < item_start_interval: |
|
77 | while next_step < item_start_interval: | |
81 | yield Stat(next_step, None) |
|
78 | yield Stat(next_step, None) | |
82 | next_step = next_step + step |
|
79 | next_step = next_step + step | |
83 | if next_step == item_start_interval: |
|
80 | if next_step == item_start_interval: | |
84 | yield Stat(item_start_interval, item) |
|
81 | yield Stat(item_start_interval, item) | |
85 | next_step = next_step + step |
|
82 | next_step = next_step + step | |
86 | if end_time: |
|
83 | if end_time: | |
87 | while next_step < end_time: |
|
84 | while next_step < end_time: | |
88 | yield Stat(next_step, None) |
|
85 | yield Stat(next_step, None) | |
89 | next_step = next_step + step |
|
86 | next_step = next_step + step | |
90 |
|
87 | |||
91 |
|
88 | |||
92 | class DateTimeEncoder(json.JSONEncoder): |
|
89 | class DateTimeEncoder(json.JSONEncoder): | |
93 | """ Simple datetime to ISO encoder for json serialization""" |
|
90 | """ Simple datetime to ISO encoder for json serialization""" | |
94 |
|
91 | |||
95 | def default(self, obj): |
|
92 | def default(self, obj): | |
96 | if isinstance(obj, date): |
|
93 | if isinstance(obj, date): | |
97 | return obj.isoformat() |
|
94 | return obj.isoformat() | |
98 | if isinstance(obj, datetime): |
|
95 | if isinstance(obj, datetime): | |
99 | return obj.isoformat() |
|
96 | return obj.isoformat() | |
100 | return json.JSONEncoder.default(self, obj) |
|
97 | return json.JSONEncoder.default(self, obj) | |
101 |
|
98 | |||
102 |
|
99 | |||
103 | def channelstream_request(secret, endpoint, payload, throw_exceptions=False, |
|
100 | def channelstream_request( | |
104 | servers=None): |
|
101 | secret, endpoint, payload, throw_exceptions=False, servers=None | |
|
102 | ): | |||
105 | responses = [] |
|
103 | responses = [] | |
106 | if not servers: |
|
104 | if not servers: | |
107 | servers = [] |
|
105 | servers = [] | |
108 |
|
106 | |||
109 | signer = TimestampSigner(secret) |
|
107 | signer = TimestampSigner(secret) | |
110 | sig_for_server = signer.sign(endpoint) |
|
108 | sig_for_server = signer.sign(endpoint) | |
111 |
for secret, server in [(s[ |
|
109 | for secret, server in [(s["secret"], s["server"]) for s in servers]: | |
112 | response = {} |
|
110 | response = {} | |
113 | secret_headers = {'x-channelstream-secret': sig_for_server, |
|
111 | secret_headers = { | |
114 | 'x-channelstream-endpoint': endpoint, |
|
112 | "x-channelstream-secret": sig_for_server, | |
115 | 'Content-Type': 'application/json'} |
|
113 | "x-channelstream-endpoint": endpoint, | |
116 | url = '%s%s' % (server, endpoint) |
|
114 | "Content-Type": "application/json", | |
|
115 | } | |||
|
116 | url = "%s%s" % (server, endpoint) | |||
117 | try: |
|
117 | try: | |
118 |
response = requests.post( |
|
118 | response = requests.post( | |
119 | data=json.dumps(payload, |
|
119 | url, | |
120 | cls=DateTimeEncoder), |
|
120 | data=json.dumps(payload, cls=DateTimeEncoder), | |
121 |
|
|
121 | headers=secret_headers, | |
122 |
|
|
122 | verify=False, | |
123 |
|
|
123 | timeout=2, | |
|
124 | ).json() | |||
124 | except requests.exceptions.RequestException as e: |
|
125 | except requests.exceptions.RequestException as e: | |
125 | if throw_exceptions: |
|
126 | if throw_exceptions: | |
126 | raise |
|
127 | raise | |
127 | responses.append(response) |
|
128 | responses.append(response) | |
128 | return responses |
|
129 | return responses | |
129 |
|
130 | |||
130 |
|
131 | |||
131 | def add_cors_headers(response): |
|
132 | def add_cors_headers(response): | |
132 | # allow CORS |
|
133 | # allow CORS | |
133 |
response.headers.add( |
|
134 | response.headers.add("Access-Control-Allow-Origin", "*") | |
134 |
response.headers.add( |
|
135 | response.headers.add("XDomainRequestAllowed", "1") | |
135 |
response.headers.add( |
|
136 | response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS") | |
136 | # response.headers.add('Access-Control-Allow-Credentials', 'true') |
|
137 | # response.headers.add('Access-Control-Allow-Credentials', 'true') | |
137 |
response.headers.add( |
|
138 | response.headers.add( | |
138 | 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie') |
|
139 | "Access-Control-Allow-Headers", | |
139 | response.headers.add('Access-Control-Max-Age', '86400') |
|
140 | "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie", | |
|
141 | ) | |||
|
142 | response.headers.add("Access-Control-Max-Age", "86400") | |||
140 |
|
143 | |||
141 |
|
144 | |||
142 | from sqlalchemy.sql import compiler |
|
145 | from sqlalchemy.sql import compiler | |
143 | from psycopg2.extensions import adapt as sqlescape |
|
146 | from psycopg2.extensions import adapt as sqlescape | |
144 |
|
147 | |||
145 |
|
148 | |||
146 | # or use the appropiate escape function from your db driver |
|
149 | # or use the appropiate escape function from your db driver | |
147 |
|
150 | |||
|
151 | ||||
148 | def compile_query(query): |
|
152 | def compile_query(query): | |
149 | dialect = query.session.bind.dialect |
|
153 | dialect = query.session.bind.dialect | |
150 | statement = query.statement |
|
154 | statement = query.statement | |
151 | comp = compiler.SQLCompiler(dialect, statement) |
|
155 | comp = compiler.SQLCompiler(dialect, statement) | |
152 | comp.compile() |
|
156 | comp.compile() | |
153 | enc = dialect.encoding |
|
157 | enc = dialect.encoding | |
154 | params = {} |
|
158 | params = {} | |
155 | for k, v in comp.params.items(): |
|
159 | for k, v in comp.params.items(): | |
156 | if isinstance(v, str): |
|
160 | if isinstance(v, str): | |
157 | v = v.encode(enc) |
|
161 | v = v.encode(enc) | |
158 | params[k] = sqlescape(v) |
|
162 | params[k] = sqlescape(v) | |
159 | return (comp.string.encode(enc) % params).decode(enc) |
|
163 | return (comp.string.encode(enc) % params).decode(enc) | |
160 |
|
164 | |||
161 |
|
165 | |||
162 | def convert_es_type(input_data): |
|
166 | def convert_es_type(input_data): | |
163 | """ |
|
167 | """ | |
164 | This might need to convert some text or other types to corresponding ES types |
|
168 | This might need to convert some text or other types to corresponding ES types | |
165 | """ |
|
169 | """ | |
166 | return str(input_data) |
|
170 | return str(input_data) | |
167 |
|
171 | |||
168 |
|
172 | |||
169 |
ProtoVersion = namedtuple( |
|
173 | ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"]) | |
170 |
|
174 | |||
171 |
|
175 | |||
172 | def parse_proto(input_data): |
|
176 | def parse_proto(input_data): | |
173 | try: |
|
177 | try: | |
174 |
parts = [int(x) for x in input_data.split( |
|
178 | parts = [int(x) for x in input_data.split(".")] | |
175 | while len(parts) < 3: |
|
179 | while len(parts) < 3: | |
176 | parts.append(0) |
|
180 | parts.append(0) | |
177 | return ProtoVersion(*parts) |
|
181 | return ProtoVersion(*parts) | |
178 | except Exception as e: |
|
182 | except Exception as e: | |
179 |
log.info( |
|
183 | log.info("Unknown protocol version: %s" % e) | |
180 | return ProtoVersion(99, 99, 99) |
|
184 | return ProtoVersion(99, 99, 99) | |
181 |
|
185 | |||
182 |
|
186 | |||
183 | def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6, |
|
187 | def es_index_name_limiter( | |
184 | ixtypes=None): |
|
188 | start_date=None, end_date=None, months_in_past=6, ixtypes=None | |
|
189 | ): | |||
185 | """ |
|
190 | """ | |
186 | This function limits the search to 6 months by default so we don't have to |
|
191 | This function limits the search to 6 months by default so we don't have to | |
187 | query 300 elasticsearch indices for 20 years of historical data for example |
|
192 | query 300 elasticsearch indices for 20 years of historical data for example | |
188 | """ |
|
193 | """ | |
189 |
|
194 | |||
190 | # should be cached later |
|
195 | # should be cached later | |
191 | def get_possible_names(): |
|
196 | def get_possible_names(): | |
192 |
return list(Datastores.es.indices.get_alias( |
|
197 | return list(Datastores.es.indices.get_alias("*")) | |
193 |
|
198 | |||
194 | possible_names = get_possible_names() |
|
199 | possible_names = get_possible_names() | |
195 | es_index_types = [] |
|
200 | es_index_types = [] | |
196 | if not ixtypes: |
|
201 | if not ixtypes: | |
197 |
ixtypes = [ |
|
202 | ixtypes = ["reports", "metrics", "logs"] | |
198 | for t in ixtypes: |
|
203 | for t in ixtypes: | |
199 |
if t == |
|
204 | if t == "reports": | |
200 |
es_index_types.append( |
|
205 | es_index_types.append("rcae_r_%s") | |
201 |
elif t == |
|
206 | elif t == "logs": | |
202 |
es_index_types.append( |
|
207 | es_index_types.append("rcae_l_%s") | |
203 |
elif t == |
|
208 | elif t == "metrics": | |
204 |
es_index_types.append( |
|
209 | es_index_types.append("rcae_m_%s") | |
205 |
elif t == |
|
210 | elif t == "uptime": | |
206 |
es_index_types.append( |
|
211 | es_index_types.append("rcae_u_%s") | |
207 |
elif t == |
|
212 | elif t == "slow_calls": | |
208 |
es_index_types.append( |
|
213 | es_index_types.append("rcae_sc_%s") | |
209 |
|
214 | |||
210 | if start_date: |
|
215 | if start_date: | |
211 | start_date = copy.copy(start_date) |
|
216 | start_date = copy.copy(start_date) | |
212 | else: |
|
217 | else: | |
213 | if not end_date: |
|
218 | if not end_date: | |
214 | end_date = datetime.utcnow() |
|
219 | end_date = datetime.utcnow() | |
215 | start_date = end_date + relativedelta(months=months_in_past * -1) |
|
220 | start_date = end_date + relativedelta(months=months_in_past * -1) | |
216 |
|
221 | |||
217 | if not end_date: |
|
222 | if not end_date: | |
218 | end_date = start_date + relativedelta(months=months_in_past) |
|
223 | end_date = start_date + relativedelta(months=months_in_past) | |
219 |
|
224 | |||
220 |
index_dates = list( |
|
225 | index_dates = list( | |
221 | dtstart=start_date.date().replace(day=1), |
|
226 | rrule( | |
222 | until=end_date.date(), |
|
227 | MONTHLY, | |
223 | count=36)) |
|
228 | dtstart=start_date.date().replace(day=1), | |
|
229 | until=end_date.date(), | |||
|
230 | count=36, | |||
|
231 | ) | |||
|
232 | ) | |||
224 | index_names = [] |
|
233 | index_names = [] | |
225 | for ix_type in es_index_types: |
|
234 | for ix_type in es_index_types: | |
226 | to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates |
|
235 | to_extend = [ | |
227 |
|
|
236 | ix_type % d.strftime("%Y_%m") | |
|
237 | for d in index_dates | |||
|
238 | if ix_type % d.strftime("%Y_%m") in possible_names | |||
|
239 | ] | |||
228 | index_names.extend(to_extend) |
|
240 | index_names.extend(to_extend) | |
229 | for day in list(rrule(DAILY, dtstart=start_date.date(), |
|
241 | for day in list( | |
230 |
|
|
242 | rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366) | |
231 | ix_name = ix_type % day.strftime('%Y_%m_%d') |
|
243 | ): | |
|
244 | ix_name = ix_type % day.strftime("%Y_%m_%d") | |||
232 | if ix_name in possible_names: |
|
245 | if ix_name in possible_names: | |
233 | index_names.append(ix_name) |
|
246 | index_names.append(ix_name) | |
234 | return index_names |
|
247 | return index_names | |
235 |
|
248 | |||
236 |
|
249 | |||
237 | def build_filter_settings_from_query_dict( |
|
250 | def build_filter_settings_from_query_dict( | |
238 |
|
|
251 | request, params=None, override_app_ids=None, resource_permissions=None | |
239 | resource_permissions=None): |
|
252 | ): | |
240 | """ |
|
253 | """ | |
241 | Builds list of normalized search terms for ES from query params |
|
254 | Builds list of normalized search terms for ES from query params | |
242 | ensuring application list is restricted to only applications user |
|
255 | ensuring application list is restricted to only applications user | |
243 | has access to |
|
256 | has access to | |
244 |
|
257 | |||
245 | :param params (dictionary) |
|
258 | :param params (dictionary) | |
246 | :param override_app_ids - list of application id's to use instead of |
|
259 | :param override_app_ids - list of application id's to use instead of | |
247 | applications user normally has access to |
|
260 | applications user normally has access to | |
248 | """ |
|
261 | """ | |
249 | params = copy.deepcopy(params) |
|
262 | params = copy.deepcopy(params) | |
250 | applications = [] |
|
263 | applications = [] | |
251 | if not resource_permissions: |
|
264 | if not resource_permissions: | |
252 |
resource_permissions = [ |
|
265 | resource_permissions = ["view"] | |
253 |
|
266 | |||
254 | if request.user: |
|
267 | if request.user: | |
255 | applications = UserService.resources_with_perms( |
|
268 | applications = UserService.resources_with_perms( | |
256 |
request.user, resource_permissions, resource_types=[ |
|
269 | request.user, resource_permissions, resource_types=["application"] | |
|
270 | ) | |||
257 |
|
271 | |||
258 | # CRITICAL - this ensures our resultset is limited to only the ones |
|
272 | # CRITICAL - this ensures our resultset is limited to only the ones | |
259 | # user has view permissions |
|
273 | # user has view permissions | |
260 | all_possible_app_ids = set([app.resource_id for app in applications]) |
|
274 | all_possible_app_ids = set([app.resource_id for app in applications]) | |
261 |
|
275 | |||
262 | # if override is preset we force permission for app to be present |
|
276 | # if override is preset we force permission for app to be present | |
263 | # this allows users to see dashboards and applications they would |
|
277 | # this allows users to see dashboards and applications they would | |
264 | # normally not be able to |
|
278 | # normally not be able to | |
265 |
|
279 | |||
266 | if override_app_ids: |
|
280 | if override_app_ids: | |
267 | all_possible_app_ids = set(override_app_ids) |
|
281 | all_possible_app_ids = set(override_app_ids) | |
268 |
|
282 | |||
269 | schema = LogSearchSchema().bind(resources=all_possible_app_ids) |
|
283 | schema = LogSearchSchema().bind(resources=all_possible_app_ids) | |
270 | tag_schema = TagListSchema() |
|
284 | tag_schema = TagListSchema() | |
271 | filter_settings = schema.deserialize(params) |
|
285 | filter_settings = schema.deserialize(params) | |
272 | tag_list = [] |
|
286 | tag_list = [] | |
273 | for k, v in list(filter_settings.items()): |
|
287 | for k, v in list(filter_settings.items()): | |
274 | if k in accepted_search_params: |
|
288 | if k in accepted_search_params: | |
275 | continue |
|
289 | continue | |
276 |
tag_list.append({"name": k, "value": v, "op": |
|
290 | tag_list.append({"name": k, "value": v, "op": "eq"}) | |
277 | # remove the key from filter_settings |
|
291 | # remove the key from filter_settings | |
278 | filter_settings.pop(k, None) |
|
292 | filter_settings.pop(k, None) | |
279 | tags = tag_schema.deserialize(tag_list) |
|
293 | tags = tag_schema.deserialize(tag_list) | |
280 |
filter_settings[ |
|
294 | filter_settings["tags"] = tags | |
281 | return filter_settings |
|
295 | return filter_settings | |
282 |
|
296 | |||
283 |
|
297 | |||
284 | def gen_uuid(): |
|
298 | def gen_uuid(): | |
285 | return str(uuid.uuid4()) |
|
299 | return str(uuid.uuid4()) | |
286 |
|
300 | |||
287 |
|
301 | |||
288 | def gen_uuid4_sha_hex(): |
|
302 | def gen_uuid4_sha_hex(): | |
289 | return hashlib.sha1(uuid.uuid4().bytes).hexdigest() |
|
303 | return hashlib.sha1(uuid.uuid4().bytes).hexdigest() | |
290 |
|
304 | |||
291 |
|
305 | |||
292 | def permission_tuple_to_dict(data): |
|
306 | def permission_tuple_to_dict(data): | |
293 | out = { |
|
307 | out = { | |
294 | "user_name": None, |
|
308 | "user_name": None, | |
295 | "perm_name": data.perm_name, |
|
309 | "perm_name": data.perm_name, | |
296 | "owner": data.owner, |
|
310 | "owner": data.owner, | |
297 | "type": data.type, |
|
311 | "type": data.type, | |
298 | "resource_name": None, |
|
312 | "resource_name": None, | |
299 | "resource_type": None, |
|
313 | "resource_type": None, | |
300 | "resource_id": None, |
|
314 | "resource_id": None, | |
301 | "group_name": None, |
|
315 | "group_name": None, | |
302 | "group_id": None |
|
316 | "group_id": None, | |
303 | } |
|
317 | } | |
304 | if data.user: |
|
318 | if data.user: | |
305 | out["user_name"] = data.user.user_name |
|
319 | out["user_name"] = data.user.user_name | |
306 | if data.perm_name == ALL_PERMISSIONS: |
|
320 | if data.perm_name == ALL_PERMISSIONS: | |
307 |
out[ |
|
321 | out["perm_name"] = "__all_permissions__" | |
308 | if data.resource: |
|
322 | if data.resource: | |
309 |
out[ |
|
323 | out["resource_name"] = data.resource.resource_name | |
310 |
out[ |
|
324 | out["resource_type"] = data.resource.resource_type | |
311 |
out[ |
|
325 | out["resource_id"] = data.resource.resource_id | |
312 | if data.group: |
|
326 | if data.group: | |
313 |
out[ |
|
327 | out["group_name"] = data.group.group_name | |
314 |
out[ |
|
328 | out["group_id"] = data.group.id | |
315 | return out |
|
329 | return out | |
316 |
|
330 | |||
317 |
|
331 | |||
318 | def get_cached_buckets(request, stats_since, end_time, fn, cache_key, |
|
332 | def get_cached_buckets( | |
319 | gap_gen=None, db_session=None, step_interval=None, |
|
333 | request, | |
320 | iv_extractor=None, |
|
334 | stats_since, | |
321 | rerange=False, *args, **kwargs): |
|
335 | end_time, | |
|
336 | fn, | |||
|
337 | cache_key, | |||
|
338 | gap_gen=None, | |||
|
339 | db_session=None, | |||
|
340 | step_interval=None, | |||
|
341 | iv_extractor=None, | |||
|
342 | rerange=False, | |||
|
343 | *args, | |||
|
344 | **kwargs | |||
|
345 | ): | |||
322 | """ Takes "fn" that should return some data and tries to load the data |
|
346 | """ Takes "fn" that should return some data and tries to load the data | |
323 | dividing it into daily buckets - if the stats_since and end time give a |
|
347 | dividing it into daily buckets - if the stats_since and end time give a | |
324 | delta bigger than 24hours, then only "todays" data is computed on the fly |
|
348 | delta bigger than 24hours, then only "todays" data is computed on the fly | |
325 |
|
349 | |||
326 | :param request: (request) request object |
|
350 | :param request: (request) request object | |
327 | :param stats_since: (datetime) start date of buckets range |
|
351 | :param stats_since: (datetime) start date of buckets range | |
328 | :param end_time: (datetime) end date of buckets range - utcnow() if None |
|
352 | :param end_time: (datetime) end date of buckets range - utcnow() if None | |
329 | :param fn: (callable) callable to use to populate buckets should have |
|
353 | :param fn: (callable) callable to use to populate buckets should have | |
330 | following signature: |
|
354 | following signature: | |
331 | def get_data(request, since_when, until, *args, **kwargs): |
|
355 | def get_data(request, since_when, until, *args, **kwargs): | |
332 |
|
356 | |||
333 | :param cache_key: (string) cache key that will be used to build bucket |
|
357 | :param cache_key: (string) cache key that will be used to build bucket | |
334 | caches |
|
358 | caches | |
335 | :param gap_gen: (callable) gap generator - should return step intervals |
|
359 | :param gap_gen: (callable) gap generator - should return step intervals | |
336 | to use with out `fn` callable |
|
360 | to use with out `fn` callable | |
337 | :param db_session: (Session) sqlalchemy session |
|
361 | :param db_session: (Session) sqlalchemy session | |
338 | :param step_interval: (timedelta) optional step interval if we want to |
|
362 | :param step_interval: (timedelta) optional step interval if we want to | |
339 | override the default determined from total start/end time delta |
|
363 | override the default determined from total start/end time delta | |
340 | :param iv_extractor: (callable) used to get step intervals from data |
|
364 | :param iv_extractor: (callable) used to get step intervals from data | |
341 | returned by `fn` callable |
|
365 | returned by `fn` callable | |
342 | :param rerange: (bool) handy if we want to change ranges from hours to |
|
366 | :param rerange: (bool) handy if we want to change ranges from hours to | |
343 | days when cached data is missing - will shorten execution time if `fn` |
|
367 | days when cached data is missing - will shorten execution time if `fn` | |
344 | callable supports that and we are working with multiple rows - like metrics |
|
368 | callable supports that and we are working with multiple rows - like metrics | |
345 | :param args: |
|
369 | :param args: | |
346 | :param kwargs: |
|
370 | :param kwargs: | |
347 |
|
371 | |||
348 | :return: iterable |
|
372 | :return: iterable | |
349 | """ |
|
373 | """ | |
350 | if not end_time: |
|
374 | if not end_time: | |
351 | end_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
375 | end_time = datetime.utcnow().replace(second=0, microsecond=0) | |
352 | delta = end_time - stats_since |
|
376 | delta = end_time - stats_since | |
353 | # if smaller than 3 days we want to group by 5min else by 1h, |
|
377 | # if smaller than 3 days we want to group by 5min else by 1h, | |
354 | # for 60 min group by min |
|
378 | # for 60 min group by min | |
355 | if not gap_gen: |
|
379 | if not gap_gen: | |
356 | gap_gen = gap_gen_default |
|
380 | gap_gen = gap_gen_default | |
357 | if not iv_extractor: |
|
381 | if not iv_extractor: | |
358 | iv_extractor = default_extractor |
|
382 | iv_extractor = default_extractor | |
359 |
|
383 | |||
360 | # do not use custom interval if total time range with new iv would exceed |
|
384 | # do not use custom interval if total time range with new iv would exceed | |
361 | # end time |
|
385 | # end time | |
362 | if not step_interval or stats_since + step_interval >= end_time: |
|
386 | if not step_interval or stats_since + step_interval >= end_time: | |
363 |
if delta < h.time_deltas.get( |
|
387 | if delta < h.time_deltas.get("12h")["delta"]: | |
364 | step_interval = timedelta(seconds=60) |
|
388 | step_interval = timedelta(seconds=60) | |
365 |
elif delta < h.time_deltas.get( |
|
389 | elif delta < h.time_deltas.get("3d")["delta"]: | |
366 | step_interval = timedelta(seconds=60 * 5) |
|
390 | step_interval = timedelta(seconds=60 * 5) | |
367 |
elif delta > h.time_deltas.get( |
|
391 | elif delta > h.time_deltas.get("2w")["delta"]: | |
368 | step_interval = timedelta(days=1) |
|
392 | step_interval = timedelta(days=1) | |
369 | else: |
|
393 | else: | |
370 | step_interval = timedelta(minutes=60) |
|
394 | step_interval = timedelta(minutes=60) | |
371 |
|
395 | |||
372 | if step_interval >= timedelta(minutes=60): |
|
396 | if step_interval >= timedelta(minutes=60): | |
373 | log.info('cached_buckets:{}: adjusting start time ' |
|
397 | log.info( | |
374 | 'for hourly or daily intervals'.format(cache_key)) |
|
398 | "cached_buckets:{}: adjusting start time " | |
|
399 | "for hourly or daily intervals".format(cache_key) | |||
|
400 | ) | |||
375 | stats_since = stats_since.replace(hour=0, minute=0) |
|
401 | stats_since = stats_since.replace(hour=0, minute=0) | |
376 |
|
402 | |||
377 | ranges = [i.start_interval for i in list(gap_gen(stats_since, |
|
403 | ranges = [ | |
378 | step_interval, [], |
|
404 | i.start_interval | |
379 | end_time=end_time))] |
|
405 | for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time)) | |
|
406 | ] | |||
380 | buckets = {} |
|
407 | buckets = {} | |
381 |
storage_key = |
|
408 | storage_key = "buckets:" + cache_key + "{}|{}" | |
382 | # this means we basicly cache per hour in 3-14 day intervals but i think |
|
409 | # this means we basicly cache per hour in 3-14 day intervals but i think | |
383 | # its fine at this point - will be faster than db access anyways |
|
410 | # its fine at this point - will be faster than db access anyways | |
384 |
|
411 | |||
385 | if len(ranges) >= 1: |
|
412 | if len(ranges) >= 1: | |
386 | last_ranges = [ranges[-1]] |
|
413 | last_ranges = [ranges[-1]] | |
387 | else: |
|
414 | else: | |
388 | last_ranges = [] |
|
415 | last_ranges = [] | |
389 | if step_interval >= timedelta(minutes=60): |
|
416 | if step_interval >= timedelta(minutes=60): | |
390 | for r in ranges: |
|
417 | for r in ranges: | |
391 | k = storage_key.format(step_interval.total_seconds(), r) |
|
418 | k = storage_key.format(step_interval.total_seconds(), r) | |
392 | value = request.registry.cache_regions.redis_day_30.get(k) |
|
419 | value = request.registry.cache_regions.redis_day_30.get(k) | |
393 | # last buckets are never loaded from cache |
|
420 | # last buckets are never loaded from cache | |
394 | is_last_result = ( |
|
421 | is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges | |
395 | r >= end_time - timedelta(hours=6) or r in last_ranges) |
|
|||
396 | if value is not NO_VALUE and not is_last_result: |
|
422 | if value is not NO_VALUE and not is_last_result: | |
397 |
log.info( |
|
423 | log.info( | |
398 | "loading range {} from cache".format(cache_key, r)) |
|
424 | "cached_buckets:{}: " | |
|
425 | "loading range {} from cache".format(cache_key, r) | |||
|
426 | ) | |||
399 | buckets[r] = value |
|
427 | buckets[r] = value | |
400 | else: |
|
428 | else: | |
401 |
log.info( |
|
429 | log.info( | |
402 | "loading range {} from storage".format(cache_key, r)) |
|
430 | "cached_buckets:{}: " | |
|
431 | "loading range {} from storage".format(cache_key, r) | |||
|
432 | ) | |||
403 | range_size = step_interval |
|
433 | range_size = step_interval | |
404 | if (step_interval == timedelta(minutes=60) and |
|
434 | if ( | |
405 | not is_last_result and rerange): |
|
435 | step_interval == timedelta(minutes=60) | |
|
436 | and not is_last_result | |||
|
437 | and rerange | |||
|
438 | ): | |||
406 | range_size = timedelta(days=1) |
|
439 | range_size = timedelta(days=1) | |
407 | r = r.replace(hour=0, minute=0) |
|
440 | r = r.replace(hour=0, minute=0) | |
408 |
log.info( |
|
441 | log.info( | |
409 |
|
|
442 | "cached_buckets:{}: " | |
410 | "range {} {}".format(cache_key, r, |
|
443 | "loading collapsed " | |
411 |
|
|
444 | "range {} {}".format(cache_key, r, r + range_size) | |
|
445 | ) | |||
412 | bucket_data = fn( |
|
446 | bucket_data = fn( | |
413 |
request, |
|
447 | request, | |
414 | gap_gen, bucket_count=len(ranges), *args, **kwargs) |
|
448 | r, | |
|
449 | r + range_size, | |||
|
450 | step_interval, | |||
|
451 | gap_gen, | |||
|
452 | bucket_count=len(ranges), | |||
|
453 | *args, | |||
|
454 | **kwargs | |||
|
455 | ) | |||
415 | for b in bucket_data: |
|
456 | for b in bucket_data: | |
416 | b_iv = iv_extractor(b) |
|
457 | b_iv = iv_extractor(b) | |
417 | buckets[b_iv] = b |
|
458 | buckets[b_iv] = b | |
418 | k2 = storage_key.format( |
|
459 | k2 = storage_key.format(step_interval.total_seconds(), b_iv) | |
419 | step_interval.total_seconds(), b_iv) |
|
|||
420 | request.registry.cache_regions.redis_day_30.set(k2, b) |
|
460 | request.registry.cache_regions.redis_day_30.set(k2, b) | |
421 | log.info("cached_buckets:{}: saving cache".format(cache_key)) |
|
461 | log.info("cached_buckets:{}: saving cache".format(cache_key)) | |
422 | else: |
|
462 | else: | |
423 | # bucket count is 1 for short time ranges <= 24h from now |
|
463 | # bucket count is 1 for short time ranges <= 24h from now | |
424 | bucket_data = fn(request, stats_since, end_time, step_interval, |
|
464 | bucket_data = fn( | |
425 | gap_gen, bucket_count=1, *args, **kwargs) |
|
465 | request, | |
|
466 | stats_since, | |||
|
467 | end_time, | |||
|
468 | step_interval, | |||
|
469 | gap_gen, | |||
|
470 | bucket_count=1, | |||
|
471 | *args, | |||
|
472 | **kwargs | |||
|
473 | ) | |||
426 | for b in bucket_data: |
|
474 | for b in bucket_data: | |
427 | buckets[iv_extractor(b)] = b |
|
475 | buckets[iv_extractor(b)] = b | |
428 | return buckets |
|
476 | return buckets | |
429 |
|
477 | |||
430 |
|
478 | |||
431 | def get_cached_split_data(request, stats_since, end_time, fn, cache_key, |
|
479 | def get_cached_split_data( | |
432 | db_session=None, *args, **kwargs): |
|
480 | request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs | |
|
481 | ): | |||
433 | """ Takes "fn" that should return some data and tries to load the data |
|
482 | """ Takes "fn" that should return some data and tries to load the data | |
434 | dividing it into 2 buckets - cached "since_from" bucket and "today" |
|
483 | dividing it into 2 buckets - cached "since_from" bucket and "today" | |
435 | bucket - then the data can be reduced into single value |
|
484 | bucket - then the data can be reduced into single value | |
436 |
|
485 | |||
437 | Data is cached if the stats_since and end time give a delta bigger |
|
486 | Data is cached if the stats_since and end time give a delta bigger | |
438 | than 24hours - then only 24h is computed on the fly |
|
487 | than 24hours - then only 24h is computed on the fly | |
439 | """ |
|
488 | """ | |
440 | if not end_time: |
|
489 | if not end_time: | |
441 | end_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
490 | end_time = datetime.utcnow().replace(second=0, microsecond=0) | |
442 | delta = end_time - stats_since |
|
491 | delta = end_time - stats_since | |
443 |
|
492 | |||
444 | if delta >= timedelta(minutes=60): |
|
493 | if delta >= timedelta(minutes=60): | |
445 | log.info('cached_split_data:{}: adjusting start time ' |
|
494 | log.info( | |
446 | 'for hourly or daily intervals'.format(cache_key)) |
|
495 | "cached_split_data:{}: adjusting start time " | |
|
496 | "for hourly or daily intervals".format(cache_key) | |||
|
497 | ) | |||
447 | stats_since = stats_since.replace(hour=0, minute=0) |
|
498 | stats_since = stats_since.replace(hour=0, minute=0) | |
448 |
|
499 | |||
449 |
storage_key = |
|
500 | storage_key = "buckets_split_data:" + cache_key + ":{}|{}" | |
450 | old_end_time = end_time.replace(hour=0, minute=0) |
|
501 | old_end_time = end_time.replace(hour=0, minute=0) | |
451 |
|
502 | |||
452 | final_storage_key = storage_key.format(delta.total_seconds(), |
|
503 | final_storage_key = storage_key.format(delta.total_seconds(), old_end_time) | |
453 | old_end_time) |
|
|||
454 | older_data = None |
|
504 | older_data = None | |
455 |
|
505 | |||
456 | cdata = request.registry.cache_regions.redis_day_7.get( |
|
506 | cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key) | |
457 | final_storage_key) |
|
|||
458 |
|
507 | |||
459 | if cdata: |
|
508 | if cdata: | |
460 | log.info("cached_split_data:{}: found old " |
|
509 | log.info("cached_split_data:{}: found old " "bucket data".format(cache_key)) | |
461 | "bucket data".format(cache_key)) |
|
|||
462 | older_data = cdata |
|
510 | older_data = cdata | |
463 |
|
511 | |||
464 |
if |
|
512 | if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata: | |
465 | not cdata): |
|
513 | log.info( | |
466 |
|
|
514 | "cached_split_data:{}: didn't find the " | |
467 |
|
|
515 | "start bucket in cache so load older data".format(cache_key) | |
|
516 | ) | |||
468 | recent_stats_since = old_end_time |
|
517 | recent_stats_since = old_end_time | |
469 | older_data = fn(request, stats_since, recent_stats_since, |
|
518 | older_data = fn( | |
470 | db_session=db_session, *args, **kwargs) |
|
519 | request, | |
471 | request.registry.cache_regions.redis_day_7.set(final_storage_key, |
|
520 | stats_since, | |
472 | older_data) |
|
521 | recent_stats_since, | |
473 | elif stats_since < end_time - h.time_deltas.get('24h')['delta']: |
|
522 | db_session=db_session, | |
|
523 | *args, | |||
|
524 | **kwargs | |||
|
525 | ) | |||
|
526 | request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data) | |||
|
527 | elif stats_since < end_time - h.time_deltas.get("24h")["delta"]: | |||
474 | recent_stats_since = old_end_time |
|
528 | recent_stats_since = old_end_time | |
475 | else: |
|
529 | else: | |
476 | recent_stats_since = stats_since |
|
530 | recent_stats_since = stats_since | |
477 |
|
531 | |||
478 | log.info("cached_split_data:{}: loading fresh " |
|
532 | log.info( | |
479 | "data bucksts from last 24h ".format(cache_key)) |
|
533 | "cached_split_data:{}: loading fresh " | |
480 | todays_data = fn(request, recent_stats_since, end_time, |
|
534 | "data bucksts from last 24h ".format(cache_key) | |
481 | db_session=db_session, *args, **kwargs) |
|
535 | ) | |
|
536 | todays_data = fn( | |||
|
537 | request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs | |||
|
538 | ) | |||
482 | return older_data, todays_data |
|
539 | return older_data, todays_data | |
483 |
|
540 | |||
484 |
|
541 | |||
485 | def in_batches(seq, size): |
|
542 | def in_batches(seq, size): | |
486 | """ |
|
543 | """ | |
487 | Splits am iterable into batches of specified size |
|
544 | Splits am iterable into batches of specified size | |
488 | :param seq (iterable) |
|
545 | :param seq (iterable) | |
489 | :param size integer |
|
546 | :param size integer | |
490 | """ |
|
547 | """ | |
491 | return (seq[pos:pos + size] for pos in range(0, len(seq), size)) |
|
548 | return (seq[pos : pos + size] for pos in range(0, len(seq), size)) |
@@ -1,142 +1,161 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | import uuid |
|
18 | import uuid | |
19 |
|
19 | |||
20 | from datetime import datetime |
|
20 | from datetime import datetime | |
21 |
|
21 | |||
22 | log = logging.getLogger(__name__) |
|
22 | log = logging.getLogger(__name__) | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | def parse_airbrake_xml(request): |
|
25 | def parse_airbrake_xml(request): | |
26 | root = request.context.airbrake_xml_etree |
|
26 | root = request.context.airbrake_xml_etree | |
27 |
error = root.find( |
|
27 | error = root.find("error") | |
28 |
notifier = root.find( |
|
28 | notifier = root.find("notifier") | |
29 |
server_env = root.find( |
|
29 | server_env = root.find("server-environment") | |
30 |
request_data = root.find( |
|
30 | request_data = root.find("request") | |
31 |
user = root.find( |
|
31 | user = root.find("current-user") | |
32 | if request_data is not None: |
|
32 | if request_data is not None: | |
33 |
cgi_data = request_data.find( |
|
33 | cgi_data = request_data.find("cgi-data") | |
34 | if cgi_data is None: |
|
34 | if cgi_data is None: | |
35 | cgi_data = [] |
|
35 | cgi_data = [] | |
36 |
|
36 | |||
37 | error_dict = { |
|
37 | error_dict = { | |
38 |
|
|
38 | "class_name": error.findtext("class") or "", | |
39 |
|
|
39 | "error": error.findtext("message") or "", | |
40 | "occurences": 1, |
|
40 | "occurences": 1, | |
41 | "http_status": 500, |
|
41 | "http_status": 500, | |
42 | "priority": 5, |
|
42 | "priority": 5, | |
43 |
"server": |
|
43 | "server": "unknown", | |
44 |
|
|
44 | "url": "unknown", | |
|
45 | "request": {}, | |||
45 | } |
|
46 | } | |
46 | if user is not None: |
|
47 | if user is not None: | |
47 |
error_dict[ |
|
48 | error_dict["username"] = user.findtext("username") or user.findtext("id") | |
48 | user.findtext('id') |
|
|||
49 | if notifier is not None: |
|
49 | if notifier is not None: | |
50 |
error_dict[ |
|
50 | error_dict["client"] = notifier.findtext("name") | |
51 |
|
51 | |||
52 | if server_env is not None: |
|
52 | if server_env is not None: | |
53 |
error_dict["server"] = server_env.findtext( |
|
53 | error_dict["server"] = server_env.findtext("hostname", "unknown") | |
54 |
|
54 | |||
55 | whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', |
|
55 | whitelist_environ = [ | |
56 | 'CONTENT_TYPE', 'HTTP_REFERER'] |
|
56 | "REMOTE_USER", | |
|
57 | "REMOTE_ADDR", | |||
|
58 | "SERVER_NAME", | |||
|
59 | "CONTENT_TYPE", | |||
|
60 | "HTTP_REFERER", | |||
|
61 | ] | |||
57 |
|
62 | |||
58 | if request_data is not None: |
|
63 | if request_data is not None: | |
59 |
error_dict[ |
|
64 | error_dict["url"] = request_data.findtext("url", "unknown") | |
60 |
component = request_data.findtext( |
|
65 | component = request_data.findtext("component") | |
61 |
action = request_data.findtext( |
|
66 | action = request_data.findtext("action") | |
62 | if component and action: |
|
67 | if component and action: | |
63 |
error_dict[ |
|
68 | error_dict["view_name"] = "%s:%s" % (component, action) | |
64 | for node in cgi_data: |
|
69 | for node in cgi_data: | |
65 |
key = node.get( |
|
70 | key = node.get("key") | |
66 |
if key.startswith( |
|
71 | if key.startswith("HTTP") or key in whitelist_environ: | |
67 |
error_dict[ |
|
72 | error_dict["request"][key] = node.text | |
68 |
elif |
|
73 | elif "query_parameters" in key: | |
69 |
error_dict[ |
|
74 | error_dict["request"]["GET"] = {} | |
70 | for x in node: |
|
75 | for x in node: | |
71 |
error_dict[ |
|
76 | error_dict["request"]["GET"][x.get("key")] = x.text | |
72 |
elif |
|
77 | elif "request_parameters" in key: | |
73 |
error_dict[ |
|
78 | error_dict["request"]["POST"] = {} | |
74 | for x in node: |
|
79 | for x in node: | |
75 |
error_dict[ |
|
80 | error_dict["request"]["POST"][x.get("key")] = x.text | |
76 |
elif key.endswith( |
|
81 | elif key.endswith("cookie"): | |
77 |
error_dict[ |
|
82 | error_dict["request"]["COOKIE"] = {} | |
78 | for x in node: |
|
83 | for x in node: | |
79 |
error_dict[ |
|
84 | error_dict["request"]["COOKIE"][x.get("key")] = x.text | |
80 |
elif key.endswith( |
|
85 | elif key.endswith("request_id"): | |
81 |
error_dict[ |
|
86 | error_dict["request_id"] = node.text | |
82 |
elif key.endswith( |
|
87 | elif key.endswith("session"): | |
83 |
error_dict[ |
|
88 | error_dict["request"]["SESSION"] = {} | |
84 | for x in node: |
|
89 | for x in node: | |
85 |
error_dict[ |
|
90 | error_dict["request"]["SESSION"][x.get("key")] = x.text | |
86 | else: |
|
91 | else: | |
87 |
if key in [ |
|
92 | if key in ["rack.session.options"]: | |
88 | # skip secret configs |
|
93 | # skip secret configs | |
89 | continue |
|
94 | continue | |
90 | try: |
|
95 | try: | |
91 | if len(node): |
|
96 | if len(node): | |
92 |
error_dict[ |
|
97 | error_dict["request"][key] = dict( | |
93 |
[(x.get( |
|
98 | [(x.get("key"), x.text) for x in node] | |
|
99 | ) | |||
94 | else: |
|
100 | else: | |
95 |
error_dict[ |
|
101 | error_dict["request"][key] = node.text | |
96 | except Exception as e: |
|
102 | except Exception as e: | |
97 |
log.warning( |
|
103 | log.warning("Airbrake integration exception: %s" % e) | |
98 |
|
104 | |||
99 |
error_dict[ |
|
105 | error_dict["request"].pop("HTTP_COOKIE", "") | |
100 |
|
106 | |||
101 |
error_dict[ |
|
107 | error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "") | |
102 |
error_dict[ |
|
108 | error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "") | |
103 |
if |
|
109 | if "request_id" not in error_dict: | |
104 |
error_dict[ |
|
110 | error_dict["request_id"] = str(uuid.uuid4()) | |
105 | if request.context.possibly_public: |
|
111 | if request.context.possibly_public: | |
106 | # set ip for reports that come from airbrake js client |
|
112 | # set ip for reports that come from airbrake js client | |
107 | error_dict["timestamp"] = datetime.utcnow() |
|
113 | error_dict["timestamp"] = datetime.utcnow() | |
108 | if request.environ.get("HTTP_X_FORWARDED_FOR"): |
|
114 | if request.environ.get("HTTP_X_FORWARDED_FOR"): | |
109 |
ip = request.environ.get("HTTP_X_FORWARDED_FOR", |
|
115 | ip = request.environ.get("HTTP_X_FORWARDED_FOR", "") | |
110 |
first_ip = ip.split( |
|
116 | first_ip = ip.split(",")[0] | |
111 | remote_addr = first_ip.strip() |
|
117 | remote_addr = first_ip.strip() | |
112 | else: |
|
118 | else: | |
113 |
remote_addr = |
|
119 | remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get( | |
114 | request.environ.get('REMOTE_ADDR')) |
|
120 | "REMOTE_ADDR" | |
|
121 | ) | |||
115 | error_dict["ip"] = remote_addr |
|
122 | error_dict["ip"] = remote_addr | |
116 |
|
123 | |||
117 | blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf', |
|
124 | blacklist = [ | |
118 | 'session', 'test'] |
|
125 | "password", | |
|
126 | "passwd", | |||
|
127 | "pwd", | |||
|
128 | "auth_tkt", | |||
|
129 | "secret", | |||
|
130 | "csrf", | |||
|
131 | "session", | |||
|
132 | "test", | |||
|
133 | ] | |||
119 |
|
134 | |||
120 | lines = [] |
|
135 | lines = [] | |
121 |
for l in error.find( |
|
136 | for l in error.find("backtrace"): | |
122 |
lines.append( |
|
137 | lines.append( | |
123 | 'line': l.get("number", ""), |
|
138 | { | |
124 |
|
|
139 | "file": l.get("file", ""), | |
125 |
|
|
140 | "line": l.get("number", ""), | |
126 |
|
|
141 | "fn": l.get("method", ""), | |
127 | 'vars': {}}) |
|
142 | "module": l.get("module", ""), | |
128 | error_dict['traceback'] = list(reversed(lines)) |
|
143 | "cline": l.get("method", ""), | |
|
144 | "vars": {}, | |||
|
145 | } | |||
|
146 | ) | |||
|
147 | error_dict["traceback"] = list(reversed(lines)) | |||
129 | # filtering is not provided by airbrake |
|
148 | # filtering is not provided by airbrake | |
130 | keys_to_check = ( |
|
149 | keys_to_check = ( | |
131 |
error_dict[ |
|
150 | error_dict["request"].get("COOKIE"), | |
132 |
error_dict[ |
|
151 | error_dict["request"].get("COOKIES"), | |
133 |
error_dict[ |
|
152 | error_dict["request"].get("POST"), | |
134 |
error_dict[ |
|
153 | error_dict["request"].get("SESSION"), | |
135 | ) |
|
154 | ) | |
136 | for source in [_f for _f in keys_to_check if _f]: |
|
155 | for source in [_f for _f in keys_to_check if _f]: | |
137 | for k in source.keys(): |
|
156 | for k in source.keys(): | |
138 | for bad_key in blacklist: |
|
157 | for bad_key in blacklist: | |
139 | if bad_key in k.lower(): |
|
158 | if bad_key in k.lower(): | |
140 |
source[k] = |
|
159 | source[k] = "***" | |
141 |
|
160 | |||
142 | return error_dict |
|
161 | return error_dict |
@@ -1,56 +1,56 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | from datetime import tzinfo, timedelta, datetime |
|
17 | from datetime import tzinfo, timedelta, datetime | |
18 | from dateutil.relativedelta import relativedelta |
|
18 | from dateutil.relativedelta import relativedelta | |
19 | import logging |
|
19 | import logging | |
20 |
|
20 | |||
21 | log = logging.getLogger(__name__) |
|
21 | log = logging.getLogger(__name__) | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | def to_relativedelta(time_delta): |
|
24 | def to_relativedelta(time_delta): | |
25 | return relativedelta(seconds=int(time_delta.total_seconds()), |
|
25 | return relativedelta( | |
26 |
|
|
26 | seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds | |
|
27 | ) | |||
27 |
|
28 | |||
28 |
|
29 | |||
29 | def convert_date(date_str, return_utcnow_if_wrong=True, |
|
30 | def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False): | |
30 | normalize_future=False): |
|
|||
31 | utcnow = datetime.utcnow() |
|
31 | utcnow = datetime.utcnow() | |
32 | if isinstance(date_str, datetime): |
|
32 | if isinstance(date_str, datetime): | |
33 | # get rid of tzinfo |
|
33 | # get rid of tzinfo | |
34 | return date_str.replace(tzinfo=None) |
|
34 | return date_str.replace(tzinfo=None) | |
35 | if not date_str and return_utcnow_if_wrong: |
|
35 | if not date_str and return_utcnow_if_wrong: | |
36 | return utcnow |
|
36 | return utcnow | |
37 | try: |
|
37 | try: | |
38 | try: |
|
38 | try: | |
39 |
if |
|
39 | if "Z" in date_str: | |
40 |
date_str = date_str[:date_str.index( |
|
40 | date_str = date_str[: date_str.index("Z")] | |
41 |
if |
|
41 | if "." in date_str: | |
42 |
date = datetime.strptime(date_str, |
|
42 | date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f") | |
43 | else: |
|
43 | else: | |
44 |
date = datetime.strptime(date_str, |
|
44 | date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S") | |
45 | except Exception: |
|
45 | except Exception: | |
46 | # bw compat with old client |
|
46 | # bw compat with old client | |
47 |
date = datetime.strptime(date_str, |
|
47 | date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f") | |
48 | except Exception: |
|
48 | except Exception: | |
49 | if return_utcnow_if_wrong: |
|
49 | if return_utcnow_if_wrong: | |
50 | date = utcnow |
|
50 | date = utcnow | |
51 | else: |
|
51 | else: | |
52 | date = None |
|
52 | date = None | |
53 | if normalize_future and date and date > (utcnow + timedelta(minutes=3)): |
|
53 | if normalize_future and date and date > (utcnow + timedelta(minutes=3)): | |
54 |
log.warning( |
|
54 | log.warning("time %s in future + 3 min, normalizing" % date) | |
55 | return utcnow |
|
55 | return utcnow | |
56 | return date |
|
56 | return date |
@@ -1,296 +1,317 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | from datetime import timedelta |
|
17 | from datetime import timedelta | |
18 |
|
18 | |||
19 | from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType |
|
19 | from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType | |
20 |
|
20 | |||
21 | EXCLUDED_LOG_VARS = [ |
|
21 | EXCLUDED_LOG_VARS = [ | |
22 | 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', |
|
22 | "args", | |
23 | 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs', |
|
23 | "asctime", | |
24 | 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated', |
|
24 | "created", | |
25 | 'thread', 'threadName'] |
|
25 | "exc_info", | |
|
26 | "exc_text", | |||
|
27 | "filename", | |||
|
28 | "funcName", | |||
|
29 | "levelname", | |||
|
30 | "levelno", | |||
|
31 | "lineno", | |||
|
32 | "message", | |||
|
33 | "module", | |||
|
34 | "msecs", | |||
|
35 | "msg", | |||
|
36 | "name", | |||
|
37 | "pathname", | |||
|
38 | "process", | |||
|
39 | "processName", | |||
|
40 | "relativeCreated", | |||
|
41 | "thread", | |||
|
42 | "threadName", | |||
|
43 | ] | |||
26 |
|
44 | |||
27 | EXCLUDE_SENTRY_KEYS = [ |
|
45 | EXCLUDE_SENTRY_KEYS = [ | |
28 |
|
|
46 | "csp", | |
29 |
|
|
47 | "culprit", | |
30 |
|
|
48 | "event_id", | |
31 |
|
|
49 | "exception", | |
32 |
|
|
50 | "extra", | |
33 |
|
|
51 | "level", | |
34 |
|
|
52 | "logentry", | |
35 |
|
|
53 | "logger", | |
36 |
|
|
54 | "message", | |
37 |
|
|
55 | "modules", | |
38 |
|
|
56 | "platform", | |
39 |
|
|
57 | "query", | |
40 |
|
|
58 | "release", | |
41 |
|
|
59 | "request", | |
42 | 'sentry.interfaces.Csp', 'sentry.interfaces.Exception', |
|
60 | "sentry.interfaces.Csp", | |
43 | 'sentry.interfaces.Http', 'sentry.interfaces.Message', |
|
61 | "sentry.interfaces.Exception", | |
44 |
|
|
62 | "sentry.interfaces.Http", | |
45 |
|
|
63 | "sentry.interfaces.Message", | |
46 | 'sentry.interfaces.Template', 'sentry.interfaces.User', |
|
64 | "sentry.interfaces.Query", | |
47 |
|
|
65 | "sentry.interfaces.Stacktrace", | |
48 |
|
|
66 | "sentry.interfaces.Template", | |
49 |
|
|
67 | "sentry.interfaces.User", | |
50 |
|
|
68 | "sentry.interfaces.csp.Csp", | |
51 |
|
|
69 | "sentry.interfaces.exception.Exception", | |
52 |
|
|
70 | "sentry.interfaces.http.Http", | |
53 |
|
|
71 | "sentry.interfaces.message.Message", | |
54 |
|
|
72 | "sentry.interfaces.query.Query", | |
55 | 'stacktrace', |
|
73 | "sentry.interfaces.stacktrace.Stacktrace", | |
56 | 'tags', |
|
74 | "sentry.interfaces.template.Template", | |
57 | 'template', |
|
75 | "sentry.interfaces.user.User", | |
58 | 'time_spent', |
|
76 | "server_name", | |
59 | 'timestamp', |
|
77 | "stacktrace", | |
60 | 'user'] |
|
78 | "tags", | |
|
79 | "template", | |||
|
80 | "time_spent", | |||
|
81 | "timestamp", | |||
|
82 | "user", | |||
|
83 | ] | |||
61 |
|
84 | |||
62 |
|
85 | |||
63 | def get_keys(list_of_keys, json_body): |
|
86 | def get_keys(list_of_keys, json_body): | |
64 | for k in list_of_keys: |
|
87 | for k in list_of_keys: | |
65 | if k in json_body: |
|
88 | if k in json_body: | |
66 | return json_body[k] |
|
89 | return json_body[k] | |
67 |
|
90 | |||
68 |
|
91 | |||
69 | def get_logentry(json_body): |
|
92 | def get_logentry(json_body): | |
70 |
key_names = [ |
|
93 | key_names = [ | |
71 | 'sentry.interfaces.message.Message', |
|
94 | "logentry", | |
72 |
|
|
95 | "sentry.interfaces.message.Message", | |
73 | ] |
|
96 | "sentry.interfaces.Message", | |
|
97 | ] | |||
74 | logentry = get_keys(key_names, json_body) |
|
98 | logentry = get_keys(key_names, json_body) | |
75 | return logentry |
|
99 | return logentry | |
76 |
|
100 | |||
77 |
|
101 | |||
78 | def get_exception(json_body): |
|
102 | def get_exception(json_body): | |
79 | parsed_exception = {} |
|
103 | parsed_exception = {} | |
80 |
key_names = [ |
|
104 | key_names = [ | |
81 | 'sentry.interfaces.exception.Exception', |
|
105 | "exception", | |
82 |
|
|
106 | "sentry.interfaces.exception.Exception", | |
83 | ] |
|
107 | "sentry.interfaces.Exception", | |
|
108 | ] | |||
84 | exception = get_keys(key_names, json_body) or {} |
|
109 | exception = get_keys(key_names, json_body) or {} | |
85 | if exception: |
|
110 | if exception: | |
86 | if isinstance(exception, dict): |
|
111 | if isinstance(exception, dict): | |
87 |
exception = exception[ |
|
112 | exception = exception["values"][0] | |
88 | else: |
|
113 | else: | |
89 | exception = exception[0] |
|
114 | exception = exception[0] | |
90 |
|
115 | |||
91 |
parsed_exception[ |
|
116 | parsed_exception["type"] = exception.get("type") | |
92 |
parsed_exception[ |
|
117 | parsed_exception["value"] = exception.get("value") | |
93 |
parsed_exception[ |
|
118 | parsed_exception["module"] = exception.get("module") | |
94 | parsed_stacktrace = get_stacktrace(exception) or {} |
|
119 | parsed_stacktrace = get_stacktrace(exception) or {} | |
95 | parsed_exception = exception or {} |
|
120 | parsed_exception = exception or {} | |
96 | return parsed_exception, parsed_stacktrace |
|
121 | return parsed_exception, parsed_stacktrace | |
97 |
|
122 | |||
98 |
|
123 | |||
99 | def get_stacktrace(json_body): |
|
124 | def get_stacktrace(json_body): | |
100 | parsed_stacktrace = [] |
|
125 | parsed_stacktrace = [] | |
101 |
key_names = [ |
|
126 | key_names = [ | |
102 | 'sentry.interfaces.stacktrace.Stacktrace', |
|
127 | "stacktrace", | |
103 |
|
|
128 | "sentry.interfaces.stacktrace.Stacktrace", | |
104 | ] |
|
129 | "sentry.interfaces.Stacktrace", | |
|
130 | ] | |||
105 | stacktrace = get_keys(key_names, json_body) |
|
131 | stacktrace = get_keys(key_names, json_body) | |
106 | if stacktrace: |
|
132 | if stacktrace: | |
107 |
for frame in stacktrace[ |
|
133 | for frame in stacktrace["frames"]: | |
108 | parsed_stacktrace.append( |
|
134 | parsed_stacktrace.append( | |
109 | {"cline": frame.get('context_line', ''), |
|
135 | { | |
110 |
" |
|
136 | "cline": frame.get("context_line", ""), | |
111 |
" |
|
137 | "file": frame.get("filename", ""), | |
112 |
" |
|
138 | "module": frame.get("module", ""), | |
113 |
" |
|
139 | "fn": frame.get("function", ""), | |
114 |
" |
|
140 | "line": frame.get("lineno", ""), | |
115 | } |
|
141 | "vars": list(frame.get("vars", {}).items()), | |
|
142 | } | |||
116 | ) |
|
143 | ) | |
117 | return parsed_stacktrace |
|
144 | return parsed_stacktrace | |
118 |
|
145 | |||
119 |
|
146 | |||
120 | def get_template(json_body): |
|
147 | def get_template(json_body): | |
121 | parsed_template = {} |
|
148 | parsed_template = {} | |
122 |
key_names = [ |
|
149 | key_names = [ | |
123 | 'sentry.interfaces.template.Template', |
|
150 | "template", | |
124 |
|
|
151 | "sentry.interfaces.template.Template", | |
125 | ] |
|
152 | "sentry.interfaces.Template", | |
|
153 | ] | |||
126 | template = get_keys(key_names, json_body) |
|
154 | template = get_keys(key_names, json_body) | |
127 | if template: |
|
155 | if template: | |
128 |
for frame in template[ |
|
156 | for frame in template["frames"]: | |
129 | parsed_template.append( |
|
157 | parsed_template.append( | |
130 | {"cline": frame.get('context_line', ''), |
|
158 | { | |
131 |
" |
|
159 | "cline": frame.get("context_line", ""), | |
132 | "fn": '', |
|
160 | "file": frame.get("filename", ""), | |
133 | "line": frame.get('lineno', ''), |
|
161 | "fn": "", | |
134 | "vars": [] |
|
162 | "line": frame.get("lineno", ""), | |
135 |
|
|
163 | "vars": [], | |
|
164 | } | |||
136 | ) |
|
165 | ) | |
137 |
|
166 | |||
138 | return parsed_template |
|
167 | return parsed_template | |
139 |
|
168 | |||
140 |
|
169 | |||
141 | def get_request(json_body): |
|
170 | def get_request(json_body): | |
142 | parsed_http = {} |
|
171 | parsed_http = {} | |
143 | key_names = ['request', |
|
172 | key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"] | |
144 | 'sentry.interfaces.http.Http', |
|
|||
145 | 'sentry.interfaces.Http' |
|
|||
146 | ] |
|
|||
147 | http = get_keys(key_names, json_body) or {} |
|
173 | http = get_keys(key_names, json_body) or {} | |
148 | for k, v in http.items(): |
|
174 | for k, v in http.items(): | |
149 |
if k == |
|
175 | if k == "headers": | |
150 |
parsed_http[ |
|
176 | parsed_http["headers"] = {} | |
151 |
for sk, sv in http[ |
|
177 | for sk, sv in http["headers"].items(): | |
152 |
parsed_http[ |
|
178 | parsed_http["headers"][sk.title()] = sv | |
153 | else: |
|
179 | else: | |
154 | parsed_http[k.lower()] = v |
|
180 | parsed_http[k.lower()] = v | |
155 | return parsed_http |
|
181 | return parsed_http | |
156 |
|
182 | |||
157 |
|
183 | |||
158 | def get_user(json_body): |
|
184 | def get_user(json_body): | |
159 | parsed_user = {} |
|
185 | parsed_user = {} | |
160 | key_names = ['user', |
|
186 | key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"] | |
161 | 'sentry.interfaces.user.User', |
|
|||
162 | 'sentry.interfaces.User' |
|
|||
163 | ] |
|
|||
164 | user = get_keys(key_names, json_body) |
|
187 | user = get_keys(key_names, json_body) | |
165 | if user: |
|
188 | if user: | |
166 |
parsed_user[ |
|
189 | parsed_user["id"] = user.get("id") | |
167 |
parsed_user[ |
|
190 | parsed_user["username"] = user.get("username") | |
168 |
parsed_user[ |
|
191 | parsed_user["email"] = user.get("email") | |
169 |
parsed_user[ |
|
192 | parsed_user["ip_address"] = user.get("ip_address") | |
170 |
|
193 | |||
171 | return parsed_user |
|
194 | return parsed_user | |
172 |
|
195 | |||
173 |
|
196 | |||
174 | def get_query(json_body): |
|
197 | def get_query(json_body): | |
175 | query = None |
|
198 | query = None | |
176 | key_name = ['query', |
|
199 | key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"] | |
177 | 'sentry.interfaces.query.Query', |
|
|||
178 | 'sentry.interfaces.Query' |
|
|||
179 | ] |
|
|||
180 | query = get_keys(key_name, json_body) |
|
200 | query = get_keys(key_name, json_body) | |
181 | return query |
|
201 | return query | |
182 |
|
202 | |||
183 |
|
203 | |||
184 | def parse_sentry_event(json_body): |
|
204 | def parse_sentry_event(json_body): | |
185 |
request_id = json_body.get( |
|
205 | request_id = json_body.get("event_id") | |
186 |
|
206 | |||
187 | # required |
|
207 | # required | |
188 |
message = json_body.get( |
|
208 | message = json_body.get("message") | |
189 |
log_timestamp = json_body.get( |
|
209 | log_timestamp = json_body.get("timestamp") | |
190 |
level = json_body.get( |
|
210 | level = json_body.get("level") | |
191 | if isinstance(level, int): |
|
211 | if isinstance(level, int): | |
192 | level = LogLevelPython.key_from_value(level) |
|
212 | level = LogLevelPython.key_from_value(level) | |
193 |
|
213 | |||
194 |
namespace = json_body.get( |
|
214 | namespace = json_body.get("logger") | |
195 |
language = json_body.get( |
|
215 | language = json_body.get("platform") | |
196 |
|
216 | |||
197 | # optional |
|
217 | # optional | |
198 |
server_name = json_body.get( |
|
218 | server_name = json_body.get("server_name") | |
199 |
culprit = json_body.get( |
|
219 | culprit = json_body.get("culprit") | |
200 |
release = json_body.get( |
|
220 | release = json_body.get("release") | |
201 |
|
221 | |||
202 |
tags = json_body.get( |
|
222 | tags = json_body.get("tags", {}) | |
203 |
if hasattr(tags, |
|
223 | if hasattr(tags, "items"): | |
204 | tags = list(tags.items()) |
|
224 | tags = list(tags.items()) | |
205 |
extra = json_body.get( |
|
225 | extra = json_body.get("extra", {}) | |
206 |
if hasattr(extra, |
|
226 | if hasattr(extra, "items"): | |
207 | extra = list(extra.items()) |
|
227 | extra = list(extra.items()) | |
208 |
|
228 | |||
209 | parsed_req = get_request(json_body) |
|
229 | parsed_req = get_request(json_body) | |
210 | user = get_user(json_body) |
|
230 | user = get_user(json_body) | |
211 | template = get_template(json_body) |
|
231 | template = get_template(json_body) | |
212 | query = get_query(json_body) |
|
232 | query = get_query(json_body) | |
213 |
|
233 | |||
214 | # other unidentified keys found |
|
234 | # other unidentified keys found | |
215 | other_keys = [(k, json_body[k]) for k in json_body.keys() |
|
235 | other_keys = [ | |
216 |
|
|
236 | (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS | |
|
237 | ] | |||
217 |
|
238 | |||
218 | logentry = get_logentry(json_body) |
|
239 | logentry = get_logentry(json_body) | |
219 | if logentry: |
|
240 | if logentry: | |
220 |
message = logentry[ |
|
241 | message = logentry["message"] | |
221 |
|
242 | |||
222 | exception, stacktrace = get_exception(json_body) |
|
243 | exception, stacktrace = get_exception(json_body) | |
223 |
|
244 | |||
224 | alt_stacktrace = get_stacktrace(json_body) |
|
245 | alt_stacktrace = get_stacktrace(json_body) | |
225 | event_type = None |
|
246 | event_type = None | |
226 | if not exception and not stacktrace and not alt_stacktrace and not template: |
|
247 | if not exception and not stacktrace and not alt_stacktrace and not template: | |
227 | event_type = ParsedSentryEventType.LOG |
|
248 | event_type = ParsedSentryEventType.LOG | |
228 |
|
249 | |||
229 | event_dict = { |
|
250 | event_dict = { | |
230 |
|
|
251 | "log_level": level, | |
231 |
|
|
252 | "message": message, | |
232 |
|
|
253 | "namespace": namespace, | |
233 |
|
|
254 | "request_id": request_id, | |
234 |
|
|
255 | "server": server_name, | |
235 |
|
|
256 | "date": log_timestamp, | |
236 |
|
|
257 | "tags": tags, | |
237 | } |
|
258 | } | |
238 |
event_dict[ |
|
259 | event_dict["tags"].extend( | |
239 |
[(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS] |
|
260 | [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS] | |
|
261 | ) | |||
240 |
|
262 | |||
241 | # other keys can be various object types |
|
263 | # other keys can be various object types | |
242 |
event_dict[ |
|
264 | event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)]) | |
243 | if isinstance(v, str)]) |
|
|||
244 | if culprit: |
|
265 | if culprit: | |
245 |
event_dict[ |
|
266 | event_dict["tags"].append(("sentry_culprit", culprit)) | |
246 | if language: |
|
267 | if language: | |
247 |
event_dict[ |
|
268 | event_dict["tags"].append(("sentry_language", language)) | |
248 | if release: |
|
269 | if release: | |
249 |
event_dict[ |
|
270 | event_dict["tags"].append(("sentry_release", release)) | |
250 |
|
271 | |||
251 | if exception or stacktrace or alt_stacktrace or template: |
|
272 | if exception or stacktrace or alt_stacktrace or template: | |
252 | event_type = ParsedSentryEventType.ERROR_REPORT |
|
273 | event_type = ParsedSentryEventType.ERROR_REPORT | |
253 | event_dict = { |
|
274 | event_dict = { | |
254 |
|
|
275 | "client": "sentry", | |
255 |
|
|
276 | "error": message, | |
256 |
|
|
277 | "namespace": namespace, | |
257 |
|
|
278 | "request_id": request_id, | |
258 |
|
|
279 | "server": server_name, | |
259 |
|
|
280 | "start_time": log_timestamp, | |
260 |
|
|
281 | "end_time": None, | |
261 |
|
|
282 | "tags": tags, | |
262 |
|
|
283 | "extra": extra, | |
263 |
|
|
284 | "language": language, | |
264 |
|
|
285 | "view_name": json_body.get("culprit"), | |
265 |
|
|
286 | "http_status": None, | |
266 |
|
|
287 | "username": None, | |
267 |
|
|
288 | "url": parsed_req.get("url"), | |
268 |
|
|
289 | "ip": None, | |
269 |
|
|
290 | "user_agent": None, | |
270 |
|
|
291 | "request": None, | |
271 |
|
|
292 | "slow_calls": None, | |
272 |
|
|
293 | "request_stats": None, | |
273 |
|
|
294 | "traceback": None, | |
274 | } |
|
295 | } | |
275 |
|
296 | |||
276 |
event_dict[ |
|
297 | event_dict["extra"].extend(other_keys) | |
277 | if release: |
|
298 | if release: | |
278 |
event_dict[ |
|
299 | event_dict["tags"].append(("sentry_release", release)) | |
279 |
event_dict[ |
|
300 | event_dict["request"] = parsed_req | |
280 |
if |
|
301 | if "headers" in parsed_req: | |
281 |
event_dict[ |
|
302 | event_dict["user_agent"] = parsed_req["headers"].get("User-Agent") | |
282 |
if |
|
303 | if "env" in parsed_req: | |
283 |
event_dict[ |
|
304 | event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR") | |
284 |
ts_ms = int(json_body.get( |
|
305 | ts_ms = int(json_body.get("time_spent") or 0) | |
285 | if ts_ms > 0: |
|
306 | if ts_ms > 0: | |
286 |
event_dict[ |
|
307 | event_dict["end_time"] = event_dict["start_time"] + timedelta( | |
287 |
|
|
308 | milliseconds=ts_ms | |
|
309 | ) | |||
288 | if stacktrace or alt_stacktrace or template: |
|
310 | if stacktrace or alt_stacktrace or template: | |
289 |
event_dict[ |
|
311 | event_dict["traceback"] = stacktrace or alt_stacktrace or template | |
290 | for k in list(event_dict.keys()): |
|
312 | for k in list(event_dict.keys()): | |
291 | if event_dict[k] is None: |
|
313 | if event_dict[k] is None: | |
292 | del event_dict[k] |
|
314 | del event_dict[k] | |
293 | if user: |
|
315 | if user: | |
294 |
event_dict[ |
|
316 | event_dict["username"] = user["username"] or user["id"] or user["email"] | |
295 | or user['email'] |
|
|||
296 | return event_dict, event_type |
|
317 | return event_dict, event_type |
@@ -1,17 +1,15 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
||||
17 |
|
@@ -1,98 +1,102 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | from alembic import context |
|
17 | from alembic import context | |
18 | from sqlalchemy import engine_from_config, pool, MetaData |
|
18 | from sqlalchemy import engine_from_config, pool, MetaData | |
19 | from logging.config import fileConfig |
|
19 | from logging.config import fileConfig | |
20 | from appenlight.models import NAMING_CONVENTION |
|
20 | from appenlight.models import NAMING_CONVENTION | |
21 |
|
21 | |||
22 | # this is the Alembic Config object, which provides |
|
22 | # this is the Alembic Config object, which provides | |
23 | # access to the values within the .ini file in use. |
|
23 | # access to the values within the .ini file in use. | |
24 | config = context.config |
|
24 | config = context.config | |
25 |
|
25 | |||
26 | # Interpret the config file for Python logging. |
|
26 | # Interpret the config file for Python logging. | |
27 | # This line sets up loggers basically. |
|
27 | # This line sets up loggers basically. | |
28 | if config.config_file_name: |
|
28 | if config.config_file_name: | |
29 | fileConfig(config.config_file_name) |
|
29 | fileConfig(config.config_file_name) | |
30 |
|
30 | |||
31 | # add your model's MetaData object here |
|
31 | # add your model's MetaData object here | |
32 | # for 'autogenerate' support |
|
32 | # for 'autogenerate' support | |
33 | # from myapp import mymodel |
|
33 | # from myapp import mymodel | |
34 | # target_metadata = mymodel.Base.metadata |
|
34 | # target_metadata = mymodel.Base.metadata | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | target_metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
37 | target_metadata = MetaData(naming_convention=NAMING_CONVENTION) | |
38 |
|
38 | |||
39 | # other values from the config, defined by the needs of env.py, |
|
39 | # other values from the config, defined by the needs of env.py, | |
40 | # can be acquired: |
|
40 | # can be acquired: | |
41 | # my_important_option = config.get_main_option("my_important_option") |
|
41 | # my_important_option = config.get_main_option("my_important_option") | |
42 | # ... etc. |
|
42 | # ... etc. | |
43 |
|
43 | |||
44 |
VERSION_TABLE_NAME = |
|
44 | VERSION_TABLE_NAME = "alembic_appenlight_version" | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | def run_migrations_offline(): |
|
47 | def run_migrations_offline(): | |
48 | """Run migrations in 'offline' mode. |
|
48 | """Run migrations in 'offline' mode. | |
49 |
|
49 | |||
50 | This configures the context with just a URL |
|
50 | This configures the context with just a URL | |
51 | and not an Engine, though an Engine is acceptable |
|
51 | and not an Engine, though an Engine is acceptable | |
52 | here as well. By skipping the Engine creation |
|
52 | here as well. By skipping the Engine creation | |
53 | we don't even need a DBAPI to be available. |
|
53 | we don't even need a DBAPI to be available. | |
54 |
|
54 | |||
55 | Calls to context.execute() here emit the given string to the |
|
55 | Calls to context.execute() here emit the given string to the | |
56 | script output. |
|
56 | script output. | |
57 |
|
57 | |||
58 | """ |
|
58 | """ | |
59 | url = config.get_main_option("sqlalchemy.url") |
|
59 | url = config.get_main_option("sqlalchemy.url") | |
60 | context.configure(url=url, target_metadata=target_metadata, |
|
60 | context.configure( | |
61 | transaction_per_migration=True, |
|
61 | url=url, | |
62 | version_table=VERSION_TABLE_NAME) |
|
62 | target_metadata=target_metadata, | |
|
63 | transaction_per_migration=True, | |||
|
64 | version_table=VERSION_TABLE_NAME, | |||
|
65 | ) | |||
63 |
|
66 | |||
64 | with context.begin_transaction(): |
|
67 | with context.begin_transaction(): | |
65 | context.run_migrations() |
|
68 | context.run_migrations() | |
66 |
|
69 | |||
67 |
|
70 | |||
68 | def run_migrations_online(): |
|
71 | def run_migrations_online(): | |
69 | """Run migrations in 'online' mode. |
|
72 | """Run migrations in 'online' mode. | |
70 |
|
73 | |||
71 | In this scenario we need to create an Engine |
|
74 | In this scenario we need to create an Engine | |
72 | and associate a connection with the context. |
|
75 | and associate a connection with the context. | |
73 |
|
76 | |||
74 | """ |
|
77 | """ | |
75 | engine = engine_from_config( |
|
78 | engine = engine_from_config( | |
76 | config.get_section(config.config_ini_section), |
|
79 | config.get_section(config.config_ini_section), | |
77 |
prefix= |
|
80 | prefix="sqlalchemy.", | |
78 |
poolclass=pool.NullPool |
|
81 | poolclass=pool.NullPool, | |
|
82 | ) | |||
79 |
|
83 | |||
80 | connection = engine.connect() |
|
84 | connection = engine.connect() | |
81 | context.configure( |
|
85 | context.configure( | |
82 | connection=connection, |
|
86 | connection=connection, | |
83 | target_metadata=target_metadata, |
|
87 | target_metadata=target_metadata, | |
84 | transaction_per_migration=True, |
|
88 | transaction_per_migration=True, | |
85 | version_table=VERSION_TABLE_NAME |
|
89 | version_table=VERSION_TABLE_NAME, | |
86 | ) |
|
90 | ) | |
87 |
|
91 | |||
88 | try: |
|
92 | try: | |
89 | with context.begin_transaction(): |
|
93 | with context.begin_transaction(): | |
90 | context.run_migrations() |
|
94 | context.run_migrations() | |
91 | finally: |
|
95 | finally: | |
92 | connection.close() |
|
96 | connection.close() | |
93 |
|
97 | |||
94 |
|
98 | |||
95 | if context.is_offline_mode(): |
|
99 | if context.is_offline_mode(): | |
96 | run_migrations_offline() |
|
100 | run_migrations_offline() | |
97 | else: |
|
101 | else: | |
98 | run_migrations_online() |
|
102 | run_migrations_online() |
This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them | |||||
@@ -1,624 +1,813 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | """initial tables |
|
17 | """initial tables | |
18 |
|
18 | |||
19 | Revision ID: 55b6e612672f |
|
19 | Revision ID: 55b6e612672f | |
20 | Revises: None |
|
20 | Revises: None | |
21 | Create Date: 2014-10-13 23:47:38.295159 |
|
21 | Create Date: 2014-10-13 23:47:38.295159 | |
22 |
|
22 | |||
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | # revision identifiers, used by Alembic. |
|
25 | # revision identifiers, used by Alembic. | |
26 |
revision = |
|
26 | revision = "55b6e612672f" | |
27 | down_revision = None |
|
27 | down_revision = None | |
28 |
|
28 | |||
29 | from alembic import op |
|
29 | from alembic import op | |
30 | import sqlalchemy as sa |
|
30 | import sqlalchemy as sa | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | def upgrade(): |
|
33 | def upgrade(): | |
34 |
op.add_column( |
|
34 | op.add_column("users", sa.Column("first_name", sa.Unicode(25))) | |
35 |
op.add_column( |
|
35 | op.add_column("users", sa.Column("last_name", sa.Unicode(50))) | |
36 |
op.add_column( |
|
36 | op.add_column("users", sa.Column("company_name", sa.Unicode(255))) | |
37 |
op.add_column( |
|
37 | op.add_column("users", sa.Column("company_address", sa.Unicode(255))) | |
38 |
op.add_column( |
|
38 | op.add_column("users", sa.Column("phone1", sa.Unicode(25))) | |
39 |
op.add_column( |
|
39 | op.add_column("users", sa.Column("phone2", sa.Unicode(25))) | |
40 |
op.add_column( |
|
40 | op.add_column("users", sa.Column("zip_code", sa.Unicode(25))) | |
41 | op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest")) |
|
41 | op.add_column( | |
42 | op.add_column('users', sa.Column('city', sa.Unicode(128))) |
|
42 | "users", | |
43 | op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default='')) |
|
43 | sa.Column( | |
44 | op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true')) |
|
44 | "default_report_sort", | |
45 | op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default='')) |
|
45 | sa.Unicode(20), | |
|
46 | nullable=False, | |||
|
47 | server_default="newest", | |||
|
48 | ), | |||
|
49 | ) | |||
|
50 | op.add_column("users", sa.Column("city", sa.Unicode(128))) | |||
|
51 | op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default="")) | |||
|
52 | op.add_column( | |||
|
53 | "users", | |||
|
54 | sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"), | |||
|
55 | ) | |||
|
56 | op.add_column( | |||
|
57 | "users", | |||
|
58 | sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""), | |||
|
59 | ) | |||
46 |
|
60 | |||
47 | op.create_table( |
|
61 | op.create_table( | |
48 |
|
|
62 | "integrations", | |
49 |
sa.Column( |
|
63 | sa.Column("id", sa.Integer(), primary_key=True), | |
50 | sa.Column('resource_id', sa.Integer(), |
|
64 | sa.Column( | |
51 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
65 | "resource_id", | |
52 | ondelete='cascade')), |
|
66 | sa.Integer(), | |
53 | sa.Column('integration_name', sa.Unicode(64)), |
|
67 | sa.ForeignKey( | |
54 | sa.Column('config', sa.dialects.postgresql.JSON, nullable=False), |
|
68 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
55 | sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()), |
|
69 | ), | |
56 | sa.Column('external_id', sa.Unicode(255)), |
|
70 | ), | |
57 |
sa.Column( |
|
71 | sa.Column("integration_name", sa.Unicode(64)), | |
|
72 | sa.Column("config", sa.dialects.postgresql.JSON, nullable=False), | |||
|
73 | sa.Column( | |||
|
74 | "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now() | |||
|
75 | ), | |||
|
76 | sa.Column("external_id", sa.Unicode(255)), | |||
|
77 | sa.Column("external_id2", sa.Unicode(255)), | |||
58 | ) |
|
78 | ) | |
59 |
|
79 | |||
60 | op.create_table( |
|
80 | op.create_table( | |
61 |
|
|
81 | "alert_channels", | |
62 | sa.Column('owner_id', sa.Integer(), |
|
82 | sa.Column( | |
63 | sa.ForeignKey('users.id', onupdate='cascade', |
|
83 | "owner_id", | |
64 | ondelete='cascade'), nullable=False), |
|
84 | sa.Integer(), | |
65 | sa.Column('channel_name', sa.Unicode(25), nullable=False), |
|
85 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
66 | sa.Column('channel_value', sa.Unicode(80), nullable=False), |
|
86 | nullable=False, | |
67 | sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False), |
|
87 | ), | |
68 |
sa.Column( |
|
88 | sa.Column("channel_name", sa.Unicode(25), nullable=False), | |
69 |
sa.Column( |
|
89 | sa.Column("channel_value", sa.Unicode(80), nullable=False), | |
70 | sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'), |
|
90 | sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False), | |
71 | sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'), |
|
91 | sa.Column( | |
72 | sa.Column('pkey', sa.Integer(), primary_key=True), |
|
92 | "channel_validated", sa.Boolean, nullable=False, server_default="False" | |
73 | sa.Column('integration_id', sa.Integer, |
|
93 | ), | |
74 | sa.ForeignKey('integrations.id', onupdate='cascade', |
|
94 | sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"), | |
75 | ondelete='cascade')), |
|
95 | sa.Column( | |
76 | ) |
|
96 | "notify_only_first", sa.Boolean, nullable=False, server_default="False" | |
77 | op.create_unique_constraint('uq_alert_channels', 'alert_channels', |
|
97 | ), | |
78 | ["owner_id", "channel_name", "channel_value"]) |
|
98 | sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"), | |
|
99 | sa.Column("pkey", sa.Integer(), primary_key=True), | |||
|
100 | sa.Column( | |||
|
101 | "integration_id", | |||
|
102 | sa.Integer, | |||
|
103 | sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"), | |||
|
104 | ), | |||
|
105 | ) | |||
|
106 | op.create_unique_constraint( | |||
|
107 | "uq_alert_channels", | |||
|
108 | "alert_channels", | |||
|
109 | ["owner_id", "channel_name", "channel_value"], | |||
|
110 | ) | |||
79 |
|
111 | |||
80 | op.create_table( |
|
112 | op.create_table( | |
81 |
|
|
113 | "alert_channels_actions", | |
82 |
sa.Column( |
|
114 | sa.Column("owner_id", sa.Integer(), nullable=False), | |
83 | sa.Column('resource_id', sa.Integer(), |
|
115 | sa.Column( | |
84 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
116 | "resource_id", | |
85 | ondelete='cascade')), |
|
117 | sa.Integer(), | |
86 | sa.Column('pkey', sa.Integer(), primary_key=True), |
|
118 | sa.ForeignKey( | |
87 | sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'), |
|
119 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
88 | sa.Column('rule', sa.dialects.postgresql.JSON), |
|
120 | ), | |
89 | sa.Column('type', sa.Unicode(10), index=True), |
|
121 | ), | |
90 |
sa.Column( |
|
122 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
91 | sa.Column('config', sa.dialects.postgresql.JSON), |
|
123 | sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"), | |
92 | sa.Column('name', sa.Unicode(255), server_default='') |
|
124 | sa.Column("rule", sa.dialects.postgresql.JSON), | |
|
125 | sa.Column("type", sa.Unicode(10), index=True), | |||
|
126 | sa.Column("other_id", sa.Unicode(40), index=True), | |||
|
127 | sa.Column("config", sa.dialects.postgresql.JSON), | |||
|
128 | sa.Column("name", sa.Unicode(255), server_default=""), | |||
93 | ) |
|
129 | ) | |
94 |
|
130 | |||
95 |
|
||||
96 | op.create_table( |
|
131 | op.create_table( | |
97 |
|
|
132 | "application_postprocess_conf", | |
98 |
sa.Column( |
|
133 | sa.Column("pkey", sa.Integer(), primary_key=True), | |
99 |
sa.Column( |
|
134 | sa.Column("do", sa.Unicode(25), nullable=False), | |
100 |
sa.Column( |
|
135 | sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""), | |
101 | sa.Column('resource_id', sa.Integer(), |
|
136 | sa.Column( | |
102 | sa.ForeignKey('resources.resource_id', |
|
137 | "resource_id", | |
103 | onupdate='cascade', |
|
138 | sa.Integer(), | |
104 | ondelete='cascade'), nullable=False), |
|
139 | sa.ForeignKey( | |
105 | sa.Column('rule', sa.dialects.postgresql.JSON), |
|
140 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
|
141 | ), | |||
|
142 | nullable=False, | |||
|
143 | ), | |||
|
144 | sa.Column("rule", sa.dialects.postgresql.JSON), | |||
106 | ) |
|
145 | ) | |
107 |
|
146 | |||
108 | op.create_table( |
|
147 | op.create_table( | |
109 |
|
|
148 | "applications", | |
110 | sa.Column('resource_id', sa.Integer(), |
|
149 | sa.Column( | |
111 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
150 | "resource_id", | |
112 | ondelete='cascade'), nullable=False, |
|
151 | sa.Integer(), | |
113 | primary_key=True, autoincrement=False), |
|
152 | sa.ForeignKey( | |
114 | sa.Column('domains', sa.UnicodeText, nullable=False), |
|
153 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
115 | sa.Column('api_key', sa.Unicode(32), nullable=False, index=True), |
|
154 | ), | |
116 | sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'), |
|
155 | nullable=False, | |
117 | sa.Column('public_key', sa.Unicode(32), nullable=False, index=True), |
|
156 | primary_key=True, | |
118 | sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False), |
|
157 | autoincrement=False, | |
119 | sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False), |
|
158 | ), | |
120 |
sa.Column( |
|
159 | sa.Column("domains", sa.UnicodeText, nullable=False), | |
121 | sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False), |
|
160 | sa.Column("api_key", sa.Unicode(32), nullable=False, index=True), | |
122 | ) |
|
161 | sa.Column( | |
123 | op.create_unique_constraint(None, 'applications', |
|
162 | "default_grouping", | |
124 | ["public_key"]) |
|
163 | sa.Unicode(20), | |
125 | op.create_unique_constraint(None, 'applications', |
|
164 | nullable=False, | |
126 | ["api_key"]) |
|
165 | server_default="url_type", | |
|
166 | ), | |||
|
167 | sa.Column("public_key", sa.Unicode(32), nullable=False, index=True), | |||
|
168 | sa.Column( | |||
|
169 | "error_report_threshold", sa.Integer(), server_default="10", nullable=False | |||
|
170 | ), | |||
|
171 | sa.Column( | |||
|
172 | "slow_report_threshold", sa.Integer(), server_default="10", nullable=False | |||
|
173 | ), | |||
|
174 | sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False), | |||
|
175 | sa.Column( | |||
|
176 | "allow_permanent_storage", | |||
|
177 | sa.Boolean(), | |||
|
178 | server_default="false", | |||
|
179 | nullable=False, | |||
|
180 | ), | |||
|
181 | ) | |||
|
182 | op.create_unique_constraint(None, "applications", ["public_key"]) | |||
|
183 | op.create_unique_constraint(None, "applications", ["api_key"]) | |||
127 |
|
184 | |||
128 | op.create_table( |
|
185 | op.create_table( | |
129 |
|
|
186 | "metrics", | |
130 |
sa.Column( |
|
187 | sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True), | |
131 | sa.Column('resource_id', sa.Integer(), |
|
188 | sa.Column( | |
132 | sa.ForeignKey('resources.resource_id', |
|
189 | "resource_id", | |
133 | onupdate='cascade', |
|
190 | sa.Integer(), | |
134 | ondelete='cascade')), |
|
191 | sa.ForeignKey( | |
135 | sa.Column('timestamp', sa.DateTime), |
|
192 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
136 | sa.Column('namespace', sa.Unicode(255)), |
|
193 | ), | |
137 | sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}") |
|
194 | ), | |
|
195 | sa.Column("timestamp", sa.DateTime), | |||
|
196 | sa.Column("namespace", sa.Unicode(255)), | |||
|
197 | sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"), | |||
138 | ) |
|
198 | ) | |
139 |
|
199 | |||
140 | op.create_table( |
|
200 | op.create_table( | |
141 |
|
|
201 | "events", | |
142 |
sa.Column( |
|
202 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), | |
143 |
sa.Column( |
|
203 | sa.Column("start_date", sa.DateTime, nullable=False, index=True), | |
144 |
sa.Column( |
|
204 | sa.Column("end_date", sa.DateTime), | |
145 |
sa.Column( |
|
205 | sa.Column("status", sa.Integer(), nullable=False, index=True), | |
146 |
sa.Column( |
|
206 | sa.Column("event_type", sa.Integer(), nullable=False, index=True), | |
147 |
sa.Column( |
|
207 | sa.Column("origin_user_id", sa.Integer()), | |
148 |
sa.Column( |
|
208 | sa.Column("target_user_id", sa.Integer()), | |
149 |
sa.Column( |
|
209 | sa.Column("resource_id", sa.Integer(), index=True), | |
150 |
sa.Column( |
|
210 | sa.Column("text", sa.UnicodeText, server_default=""), | |
151 |
sa.Column( |
|
211 | sa.Column("values", sa.dialects.postgresql.JSON), | |
152 |
sa.Column( |
|
212 | sa.Column("target_id", sa.Integer()), | |
153 |
sa.Column( |
|
213 | sa.Column("target_uuid", sa.Unicode(40), index=True), | |
154 | ) |
|
214 | ) | |
155 |
|
215 | |||
156 | op.create_table( |
|
216 | op.create_table( | |
157 |
|
|
217 | "logs", | |
158 |
sa.Column( |
|
218 | sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True), | |
159 | sa.Column('resource_id', sa.Integer(), |
|
219 | sa.Column( | |
160 | sa.ForeignKey('resources.resource_id', |
|
220 | "resource_id", | |
161 | onupdate='cascade', |
|
221 | sa.Integer(), | |
162 | ondelete='cascade')), |
|
222 | sa.ForeignKey( | |
163 | sa.Column('log_level', sa.SmallInteger(), nullable=False), |
|
223 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
164 | sa.Column('primary_key', sa.Unicode(128), nullable=True), |
|
224 | ), | |
165 | sa.Column('message', sa.UnicodeText, nullable=False, server_default=''), |
|
225 | ), | |
166 | sa.Column('timestamp', sa.DateTime), |
|
226 | sa.Column("log_level", sa.SmallInteger(), nullable=False), | |
167 |
sa.Column( |
|
227 | sa.Column("primary_key", sa.Unicode(128), nullable=True), | |
168 | sa.Column('request_id', sa.Unicode(40)), |
|
228 | sa.Column("message", sa.UnicodeText, nullable=False, server_default=""), | |
169 | sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"), |
|
229 | sa.Column("timestamp", sa.DateTime), | |
170 | sa.Column('permanent', sa.Boolean(), server_default="false", |
|
230 | sa.Column("namespace", sa.Unicode(255)), | |
171 | nullable=False) |
|
231 | sa.Column("request_id", sa.Unicode(40)), | |
|
232 | sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"), | |||
|
233 | sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False), | |||
172 | ) |
|
234 | ) | |
173 |
|
235 | |||
174 | op.create_table( |
|
236 | op.create_table( | |
175 |
|
|
237 | "reports_groups", | |
176 |
sa.Column( |
|
238 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
177 | sa.Column('resource_id', sa.Integer, |
|
239 | sa.Column( | |
178 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
240 | "resource_id", | |
179 | ondelete='cascade'), nullable=False), |
|
241 | sa.Integer, | |
180 | sa.Column('priority', sa.Integer, nullable=False, server_default="5"), |
|
242 | sa.ForeignKey( | |
181 | sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()), |
|
243 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
182 | sa.Column('last_timestamp', sa.DateTime()), |
|
244 | ), | |
183 | sa.Column('error', sa.UnicodeText, nullable=False, server_default=""), |
|
245 | nullable=False, | |
184 | sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""), |
|
246 | ), | |
185 |
sa.Column( |
|
247 | sa.Column("priority", sa.Integer, nullable=False, server_default="5"), | |
186 | sa.Column('report_type', sa.Integer, nullable=False, server_default="0"), |
|
248 | sa.Column( | |
187 | sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"), |
|
249 | "first_timestamp", | |
188 | sa.Column('last_report', sa.Integer, nullable=False, server_default="0"), |
|
250 | sa.DateTime(), | |
189 | sa.Column('occurences', sa.Integer, nullable=False, server_default="1"), |
|
251 | nullable=False, | |
190 | sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"), |
|
252 | server_default=sa.func.now(), | |
191 | sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"), |
|
253 | ), | |
192 | sa.Column('notified', sa.Boolean, nullable=False, server_default="False"), |
|
254 | sa.Column("last_timestamp", sa.DateTime()), | |
193 |
sa.Column( |
|
255 | sa.Column("error", sa.UnicodeText, nullable=False, server_default=""), | |
194 |
sa.Column( |
|
256 | sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""), | |
195 | sa.Column('read', sa.Boolean, nullable=False, server_default="False"), |
|
257 | sa.Column( | |
|
258 | "triggered_postprocesses_ids", | |||
|
259 | sa.dialects.postgresql.JSON, | |||
|
260 | nullable=False, | |||
|
261 | server_default="[]", | |||
|
262 | ), | |||
|
263 | sa.Column("report_type", sa.Integer, nullable=False, server_default="0"), | |||
|
264 | sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"), | |||
|
265 | sa.Column("last_report", sa.Integer, nullable=False, server_default="0"), | |||
|
266 | sa.Column("occurences", sa.Integer, nullable=False, server_default="1"), | |||
|
267 | sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"), | |||
|
268 | sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"), | |||
|
269 | sa.Column("notified", sa.Boolean, nullable=False, server_default="False"), | |||
|
270 | sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"), | |||
|
271 | sa.Column("public", sa.Boolean, nullable=False, server_default="False"), | |||
|
272 | sa.Column("read", sa.Boolean, nullable=False, server_default="False"), | |||
196 | ) |
|
273 | ) | |
197 |
|
274 | |||
198 | op.create_table( |
|
275 | op.create_table( | |
199 |
|
|
276 | "reports", | |
200 |
sa.Column( |
|
277 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
201 | sa.Column('group_id', sa.types.BigInteger, |
|
278 | sa.Column( | |
202 | sa.ForeignKey('reports_groups.id', onupdate='cascade', |
|
279 | "group_id", | |
203 | ondelete='cascade'), nullable=False, index=True), |
|
280 | sa.types.BigInteger, | |
204 | sa.Column('resource_id', sa.Integer, nullable=False, index=True), |
|
281 | sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"), | |
205 | sa.Column('report_type', sa.Integer, nullable=False, server_default="0"), |
|
282 | nullable=False, | |
206 | sa.Column('error', sa.UnicodeText, nullable=False, server_default=""), |
|
283 | index=True, | |
207 | sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"), |
|
284 | ), | |
208 |
sa.Column( |
|
285 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
209 |
sa.Column( |
|
286 | sa.Column("report_type", sa.Integer, nullable=False, server_default="0"), | |
210 |
sa.Column( |
|
287 | sa.Column("error", sa.UnicodeText, nullable=False, server_default=""), | |
211 | sa.Column('username', sa.Unicode(255), nullable=False, server_default=""), |
|
288 | sa.Column( | |
212 |
|
|
289 | "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
213 | sa.Column('url', sa.UnicodeText, nullable=False, server_default=""), |
|
290 | ), | |
214 | sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""), |
|
291 | sa.Column( | |
215 |
|
|
292 | "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
216 | sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"), |
|
293 | ), | |
217 | sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""), |
|
294 | sa.Column( | |
218 | sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()), |
|
295 | "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}" | |
219 | sa.Column('end_time', sa.DateTime()), |
|
296 | ), | |
220 |
sa.Column( |
|
297 | sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""), | |
221 |
sa.Column( |
|
298 | sa.Column("username", sa.Unicode(255), nullable=False, server_default=""), | |
222 | sa.Column('http_status', sa.Integer, index=True), |
|
299 | sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""), | |
223 | sa.Column('url_domain', sa.Unicode(128)), |
|
300 | sa.Column("url", sa.UnicodeText, nullable=False, server_default=""), | |
224 | sa.Column('url_path', sa.UnicodeText), |
|
301 | sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""), | |
225 | sa.Column('language', sa.Integer, server_default="0"), |
|
302 | sa.Column( | |
226 | ) |
|
303 | "request_stats", | |
227 | op.create_index(None, 'reports', |
|
304 | sa.dialects.postgresql.JSON, | |
228 | [sa.text("(tags ->> 'server_name')")]) |
|
305 | nullable=False, | |
229 | op.create_index(None, 'reports', |
|
306 | server_default="{}", | |
230 | [sa.text("(tags ->> 'view_name')")]) |
|
307 | ), | |
|
308 | sa.Column( | |||
|
309 | "traceback", | |||
|
310 | sa.dialects.postgresql.JSON, | |||
|
311 | nullable=False, | |||
|
312 | server_default="{}", | |||
|
313 | ), | |||
|
314 | sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""), | |||
|
315 | sa.Column( | |||
|
316 | "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now() | |||
|
317 | ), | |||
|
318 | sa.Column("end_time", sa.DateTime()), | |||
|
319 | sa.Column( | |||
|
320 | "report_group_time", | |||
|
321 | sa.DateTime, | |||
|
322 | index=True, | |||
|
323 | nullable=False, | |||
|
324 | server_default=sa.func.now(), | |||
|
325 | ), | |||
|
326 | sa.Column("duration", sa.Float(), nullable=False, server_default="0"), | |||
|
327 | sa.Column("http_status", sa.Integer, index=True), | |||
|
328 | sa.Column("url_domain", sa.Unicode(128)), | |||
|
329 | sa.Column("url_path", sa.UnicodeText), | |||
|
330 | sa.Column("language", sa.Integer, server_default="0"), | |||
|
331 | ) | |||
|
332 | op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")]) | |||
|
333 | op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")]) | |||
231 |
|
334 | |||
232 | op.create_table( |
|
335 | op.create_table( | |
233 |
|
|
336 | "reports_assignments", | |
234 |
sa.Column( |
|
337 | sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True), | |
235 |
sa.Column( |
|
338 | sa.Column( | |
236 | sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'), |
|
339 | "owner_id", | |
237 | nullable=False, primary_key=True), |
|
340 | sa.Integer, | |
238 | sa.Column('report_time', sa.DateTime, nullable=False) |
|
341 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |
239 | ) |
|
342 | nullable=False, | |
|
343 | primary_key=True, | |||
|
344 | ), | |||
|
345 | sa.Column("report_time", sa.DateTime, nullable=False), | |||
|
346 | ) | |||
240 |
|
347 | |||
241 | op.create_table( |
|
348 | op.create_table( | |
242 |
|
|
349 | "reports_comments", | |
243 |
sa.Column( |
|
350 | sa.Column("comment_id", sa.Integer, primary_key=True), | |
244 |
sa.Column( |
|
351 | sa.Column("body", sa.UnicodeText, nullable=False, server_default=""), | |
245 |
sa.Column( |
|
352 | sa.Column( | |
246 | sa.ForeignKey('users.id', onupdate='cascade', |
|
353 | "owner_id", | |
247 | ondelete='set null'), nullable=True), |
|
354 | sa.Integer, | |
248 | sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
355 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"), | |
249 | sa.Column('report_time', sa.DateTime, nullable=False), |
|
356 | nullable=True, | |
250 | sa.Column('group_id', sa.types.BigInteger, nullable=False) |
|
357 | ), | |
|
358 | sa.Column( | |||
|
359 | "created_timestamp", | |||
|
360 | sa.DateTime, | |||
|
361 | nullable=False, | |||
|
362 | server_default=sa.func.now(), | |||
|
363 | ), | |||
|
364 | sa.Column("report_time", sa.DateTime, nullable=False), | |||
|
365 | sa.Column("group_id", sa.types.BigInteger, nullable=False), | |||
251 | ) |
|
366 | ) | |
252 |
|
367 | |||
253 | op.create_table( |
|
368 | op.create_table( | |
254 |
|
|
369 | "reports_stats", | |
255 |
sa.Column( |
|
370 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |
256 |
sa.Column( |
|
371 | sa.Column("start_interval", sa.DateTime, nullable=False, index=True), | |
257 |
sa.Column( |
|
372 | sa.Column("group_id", sa.types.BigInteger, index=True), | |
258 | sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True), |
|
373 | sa.Column( | |
259 | sa.Column('owner_user_id', sa.Integer), |
|
374 | "occurences", sa.Integer, nullable=False, server_default="0", index=True | |
260 | sa.Column('type', sa.Integer, index=True, nullable=False), |
|
375 | ), | |
261 | sa.Column('duration', sa.Float(), server_default='0'), |
|
376 | sa.Column("owner_user_id", sa.Integer), | |
262 | sa.Column('server_name', sa.Unicode(128), |
|
377 | sa.Column("type", sa.Integer, index=True, nullable=False), | |
263 | server_default=''), |
|
378 | sa.Column("duration", sa.Float(), server_default="0"), | |
264 |
sa.Column( |
|
379 | sa.Column("server_name", sa.Unicode(128), server_default=""), | |
265 | server_default=''), |
|
380 | sa.Column("view_name", sa.Unicode(128), server_default=""), | |
266 |
sa.Column( |
|
381 | sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True), | |
267 | ) |
|
382 | ) | |
268 | op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats', |
|
383 | op.create_index( | |
269 |
|
|
384 | "ix_reports_stats_start_interval_group_id", | |
|
385 | "reports_stats", | |||
|
386 | ["start_interval", "group_id"], | |||
|
387 | ) | |||
270 |
|
388 | |||
271 | op.create_table( |
|
389 | op.create_table( | |
272 |
|
|
390 | "slow_calls", | |
273 |
sa.Column( |
|
391 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
274 | sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'), |
|
392 | sa.Column( | |
275 | nullable=False, index=True), |
|
393 | "report_id", | |
276 | sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True), |
|
394 | sa.types.BigInteger, | |
277 | sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True), |
|
395 | sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"), | |
278 | sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()), |
|
396 | nullable=False, | |
279 | sa.Column('type', sa.Unicode(16), nullable=False, index=True), |
|
397 | index=True, | |
280 | sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''), |
|
398 | ), | |
281 | sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False), |
|
399 | sa.Column( | |
282 | sa.Column('location', sa.UnicodeText, server_default=''), |
|
400 | "duration", sa.Float(), nullable=False, server_default="0", index=True | |
283 | sa.Column('subtype', sa.Unicode(16), nullable=False, index=True), |
|
401 | ), | |
284 | sa.Column('resource_id', sa.Integer, nullable=False, index=True), |
|
402 | sa.Column( | |
285 | sa.Column('statement_hash', sa.Unicode(60), index=True) |
|
403 | "timestamp", | |
|
404 | sa.DateTime, | |||
|
405 | nullable=False, | |||
|
406 | server_default=sa.func.now(), | |||
|
407 | index=True, | |||
|
408 | ), | |||
|
409 | sa.Column( | |||
|
410 | "report_group_time", | |||
|
411 | sa.DateTime, | |||
|
412 | index=True, | |||
|
413 | nullable=False, | |||
|
414 | server_default=sa.func.now(), | |||
|
415 | ), | |||
|
416 | sa.Column("type", sa.Unicode(16), nullable=False, index=True), | |||
|
417 | sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""), | |||
|
418 | sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False), | |||
|
419 | sa.Column("location", sa.UnicodeText, server_default=""), | |||
|
420 | sa.Column("subtype", sa.Unicode(16), nullable=False, index=True), | |||
|
421 | sa.Column("resource_id", sa.Integer, nullable=False, index=True), | |||
|
422 | sa.Column("statement_hash", sa.Unicode(60), index=True), | |||
286 | ) |
|
423 | ) | |
287 |
|
424 | |||
288 | op.create_table( |
|
425 | op.create_table( | |
289 |
|
|
426 | "tags", | |
290 |
sa.Column( |
|
427 | sa.Column("id", sa.types.BigInteger, primary_key=True), | |
291 | sa.Column('resource_id', sa.Integer, |
|
428 | sa.Column( | |
292 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
429 | "resource_id", | |
293 | ondelete='cascade')), |
|
430 | sa.Integer, | |
294 | sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
431 | sa.ForeignKey( | |
295 | sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
432 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
296 | sa.Column('name', sa.Unicode(32), nullable=False), |
|
433 | ), | |
297 | sa.Column('value', sa.dialects.postgresql.JSON, nullable=False), |
|
434 | ), | |
298 | sa.Column('times_seen', sa.Integer, nullable=False, server_default='1') |
|
435 | sa.Column( | |
|
436 | "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now() | |||
|
437 | ), | |||
|
438 | sa.Column( | |||
|
439 | "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now() | |||
|
440 | ), | |||
|
441 | sa.Column("name", sa.Unicode(32), nullable=False), | |||
|
442 | sa.Column("value", sa.dialects.postgresql.JSON, nullable=False), | |||
|
443 | sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"), | |||
299 | ) |
|
444 | ) | |
300 |
|
445 | |||
301 | op.create_table( |
|
446 | op.create_table( | |
302 |
|
|
447 | "auth_tokens", | |
303 |
sa.Column( |
|
448 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), | |
304 |
sa.Column( |
|
449 | sa.Column("token", sa.Unicode), | |
305 | sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()), |
|
450 | sa.Column( | |
306 | sa.Column('expires', sa.DateTime), |
|
451 | "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now() | |
307 | sa.Column('owner_id', sa.Integer, |
|
452 | ), | |
308 | sa.ForeignKey('users.id', onupdate='cascade', |
|
453 | sa.Column("expires", sa.DateTime), | |
309 | ondelete='cascade')), |
|
454 | sa.Column( | |
310 | sa.Column('description', sa.Unicode), |
|
455 | "owner_id", | |
|
456 | sa.Integer, | |||
|
457 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |||
|
458 | ), | |||
|
459 | sa.Column("description", sa.Unicode), | |||
311 | ) |
|
460 | ) | |
312 |
|
461 | |||
313 | op.create_table( |
|
462 | op.create_table( | |
314 |
|
|
463 | "channels_actions", | |
315 | sa.Column('channel_pkey', sa.Integer, |
|
464 | sa.Column( | |
316 | sa.ForeignKey('alert_channels.pkey', |
|
465 | "channel_pkey", | |
317 | ondelete='CASCADE', onupdate='CASCADE')), |
|
466 | sa.Integer, | |
318 | sa.Column('action_pkey', sa.Integer, |
|
467 | sa.ForeignKey( | |
319 | sa.ForeignKey('alert_channels_actions.pkey', |
|
468 | "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
320 | ondelete='CASCADE', onupdate='CASCADE')) |
|
469 | ), | |
|
470 | ), | |||
|
471 | sa.Column( | |||
|
472 | "action_pkey", | |||
|
473 | sa.Integer, | |||
|
474 | sa.ForeignKey( | |||
|
475 | "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE" | |||
|
476 | ), | |||
|
477 | ), | |||
321 | ) |
|
478 | ) | |
322 |
|
479 | |||
323 | op.create_table( |
|
480 | op.create_table( | |
324 |
|
|
481 | "config", | |
325 |
sa.Column( |
|
482 | sa.Column("key", sa.Unicode(128), primary_key=True), | |
326 |
sa.Column( |
|
483 | sa.Column("section", sa.Unicode(128), primary_key=True), | |
327 |
sa.Column( |
|
484 | sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"), | |
328 | server_default="{}") |
|
|||
329 | ) |
|
485 | ) | |
330 |
|
486 | |||
331 | op.create_table( |
|
487 | op.create_table( | |
332 |
|
|
488 | "plugin_configs", | |
333 |
sa.Column( |
|
489 | sa.Column("id", sa.Integer, primary_key=True), | |
334 |
sa.Column( |
|
490 | sa.Column("plugin_name", sa.Unicode(128)), | |
335 |
sa.Column( |
|
491 | sa.Column("section", sa.Unicode(128)), | |
336 |
sa.Column( |
|
492 | sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"), | |
337 | server_default="{}"), |
|
493 | sa.Column( | |
338 | sa.Column('resource_id', sa.Integer(), |
|
494 | "resource_id", | |
339 | sa.ForeignKey('resources.resource_id', onupdate='cascade', |
|
495 | sa.Integer(), | |
340 | ondelete='cascade')), |
|
496 | sa.ForeignKey( | |
341 | sa.Column('owner_id', sa.Integer(), |
|
497 | "resources.resource_id", onupdate="cascade", ondelete="cascade" | |
342 | sa.ForeignKey('users.id', onupdate='cascade', |
|
498 | ), | |
343 | ondelete='cascade'))) |
|
499 | ), | |
|
500 | sa.Column( | |||
|
501 | "owner_id", | |||
|
502 | sa.Integer(), | |||
|
503 | sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"), | |||
|
504 | ), | |||
|
505 | ) | |||
344 |
|
506 | |||
345 | op.create_table( |
|
507 | op.create_table( | |
346 |
|
|
508 | "rc_versions", | |
347 |
sa.Column( |
|
509 | sa.Column("name", sa.Unicode(40), primary_key=True), | |
348 |
sa.Column( |
|
510 | sa.Column("value", sa.Unicode(40)), | |
|
511 | ) | |||
|
512 | version_table = sa.table( | |||
|
513 | "rc_versions", | |||
|
514 | sa.Column("name", sa.Unicode(40)), | |||
|
515 | sa.Column("value", sa.Unicode(40)), | |||
349 | ) |
|
516 | ) | |
350 | version_table = sa.table('rc_versions', |
|
|||
351 | sa.Column('name', sa.Unicode(40)), |
|
|||
352 | sa.Column('value', sa.Unicode(40))) |
|
|||
353 |
|
517 | |||
354 |
insert = version_table.insert().values(name= |
|
518 | insert = version_table.insert().values(name="es_reports") | |
355 | op.execute(insert) |
|
519 | op.execute(insert) | |
356 |
insert = version_table.insert().values(name= |
|
520 | insert = version_table.insert().values(name="es_reports_groups") | |
357 | op.execute(insert) |
|
521 | op.execute(insert) | |
358 |
insert = version_table.insert().values(name= |
|
522 | insert = version_table.insert().values(name="es_reports_stats") | |
359 | op.execute(insert) |
|
523 | op.execute(insert) | |
360 |
insert = version_table.insert().values(name= |
|
524 | insert = version_table.insert().values(name="es_logs") | |
361 | op.execute(insert) |
|
525 | op.execute(insert) | |
362 |
insert = version_table.insert().values(name= |
|
526 | insert = version_table.insert().values(name="es_metrics") | |
363 | op.execute(insert) |
|
527 | op.execute(insert) | |
364 |
insert = version_table.insert().values(name= |
|
528 | insert = version_table.insert().values(name="es_slow_calls") | |
365 | op.execute(insert) |
|
529 | op.execute(insert) | |
366 |
|
530 | |||
367 |
|
531 | op.execute( | ||
368 | op.execute(''' |
|
532 | """ | |
369 |
|
|
533 | CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone) | |
370 |
|
|
534 | RETURNS timestamp without time zone AS | |
371 | $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$ |
|
535 | $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$ | |
372 |
|
|
536 | LANGUAGE sql VOLATILE; | |
373 | ''') |
|
537 | """ | |
|
538 | ) | |||
374 |
|
539 | |||
375 |
op.execute( |
|
540 | op.execute( | |
|
541 | """ | |||
376 |
|
|
542 | CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger | |
377 |
|
|
543 | LANGUAGE plpgsql SECURITY DEFINER | |
378 |
|
|
544 | AS $$ | |
379 |
|
|
545 | DECLARE | |
380 | main_table varchar := 'logs'; |
|
546 | main_table varchar := 'logs'; | |
381 | partitioned_table varchar := ''; |
|
547 | partitioned_table varchar := ''; | |
382 |
|
|
548 | BEGIN | |
383 |
|
|
549 | ||
384 |
|
|
550 | IF NEW.permanent THEN | |
385 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp); |
|
551 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp); | |
386 |
|
|
552 | ELSE | |
387 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp); |
|
553 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp); | |
388 |
|
|
554 | END IF; | |
389 |
|
|
555 | ||
390 |
|
|
556 | BEGIN | |
391 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
557 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
392 |
|
|
558 | EXCEPTION | |
393 |
|
|
559 | WHEN undefined_table THEN | |
394 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
560 | RAISE NOTICE 'A partition has been created %', partitioned_table; | |
395 |
|
|
561 | IF NEW.permanent THEN | |
396 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', |
|
562 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', | |
397 |
|
|
563 | partitioned_table, | |
398 | quote_literal(date_trunc('month', NEW.timestamp)::date) , |
|
564 | quote_literal(date_trunc('month', NEW.timestamp)::date) , | |
399 | quote_literal((date_trunc('month', NEW.timestamp)::date + interval '1 month')::text), |
|
565 | quote_literal((date_trunc('month', NEW.timestamp)::date + interval '1 month')::text), | |
400 |
|
|
566 | main_table); | |
401 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(log_id);', partitioned_table, partitioned_table); |
|
567 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(log_id);', partitioned_table, partitioned_table); | |
402 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
568 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); | |
403 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); |
|
569 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); | |
404 | EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table); |
|
570 | EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table); | |
405 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); |
|
571 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); | |
406 | EXECUTE format('CREATE INDEX ix_%s_pkey_namespace ON %s (primary_key, namespace);', partitioned_table, partitioned_table); |
|
572 | EXECUTE format('CREATE INDEX ix_%s_pkey_namespace ON %s (primary_key, namespace);', partitioned_table, partitioned_table); | |
407 |
|
|
573 | ELSE | |
408 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', |
|
574 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', | |
409 |
|
|
575 | partitioned_table, | |
410 | quote_literal(date_trunc('day', NEW.timestamp)::date) , |
|
576 | quote_literal(date_trunc('day', NEW.timestamp)::date) , | |
411 | quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text), |
|
577 | quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text), | |
412 |
|
|
578 | main_table); | |
413 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s_ PRIMARY KEY(log_id);', partitioned_table, partitioned_table); |
|
579 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s_ PRIMARY KEY(log_id);', partitioned_table, partitioned_table); | |
414 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
580 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); | |
415 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); |
|
581 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); | |
416 | EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table); |
|
582 | EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table); | |
417 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); |
|
583 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); | |
418 | EXECUTE format('CREATE INDEX ix_%s_primary_key_namespace ON %s (primary_key,namespace);', partitioned_table, partitioned_table); |
|
584 | EXECUTE format('CREATE INDEX ix_%s_primary_key_namespace ON %s (primary_key,namespace);', partitioned_table, partitioned_table); | |
419 |
|
|
585 | END IF; | |
420 |
|
|
586 | ||
421 |
|
|
587 | ||
422 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
588 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
423 |
|
|
589 | END; | |
424 |
|
|
590 | ||
425 |
|
|
591 | ||
426 |
|
|
592 | RETURN NULL; | |
427 |
|
|
593 | END | |
428 |
|
|
594 | $$; | |
429 | ''') |
|
595 | """ | |
|
596 | ) | |||
430 |
|
597 | |||
431 |
op.execute( |
|
598 | op.execute( | |
|
599 | """ | |||
432 |
|
|
600 | CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs(); | |
433 | ''') |
|
601 | """ | |
|
602 | ) | |||
434 |
|
603 | |||
435 |
op.execute( |
|
604 | op.execute( | |
|
605 | """ | |||
436 |
|
|
606 | CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger | |
437 |
|
|
607 | LANGUAGE plpgsql SECURITY DEFINER | |
438 |
|
|
608 | AS $$ | |
439 |
|
|
609 | DECLARE | |
440 | main_table varchar := 'metrics'; |
|
610 | main_table varchar := 'metrics'; | |
441 | partitioned_table varchar := ''; |
|
611 | partitioned_table varchar := ''; | |
442 |
|
|
612 | BEGIN | |
443 |
|
|
613 | ||
444 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp); |
|
614 | partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp); | |
445 |
|
|
615 | ||
446 |
|
|
616 | BEGIN | |
447 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
617 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
448 |
|
|
618 | EXCEPTION | |
449 |
|
|
619 | WHEN undefined_table THEN | |
450 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
620 | RAISE NOTICE 'A partition has been created %', partitioned_table; | |
451 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', |
|
621 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)', | |
452 |
|
|
622 | partitioned_table, | |
453 | quote_literal(date_trunc('day', NEW.timestamp)::date) , |
|
623 | quote_literal(date_trunc('day', NEW.timestamp)::date) , | |
454 | quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text), |
|
624 | quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text), | |
455 |
|
|
625 | main_table); | |
456 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(pkey);', partitioned_table, partitioned_table); |
|
626 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(pkey);', partitioned_table, partitioned_table); | |
457 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
627 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); | |
458 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); |
|
628 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table); | |
459 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); |
|
629 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table); | |
460 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
630 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
461 |
|
|
631 | END; | |
462 |
|
|
632 | ||
463 |
|
|
633 | RETURN NULL; | |
464 |
|
|
634 | END | |
465 |
|
|
635 | $$; | |
466 | ''') |
|
636 | """ | |
|
637 | ) | |||
467 |
|
638 | |||
468 |
op.execute( |
|
639 | op.execute( | |
|
640 | """ | |||
469 |
|
|
641 | CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics(); | |
470 | ''') |
|
642 | """ | |
|
643 | ) | |||
471 |
|
644 | |||
472 |
op.execute( |
|
645 | op.execute( | |
|
646 | """ | |||
473 |
|
|
647 | CREATE FUNCTION partition_reports_stats() RETURNS trigger | |
474 |
|
|
648 | LANGUAGE plpgsql SECURITY DEFINER | |
475 |
|
|
649 | AS $$ | |
476 |
|
|
650 | DECLARE | |
477 | main_table varchar := 'reports_stats'; |
|
651 | main_table varchar := 'reports_stats'; | |
478 | partitioned_table varchar := ''; |
|
652 | partitioned_table varchar := ''; | |
479 |
|
|
653 | BEGIN | |
480 |
|
|
654 | ||
481 | partitioned_table := main_table || '_p_' || date_part('year', NEW.start_interval)::TEXT || '_' || DATE_part('month', NEW.start_interval); |
|
655 | partitioned_table := main_table || '_p_' || date_part('year', NEW.start_interval)::TEXT || '_' || DATE_part('month', NEW.start_interval); | |
482 |
|
|
656 | ||
483 |
|
|
657 | BEGIN | |
484 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
658 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
485 |
|
|
659 | EXCEPTION | |
486 |
|
|
660 | WHEN undefined_table THEN | |
487 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
661 | RAISE NOTICE 'A partition has been created %', partitioned_table; | |
488 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( start_interval >= DATE %s AND start_interval < DATE %s )) INHERITS (%s)', |
|
662 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( start_interval >= DATE %s AND start_interval < DATE %s )) INHERITS (%s)', | |
489 |
|
|
663 | partitioned_table, | |
490 | quote_literal(date_trunc('month', NEW.start_interval)::date) , |
|
664 | quote_literal(date_trunc('month', NEW.start_interval)::date) , | |
491 | quote_literal((date_trunc('month', NEW.start_interval)::date + interval '1 month')::text), |
|
665 | quote_literal((date_trunc('month', NEW.start_interval)::date + interval '1 month')::text), | |
492 |
|
|
666 | main_table); | |
493 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
667 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); | |
494 | EXECUTE format('CREATE INDEX ix_%s_start_interval ON %s USING btree (start_interval);', partitioned_table, partitioned_table); |
|
668 | EXECUTE format('CREATE INDEX ix_%s_start_interval ON %s USING btree (start_interval);', partitioned_table, partitioned_table); | |
495 | EXECUTE format('CREATE INDEX ix_%s_type ON %s USING btree (type);', partitioned_table, partitioned_table); |
|
669 | EXECUTE format('CREATE INDEX ix_%s_type ON %s USING btree (type);', partitioned_table, partitioned_table); | |
496 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); |
|
670 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); | |
497 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
671 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
498 |
|
|
672 | END; | |
499 |
|
|
673 | RETURN NULL; | |
500 |
|
|
674 | END | |
501 |
|
|
675 | $$; | |
502 | ''') |
|
676 | """ | |
|
677 | ) | |||
503 |
|
678 | |||
504 |
op.execute( |
|
679 | op.execute( | |
|
680 | """ | |||
505 |
|
|
681 | CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats(); | |
506 | ''') |
|
682 | """ | |
|
683 | ) | |||
507 |
|
684 | |||
508 |
op.execute( |
|
685 | op.execute( | |
|
686 | """ | |||
509 |
|
|
687 | CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger | |
510 |
|
|
688 | LANGUAGE plpgsql SECURITY DEFINER | |
511 |
|
|
689 | AS $$ | |
512 |
|
|
690 | DECLARE | |
513 | main_table varchar := 'reports_groups'; |
|
691 | main_table varchar := 'reports_groups'; | |
514 | partitioned_table varchar := ''; |
|
692 | partitioned_table varchar := ''; | |
515 |
|
|
693 | BEGIN | |
516 |
|
|
694 | ||
517 | partitioned_table := main_table || '_p_' || date_part('year', NEW.first_timestamp)::TEXT || '_' || DATE_part('month', NEW.first_timestamp); |
|
695 | partitioned_table := main_table || '_p_' || date_part('year', NEW.first_timestamp)::TEXT || '_' || DATE_part('month', NEW.first_timestamp); | |
518 |
|
|
696 | ||
519 |
|
|
697 | BEGIN | |
520 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
698 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
521 |
|
|
699 | EXCEPTION | |
522 |
|
|
700 | WHEN undefined_table THEN | |
523 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
701 | RAISE NOTICE 'A partition has been created %', partitioned_table; | |
524 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( first_timestamp >= DATE %s AND first_timestamp < DATE %s )) INHERITS (%s)', |
|
702 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( first_timestamp >= DATE %s AND first_timestamp < DATE %s )) INHERITS (%s)', | |
525 |
|
|
703 | partitioned_table, | |
526 | quote_literal(date_trunc('month', NEW.first_timestamp)::date) , |
|
704 | quote_literal(date_trunc('month', NEW.first_timestamp)::date) , | |
527 | quote_literal((date_trunc('month', NEW.first_timestamp)::date + interval '1 month')::text), |
|
705 | quote_literal((date_trunc('month', NEW.first_timestamp)::date + interval '1 month')::text), | |
528 |
|
|
706 | main_table); | |
529 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
707 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); | |
530 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
708 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); | |
531 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
709 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
532 |
|
|
710 | END; | |
533 |
|
|
711 | RETURN NULL; | |
534 |
|
|
712 | END | |
535 |
|
|
713 | $$; | |
536 | ''') |
|
714 | """ | |
|
715 | ) | |||
537 |
|
716 | |||
538 |
op.execute( |
|
717 | op.execute( | |
|
718 | """ | |||
539 |
|
|
719 | CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups(); | |
540 | ''') |
|
720 | """ | |
|
721 | ) | |||
541 |
|
722 | |||
542 |
op.execute( |
|
723 | op.execute( | |
|
724 | """ | |||
543 |
|
|
725 | CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger | |
544 |
|
|
726 | LANGUAGE plpgsql SECURITY DEFINER | |
545 |
|
|
727 | AS $$ | |
546 |
|
|
728 | DECLARE | |
547 | main_table varchar := 'reports'; |
|
729 | main_table varchar := 'reports'; | |
548 | partitioned_table varchar := ''; |
|
730 | partitioned_table varchar := ''; | |
549 | partitioned_parent_table varchar := ''; |
|
731 | partitioned_parent_table varchar := ''; | |
550 |
|
|
732 | BEGIN | |
551 |
|
|
733 | ||
552 | partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
734 | partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); | |
553 | partitioned_parent_table := 'reports_groups_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
735 | partitioned_parent_table := 'reports_groups_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); | |
554 |
|
|
736 | ||
555 |
|
|
737 | BEGIN | |
556 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
738 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
557 |
|
|
739 | EXCEPTION | |
558 |
|
|
740 | WHEN undefined_table THEN | |
559 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
741 | RAISE NOTICE 'A partition has been created %', partitioned_table; | |
560 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)', |
|
742 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)', | |
561 |
|
|
743 | partitioned_table, | |
562 | quote_literal(date_trunc('month', NEW.report_group_time)::date) , |
|
744 | quote_literal(date_trunc('month', NEW.report_group_time)::date) , | |
563 | quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text), |
|
745 | quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text), | |
564 |
|
|
746 | main_table); | |
565 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
747 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); | |
566 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
748 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); | |
567 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_group_id FOREIGN KEY (group_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table); |
|
749 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_group_id FOREIGN KEY (group_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table); | |
568 | EXECUTE format('CREATE INDEX ix_%s_report_group_time ON %s USING btree (report_group_time);', partitioned_table, partitioned_table); |
|
750 | EXECUTE format('CREATE INDEX ix_%s_report_group_time ON %s USING btree (report_group_time);', partitioned_table, partitioned_table); | |
569 | EXECUTE format('CREATE INDEX ix_%s_group_id ON %s USING btree (group_id);', partitioned_table, partitioned_table); |
|
751 | EXECUTE format('CREATE INDEX ix_%s_group_id ON %s USING btree (group_id);', partitioned_table, partitioned_table); | |
570 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); |
|
752 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); | |
571 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
753 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
572 |
|
|
754 | END; | |
573 |
|
|
755 | RETURN NULL; | |
574 |
|
|
756 | END | |
575 |
|
|
757 | $$; | |
576 | ''') |
|
758 | """ | |
|
759 | ) | |||
577 |
|
760 | |||
578 |
op.execute( |
|
761 | op.execute( | |
|
762 | """ | |||
579 |
|
|
763 | CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports(); | |
580 | ''') |
|
764 | """ | |
581 |
|
765 | ) | ||
582 |
|
766 | |||
583 |
op.execute( |
|
767 | op.execute( | |
|
768 | """ | |||
584 |
|
|
769 | CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger | |
585 |
|
|
770 | LANGUAGE plpgsql SECURITY DEFINER | |
586 |
|
|
771 | AS $$ | |
587 |
|
|
772 | DECLARE | |
588 | main_table varchar := 'slow_calls'; |
|
773 | main_table varchar := 'slow_calls'; | |
589 | partitioned_table varchar := ''; |
|
774 | partitioned_table varchar := ''; | |
590 | partitioned_parent_table varchar := ''; |
|
775 | partitioned_parent_table varchar := ''; | |
591 |
|
|
776 | BEGIN | |
592 |
|
|
777 | ||
593 | partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
778 | partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); | |
594 | partitioned_parent_table := 'reports_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); |
|
779 | partitioned_parent_table := 'reports_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time); | |
595 |
|
|
780 | ||
596 |
|
|
781 | BEGIN | |
597 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
782 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
598 |
|
|
783 | EXCEPTION | |
599 |
|
|
784 | WHEN undefined_table THEN | |
600 | RAISE NOTICE 'A partition has been created %', partitioned_table; |
|
785 | RAISE NOTICE 'A partition has been created %', partitioned_table; | |
601 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)', |
|
786 | EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)', | |
602 |
|
|
787 | partitioned_table, | |
603 | quote_literal(date_trunc('month', NEW.report_group_time)::date) , |
|
788 | quote_literal(date_trunc('month', NEW.report_group_time)::date) , | |
604 | quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text), |
|
789 | quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text), | |
605 |
|
|
790 | main_table); | |
606 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); |
|
791 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table); | |
607 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); |
|
792 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table); | |
608 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_report_id FOREIGN KEY (report_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table); |
|
793 | EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_report_id FOREIGN KEY (report_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table); | |
609 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); |
|
794 | EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table); | |
610 | EXECUTE format('CREATE INDEX ix_%s_report_id ON %s USING btree (report_id);', partitioned_table, partitioned_table); |
|
795 | EXECUTE format('CREATE INDEX ix_%s_report_id ON %s USING btree (report_id);', partitioned_table, partitioned_table); | |
611 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s USING btree (timestamp);', partitioned_table, partitioned_table); |
|
796 | EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s USING btree (timestamp);', partitioned_table, partitioned_table); | |
612 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; |
|
797 | EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;'; | |
613 |
|
|
798 | END; | |
614 |
|
|
799 | RETURN NULL; | |
615 |
|
|
800 | END | |
616 |
|
|
801 | $$; | |
617 | ''') |
|
802 | """ | |
|
803 | ) | |||
618 |
|
804 | |||
619 |
op.execute( |
|
805 | op.execute( | |
|
806 | """ | |||
620 |
|
|
807 | CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls(); | |
621 | ''') |
|
808 | """ | |
|
809 | ) | |||
|
810 | ||||
622 |
|
811 | |||
623 | def downgrade(): |
|
812 | def downgrade(): | |
624 | pass |
|
813 | pass |
@@ -1,32 +1,40 b'' | |||||
1 | """connect resources to alert_channels |
|
1 | """connect resources to alert_channels | |
2 |
|
2 | |||
3 | Revision ID: e9fcfbdd9498 |
|
3 | Revision ID: e9fcfbdd9498 | |
4 | Revises: 55b6e612672f |
|
4 | Revises: 55b6e612672f | |
5 | Create Date: 2018-02-28 13:52:50.717217 |
|
5 | Create Date: 2018-02-28 13:52:50.717217 | |
6 |
|
6 | |||
7 | """ |
|
7 | """ | |
8 |
|
8 | |||
9 | # revision identifiers, used by Alembic. |
|
9 | # revision identifiers, used by Alembic. | |
10 |
revision = |
|
10 | revision = "e9fcfbdd9498" | |
11 |
down_revision = |
|
11 | down_revision = "55b6e612672f" | |
12 |
|
12 | |||
13 | from alembic import op |
|
13 | from alembic import op | |
14 | import sqlalchemy as sa |
|
14 | import sqlalchemy as sa | |
15 |
|
15 | |||
16 |
|
16 | |||
17 | def upgrade(): |
|
17 | def upgrade(): | |
18 | op.create_table( |
|
18 | op.create_table( | |
19 |
|
|
19 | "channels_resources", | |
20 | sa.Column('channel_pkey', sa.Integer, |
|
20 | sa.Column( | |
21 | sa.ForeignKey('alert_channels.pkey', |
|
21 | "channel_pkey", | |
22 | ondelete='CASCADE', onupdate='CASCADE'), |
|
22 | sa.Integer, | |
23 | primary_key=True), |
|
23 | sa.ForeignKey( | |
24 | sa.Column('resource_id', sa.Integer, |
|
24 | "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE" | |
25 | sa.ForeignKey('resources.resource_id', |
|
25 | ), | |
26 | ondelete='CASCADE', onupdate='CASCADE'), |
|
26 | primary_key=True, | |
27 | primary_key=True) |
|
27 | ), | |
|
28 | sa.Column( | |||
|
29 | "resource_id", | |||
|
30 | sa.Integer, | |||
|
31 | sa.ForeignKey( | |||
|
32 | "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE" | |||
|
33 | ), | |||
|
34 | primary_key=True, | |||
|
35 | ), | |||
28 | ) |
|
36 | ) | |
29 |
|
37 | |||
30 |
|
38 | |||
31 | def downgrade(): |
|
39 | def downgrade(): | |
32 |
op.drop_table( |
|
40 | op.drop_table("channels_resources") |
@@ -1,130 +1,139 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | from sqlalchemy.ext.declarative import declarative_base |
|
19 | from sqlalchemy.ext.declarative import declarative_base | |
20 | from sqlalchemy import MetaData |
|
20 | from sqlalchemy import MetaData | |
21 | from sqlalchemy.orm import scoped_session |
|
21 | from sqlalchemy.orm import scoped_session | |
22 | from sqlalchemy.orm import sessionmaker |
|
22 | from sqlalchemy.orm import sessionmaker | |
23 | from zope.sqlalchemy import ZopeTransactionExtension |
|
23 | from zope.sqlalchemy import ZopeTransactionExtension | |
24 | import ziggurat_foundations |
|
24 | import ziggurat_foundations | |
25 | from ziggurat_foundations.models.base import get_db_session |
|
25 | from ziggurat_foundations.models.base import get_db_session | |
26 |
|
26 | |||
27 | log = logging.getLogger(__name__) |
|
27 | log = logging.getLogger(__name__) | |
28 |
|
28 | |||
29 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) |
|
29 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) | |
30 |
|
30 | |||
31 | NAMING_CONVENTION = { |
|
31 | NAMING_CONVENTION = { | |
32 |
"ix": |
|
32 | "ix": "ix_%(column_0_label)s", | |
33 | "uq": "uq_%(table_name)s_%(column_0_name)s", |
|
33 | "uq": "uq_%(table_name)s_%(column_0_name)s", | |
34 | "ck": "ck_%(table_name)s_%(constraint_name)s", |
|
34 | "ck": "ck_%(table_name)s_%(constraint_name)s", | |
35 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", |
|
35 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", | |
36 | "pk": "pk_%(table_name)s" |
|
36 | "pk": "pk_%(table_name)s", | |
37 | } |
|
37 | } | |
38 |
|
38 | |||
39 | metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
39 | metadata = MetaData(naming_convention=NAMING_CONVENTION) | |
40 | Base = declarative_base(metadata=metadata) |
|
40 | Base = declarative_base(metadata=metadata) | |
41 |
|
41 | |||
42 | # optional for request.db approach |
|
42 | # optional for request.db approach | |
43 | ziggurat_foundations.models.DBSession = DBSession |
|
43 | ziggurat_foundations.models.DBSession = DBSession | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | class Datastores(object): |
|
46 | class Datastores(object): | |
47 | redis = None |
|
47 | redis = None | |
48 | es = None |
|
48 | es = None | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | def register_datastores(es_conn, redis_conn, redis_lockmgr): |
|
51 | def register_datastores(es_conn, redis_conn, redis_lockmgr): | |
52 | Datastores.es = es_conn |
|
52 | Datastores.es = es_conn | |
53 | Datastores.redis = redis_conn |
|
53 | Datastores.redis = redis_conn | |
54 | Datastores.lockmgr = redis_lockmgr |
|
54 | Datastores.lockmgr = redis_lockmgr | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class SliceableESQuery(object): |
|
57 | class SliceableESQuery(object): | |
58 | def __init__(self, query, sort_query=None, aggregations=False, **kwconfig): |
|
58 | def __init__(self, query, sort_query=None, aggregations=False, **kwconfig): | |
59 | self.query = query |
|
59 | self.query = query | |
60 | self.sort_query = sort_query |
|
60 | self.sort_query = sort_query | |
61 | self.aggregations = aggregations |
|
61 | self.aggregations = aggregations | |
62 |
self.items_per_page = kwconfig.pop( |
|
62 | self.items_per_page = kwconfig.pop("items_per_page", 10) | |
63 |
self.page = kwconfig.pop( |
|
63 | self.page = kwconfig.pop("page", 1) | |
64 | self.kwconfig = kwconfig |
|
64 | self.kwconfig = kwconfig | |
65 | self.result = None |
|
65 | self.result = None | |
66 |
|
66 | |||
67 | def __getitem__(self, index): |
|
67 | def __getitem__(self, index): | |
68 | config = self.kwconfig.copy() |
|
68 | config = self.kwconfig.copy() | |
69 |
config[ |
|
69 | config["from_"] = index.start | |
70 | query = self.query.copy() |
|
70 | query = self.query.copy() | |
71 | if self.sort_query: |
|
71 | if self.sort_query: | |
72 | query.update(self.sort_query) |
|
72 | query.update(self.sort_query) | |
73 |
self.result = Datastores.es.search( |
|
73 | self.result = Datastores.es.search( | |
74 | **config) |
|
74 | body=query, size=self.items_per_page, **config | |
|
75 | ) | |||
75 | if self.aggregations: |
|
76 | if self.aggregations: | |
76 |
self.items = self.result.get( |
|
77 | self.items = self.result.get("aggregations") | |
77 | else: |
|
78 | else: | |
78 |
self.items = self.result[ |
|
79 | self.items = self.result["hits"]["hits"] | |
79 |
|
80 | |||
80 | return self.items |
|
81 | return self.items | |
81 |
|
82 | |||
82 | def __iter__(self): |
|
83 | def __iter__(self): | |
83 | return self.result |
|
84 | return self.result | |
84 |
|
85 | |||
85 | def __len__(self): |
|
86 | def __len__(self): | |
86 | config = self.kwconfig.copy() |
|
87 | config = self.kwconfig.copy() | |
87 | query = self.query.copy() |
|
88 | query = self.query.copy() | |
88 |
self.result = Datastores.es.search( |
|
89 | self.result = Datastores.es.search( | |
89 | **config) |
|
90 | body=query, size=self.items_per_page, **config | |
|
91 | ) | |||
90 | if self.aggregations: |
|
92 | if self.aggregations: | |
91 |
self.items = self.result.get( |
|
93 | self.items = self.result.get("aggregations") | |
92 | else: |
|
94 | else: | |
93 |
self.items = self.result[ |
|
95 | self.items = self.result["hits"]["hits"] | |
94 |
|
96 | |||
95 |
count = int(self.result[ |
|
97 | count = int(self.result["hits"]["total"]) | |
96 | return count if count < 5000 else 5000 |
|
98 | return count if count < 5000 else 5000 | |
97 |
|
99 | |||
98 |
|
100 | |||
99 | from appenlight.models.resource import Resource |
|
101 | from appenlight.models.resource import Resource | |
100 | from appenlight.models.application import Application |
|
102 | from appenlight.models.application import Application | |
101 | from appenlight.models.user import User |
|
103 | from appenlight.models.user import User | |
102 | from appenlight.models.alert_channel import AlertChannel |
|
104 | from appenlight.models.alert_channel import AlertChannel | |
103 | from appenlight.models.alert_channel_action import AlertChannelAction |
|
105 | from appenlight.models.alert_channel_action import AlertChannelAction | |
104 | from appenlight.models.metric import Metric |
|
106 | from appenlight.models.metric import Metric | |
105 |
from appenlight.models.application_postprocess_conf import |
|
107 | from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf | |
106 | ApplicationPostprocessConf |
|
|||
107 | from appenlight.models.auth_token import AuthToken |
|
108 | from appenlight.models.auth_token import AuthToken | |
108 | from appenlight.models.event import Event |
|
109 | from appenlight.models.event import Event | |
109 | from appenlight.models.external_identity import ExternalIdentity |
|
110 | from appenlight.models.external_identity import ExternalIdentity | |
110 | from appenlight.models.group import Group |
|
111 | from appenlight.models.group import Group | |
111 | from appenlight.models.group_permission import GroupPermission |
|
112 | from appenlight.models.group_permission import GroupPermission | |
112 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
113 | from appenlight.models.group_resource_permission import GroupResourcePermission | |
113 | from appenlight.models.log import Log |
|
114 | from appenlight.models.log import Log | |
114 | from appenlight.models.plugin_config import PluginConfig |
|
115 | from appenlight.models.plugin_config import PluginConfig | |
115 | from appenlight.models.report import Report |
|
116 | from appenlight.models.report import Report | |
116 | from appenlight.models.report_group import ReportGroup |
|
117 | from appenlight.models.report_group import ReportGroup | |
117 | from appenlight.models.report_comment import ReportComment |
|
118 | from appenlight.models.report_comment import ReportComment | |
118 | from appenlight.models.report_assignment import ReportAssignment |
|
119 | from appenlight.models.report_assignment import ReportAssignment | |
119 | from appenlight.models.report_stat import ReportStat |
|
120 | from appenlight.models.report_stat import ReportStat | |
120 | from appenlight.models.slow_call import SlowCall |
|
121 | from appenlight.models.slow_call import SlowCall | |
121 | from appenlight.models.tag import Tag |
|
122 | from appenlight.models.tag import Tag | |
122 | from appenlight.models.user_group import UserGroup |
|
123 | from appenlight.models.user_group import UserGroup | |
123 | from appenlight.models.user_permission import UserPermission |
|
124 | from appenlight.models.user_permission import UserPermission | |
124 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
125 | from appenlight.models.user_resource_permission import UserResourcePermission | |
125 | from ziggurat_foundations import ziggurat_model_init |
|
126 | from ziggurat_foundations import ziggurat_model_init | |
126 |
|
127 | |||
127 | ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission, |
|
128 | ziggurat_model_init( | |
128 | UserResourcePermission, GroupResourcePermission, |
|
129 | User, | |
129 | Resource, |
|
130 | Group, | |
130 | ExternalIdentity, passwordmanager=None) |
|
131 | UserGroup, | |
|
132 | GroupPermission, | |||
|
133 | UserPermission, | |||
|
134 | UserResourcePermission, | |||
|
135 | GroupResourcePermission, | |||
|
136 | Resource, | |||
|
137 | ExternalIdentity, | |||
|
138 | passwordmanager=None, | |||
|
139 | ) |
@@ -1,305 +1,298 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | import sqlalchemy as sa |
|
18 | import sqlalchemy as sa | |
19 | import urllib.request, urllib.parse, urllib.error |
|
19 | import urllib.request, urllib.parse, urllib.error | |
20 | from datetime import timedelta |
|
20 | from datetime import timedelta | |
21 | from appenlight.models import Base |
|
21 | from appenlight.models import Base | |
22 | from appenlight.lib.utils.date_utils import convert_date |
|
22 | from appenlight.lib.utils.date_utils import convert_date | |
23 | from sqlalchemy.dialects.postgresql import JSON |
|
23 | from sqlalchemy.dialects.postgresql import JSON | |
24 | from ziggurat_foundations.models.base import BaseModel |
|
24 | from ziggurat_foundations.models.base import BaseModel | |
25 |
|
25 | |||
26 | log = logging.getLogger(__name__) |
|
26 | log = logging.getLogger(__name__) | |
27 |
|
27 | |||
28 | # |
|
28 | # | |
29 | channel_rules_m2m_table = sa.Table( |
|
29 | channel_rules_m2m_table = sa.Table( | |
30 |
|
|
30 | "channels_actions", | |
31 | sa.Column('channel_pkey', sa.Integer, |
|
31 | Base.metadata, | |
32 |
|
|
32 | sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | |
33 | sa.Column('action_pkey', sa.Integer, |
|
33 | sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")), | |
34 | sa.ForeignKey('alert_channels_actions.pkey')) |
|
|||
35 | ) |
|
34 | ) | |
36 |
|
35 | |||
37 | channel_resources_m2m_table = sa.Table( |
|
36 | channel_resources_m2m_table = sa.Table( | |
38 |
|
|
37 | "channels_resources", | |
39 | sa.Column('channel_pkey', sa.Integer, |
|
38 | Base.metadata, | |
40 |
|
|
39 | sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | |
41 |
sa.Column( |
|
40 | sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")), | |
42 | sa.ForeignKey('resources.resource_id')) |
|
|||
43 | ) |
|
41 | ) | |
44 |
|
42 | |||
45 |
DATE_FRMT = |
|
43 | DATE_FRMT = "%Y-%m-%dT%H:%M" | |
46 |
|
44 | |||
47 |
|
45 | |||
48 | class AlertChannel(Base, BaseModel): |
|
46 | class AlertChannel(Base, BaseModel): | |
49 | """ |
|
47 | """ | |
50 | Stores information about possible alerting options |
|
48 | Stores information about possible alerting options | |
51 | """ |
|
49 | """ | |
52 | __tablename__ = 'alert_channels' |
|
50 | ||
53 | __possible_channel_names__ = ['email'] |
|
51 | __tablename__ = "alert_channels" | |
|
52 | __possible_channel_names__ = ["email"] | |||
54 | __mapper_args__ = { |
|
53 | __mapper_args__ = { | |
55 |
|
|
54 | "polymorphic_on": "channel_name", | |
56 |
|
|
55 | "polymorphic_identity": "integration", | |
57 | } |
|
56 | } | |
58 |
|
57 | |||
59 |
owner_id = sa.Column( |
|
58 | owner_id = sa.Column( | |
60 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
59 | sa.Unicode(30), | |
61 | ondelete='CASCADE')) |
|
60 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |
|
61 | ) | |||
62 | channel_name = sa.Column(sa.Unicode(25), nullable=False) |
|
62 | channel_name = sa.Column(sa.Unicode(25), nullable=False) | |
63 |
channel_value = sa.Column(sa.Unicode(80), nullable=False, default= |
|
63 | channel_value = sa.Column(sa.Unicode(80), nullable=False, default="") | |
64 |
channel_json_conf = sa.Column(JSON(), nullable=False, default= |
|
64 | channel_json_conf = sa.Column(JSON(), nullable=False, default="") | |
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, |
|
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, default=False) | |
66 | default=False) |
|
66 | send_alerts = sa.Column(sa.Boolean, nullable=False, default=True) | |
67 |
|
|
67 | daily_digest = sa.Column(sa.Boolean, nullable=False, default=True) | |
68 | default=True) |
|
68 | integration_id = sa.Column( | |
69 | daily_digest = sa.Column(sa.Boolean, nullable=False, |
|
69 | sa.Integer, sa.ForeignKey("integrations.id"), nullable=True | |
70 | default=True) |
|
70 | ) | |
71 | integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'), |
|
|||
72 | nullable=True) |
|
|||
73 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
71 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
74 |
|
72 | |||
75 |
channel_actions = sa.orm.relationship( |
|
73 | channel_actions = sa.orm.relationship( | |
76 | cascade="all", |
|
74 | "AlertChannelAction", | |
77 | passive_deletes=True, |
|
75 | cascade="all", | |
78 | passive_updates=True, |
|
76 | passive_deletes=True, | |
79 | secondary=channel_rules_m2m_table, |
|
77 | passive_updates=True, | |
80 | backref='channels') |
|
78 | secondary=channel_rules_m2m_table, | |
81 | resources = sa.orm.relationship('Resource', |
|
79 | backref="channels", | |
82 | cascade="all", |
|
80 | ) | |
83 | passive_deletes=True, |
|
81 | resources = sa.orm.relationship( | |
84 | passive_updates=True, |
|
82 | "Resource", | |
85 | secondary=channel_resources_m2m_table, |
|
83 | cascade="all", | |
86 | backref='resources') |
|
84 | passive_deletes=True, | |
|
85 | passive_updates=True, | |||
|
86 | secondary=channel_resources_m2m_table, | |||
|
87 | backref="resources", | |||
|
88 | ) | |||
87 |
|
89 | |||
88 | @property |
|
90 | @property | |
89 | def channel_visible_value(self): |
|
91 | def channel_visible_value(self): | |
90 | if self.integration: |
|
92 | if self.integration: | |
91 |
return |
|
93 | return "{}: {}".format( | |
92 | self.channel_name, |
|
94 | self.channel_name, self.integration.resource.resource_name | |
93 | self.integration.resource.resource_name |
|
|||
94 | ) |
|
95 | ) | |
95 |
|
96 | |||
96 | return '{}: {}'.format( |
|
97 | return "{}: {}".format(self.channel_name, self.channel_value) | |
97 | self.channel_name, |
|
|||
98 | self.channel_value |
|
|||
99 | ) |
|
|||
100 |
|
98 | |||
101 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
99 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True): | |
102 | extended_info=True): |
|
|||
103 | """ |
|
100 | """ | |
104 | Returns dictionary with required information that will be consumed by |
|
101 | Returns dictionary with required information that will be consumed by | |
105 | angular |
|
102 | angular | |
106 | """ |
|
103 | """ | |
107 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, |
|
104 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys) | |
108 | include_keys) |
|
|||
109 | exclude_keys_list = exclude_keys or [] |
|
105 | exclude_keys_list = exclude_keys or [] | |
110 | include_keys_list = include_keys or [] |
|
106 | include_keys_list = include_keys or [] | |
111 |
|
107 | |||
112 |
instance_dict[ |
|
108 | instance_dict["supports_report_alerting"] = True | |
113 |
instance_dict[ |
|
109 | instance_dict["channel_visible_value"] = self.channel_visible_value | |
114 |
|
110 | |||
115 | if extended_info: |
|
111 | if extended_info: | |
116 |
instance_dict[ |
|
112 | instance_dict["actions"] = [ | |
117 | rule.get_dict(extended_info=True) for |
|
113 | rule.get_dict(extended_info=True) for rule in self.channel_actions | |
118 | rule in self.channel_actions] |
|
114 | ] | |
119 |
|
115 | |||
120 |
del instance_dict[ |
|
116 | del instance_dict["channel_json_conf"] | |
121 |
|
117 | |||
122 | if self.integration: |
|
118 | if self.integration: | |
123 | instance_dict[ |
|
119 | instance_dict[ | |
124 |
|
|
120 | "supports_report_alerting" | |
125 |
|
|
121 | ] = self.integration.supports_report_alerting | |
126 | d = {} |
|
122 | d = {} | |
127 | for k in instance_dict.keys(): |
|
123 | for k in instance_dict.keys(): | |
128 |
if |
|
124 | if k not in exclude_keys_list and ( | |
129 |
|
|
125 | k in include_keys_list or not include_keys | |
|
126 | ): | |||
130 | d[k] = instance_dict[k] |
|
127 | d[k] = instance_dict[k] | |
131 | return d |
|
128 | return d | |
132 |
|
129 | |||
133 | def __repr__(self): |
|
130 | def __repr__(self): | |
134 |
return |
|
131 | return "<AlertChannel: (%s,%s), user:%s>" % ( | |
135 | self.channel_value, |
|
132 | self.channel_name, | |
136 | self.user_name,) |
|
133 | self.channel_value, | |
|
134 | self.user_name, | |||
|
135 | ) | |||
137 |
|
136 | |||
138 | def send_digest(self, **kwargs): |
|
137 | def send_digest(self, **kwargs): | |
139 | """ |
|
138 | """ | |
140 | This should implement daily top error report notifications |
|
139 | This should implement daily top error report notifications | |
141 | """ |
|
140 | """ | |
142 |
log.warning( |
|
141 | log.warning("send_digest NOT IMPLEMENTED") | |
143 |
|
142 | |||
144 | def notify_reports(self, **kwargs): |
|
143 | def notify_reports(self, **kwargs): | |
145 | """ |
|
144 | """ | |
146 | This should implement notification of reports that occured in 1 min |
|
145 | This should implement notification of reports that occured in 1 min | |
147 | interval |
|
146 | interval | |
148 | """ |
|
147 | """ | |
149 |
log.warning( |
|
148 | log.warning("notify_reports NOT IMPLEMENTED") | |
150 |
|
149 | |||
151 | def notify_alert(self, **kwargs): |
|
150 | def notify_alert(self, **kwargs): | |
152 | """ |
|
151 | """ | |
153 | Notify user of report/uptime/chart threshold events based on events alert |
|
152 | Notify user of report/uptime/chart threshold events based on events alert | |
154 | type |
|
153 | type | |
155 |
|
154 | |||
156 | Kwargs: |
|
155 | Kwargs: | |
157 | application: application that the event applies for, |
|
156 | application: application that the event applies for, | |
158 | event: event that is notified, |
|
157 | event: event that is notified, | |
159 | user: user that should be notified |
|
158 | user: user that should be notified | |
160 | request: request object |
|
159 | request: request object | |
161 |
|
160 | |||
162 | """ |
|
161 | """ | |
163 |
alert_name = kwargs[ |
|
162 | alert_name = kwargs["event"].unified_alert_name() | |
164 |
if alert_name in [ |
|
163 | if alert_name in ["slow_report_alert", "error_report_alert"]: | |
165 | self.notify_report_alert(**kwargs) |
|
164 | self.notify_report_alert(**kwargs) | |
166 |
elif alert_name == |
|
165 | elif alert_name == "uptime_alert": | |
167 | self.notify_uptime_alert(**kwargs) |
|
166 | self.notify_uptime_alert(**kwargs) | |
168 |
elif alert_name == |
|
167 | elif alert_name == "chart_alert": | |
169 | self.notify_chart_alert(**kwargs) |
|
168 | self.notify_chart_alert(**kwargs) | |
170 |
|
169 | |||
171 | def notify_chart_alert(self, **kwargs): |
|
170 | def notify_chart_alert(self, **kwargs): | |
172 | """ |
|
171 | """ | |
173 | This should implement report open/close alerts notifications |
|
172 | This should implement report open/close alerts notifications | |
174 | """ |
|
173 | """ | |
175 |
log.warning( |
|
174 | log.warning("notify_chart_alert NOT IMPLEMENTED") | |
176 |
|
175 | |||
177 | def notify_report_alert(self, **kwargs): |
|
176 | def notify_report_alert(self, **kwargs): | |
178 | """ |
|
177 | """ | |
179 | This should implement report open/close alerts notifications |
|
178 | This should implement report open/close alerts notifications | |
180 | """ |
|
179 | """ | |
181 |
log.warning( |
|
180 | log.warning("notify_report_alert NOT IMPLEMENTED") | |
182 |
|
181 | |||
183 | def notify_uptime_alert(self, **kwargs): |
|
182 | def notify_uptime_alert(self, **kwargs): | |
184 | """ |
|
183 | """ | |
185 | This should implement uptime open/close alerts notifications |
|
184 | This should implement uptime open/close alerts notifications | |
186 | """ |
|
185 | """ | |
187 |
log.warning( |
|
186 | log.warning("notify_uptime_alert NOT IMPLEMENTED") | |
188 |
|
187 | |||
189 | def get_notification_basic_vars(self, kwargs): |
|
188 | def get_notification_basic_vars(self, kwargs): | |
190 | """ |
|
189 | """ | |
191 | Sets most common variables used later for rendering notifications for |
|
190 | Sets most common variables used later for rendering notifications for | |
192 | channel |
|
191 | channel | |
193 | """ |
|
192 | """ | |
194 |
if |
|
193 | if "event" in kwargs: | |
195 |
kwargs[ |
|
194 | kwargs["since_when"] = kwargs["event"].start_date | |
196 |
|
195 | |||
197 |
url_start_date = kwargs.get( |
|
196 | url_start_date = kwargs.get("since_when") - timedelta(minutes=1) | |
198 |
url_end_date = kwargs.get( |
|
197 | url_end_date = kwargs.get("since_when") + timedelta(minutes=4) | |
199 | tmpl_vars = { |
|
198 | tmpl_vars = { | |
200 |
"timestamp": kwargs[ |
|
199 | "timestamp": kwargs["since_when"], | |
201 |
"user": kwargs[ |
|
200 | "user": kwargs["user"], | |
202 |
"since_when": kwargs.get( |
|
201 | "since_when": kwargs.get("since_when"), | |
203 | "url_start_date": url_start_date, |
|
202 | "url_start_date": url_start_date, | |
204 | "url_end_date": url_end_date |
|
203 | "url_end_date": url_end_date, | |
205 | } |
|
204 | } | |
206 |
tmpl_vars["resource_name"] = kwargs[ |
|
205 | tmpl_vars["resource_name"] = kwargs["resource"].resource_name | |
207 |
tmpl_vars["resource"] = kwargs[ |
|
206 | tmpl_vars["resource"] = kwargs["resource"] | |
208 |
|
207 | |||
209 |
if |
|
208 | if "event" in kwargs: | |
210 |
tmpl_vars[ |
|
209 | tmpl_vars["event_values"] = kwargs["event"].values | |
211 |
tmpl_vars[ |
|
210 | tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name() | |
212 |
tmpl_vars[ |
|
211 | tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action() | |
213 | return tmpl_vars |
|
212 | return tmpl_vars | |
214 |
|
213 | |||
215 | def report_alert_notification_vars(self, kwargs): |
|
214 | def report_alert_notification_vars(self, kwargs): | |
216 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
215 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
217 |
reports = kwargs.get( |
|
216 | reports = kwargs.get("reports", []) | |
218 | tmpl_vars["reports"] = reports |
|
217 | tmpl_vars["reports"] = reports | |
219 | tmpl_vars["confirmed_total"] = len(reports) |
|
218 | tmpl_vars["confirmed_total"] = len(reports) | |
220 |
|
219 | |||
221 | tmpl_vars["report_type"] = "error reports" |
|
220 | tmpl_vars["report_type"] = "error reports" | |
222 |
tmpl_vars["url_report_type"] = |
|
221 | tmpl_vars["url_report_type"] = "report/list" | |
223 |
|
222 | |||
224 |
alert_type = tmpl_vars.get( |
|
223 | alert_type = tmpl_vars.get("alert_type", "") | |
225 |
if |
|
224 | if "slow_report" in alert_type: | |
226 | tmpl_vars["report_type"] = "slow reports" |
|
225 | tmpl_vars["report_type"] = "slow reports" | |
227 |
tmpl_vars["url_report_type"] = |
|
226 | tmpl_vars["url_report_type"] = "report/list_slow" | |
228 |
|
227 | |||
229 |
app_url = kwargs[ |
|
228 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
230 |
|
229 | |||
231 |
destination_url = kwargs[ |
|
230 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
232 | _app_url=app_url) |
|
|||
233 | if alert_type: |
|
231 | if alert_type: | |
234 |
destination_url += |
|
232 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format( | |
235 | tmpl_vars["url_report_type"], |
|
233 | tmpl_vars["url_report_type"], | |
236 |
tmpl_vars[ |
|
234 | tmpl_vars["resource"].resource_id, | |
237 |
tmpl_vars[ |
|
235 | tmpl_vars["url_start_date"].strftime(DATE_FRMT), | |
238 |
tmpl_vars[ |
|
236 | tmpl_vars["url_end_date"].strftime(DATE_FRMT), | |
239 | ) |
|
237 | ) | |
240 | else: |
|
238 | else: | |
241 |
destination_url += |
|
239 | destination_url += "ui/{}?resource={}".format( | |
242 | tmpl_vars["url_report_type"], |
|
240 | tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id | |
243 | tmpl_vars['resource'].resource_id |
|
|||
244 | ) |
|
241 | ) | |
245 | tmpl_vars["destination_url"] = destination_url |
|
242 | tmpl_vars["destination_url"] = destination_url | |
246 |
|
243 | |||
247 | return tmpl_vars |
|
244 | return tmpl_vars | |
248 |
|
245 | |||
249 | def uptime_alert_notification_vars(self, kwargs): |
|
246 | def uptime_alert_notification_vars(self, kwargs): | |
250 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
247 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
251 |
app_url = kwargs[ |
|
248 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
252 |
destination_url = kwargs[ |
|
249 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
253 |
destination_url += |
|
250 | destination_url += "ui/{}?resource={}".format( | |
254 | 'uptime', |
|
251 | "uptime", tmpl_vars["resource"].resource_id | |
255 | tmpl_vars['resource'].resource_id) |
|
252 | ) | |
256 |
tmpl_vars[ |
|
253 | tmpl_vars["destination_url"] = destination_url | |
257 |
|
254 | |||
258 |
reason = |
|
255 | reason = "" | |
259 |
e_values = tmpl_vars.get( |
|
256 | e_values = tmpl_vars.get("event_values") | |
260 |
|
257 | |||
261 |
if e_values and e_values.get( |
|
258 | if e_values and e_values.get("response_time") == 0: | |
262 |
reason += |
|
259 | reason += " Response time was slower than 20 seconds." | |
263 | elif e_values: |
|
260 | elif e_values: | |
264 |
code = e_values.get( |
|
261 | code = e_values.get("status_code") | |
265 |
reason += |
|
262 | reason += " Response status code: %s." % code | |
266 |
|
263 | |||
267 |
tmpl_vars[ |
|
264 | tmpl_vars["reason"] = reason | |
268 | return tmpl_vars |
|
265 | return tmpl_vars | |
269 |
|
266 | |||
270 | def chart_alert_notification_vars(self, kwargs): |
|
267 | def chart_alert_notification_vars(self, kwargs): | |
271 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
268 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
272 |
tmpl_vars[ |
|
269 | tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"] | |
273 |
tmpl_vars[ |
|
270 | tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or "" | |
274 | 'action_name') or '' |
|
271 | matched_values = tmpl_vars["event_values"]["matched_step_values"] | |
275 | matched_values = tmpl_vars['event_values']['matched_step_values'] |
|
272 | tmpl_vars["readable_values"] = [] | |
276 | tmpl_vars['readable_values'] = [] |
|
273 | for key, value in list(matched_values["values"].items()): | |
277 | for key, value in list(matched_values['values'].items()): |
|
274 | matched_label = matched_values["labels"].get(key) | |
278 | matched_label = matched_values['labels'].get(key) |
|
|||
279 | if matched_label: |
|
275 | if matched_label: | |
280 |
tmpl_vars[ |
|
276 | tmpl_vars["readable_values"].append( | |
281 |
|
|
277 | {"label": matched_label["human_label"], "value": value} | |
282 |
|
|
278 | ) | |
283 | }) |
|
279 | tmpl_vars["readable_values"] = sorted( | |
284 | tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'], |
|
280 | tmpl_vars["readable_values"], key=lambda x: x["label"] | |
285 | key=lambda x: x['label']) |
|
281 | ) | |
286 |
start_date = convert_date(tmpl_vars[ |
|
282 | start_date = convert_date(tmpl_vars["event_values"]["start_interval"]) | |
287 | end_date = None |
|
283 | end_date = None | |
288 |
if tmpl_vars[ |
|
284 | if tmpl_vars["event_values"].get("end_interval"): | |
289 |
end_date = convert_date(tmpl_vars[ |
|
285 | end_date = convert_date(tmpl_vars["event_values"]["end_interval"]) | |
290 |
|
286 | |||
291 |
app_url = kwargs[ |
|
287 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
292 |
destination_url = kwargs[ |
|
288 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
293 | to_encode = { |
|
289 | to_encode = { | |
294 |
|
|
290 | "resource": tmpl_vars["event_values"]["resource"], | |
295 |
|
|
291 | "start_date": start_date.strftime(DATE_FRMT), | |
296 | } |
|
292 | } | |
297 | if end_date: |
|
293 | if end_date: | |
298 |
to_encode[ |
|
294 | to_encode["end_date"] = end_date.strftime(DATE_FRMT) | |
299 |
|
295 | |||
300 |
destination_url += |
|
296 | destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode)) | |
301 | 'logs', |
|
297 | tmpl_vars["destination_url"] = destination_url | |
302 | urllib.parse.urlencode(to_encode) |
|
|||
303 | ) |
|
|||
304 | tmpl_vars['destination_url'] = destination_url |
|
|||
305 | return tmpl_vars |
|
298 | return tmpl_vars |
@@ -1,79 +1,84 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 |
|
18 | |||
19 | from ziggurat_foundations.models.services.resource import ResourceService |
|
19 | from ziggurat_foundations.models.services.resource import ResourceService | |
20 | from appenlight.models import Base, get_db_session |
|
20 | from appenlight.models import Base, get_db_session | |
21 | from sqlalchemy.orm import validates |
|
21 | from sqlalchemy.orm import validates | |
22 | from ziggurat_foundations.models.base import BaseModel |
|
22 | from ziggurat_foundations.models.base import BaseModel | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class AlertChannelAction(Base, BaseModel): |
|
25 | class AlertChannelAction(Base, BaseModel): | |
26 | """ |
|
26 | """ | |
27 | Stores notifications conditions for user's alert channels |
|
27 | Stores notifications conditions for user's alert channels | |
28 | This is later used for rule parsing like "alert if http_status == 500" |
|
28 | This is later used for rule parsing like "alert if http_status == 500" | |
29 | """ |
|
29 | """ | |
30 | __tablename__ = 'alert_channels_actions' |
|
|||
31 |
|
30 | |||
32 | types = ['report', 'chart'] |
|
31 | __tablename__ = "alert_channels_actions" | |
33 |
|
32 | |||
34 | owner_id = sa.Column(sa.Integer, |
|
33 | types = ["report", "chart"] | |
35 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
34 | ||
36 | ondelete='CASCADE')) |
|
35 | owner_id = sa.Column( | |
|
36 | sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE") | |||
|
37 | ) | |||
37 | resource_id = sa.Column(sa.Integer()) |
|
38 | resource_id = sa.Column(sa.Integer()) | |
38 |
action = sa.Column(sa.Unicode(10), nullable=False, default= |
|
39 | action = sa.Column(sa.Unicode(10), nullable=False, default="always") | |
39 | type = sa.Column(sa.Unicode(10), nullable=False) |
|
40 | type = sa.Column(sa.Unicode(10), nullable=False) | |
40 | other_id = sa.Column(sa.Unicode(40)) |
|
41 | other_id = sa.Column(sa.Unicode(40)) | |
41 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
42 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
42 |
rule = sa.Column( |
|
43 | rule = sa.Column( | |
43 | nullable=False, default={'field': 'http_status', |
|
44 | sa.dialects.postgresql.JSON, | |
44 | "op": "ge", "value": "500"}) |
|
45 | nullable=False, | |
|
46 | default={"field": "http_status", "op": "ge", "value": "500"}, | |||
|
47 | ) | |||
45 | config = sa.Column(sa.dialects.postgresql.JSON) |
|
48 | config = sa.Column(sa.dialects.postgresql.JSON) | |
46 | name = sa.Column(sa.Unicode(255)) |
|
49 | name = sa.Column(sa.Unicode(255)) | |
47 |
|
50 | |||
48 |
@validates( |
|
51 | @validates("notify_type") | |
49 | def validate_email(self, key, notify_type): |
|
52 | def validate_email(self, key, notify_type): | |
50 |
assert notify_type in [ |
|
53 | assert notify_type in ["always", "only_first"] | |
51 | return notify_type |
|
54 | return notify_type | |
52 |
|
55 | |||
53 | def resource_name(self, db_session=None): |
|
56 | def resource_name(self, db_session=None): | |
54 | db_session = get_db_session(db_session) |
|
57 | db_session = get_db_session(db_session) | |
55 | if self.resource_id: |
|
58 | if self.resource_id: | |
56 | return ResourceService.by_resource_id( |
|
59 | return ResourceService.by_resource_id( | |
57 |
self.resource_id, db_session=db_session |
|
60 | self.resource_id, db_session=db_session | |
|
61 | ).resource_name | |||
58 | else: |
|
62 | else: | |
59 |
return |
|
63 | return "any resource" | |
60 |
|
64 | |||
61 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
65 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False): | |
62 | extended_info=False): |
|
|||
63 | """ |
|
66 | """ | |
64 | Returns dictionary with required information that will be consumed by |
|
67 | Returns dictionary with required information that will be consumed by | |
65 | angular |
|
68 | angular | |
66 | """ |
|
69 | """ | |
67 | instance_dict = super(AlertChannelAction, self).get_dict() |
|
70 | instance_dict = super(AlertChannelAction, self).get_dict() | |
68 | exclude_keys_list = exclude_keys or [] |
|
71 | exclude_keys_list = exclude_keys or [] | |
69 | include_keys_list = include_keys or [] |
|
72 | include_keys_list = include_keys or [] | |
70 | if extended_info: |
|
73 | if extended_info: | |
71 |
instance_dict[ |
|
74 | instance_dict["channels"] = [ | |
72 |
c.get_dict(extended_info=False) for c in self.channels |
|
75 | c.get_dict(extended_info=False) for c in self.channels | |
|
76 | ] | |||
73 |
|
77 | |||
74 | d = {} |
|
78 | d = {} | |
75 | for k in instance_dict.keys(): |
|
79 | for k in instance_dict.keys(): | |
76 |
if |
|
80 | if k not in exclude_keys_list and ( | |
77 |
|
|
81 | k in include_keys_list or not include_keys | |
|
82 | ): | |||
78 | d[k] = instance_dict[k] |
|
83 | d[k] = instance_dict[k] | |
79 | return d |
|
84 | return d |
@@ -1,16 +1,15 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
@@ -1,188 +1,191 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | from appenlight.models.alert_channel import AlertChannel |
|
18 | from appenlight.models.alert_channel import AlertChannel | |
19 | from appenlight.models.integrations.campfire import CampfireIntegration |
|
19 | from appenlight.models.integrations.campfire import CampfireIntegration | |
20 | from webhelpers2.text import truncate |
|
20 | from webhelpers2.text import truncate | |
21 |
|
21 | |||
22 | log = logging.getLogger(__name__) |
|
22 | log = logging.getLogger(__name__) | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class CampfireAlertChannel(AlertChannel): |
|
25 | class CampfireAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "campfire"} | |
27 | 'polymorphic_identity': 'campfire' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | @property |
|
28 | @property | |
31 | def client(self): |
|
29 | def client(self): | |
32 | client = CampfireIntegration.create_client( |
|
30 | client = CampfireIntegration.create_client( | |
33 |
self.integration.config[ |
|
31 | self.integration.config["api_token"], self.integration.config["account"] | |
34 | self.integration.config['account']) |
|
32 | ) | |
35 | return client |
|
33 | return client | |
36 |
|
34 | |||
37 | def notify_reports(self, **kwargs): |
|
35 | def notify_reports(self, **kwargs): | |
38 | """ |
|
36 | """ | |
39 | Notify user of individual reports |
|
37 | Notify user of individual reports | |
40 |
|
38 | |||
41 | kwargs: |
|
39 | kwargs: | |
42 | application: application that the event applies for, |
|
40 | application: application that the event applies for, | |
43 | user: user that should be notified |
|
41 | user: user that should be notified | |
44 | request: request object |
|
42 | request: request object | |
45 | since_when: reports are newer than this time value, |
|
43 | since_when: reports are newer than this time value, | |
46 | reports: list of reports to render |
|
44 | reports: list of reports to render | |
47 |
|
45 | |||
48 | """ |
|
46 | """ | |
49 | template_vars = self.report_alert_notification_vars(kwargs) |
|
47 | template_vars = self.report_alert_notification_vars(kwargs) | |
50 |
|
48 | |||
51 |
app_url = kwargs[ |
|
49 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
52 |
destination_url = kwargs[ |
|
50 | destination_url = kwargs["request"].route_url("/", app_url=app_url) | |
53 | app_url=app_url) |
|
51 | f_args = ( | |
54 |
|
|
52 | "report", | |
55 |
|
|
53 | template_vars["resource"].resource_id, | |
56 |
|
|
54 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
57 |
|
|
55 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
58 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
56 | ) | |
59 | *f_args) |
|
57 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
60 |
|
58 | |||
61 |
if template_vars[ |
|
59 | if template_vars["confirmed_total"] > 1: | |
62 | template_vars["title"] = "%s - %s reports" % ( |
|
60 | template_vars["title"] = "%s - %s reports" % ( | |
63 |
template_vars[ |
|
61 | template_vars["resource_name"], | |
64 |
template_vars[ |
|
62 | template_vars["confirmed_total"], | |
65 | ) |
|
63 | ) | |
66 | else: |
|
64 | else: | |
67 |
error_title = truncate( |
|
65 | error_title = truncate( | |
68 | 'slow report', 90) |
|
66 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
67 | ) | |||
69 | template_vars["title"] = "%s - '%s' report" % ( |
|
68 | template_vars["title"] = "%s - '%s' report" % ( | |
70 |
template_vars[ |
|
69 | template_vars["resource_name"], | |
71 |
error_title |
|
70 | error_title, | |
|
71 | ) | |||
72 |
|
72 | |||
73 |
template_vars["title"] += |
|
73 | template_vars["title"] += " " + destination_url | |
74 |
|
74 | |||
75 |
log_msg = |
|
75 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
76 |
kwargs[ |
|
76 | kwargs["user"].user_name, | |
77 | self.channel_visible_value, |
|
77 | self.channel_visible_value, | |
78 |
template_vars[ |
|
78 | template_vars["confirmed_total"], | |
|
79 | ) | |||
79 | log.warning(log_msg) |
|
80 | log.warning(log_msg) | |
80 |
|
81 | |||
81 |
for room in self.integration.config[ |
|
82 | for room in self.integration.config["rooms"].split(","): | |
82 | self.client.speak_to_room(room.strip(), template_vars["title"]) |
|
83 | self.client.speak_to_room(room.strip(), template_vars["title"]) | |
83 |
|
84 | |||
84 | def notify_report_alert(self, **kwargs): |
|
85 | def notify_report_alert(self, **kwargs): | |
85 | """ |
|
86 | """ | |
86 | Build and send report alert notification |
|
87 | Build and send report alert notification | |
87 |
|
88 | |||
88 | Kwargs: |
|
89 | Kwargs: | |
89 | application: application that the event applies for, |
|
90 | application: application that the event applies for, | |
90 | event: event that is notified, |
|
91 | event: event that is notified, | |
91 | user: user that should be notified |
|
92 | user: user that should be notified | |
92 | request: request object |
|
93 | request: request object | |
93 |
|
94 | |||
94 | """ |
|
95 | """ | |
95 | template_vars = self.report_alert_notification_vars(kwargs) |
|
96 | template_vars = self.report_alert_notification_vars(kwargs) | |
96 |
|
97 | |||
97 |
if kwargs[ |
|
98 | if kwargs["event"].unified_alert_action() == "OPEN": | |
98 |
title = |
|
99 | title = "ALERT %s: %s - %s %s %s" % ( | |
99 |
template_vars[ |
|
100 | template_vars["alert_action"], | |
100 |
template_vars[ |
|
101 | template_vars["resource_name"], | |
101 |
kwargs[ |
|
102 | kwargs["event"].values["reports"], | |
102 |
template_vars[ |
|
103 | template_vars["report_type"], | |
103 |
template_vars[ |
|
104 | template_vars["destination_url"], | |
104 | ) |
|
105 | ) | |
105 |
|
106 | |||
106 | else: |
|
107 | else: | |
107 |
title = |
|
108 | title = "ALERT %s: %s type: %s" % ( | |
108 |
template_vars[ |
|
109 | template_vars["alert_action"], | |
109 |
template_vars[ |
|
110 | template_vars["resource_name"], | |
110 |
template_vars[ |
|
111 | template_vars["alert_type"].replace("_", " "), | |
111 | ) |
|
112 | ) | |
112 |
for room in self.integration.config[ |
|
113 | for room in self.integration.config["rooms"].split(","): | |
113 |
self.client.speak_to_room(room.strip(), title, sound= |
|
114 | self.client.speak_to_room(room.strip(), title, sound="VUVUZELA") | |
114 |
|
115 | |||
115 | def notify_uptime_alert(self, **kwargs): |
|
116 | def notify_uptime_alert(self, **kwargs): | |
116 | """ |
|
117 | """ | |
117 | Build and send uptime alert notification |
|
118 | Build and send uptime alert notification | |
118 |
|
119 | |||
119 | Kwargs: |
|
120 | Kwargs: | |
120 | application: application that the event applies for, |
|
121 | application: application that the event applies for, | |
121 | event: event that is notified, |
|
122 | event: event that is notified, | |
122 | user: user that should be notified |
|
123 | user: user that should be notified | |
123 | request: request object |
|
124 | request: request object | |
124 |
|
125 | |||
125 | """ |
|
126 | """ | |
126 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
127 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
127 |
|
128 | |||
128 |
message = |
|
129 | message = "ALERT %s: %s has uptime issues %s\n\n" % ( | |
129 |
template_vars[ |
|
130 | template_vars["alert_action"], | |
130 |
template_vars[ |
|
131 | template_vars["resource_name"], | |
131 |
template_vars[ |
|
132 | template_vars["destination_url"], | |
132 | ) |
|
133 | ) | |
133 |
message += template_vars[ |
|
134 | message += template_vars["reason"] | |
134 |
|
135 | |||
135 |
for room in self.integration.config[ |
|
136 | for room in self.integration.config["rooms"].split(","): | |
136 |
self.client.speak_to_room(room.strip(), message, sound= |
|
137 | self.client.speak_to_room(room.strip(), message, sound="VUVUZELA") | |
137 |
|
138 | |||
138 | def send_digest(self, **kwargs): |
|
139 | def send_digest(self, **kwargs): | |
139 | """ |
|
140 | """ | |
140 | Build and send daily digest notification |
|
141 | Build and send daily digest notification | |
141 |
|
142 | |||
142 | kwargs: |
|
143 | kwargs: | |
143 | application: application that the event applies for, |
|
144 | application: application that the event applies for, | |
144 | user: user that should be notified |
|
145 | user: user that should be notified | |
145 | request: request object |
|
146 | request: request object | |
146 | since_when: reports are newer than this time value, |
|
147 | since_when: reports are newer than this time value, | |
147 | reports: list of reports to render |
|
148 | reports: list of reports to render | |
148 |
|
149 | |||
149 | """ |
|
150 | """ | |
150 | template_vars = self.report_alert_notification_vars(kwargs) |
|
151 | template_vars = self.report_alert_notification_vars(kwargs) | |
151 |
f_args = (template_vars[ |
|
152 | f_args = (template_vars["resource_name"], template_vars["confirmed_total"]) | |
152 | template_vars['confirmed_total'],) |
|
|||
153 | message = "Daily report digest: %s - %s reports" % f_args |
|
153 | message = "Daily report digest: %s - %s reports" % f_args | |
154 |
message += |
|
154 | message += "{}\n".format(template_vars["destination_url"]) | |
155 |
for room in self.integration.config[ |
|
155 | for room in self.integration.config["rooms"].split(","): | |
156 | self.client.speak_to_room(room.strip(), message) |
|
156 | self.client.speak_to_room(room.strip(), message) | |
157 |
|
157 | |||
158 |
log_msg = |
|
158 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
159 |
kwargs[ |
|
159 | kwargs["user"].user_name, | |
160 | self.channel_visible_value, |
|
160 | self.channel_visible_value, | |
161 |
template_vars[ |
|
161 | template_vars["confirmed_total"], | |
|
162 | ) | |||
162 | log.warning(log_msg) |
|
163 | log.warning(log_msg) | |
163 |
|
164 | |||
164 | def notify_chart_alert(self, **kwargs): |
|
165 | def notify_chart_alert(self, **kwargs): | |
165 | """ |
|
166 | """ | |
166 | Build and send chart alert notification |
|
167 | Build and send chart alert notification | |
167 |
|
168 | |||
168 | Kwargs: |
|
169 | Kwargs: | |
169 | application: application that the event applies for, |
|
170 | application: application that the event applies for, | |
170 | event: event that is notified, |
|
171 | event: event that is notified, | |
171 | user: user that should be notified |
|
172 | user: user that should be notified | |
172 | request: request object |
|
173 | request: request object | |
173 |
|
174 | |||
174 | """ |
|
175 | """ | |
175 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
176 | template_vars = self.chart_alert_notification_vars(kwargs) | |
176 | message = 'ALERT {}: value in "{}" chart: ' \ |
|
177 | message = ( | |
177 | 'met alert "{}" criteria {} \n'.format( |
|
178 | 'ALERT {}: value in "{}" chart: ' | |
178 | template_vars['alert_action'], |
|
179 | 'met alert "{}" criteria {} \n'.format( | |
179 |
template_vars[ |
|
180 | template_vars["alert_action"], | |
180 |
template_vars[ |
|
181 | template_vars["chart_name"], | |
181 |
template_vars[ |
|
182 | template_vars["action_name"], | |
|
183 | template_vars["destination_url"], | |||
|
184 | ) | |||
182 | ) |
|
185 | ) | |
183 |
|
186 | |||
184 |
for item in template_vars[ |
|
187 | for item in template_vars["readable_values"]: | |
185 |
message += |
|
188 | message += "{}: {}\n".format(item["label"], item["value"]) | |
186 |
|
189 | |||
187 |
for room in self.integration.config[ |
|
190 | for room in self.integration.config["rooms"].split(","): | |
188 |
self.client.speak_to_room(room.strip(), message, sound= |
|
191 | self.client.speak_to_room(room.strip(), message, sound="VUVUZELA") |
@@ -1,175 +1,192 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | from appenlight.models.alert_channel import AlertChannel |
|
18 | from appenlight.models.alert_channel import AlertChannel | |
19 | from appenlight.models.services.user import UserService |
|
19 | from appenlight.models.services.user import UserService | |
20 | from webhelpers2.text import truncate |
|
20 | from webhelpers2.text import truncate | |
21 |
|
21 | |||
22 | log = logging.getLogger(__name__) |
|
22 | log = logging.getLogger(__name__) | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class EmailAlertChannel(AlertChannel): |
|
25 | class EmailAlertChannel(AlertChannel): | |
26 | """ |
|
26 | """ | |
27 | Default email alerting channel |
|
27 | Default email alerting channel | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | __mapper_args__ = { |
|
30 | __mapper_args__ = {"polymorphic_identity": "email"} | |
31 | 'polymorphic_identity': 'email' |
|
|||
32 | } |
|
|||
33 |
|
31 | |||
34 | def notify_reports(self, **kwargs): |
|
32 | def notify_reports(self, **kwargs): | |
35 | """ |
|
33 | """ | |
36 | Notify user of individual reports |
|
34 | Notify user of individual reports | |
37 |
|
35 | |||
38 | kwargs: |
|
36 | kwargs: | |
39 | application: application that the event applies for, |
|
37 | application: application that the event applies for, | |
40 | user: user that should be notified |
|
38 | user: user that should be notified | |
41 | request: request object |
|
39 | request: request object | |
42 | since_when: reports are newer than this time value, |
|
40 | since_when: reports are newer than this time value, | |
43 | reports: list of reports to render |
|
41 | reports: list of reports to render | |
44 |
|
42 | |||
45 | """ |
|
43 | """ | |
46 | template_vars = self.report_alert_notification_vars(kwargs) |
|
44 | template_vars = self.report_alert_notification_vars(kwargs) | |
47 |
|
45 | |||
48 |
if template_vars[ |
|
46 | if template_vars["confirmed_total"] > 1: | |
49 | template_vars["title"] = "AppEnlight :: %s - %s reports" % ( |
|
47 | template_vars["title"] = "AppEnlight :: %s - %s reports" % ( | |
50 |
template_vars[ |
|
48 | template_vars["resource_name"], | |
51 |
template_vars[ |
|
49 | template_vars["confirmed_total"], | |
52 | ) |
|
50 | ) | |
53 | else: |
|
51 | else: | |
54 |
error_title = truncate( |
|
52 | error_title = truncate( | |
55 | 'slow report', 20) |
|
53 | template_vars["reports"][0][1].error or "slow report", 20 | |
|
54 | ) | |||
56 | template_vars["title"] = "AppEnlight :: %s - '%s' report" % ( |
|
55 | template_vars["title"] = "AppEnlight :: %s - '%s' report" % ( | |
57 |
template_vars[ |
|
56 | template_vars["resource_name"], | |
58 |
error_title |
|
57 | error_title, | |
59 | UserService.send_email(kwargs['request'], |
|
58 | ) | |
60 | [self.channel_value], |
|
59 | UserService.send_email( | |
61 | template_vars, |
|
60 | kwargs["request"], | |
62 | '/email_templates/notify_reports.jinja2') |
|
61 | [self.channel_value], | |
63 | log_msg = 'NOTIFY : %s via %s :: %s reports' % ( |
|
62 | template_vars, | |
64 | kwargs['user'].user_name, |
|
63 | "/email_templates/notify_reports.jinja2", | |
|
64 | ) | |||
|
65 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |||
|
66 | kwargs["user"].user_name, | |||
65 | self.channel_visible_value, |
|
67 | self.channel_visible_value, | |
66 |
template_vars[ |
|
68 | template_vars["confirmed_total"], | |
|
69 | ) | |||
67 | log.warning(log_msg) |
|
70 | log.warning(log_msg) | |
68 |
|
71 | |||
69 | def send_digest(self, **kwargs): |
|
72 | def send_digest(self, **kwargs): | |
70 | """ |
|
73 | """ | |
71 | Build and send daily digest notification |
|
74 | Build and send daily digest notification | |
72 |
|
75 | |||
73 | kwargs: |
|
76 | kwargs: | |
74 | application: application that the event applies for, |
|
77 | application: application that the event applies for, | |
75 | user: user that should be notified |
|
78 | user: user that should be notified | |
76 | request: request object |
|
79 | request: request object | |
77 | since_when: reports are newer than this time value, |
|
80 | since_when: reports are newer than this time value, | |
78 | reports: list of reports to render |
|
81 | reports: list of reports to render | |
79 |
|
82 | |||
80 | """ |
|
83 | """ | |
81 | template_vars = self.report_alert_notification_vars(kwargs) |
|
84 | template_vars = self.report_alert_notification_vars(kwargs) | |
82 | title = "AppEnlight :: Daily report digest: %s - %s reports" |
|
85 | title = "AppEnlight :: Daily report digest: %s - %s reports" | |
83 | template_vars["email_title"] = title % ( |
|
86 | template_vars["email_title"] = title % ( | |
84 |
template_vars[ |
|
87 | template_vars["resource_name"], | |
85 |
template_vars[ |
|
88 | template_vars["confirmed_total"], | |
86 | ) |
|
89 | ) | |
87 |
|
90 | |||
88 |
UserService.send_email( |
|
91 | UserService.send_email( | |
89 | [self.channel_value], |
|
92 | kwargs["request"], | |
90 | template_vars, |
|
93 | [self.channel_value], | |
91 | '/email_templates/notify_reports.jinja2', |
|
94 | template_vars, | |
92 | immediately=True, |
|
95 | "/email_templates/notify_reports.jinja2", | |
93 | silent=True) |
|
96 | immediately=True, | |
94 | log_msg = 'DIGEST : %s via %s :: %s reports' % ( |
|
97 | silent=True, | |
95 | kwargs['user'].user_name, |
|
98 | ) | |
|
99 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |||
|
100 | kwargs["user"].user_name, | |||
96 | self.channel_visible_value, |
|
101 | self.channel_visible_value, | |
97 |
template_vars[ |
|
102 | template_vars["confirmed_total"], | |
|
103 | ) | |||
98 | log.warning(log_msg) |
|
104 | log.warning(log_msg) | |
99 |
|
105 | |||
100 | def notify_report_alert(self, **kwargs): |
|
106 | def notify_report_alert(self, **kwargs): | |
101 | """ |
|
107 | """ | |
102 | Build and send report alert notification |
|
108 | Build and send report alert notification | |
103 |
|
109 | |||
104 | Kwargs: |
|
110 | Kwargs: | |
105 | application: application that the event applies for, |
|
111 | application: application that the event applies for, | |
106 | event: event that is notified, |
|
112 | event: event that is notified, | |
107 | user: user that should be notified |
|
113 | user: user that should be notified | |
108 | request: request object |
|
114 | request: request object | |
109 |
|
115 | |||
110 | """ |
|
116 | """ | |
111 | template_vars = self.report_alert_notification_vars(kwargs) |
|
117 | template_vars = self.report_alert_notification_vars(kwargs) | |
112 |
|
118 | |||
113 |
if kwargs[ |
|
119 | if kwargs["event"].unified_alert_action() == "OPEN": | |
114 |
title = |
|
120 | title = "AppEnlight :: ALERT %s: %s - %s %s" % ( | |
115 |
template_vars[ |
|
121 | template_vars["alert_action"], | |
116 |
template_vars[ |
|
122 | template_vars["resource_name"], | |
117 |
kwargs[ |
|
123 | kwargs["event"].values["reports"], | |
118 |
template_vars[ |
|
124 | template_vars["report_type"], | |
119 | ) |
|
125 | ) | |
120 | else: |
|
126 | else: | |
121 |
title = |
|
127 | title = "AppEnlight :: ALERT %s: %s type: %s" % ( | |
122 |
template_vars[ |
|
128 | template_vars["alert_action"], | |
123 |
template_vars[ |
|
129 | template_vars["resource_name"], | |
124 |
template_vars[ |
|
130 | template_vars["alert_type"].replace("_", " "), | |
125 | ) |
|
131 | ) | |
126 |
template_vars[ |
|
132 | template_vars["email_title"] = title | |
127 |
UserService.send_email( |
|
133 | UserService.send_email( | |
128 | template_vars, |
|
134 | kwargs["request"], | |
129 | '/email_templates/alert_reports.jinja2') |
|
135 | [self.channel_value], | |
|
136 | template_vars, | |||
|
137 | "/email_templates/alert_reports.jinja2", | |||
|
138 | ) | |||
130 |
|
139 | |||
131 | def notify_uptime_alert(self, **kwargs): |
|
140 | def notify_uptime_alert(self, **kwargs): | |
132 | """ |
|
141 | """ | |
133 | Build and send uptime alert notification |
|
142 | Build and send uptime alert notification | |
134 |
|
143 | |||
135 | Kwargs: |
|
144 | Kwargs: | |
136 | application: application that the event applies for, |
|
145 | application: application that the event applies for, | |
137 | event: event that is notified, |
|
146 | event: event that is notified, | |
138 | user: user that should be notified |
|
147 | user: user that should be notified | |
139 | request: request object |
|
148 | request: request object | |
140 |
|
149 | |||
141 | """ |
|
150 | """ | |
142 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
151 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
143 |
title = |
|
152 | title = "AppEnlight :: ALERT %s: %s has uptime issues" % ( | |
144 |
template_vars[ |
|
153 | template_vars["alert_action"], | |
145 |
template_vars[ |
|
154 | template_vars["resource_name"], | |
146 | ) |
|
155 | ) | |
147 |
template_vars[ |
|
156 | template_vars["email_title"] = title | |
148 |
|
157 | |||
149 |
UserService.send_email( |
|
158 | UserService.send_email( | |
150 | template_vars, |
|
159 | kwargs["request"], | |
151 | '/email_templates/alert_uptime.jinja2') |
|
160 | [self.channel_value], | |
|
161 | template_vars, | |||
|
162 | "/email_templates/alert_uptime.jinja2", | |||
|
163 | ) | |||
152 |
|
164 | |||
153 | def notify_chart_alert(self, **kwargs): |
|
165 | def notify_chart_alert(self, **kwargs): | |
154 | """ |
|
166 | """ | |
155 | Build and send chart alert notification |
|
167 | Build and send chart alert notification | |
156 |
|
168 | |||
157 | Kwargs: |
|
169 | Kwargs: | |
158 | application: application that the event applies for, |
|
170 | application: application that the event applies for, | |
159 | event: event that is notified, |
|
171 | event: event that is notified, | |
160 | user: user that should be notified |
|
172 | user: user that should be notified | |
161 | request: request object |
|
173 | request: request object | |
162 |
|
174 | |||
163 | """ |
|
175 | """ | |
164 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
176 | template_vars = self.chart_alert_notification_vars(kwargs) | |
165 |
|
177 | |||
166 | title = 'AppEnlight :: ALERT {} value in "{}" chart' \ |
|
178 | title = ( | |
167 | ' met alert "{}" criteria'.format( |
|
179 | 'AppEnlight :: ALERT {} value in "{}" chart' | |
168 | template_vars['alert_action'], |
|
180 | ' met alert "{}" criteria'.format( | |
169 |
template_vars[ |
|
181 | template_vars["alert_action"], | |
170 |
template_vars[ |
|
182 | template_vars["chart_name"], | |
|
183 | template_vars["action_name"], | |||
|
184 | ) | |||
|
185 | ) | |||
|
186 | template_vars["email_title"] = title | |||
|
187 | UserService.send_email( | |||
|
188 | kwargs["request"], | |||
|
189 | [self.channel_value], | |||
|
190 | template_vars, | |||
|
191 | "/email_templates/alert_chart.jinja2", | |||
171 | ) |
|
192 | ) | |
172 | template_vars['email_title'] = title |
|
|||
173 | UserService.send_email(kwargs['request'], [self.channel_value], |
|
|||
174 | template_vars, |
|
|||
175 | '/email_templates/alert_chart.jinja2') |
|
@@ -1,233 +1,225 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | from appenlight.models.alert_channel import AlertChannel |
|
18 | from appenlight.models.alert_channel import AlertChannel | |
19 | from appenlight.models.integrations.flowdock import FlowdockIntegration |
|
19 | from appenlight.models.integrations.flowdock import FlowdockIntegration | |
20 | from webhelpers2.text import truncate |
|
20 | from webhelpers2.text import truncate | |
21 |
|
21 | |||
22 | log = logging.getLogger(__name__) |
|
22 | log = logging.getLogger(__name__) | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class FlowdockAlertChannel(AlertChannel): |
|
25 | class FlowdockAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "flowdock"} | |
27 | 'polymorphic_identity': 'flowdock' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | def notify_reports(self, **kwargs): |
|
28 | def notify_reports(self, **kwargs): | |
31 | """ |
|
29 | """ | |
32 | Notify user of individual reports |
|
30 | Notify user of individual reports | |
33 |
|
31 | |||
34 | kwargs: |
|
32 | kwargs: | |
35 | application: application that the event applies for, |
|
33 | application: application that the event applies for, | |
36 | user: user that should be notified |
|
34 | user: user that should be notified | |
37 | request: request object |
|
35 | request: request object | |
38 | since_when: reports are newer than this time value, |
|
36 | since_when: reports are newer than this time value, | |
39 | reports: list of reports to render |
|
37 | reports: list of reports to render | |
40 |
|
38 | |||
41 | """ |
|
39 | """ | |
42 | template_vars = self.report_alert_notification_vars(kwargs) |
|
40 | template_vars = self.report_alert_notification_vars(kwargs) | |
43 |
|
41 | |||
44 |
app_url = kwargs[ |
|
42 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
45 |
destination_url = kwargs[ |
|
43 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
46 | _app_url=app_url) |
|
44 | f_args = ( | |
47 |
|
|
45 | "report", | |
48 |
|
|
46 | template_vars["resource"].resource_id, | |
49 |
|
|
47 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
50 |
|
|
48 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
51 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
49 | ) | |
52 | *f_args) |
|
50 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
53 |
|
51 | |||
54 |
if template_vars[ |
|
52 | if template_vars["confirmed_total"] > 1: | |
55 | template_vars["title"] = "%s - %s reports" % ( |
|
53 | template_vars["title"] = "%s - %s reports" % ( | |
56 |
template_vars[ |
|
54 | template_vars["resource_name"], | |
57 |
template_vars[ |
|
55 | template_vars["confirmed_total"], | |
58 | ) |
|
56 | ) | |
59 | else: |
|
57 | else: | |
60 |
error_title = truncate( |
|
58 | error_title = truncate( | |
61 | 'slow report', 90) |
|
59 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
60 | ) | |||
62 | template_vars["title"] = "%s - '%s' report" % ( |
|
61 | template_vars["title"] = "%s - '%s' report" % ( | |
63 |
template_vars[ |
|
62 | template_vars["resource_name"], | |
64 |
error_title |
|
63 | error_title, | |
|
64 | ) | |||
65 |
|
65 | |||
66 |
log_msg = |
|
66 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
67 |
kwargs[ |
|
67 | kwargs["user"].user_name, | |
68 | self.channel_visible_value, |
|
68 | self.channel_visible_value, | |
69 |
template_vars[ |
|
69 | template_vars["confirmed_total"], | |
|
70 | ) | |||
70 | log.warning(log_msg) |
|
71 | log.warning(log_msg) | |
71 |
|
72 | |||
72 | client = FlowdockIntegration.create_client( |
|
73 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
73 | self.integration.config['api_token']) |
|
|||
74 | payload = { |
|
74 | payload = { | |
75 | "source": "AppEnlight", |
|
75 | "source": "AppEnlight", | |
76 |
"from_address": kwargs[ |
|
76 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
77 | 'mailing.from_email'], |
|
|||
78 | "subject": template_vars["title"], |
|
77 | "subject": template_vars["title"], | |
79 | "content": "New report present", |
|
78 | "content": "New report present", | |
80 | "tags": ["appenlight"], |
|
79 | "tags": ["appenlight"], | |
81 | "link": destination_url |
|
80 | "link": destination_url, | |
82 | } |
|
81 | } | |
83 | client.send_to_inbox(payload) |
|
82 | client.send_to_inbox(payload) | |
84 |
|
83 | |||
85 | def notify_report_alert(self, **kwargs): |
|
84 | def notify_report_alert(self, **kwargs): | |
86 | """ |
|
85 | """ | |
87 | Build and send report alert notification |
|
86 | Build and send report alert notification | |
88 |
|
87 | |||
89 | Kwargs: |
|
88 | Kwargs: | |
90 | application: application that the event applies for, |
|
89 | application: application that the event applies for, | |
91 | event: event that is notified, |
|
90 | event: event that is notified, | |
92 | user: user that should be notified |
|
91 | user: user that should be notified | |
93 | request: request object |
|
92 | request: request object | |
94 |
|
93 | |||
95 | """ |
|
94 | """ | |
96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
95 | template_vars = self.report_alert_notification_vars(kwargs) | |
97 |
|
96 | |||
98 |
if kwargs[ |
|
97 | if kwargs["event"].unified_alert_action() == "OPEN": | |
99 |
|
98 | |||
100 |
title = |
|
99 | title = "ALERT %s: %s - %s %s" % ( | |
101 |
template_vars[ |
|
100 | template_vars["alert_action"], | |
102 |
template_vars[ |
|
101 | template_vars["resource_name"], | |
103 |
kwargs[ |
|
102 | kwargs["event"].values["reports"], | |
104 |
template_vars[ |
|
103 | template_vars["report_type"], | |
105 | ) |
|
104 | ) | |
106 |
|
105 | |||
107 | else: |
|
106 | else: | |
108 |
title = |
|
107 | title = "ALERT %s: %s type: %s" % ( | |
109 |
template_vars[ |
|
108 | template_vars["alert_action"], | |
110 |
template_vars[ |
|
109 | template_vars["resource_name"], | |
111 |
template_vars[ |
|
110 | template_vars["alert_type"].replace("_", " "), | |
112 | ) |
|
111 | ) | |
113 |
|
112 | |||
114 | client = FlowdockIntegration.create_client( |
|
113 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
115 | self.integration.config['api_token']) |
|
|||
116 | payload = { |
|
114 | payload = { | |
117 | "source": "AppEnlight", |
|
115 | "source": "AppEnlight", | |
118 |
"from_address": kwargs[ |
|
116 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
119 | 'mailing.from_email'], |
|
|||
120 | "subject": title, |
|
117 | "subject": title, | |
121 |
"content": |
|
118 | "content": "Investigation required", | |
122 |
"tags": ["appenlight", "alert", template_vars[ |
|
119 | "tags": ["appenlight", "alert", template_vars["alert_type"]], | |
123 |
"link": template_vars[ |
|
120 | "link": template_vars["destination_url"], | |
124 | } |
|
121 | } | |
125 | client.send_to_inbox(payload) |
|
122 | client.send_to_inbox(payload) | |
126 |
|
123 | |||
127 | def notify_uptime_alert(self, **kwargs): |
|
124 | def notify_uptime_alert(self, **kwargs): | |
128 | """ |
|
125 | """ | |
129 | Build and send uptime alert notification |
|
126 | Build and send uptime alert notification | |
130 |
|
127 | |||
131 | Kwargs: |
|
128 | Kwargs: | |
132 | application: application that the event applies for, |
|
129 | application: application that the event applies for, | |
133 | event: event that is notified, |
|
130 | event: event that is notified, | |
134 | user: user that should be notified |
|
131 | user: user that should be notified | |
135 | request: request object |
|
132 | request: request object | |
136 |
|
133 | |||
137 | """ |
|
134 | """ | |
138 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
135 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
139 |
|
136 | |||
140 |
message = |
|
137 | message = "ALERT %s: %s has uptime issues" % ( | |
141 |
template_vars[ |
|
138 | template_vars["alert_action"], | |
142 |
template_vars[ |
|
139 | template_vars["resource_name"], | |
143 | ) |
|
140 | ) | |
144 |
submessage = |
|
141 | submessage = "Info: " | |
145 |
submessage += template_vars[ |
|
142 | submessage += template_vars["reason"] | |
146 |
|
143 | |||
147 | client = FlowdockIntegration.create_client( |
|
144 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
148 | self.integration.config['api_token']) |
|
|||
149 | payload = { |
|
145 | payload = { | |
150 | "source": "AppEnlight", |
|
146 | "source": "AppEnlight", | |
151 |
"from_address": kwargs[ |
|
147 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
152 | 'mailing.from_email'], |
|
|||
153 | "subject": message, |
|
148 | "subject": message, | |
154 | "content": submessage, |
|
149 | "content": submessage, | |
155 |
"tags": ["appenlight", "alert", |
|
150 | "tags": ["appenlight", "alert", "uptime"], | |
156 |
"link": template_vars[ |
|
151 | "link": template_vars["destination_url"], | |
157 | } |
|
152 | } | |
158 | client.send_to_inbox(payload) |
|
153 | client.send_to_inbox(payload) | |
159 |
|
154 | |||
160 | def send_digest(self, **kwargs): |
|
155 | def send_digest(self, **kwargs): | |
161 | """ |
|
156 | """ | |
162 | Build and send daily digest notification |
|
157 | Build and send daily digest notification | |
163 |
|
158 | |||
164 | kwargs: |
|
159 | kwargs: | |
165 | application: application that the event applies for, |
|
160 | application: application that the event applies for, | |
166 | user: user that should be notified |
|
161 | user: user that should be notified | |
167 | request: request object |
|
162 | request: request object | |
168 | since_when: reports are newer than this time value, |
|
163 | since_when: reports are newer than this time value, | |
169 | reports: list of reports to render |
|
164 | reports: list of reports to render | |
170 |
|
165 | |||
171 | """ |
|
166 | """ | |
172 | template_vars = self.report_alert_notification_vars(kwargs) |
|
167 | template_vars = self.report_alert_notification_vars(kwargs) | |
173 | message = "Daily report digest: %s - %s reports" % ( |
|
168 | message = "Daily report digest: %s - %s reports" % ( | |
174 |
template_vars[ |
|
169 | template_vars["resource_name"], | |
|
170 | template_vars["confirmed_total"], | |||
|
171 | ) | |||
175 |
|
172 | |||
176 |
f_args = (template_vars[ |
|
173 | f_args = (template_vars["confirmed_total"], template_vars["timestamp"]) | |
177 | template_vars['timestamp']) |
|
|||
178 |
|
174 | |||
179 | payload = { |
|
175 | payload = { | |
180 | "source": "AppEnlight", |
|
176 | "source": "AppEnlight", | |
181 |
"from_address": kwargs[ |
|
177 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
182 | 'mailing.from_email'], |
|
|||
183 | "subject": message, |
|
178 | "subject": message, | |
184 |
"content": |
|
179 | "content": "%s reports in total since %s" % f_args, | |
185 | "tags": ["appenlight", "digest"], |
|
180 | "tags": ["appenlight", "digest"], | |
186 |
"link": template_vars[ |
|
181 | "link": template_vars["destination_url"], | |
187 | } |
|
182 | } | |
188 |
|
183 | |||
189 | client = FlowdockIntegration.create_client( |
|
184 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
190 | self.integration.config['api_token']) |
|
|||
191 | client.send_to_inbox(payload) |
|
185 | client.send_to_inbox(payload) | |
192 |
|
186 | |||
193 |
log_msg = |
|
187 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
194 |
kwargs[ |
|
188 | kwargs["user"].user_name, | |
195 | self.channel_visible_value, |
|
189 | self.channel_visible_value, | |
196 |
template_vars[ |
|
190 | template_vars["confirmed_total"], | |
|
191 | ) | |||
197 | log.warning(log_msg) |
|
192 | log.warning(log_msg) | |
198 |
|
193 | |||
199 | def notify_chart_alert(self, **kwargs): |
|
194 | def notify_chart_alert(self, **kwargs): | |
200 | """ |
|
195 | """ | |
201 | Build and send chart alert notification |
|
196 | Build and send chart alert notification | |
202 |
|
197 | |||
203 | Kwargs: |
|
198 | Kwargs: | |
204 | application: application that the event applies for, |
|
199 | application: application that the event applies for, | |
205 | event: event that is notified, |
|
200 | event: event that is notified, | |
206 | user: user that should be notified |
|
201 | user: user that should be notified | |
207 | request: request object |
|
202 | request: request object | |
208 |
|
203 | |||
209 | """ |
|
204 | """ | |
210 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
205 | template_vars = self.chart_alert_notification_vars(kwargs) | |
211 |
|
206 | |||
212 |
message = 'ALERT {}: value in "{}" chart ' |
|
207 | message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format( | |
213 | 'met alert "{}" criteria'.format( |
|
208 | template_vars["alert_action"], | |
214 |
template_vars[ |
|
209 | template_vars["chart_name"], | |
215 |
template_vars[ |
|
210 | template_vars["action_name"], | |
216 | template_vars['action_name'], |
|
|||
217 | ) |
|
211 | ) | |
218 |
submessage = |
|
212 | submessage = "Info: " | |
219 |
for item in template_vars[ |
|
213 | for item in template_vars["readable_values"]: | |
220 |
submessage += |
|
214 | submessage += "{}: {}\n".format(item["label"], item["value"]) | |
221 |
|
215 | |||
222 | client = FlowdockIntegration.create_client( |
|
216 | client = FlowdockIntegration.create_client(self.integration.config["api_token"]) | |
223 | self.integration.config['api_token']) |
|
|||
224 | payload = { |
|
217 | payload = { | |
225 | "source": "AppEnlight", |
|
218 | "source": "AppEnlight", | |
226 |
"from_address": kwargs[ |
|
219 | "from_address": kwargs["request"].registry.settings["mailing.from_email"], | |
227 | 'mailing.from_email'], |
|
|||
228 | "subject": message, |
|
220 | "subject": message, | |
229 | "content": submessage, |
|
221 | "content": submessage, | |
230 |
"tags": ["appenlight", "alert", |
|
222 | "tags": ["appenlight", "alert", "chart"], | |
231 |
"link": template_vars[ |
|
223 | "link": template_vars["destination_url"], | |
232 | } |
|
224 | } | |
233 | client.send_to_inbox(payload) |
|
225 | client.send_to_inbox(payload) |
@@ -1,229 +1,238 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | from appenlight.models.alert_channel import AlertChannel |
|
18 | from appenlight.models.alert_channel import AlertChannel | |
19 | from appenlight.models.integrations.hipchat import HipchatIntegration |
|
19 | from appenlight.models.integrations.hipchat import HipchatIntegration | |
20 | from webhelpers2.text import truncate |
|
20 | from webhelpers2.text import truncate | |
21 |
|
21 | |||
22 | log = logging.getLogger(__name__) |
|
22 | log = logging.getLogger(__name__) | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class HipchatAlertChannel(AlertChannel): |
|
25 | class HipchatAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "hipchat"} | |
27 | 'polymorphic_identity': 'hipchat' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | def notify_reports(self, **kwargs): |
|
28 | def notify_reports(self, **kwargs): | |
31 | """ |
|
29 | """ | |
32 | Notify user of individual reports |
|
30 | Notify user of individual reports | |
33 |
|
31 | |||
34 | kwargs: |
|
32 | kwargs: | |
35 | application: application that the event applies for, |
|
33 | application: application that the event applies for, | |
36 | user: user that should be notified |
|
34 | user: user that should be notified | |
37 | request: request object |
|
35 | request: request object | |
38 | since_when: reports are newer than this time value, |
|
36 | since_when: reports are newer than this time value, | |
39 | reports: list of reports to render |
|
37 | reports: list of reports to render | |
40 |
|
38 | |||
41 | """ |
|
39 | """ | |
42 | template_vars = self.report_alert_notification_vars(kwargs) |
|
40 | template_vars = self.report_alert_notification_vars(kwargs) | |
43 |
|
41 | |||
44 |
app_url = kwargs[ |
|
42 | app_url = kwargs["request"].registry.settings["_mail_url"] | |
45 |
destination_url = kwargs[ |
|
43 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |
46 | _app_url=app_url) |
|
44 | f_args = ( | |
47 |
|
|
45 | "report", | |
48 |
|
|
46 | template_vars["resource"].resource_id, | |
49 |
|
|
47 | template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"), | |
50 |
|
|
48 | template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"), | |
51 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
49 | ) | |
52 | *f_args) |
|
50 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args) | |
53 |
|
51 | |||
54 |
if template_vars[ |
|
52 | if template_vars["confirmed_total"] > 1: | |
55 | template_vars["title"] = "%s - %s reports" % ( |
|
53 | template_vars["title"] = "%s - %s reports" % ( | |
56 |
template_vars[ |
|
54 | template_vars["resource_name"], | |
57 |
template_vars[ |
|
55 | template_vars["confirmed_total"], | |
58 | ) |
|
56 | ) | |
59 | else: |
|
57 | else: | |
60 |
error_title = truncate( |
|
58 | error_title = truncate( | |
61 | 'slow report', 90) |
|
59 | template_vars["reports"][0][1].error or "slow report", 90 | |
|
60 | ) | |||
62 | template_vars["title"] = "%s - '%s' report" % ( |
|
61 | template_vars["title"] = "%s - '%s' report" % ( | |
63 |
template_vars[ |
|
62 | template_vars["resource_name"], | |
64 |
error_title |
|
63 | error_title, | |
|
64 | ) | |||
65 |
|
65 | |||
66 |
template_vars["title"] += |
|
66 | template_vars["title"] += " " + destination_url | |
67 |
|
67 | |||
68 |
log_msg = |
|
68 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
69 |
kwargs[ |
|
69 | kwargs["user"].user_name, | |
70 | self.channel_visible_value, |
|
70 | self.channel_visible_value, | |
71 |
template_vars[ |
|
71 | template_vars["confirmed_total"], | |
|
72 | ) | |||
72 | log.warning(log_msg) |
|
73 | log.warning(log_msg) | |
73 |
|
74 | |||
74 | client = HipchatIntegration.create_client( |
|
75 | client = HipchatIntegration.create_client(self.integration.config["api_token"]) | |
75 |
|
|
76 | for room in self.integration.config["rooms"].split(","): | |
76 | for room in self.integration.config['rooms'].split(','): |
|
77 | client.send( | |
77 |
|
|
78 | { | |
78 |
"message_format": |
|
79 | "message_format": "text", | |
79 | "message": template_vars["title"], |
|
80 | "message": template_vars["title"], | |
80 | "from": "AppEnlight", |
|
81 | "from": "AppEnlight", | |
81 | "room_id": room.strip(), |
|
82 | "room_id": room.strip(), | |
82 | "color": "yellow" |
|
83 | "color": "yellow", | |
83 |
} |
|
84 | } | |
|
85 | ) | |||
84 |
|
86 | |||
85 | def notify_report_alert(self, **kwargs): |
|
87 | def notify_report_alert(self, **kwargs): | |
86 | """ |
|
88 | """ | |
87 | Build and send report alert notification |
|
89 | Build and send report alert notification | |
88 |
|
90 | |||
89 | Kwargs: |
|
91 | Kwargs: | |
90 | application: application that the event applies for, |
|
92 | application: application that the event applies for, | |
91 | event: event that is notified, |
|
93 | event: event that is notified, | |
92 | user: user that should be notified |
|
94 | user: user that should be notified | |
93 | request: request object |
|
95 | request: request object | |
94 |
|
96 | |||
95 | """ |
|
97 | """ | |
96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
98 | template_vars = self.report_alert_notification_vars(kwargs) | |
97 |
|
99 | |||
98 |
if kwargs[ |
|
100 | if kwargs["event"].unified_alert_action() == "OPEN": | |
99 |
|
101 | |||
100 |
title = |
|
102 | title = "ALERT %s: %s - %s %s" % ( | |
101 |
template_vars[ |
|
103 | template_vars["alert_action"], | |
102 |
template_vars[ |
|
104 | template_vars["resource_name"], | |
103 |
kwargs[ |
|
105 | kwargs["event"].values["reports"], | |
104 |
template_vars[ |
|
106 | template_vars["report_type"], | |
105 | ) |
|
107 | ) | |
106 |
|
108 | |||
107 | else: |
|
109 | else: | |
108 |
title = |
|
110 | title = "ALERT %s: %s type: %s" % ( | |
109 |
template_vars[ |
|
111 | template_vars["alert_action"], | |
110 |
template_vars[ |
|
112 | template_vars["resource_name"], | |
111 |
template_vars[ |
|
113 | template_vars["alert_type"].replace("_", " "), | |
112 | ) |
|
114 | ) | |
113 |
|
115 | |||
114 |
title += |
|
116 | title += "\n " + template_vars["destination_url"] | |
115 |
|
117 | |||
116 |
api_token = self.integration.config[ |
|
118 | api_token = self.integration.config["api_token"] | |
117 | client = HipchatIntegration.create_client(api_token) |
|
119 | client = HipchatIntegration.create_client(api_token) | |
118 |
for room in self.integration.config[ |
|
120 | for room in self.integration.config["rooms"].split(","): | |
119 |
client.send( |
|
121 | client.send( | |
120 | "message_format": 'text', |
|
122 | { | |
121 |
"message": |
|
123 | "message_format": "text", | |
122 |
" |
|
124 | "message": title, | |
123 |
"r |
|
125 | "from": "AppEnlight", | |
124 |
" |
|
126 | "room_id": room.strip(), | |
125 |
" |
|
127 | "color": "red", | |
126 | }) |
|
128 | "notify": "1", | |
|
129 | } | |||
|
130 | ) | |||
127 |
|
131 | |||
128 | def notify_uptime_alert(self, **kwargs): |
|
132 | def notify_uptime_alert(self, **kwargs): | |
129 | """ |
|
133 | """ | |
130 | Build and send uptime alert notification |
|
134 | Build and send uptime alert notification | |
131 |
|
135 | |||
132 | Kwargs: |
|
136 | Kwargs: | |
133 | application: application that the event applies for, |
|
137 | application: application that the event applies for, | |
134 | event: event that is notified, |
|
138 | event: event that is notified, | |
135 | user: user that should be notified |
|
139 | user: user that should be notified | |
136 | request: request object |
|
140 | request: request object | |
137 |
|
141 | |||
138 | """ |
|
142 | """ | |
139 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
143 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
140 |
|
144 | |||
141 |
message = |
|
145 | message = "ALERT %s: %s has uptime issues\n" % ( | |
142 |
template_vars[ |
|
146 | template_vars["alert_action"], | |
143 |
template_vars[ |
|
147 | template_vars["resource_name"], | |
144 | ) |
|
148 | ) | |
145 |
message += template_vars[ |
|
149 | message += template_vars["reason"] | |
146 |
message += |
|
150 | message += "\n{}".format(template_vars["destination_url"]) | |
147 |
|
151 | |||
148 |
api_token = self.integration.config[ |
|
152 | api_token = self.integration.config["api_token"] | |
149 | client = HipchatIntegration.create_client(api_token) |
|
153 | client = HipchatIntegration.create_client(api_token) | |
150 |
for room in self.integration.config[ |
|
154 | for room in self.integration.config["rooms"].split(","): | |
151 |
client.send( |
|
155 | client.send( | |
152 | "message_format": 'text', |
|
156 | { | |
153 |
"message": |
|
157 | "message_format": "text", | |
154 |
" |
|
158 | "message": message, | |
155 |
"r |
|
159 | "from": "AppEnlight", | |
156 |
" |
|
160 | "room_id": room.strip(), | |
157 |
" |
|
161 | "color": "red", | |
158 | }) |
|
162 | "notify": "1", | |
|
163 | } | |||
|
164 | ) | |||
159 |
|
165 | |||
160 | def notify_chart_alert(self, **kwargs): |
|
166 | def notify_chart_alert(self, **kwargs): | |
161 | """ |
|
167 | """ | |
162 | Build and send chart alert notification |
|
168 | Build and send chart alert notification | |
163 |
|
169 | |||
164 | Kwargs: |
|
170 | Kwargs: | |
165 | application: application that the event applies for, |
|
171 | application: application that the event applies for, | |
166 | event: event that is notified, |
|
172 | event: event that is notified, | |
167 | user: user that should be notified |
|
173 | user: user that should be notified | |
168 | request: request object |
|
174 | request: request object | |
169 |
|
175 | |||
170 | """ |
|
176 | """ | |
171 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
177 | template_vars = self.chart_alert_notification_vars(kwargs) | |
172 | message = 'ALERT {}: value in "{}" chart: ' \ |
|
178 | message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format( | |
173 | 'met alert "{}" criteria\n'.format( |
|
179 | template_vars["alert_action"], | |
174 |
template_vars[ |
|
180 | template_vars["chart_name"], | |
175 |
template_vars[ |
|
181 | template_vars["action_name"], | |
176 | template_vars['action_name'], |
|
|||
177 | ) |
|
182 | ) | |
178 |
|
183 | |||
179 |
for item in template_vars[ |
|
184 | for item in template_vars["readable_values"]: | |
180 |
message += |
|
185 | message += "{}: {}\n".format(item["label"], item["value"]) | |
181 |
|
186 | |||
182 |
message += template_vars[ |
|
187 | message += template_vars["destination_url"] | |
183 |
|
188 | |||
184 |
api_token = self.integration.config[ |
|
189 | api_token = self.integration.config["api_token"] | |
185 | client = HipchatIntegration.create_client(api_token) |
|
190 | client = HipchatIntegration.create_client(api_token) | |
186 |
for room in self.integration.config[ |
|
191 | for room in self.integration.config["rooms"].split(","): | |
187 |
client.send( |
|
192 | client.send( | |
188 | "message_format": 'text', |
|
193 | { | |
189 |
"message": |
|
194 | "message_format": "text", | |
190 |
" |
|
195 | "message": message, | |
191 |
"r |
|
196 | "from": "AppEnlight", | |
192 |
" |
|
197 | "room_id": room.strip(), | |
193 |
" |
|
198 | "color": "red", | |
194 | }) |
|
199 | "notify": "1", | |
|
200 | } | |||
|
201 | ) | |||
195 |
|
202 | |||
196 | def send_digest(self, **kwargs): |
|
203 | def send_digest(self, **kwargs): | |
197 | """ |
|
204 | """ | |
198 | Build and send daily digest notification |
|
205 | Build and send daily digest notification | |
199 |
|
206 | |||
200 | kwargs: |
|
207 | kwargs: | |
201 | application: application that the event applies for, |
|
208 | application: application that the event applies for, | |
202 | user: user that should be notified |
|
209 | user: user that should be notified | |
203 | request: request object |
|
210 | request: request object | |
204 | since_when: reports are newer than this time value, |
|
211 | since_when: reports are newer than this time value, | |
205 | reports: list of reports to render |
|
212 | reports: list of reports to render | |
206 |
|
213 | |||
207 | """ |
|
214 | """ | |
208 | template_vars = self.report_alert_notification_vars(kwargs) |
|
215 | template_vars = self.report_alert_notification_vars(kwargs) | |
209 |
f_args = (template_vars[ |
|
216 | f_args = (template_vars["resource_name"], template_vars["confirmed_total"]) | |
210 | template_vars['confirmed_total'],) |
|
|||
211 | message = "Daily report digest: %s - %s reports" % f_args |
|
217 | message = "Daily report digest: %s - %s reports" % f_args | |
212 |
message += |
|
218 | message += "\n{}".format(template_vars["destination_url"]) | |
213 |
api_token = self.integration.config[ |
|
219 | api_token = self.integration.config["api_token"] | |
214 | client = HipchatIntegration.create_client(api_token) |
|
220 | client = HipchatIntegration.create_client(api_token) | |
215 |
for room in self.integration.config[ |
|
221 | for room in self.integration.config["rooms"].split(","): | |
216 |
client.send( |
|
222 | client.send( | |
217 | "message_format": 'text', |
|
223 | { | |
218 |
"message": |
|
224 | "message_format": "text", | |
219 |
" |
|
225 | "message": message, | |
220 |
"r |
|
226 | "from": "AppEnlight", | |
221 |
" |
|
227 | "room_id": room.strip(), | |
222 |
" |
|
228 | "color": "green", | |
223 | }) |
|
229 | "notify": "1", | |
224 |
|
230 | } | ||
225 | log_msg = 'DIGEST : %s via %s :: %s reports' % ( |
|
231 | ) | |
226 | kwargs['user'].user_name, |
|
232 | ||
|
233 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |||
|
234 | kwargs["user"].user_name, | |||
227 | self.channel_visible_value, |
|
235 | self.channel_visible_value, | |
228 |
template_vars[ |
|
236 | template_vars["confirmed_total"], | |
|
237 | ) | |||
229 | log.warning(log_msg) |
|
238 | log.warning(log_msg) |
@@ -1,285 +1,270 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | from appenlight.models.alert_channel import AlertChannel |
|
18 | from appenlight.models.alert_channel import AlertChannel | |
19 | from appenlight.models.integrations.slack import SlackIntegration |
|
19 | from appenlight.models.integrations.slack import SlackIntegration | |
20 | from webhelpers2.text import truncate |
|
20 | from webhelpers2.text import truncate | |
21 |
|
21 | |||
22 | log = logging.getLogger(__name__) |
|
22 | log = logging.getLogger(__name__) | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class SlackAlertChannel(AlertChannel): |
|
25 | class SlackAlertChannel(AlertChannel): | |
26 | __mapper_args__ = { |
|
26 | __mapper_args__ = {"polymorphic_identity": "slack"} | |
27 | 'polymorphic_identity': 'slack' |
|
|||
28 | } |
|
|||
29 |
|
27 | |||
30 | def notify_reports(self, **kwargs): |
|
28 | def notify_reports(self, **kwargs): | |
31 | """ |
|
29 | """ | |
32 | Notify user of individual reports |
|
30 | Notify user of individual reports | |
33 |
|
31 | |||
34 | kwargs: |
|
32 | kwargs: | |
35 | application: application that the event applies for, |
|
33 | application: application that the event applies for, | |
36 | user: user that should be notified |
|
34 | user: user that should be notified | |
37 | request: request object |
|
35 | request: request object | |
38 | since_when: reports are newer than this time value, |
|
36 | since_when: reports are newer than this time value, | |
39 | reports: list of reports to render |
|
37 | reports: list of reports to render | |
40 |
|
38 | |||
41 | """ |
|
39 | """ | |
42 | template_vars = self.report_alert_notification_vars(kwargs) |
|
40 | template_vars = self.report_alert_notification_vars(kwargs) | |
43 |
template_vars["title"] = template_vars[ |
|
41 | template_vars["title"] = template_vars["resource_name"] | |
44 |
|
42 | |||
45 |
if template_vars[ |
|
43 | if template_vars["confirmed_total"] > 1: | |
46 |
template_vars[ |
|
44 | template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"] | |
47 | 'confirmed_total'] |
|
|||
48 | else: |
|
45 | else: | |
49 |
error_title = truncate( |
|
46 | error_title = truncate( | |
50 | 'slow report', 90) |
|
47 | template_vars["reports"][0][1].error or "slow report", 90 | |
51 | template_vars['subtext'] = error_title |
|
48 | ) | |
|
49 | template_vars["subtext"] = error_title | |||
52 |
|
50 | |||
53 |
log_msg = |
|
51 | log_msg = "NOTIFY : %s via %s :: %s reports" % ( | |
54 |
kwargs[ |
|
52 | kwargs["user"].user_name, | |
55 | self.channel_visible_value, |
|
53 | self.channel_visible_value, | |
56 |
template_vars[ |
|
54 | template_vars["confirmed_total"], | |
|
55 | ) | |||
57 | log.warning(log_msg) |
|
56 | log.warning(log_msg) | |
58 |
|
57 | |||
59 | client = SlackIntegration.create_client( |
|
58 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
60 | self.integration.config['webhook_url']) |
|
|||
61 | report_data = { |
|
59 | report_data = { | |
62 | "username": "AppEnlight", |
|
60 | "username": "AppEnlight", | |
63 | "icon_emoji": ":fire:", |
|
61 | "icon_emoji": ":fire:", | |
64 | "attachments": [ |
|
62 | "attachments": [ | |
65 | { |
|
63 | { | |
66 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
64 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
67 |
"fallback": "*%s* - <%s| Browse>" |
|
65 | "fallback": "*%s* - <%s| Browse>" | |
68 |
|
|
66 | % (template_vars["title"], template_vars["destination_url"]), | |
69 | template_vars['destination_url']), |
|
67 | "pretext": "*%s* - <%s| Browse>" | |
70 | "pretext": "*%s* - <%s| Browse>" % ( |
|
68 | % (template_vars["title"], template_vars["destination_url"]), | |
71 | template_vars["title"], |
|
|||
72 | template_vars['destination_url']), |
|
|||
73 | "color": "warning", |
|
69 | "color": "warning", | |
74 | "fields": [ |
|
70 | "fields": [ | |
75 | { |
|
71 | {"value": "Info: %s" % template_vars["subtext"], "short": False} | |
76 | "value": 'Info: %s' % template_vars['subtext'], |
|
72 | ], | |
77 | "short": False |
|
|||
78 | } |
|
|||
79 | ] |
|
|||
80 | } |
|
73 | } | |
81 | ] |
|
74 | ], | |
82 | } |
|
75 | } | |
83 | client.make_request(data=report_data) |
|
76 | client.make_request(data=report_data) | |
84 |
|
77 | |||
85 | def notify_report_alert(self, **kwargs): |
|
78 | def notify_report_alert(self, **kwargs): | |
86 | """ |
|
79 | """ | |
87 | Build and send report alert notification |
|
80 | Build and send report alert notification | |
88 |
|
81 | |||
89 | Kwargs: |
|
82 | Kwargs: | |
90 | application: application that the event applies for, |
|
83 | application: application that the event applies for, | |
91 | event: event that is notified, |
|
84 | event: event that is notified, | |
92 | user: user that should be notified |
|
85 | user: user that should be notified | |
93 | request: request object |
|
86 | request: request object | |
94 |
|
87 | |||
95 | """ |
|
88 | """ | |
96 | template_vars = self.report_alert_notification_vars(kwargs) |
|
89 | template_vars = self.report_alert_notification_vars(kwargs) | |
97 |
|
90 | |||
98 |
if kwargs[ |
|
91 | if kwargs["event"].unified_alert_action() == "OPEN": | |
99 |
title = |
|
92 | title = "*ALERT %s*: %s" % ( | |
100 |
template_vars[ |
|
93 | template_vars["alert_action"], | |
101 |
template_vars[ |
|
94 | template_vars["resource_name"], | |
102 | ) |
|
95 | ) | |
103 |
|
96 | |||
104 |
template_vars[ |
|
97 | template_vars["subtext"] = "Got at least %s %s" % ( | |
105 |
kwargs[ |
|
98 | kwargs["event"].values["reports"], | |
106 |
template_vars[ |
|
99 | template_vars["report_type"], | |
107 | ) |
|
100 | ) | |
108 |
|
101 | |||
109 | else: |
|
102 | else: | |
110 |
title = |
|
103 | title = "*ALERT %s*: %s" % ( | |
111 |
template_vars[ |
|
104 | template_vars["alert_action"], | |
112 |
template_vars[ |
|
105 | template_vars["resource_name"], | |
113 | ) |
|
106 | ) | |
114 |
|
107 | |||
115 |
template_vars[ |
|
108 | template_vars["subtext"] = "" | |
116 |
|
109 | |||
117 |
alert_type = template_vars[ |
|
110 | alert_type = template_vars["alert_type"].replace("_", " ") | |
118 |
alert_type = alert_type.replace( |
|
111 | alert_type = alert_type.replace("alert", "").capitalize() | |
119 |
|
112 | |||
120 |
template_vars[ |
|
113 | template_vars["type"] = "Type: %s" % alert_type | |
121 |
|
114 | |||
122 | client = SlackIntegration.create_client( |
|
115 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
123 | self.integration.config['webhook_url'] |
|
|||
124 | ) |
|
|||
125 | report_data = { |
|
116 | report_data = { | |
126 | "username": "AppEnlight", |
|
117 | "username": "AppEnlight", | |
127 | "icon_emoji": ":rage:", |
|
118 | "icon_emoji": ":rage:", | |
128 | "attachments": [ |
|
119 | "attachments": [ | |
129 | { |
|
120 | { | |
130 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
121 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
131 |
"fallback": "%s - <%s| Browse>" |
|
122 | "fallback": "%s - <%s| Browse>" | |
132 |
|
|
123 | % (title, template_vars["destination_url"]), | |
133 |
"pretext": "%s - <%s| Browse>" |
|
124 | "pretext": "%s - <%s| Browse>" | |
134 |
|
|
125 | % (title, template_vars["destination_url"]), | |
135 | "color": "danger", |
|
126 | "color": "danger", | |
136 | "fields": [ |
|
127 | "fields": [ | |
137 | { |
|
128 | { | |
138 |
"title": template_vars[ |
|
129 | "title": template_vars["type"], | |
139 |
"value": template_vars[ |
|
130 | "value": template_vars["subtext"], | |
140 | "short": False |
|
131 | "short": False, | |
141 | } |
|
132 | } | |
142 | ] |
|
133 | ], | |
143 | } |
|
134 | } | |
144 | ] |
|
135 | ], | |
145 | } |
|
136 | } | |
146 | client.make_request(data=report_data) |
|
137 | client.make_request(data=report_data) | |
147 |
|
138 | |||
148 | def notify_uptime_alert(self, **kwargs): |
|
139 | def notify_uptime_alert(self, **kwargs): | |
149 | """ |
|
140 | """ | |
150 | Build and send uptime alert notification |
|
141 | Build and send uptime alert notification | |
151 |
|
142 | |||
152 | Kwargs: |
|
143 | Kwargs: | |
153 | application: application that the event applies for, |
|
144 | application: application that the event applies for, | |
154 | event: event that is notified, |
|
145 | event: event that is notified, | |
155 | user: user that should be notified |
|
146 | user: user that should be notified | |
156 | request: request object |
|
147 | request: request object | |
157 |
|
148 | |||
158 | """ |
|
149 | """ | |
159 | template_vars = self.uptime_alert_notification_vars(kwargs) |
|
150 | template_vars = self.uptime_alert_notification_vars(kwargs) | |
160 |
|
151 | |||
161 |
title = |
|
152 | title = "*ALERT %s*: %s" % ( | |
162 |
template_vars[ |
|
153 | template_vars["alert_action"], | |
163 |
template_vars[ |
|
154 | template_vars["resource_name"], | |
164 | ) |
|
|||
165 | client = SlackIntegration.create_client( |
|
|||
166 | self.integration.config['webhook_url'] |
|
|||
167 | ) |
|
155 | ) | |
|
156 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |||
168 | report_data = { |
|
157 | report_data = { | |
169 | "username": "AppEnlight", |
|
158 | "username": "AppEnlight", | |
170 | "icon_emoji": ":rage:", |
|
159 | "icon_emoji": ":rage:", | |
171 | "attachments": [ |
|
160 | "attachments": [ | |
172 | { |
|
161 | { | |
173 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
162 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
174 | "fallback": "{} - <{}| Browse>".format( |
|
163 | "fallback": "{} - <{}| Browse>".format( | |
175 |
title, template_vars[ |
|
164 | title, template_vars["destination_url"] | |
|
165 | ), | |||
176 | "pretext": "{} - <{}| Browse>".format( |
|
166 | "pretext": "{} - <{}| Browse>".format( | |
177 |
title, template_vars[ |
|
167 | title, template_vars["destination_url"] | |
|
168 | ), | |||
178 | "color": "danger", |
|
169 | "color": "danger", | |
179 | "fields": [ |
|
170 | "fields": [ | |
180 | { |
|
171 | { | |
181 | "title": "Application has uptime issues", |
|
172 | "title": "Application has uptime issues", | |
182 |
"value": template_vars[ |
|
173 | "value": template_vars["reason"], | |
183 | "short": False |
|
174 | "short": False, | |
184 | } |
|
175 | } | |
185 | ] |
|
176 | ], | |
186 | } |
|
177 | } | |
187 | ] |
|
178 | ], | |
188 | } |
|
179 | } | |
189 | client.make_request(data=report_data) |
|
180 | client.make_request(data=report_data) | |
190 |
|
181 | |||
191 | def notify_chart_alert(self, **kwargs): |
|
182 | def notify_chart_alert(self, **kwargs): | |
192 | """ |
|
183 | """ | |
193 | Build and send chart alert notification |
|
184 | Build and send chart alert notification | |
194 |
|
185 | |||
195 | Kwargs: |
|
186 | Kwargs: | |
196 | application: application that the event applies for, |
|
187 | application: application that the event applies for, | |
197 | event: event that is notified, |
|
188 | event: event that is notified, | |
198 | user: user that should be notified |
|
189 | user: user that should be notified | |
199 | request: request object |
|
190 | request: request object | |
200 |
|
191 | |||
201 | """ |
|
192 | """ | |
202 | template_vars = self.chart_alert_notification_vars(kwargs) |
|
193 | template_vars = self.chart_alert_notification_vars(kwargs) | |
203 |
|
194 | |||
204 |
title = '*ALERT {}*: value in *"{}"* chart ' |
|
195 | title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format( | |
205 | 'met alert *"{}"* criteria'.format( |
|
196 | template_vars["alert_action"], | |
206 |
template_vars[ |
|
197 | template_vars["chart_name"], | |
207 |
template_vars[ |
|
198 | template_vars["action_name"], | |
208 | template_vars['action_name'], |
|
|||
209 | ) |
|
199 | ) | |
210 |
|
200 | |||
211 |
subtext = |
|
201 | subtext = "" | |
212 |
for item in template_vars[ |
|
202 | for item in template_vars["readable_values"]: | |
213 |
subtext += |
|
203 | subtext += "{} - {}\n".format(item["label"], item["value"]) | |
214 |
|
204 | |||
215 | client = SlackIntegration.create_client( |
|
205 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
216 | self.integration.config['webhook_url'] |
|
|||
217 | ) |
|
|||
218 | report_data = { |
|
206 | report_data = { | |
219 | "username": "AppEnlight", |
|
207 | "username": "AppEnlight", | |
220 | "icon_emoji": ":rage:", |
|
208 | "icon_emoji": ":rage:", | |
221 | "attachments": [ |
|
209 | "attachments": [ | |
222 | {"mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
210 | { | |
223 | "fallback": "{} - <{}| Browse>".format( |
|
211 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
224 | title, template_vars['destination_url']), |
|
212 | "fallback": "{} - <{}| Browse>".format( | |
225 | "pretext": "{} - <{}| Browse>".format( |
|
213 | title, template_vars["destination_url"] | |
226 | title, template_vars['destination_url']), |
|
214 | ), | |
227 | "color": "danger", |
|
215 | "pretext": "{} - <{}| Browse>".format( | |
228 | "fields": [ |
|
216 | title, template_vars["destination_url"] | |
229 |
|
|
217 | ), | |
230 |
|
|
218 | "color": "danger", | |
231 |
|
|
219 | "fields": [ | |
232 |
|
|
220 | { | |
233 | } |
|
221 | "title": "Following criteria were met:", | |
234 | ] |
|
222 | "value": subtext, | |
235 | } |
|
223 | "short": False, | |
236 |
|
|
224 | } | |
|
225 | ], | |||
|
226 | } | |||
|
227 | ], | |||
237 | } |
|
228 | } | |
238 | client.make_request(data=report_data) |
|
229 | client.make_request(data=report_data) | |
239 |
|
230 | |||
240 | def send_digest(self, **kwargs): |
|
231 | def send_digest(self, **kwargs): | |
241 | """ |
|
232 | """ | |
242 | Build and send daily digest notification |
|
233 | Build and send daily digest notification | |
243 |
|
234 | |||
244 | kwargs: |
|
235 | kwargs: | |
245 | application: application that the event applies for, |
|
236 | application: application that the event applies for, | |
246 | user: user that should be notified |
|
237 | user: user that should be notified | |
247 | request: request object |
|
238 | request: request object | |
248 | since_when: reports are newer than this time value, |
|
239 | since_when: reports are newer than this time value, | |
249 | reports: list of reports to render |
|
240 | reports: list of reports to render | |
250 |
|
241 | |||
251 | """ |
|
242 | """ | |
252 | template_vars = self.report_alert_notification_vars(kwargs) |
|
243 | template_vars = self.report_alert_notification_vars(kwargs) | |
253 |
title = "*Daily report digest*: %s" % template_vars[ |
|
244 | title = "*Daily report digest*: %s" % template_vars["resource_name"] | |
254 |
|
245 | |||
255 |
subtext = |
|
246 | subtext = "%s reports" % template_vars["confirmed_total"] | |
256 |
|
247 | |||
257 | client = SlackIntegration.create_client( |
|
248 | client = SlackIntegration.create_client(self.integration.config["webhook_url"]) | |
258 | self.integration.config['webhook_url'] |
|
|||
259 | ) |
|
|||
260 | report_data = { |
|
249 | report_data = { | |
261 | "username": "AppEnlight", |
|
250 | "username": "AppEnlight", | |
262 | "attachments": [ |
|
251 | "attachments": [ | |
263 | { |
|
252 | { | |
264 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], |
|
253 | "mrkdwn_in": ["text", "pretext", "title", "fallback"], | |
265 |
"fallback": "%s : <%s| Browse>" |
|
254 | "fallback": "%s : <%s| Browse>" | |
266 |
|
|
255 | % (title, template_vars["destination_url"]), | |
267 |
"pretext": "%s: <%s| Browse>" |
|
256 | "pretext": "%s: <%s| Browse>" | |
268 |
|
|
257 | % (title, template_vars["destination_url"]), | |
269 | "color": "good", |
|
258 | "color": "good", | |
270 | "fields": [ |
|
259 | "fields": [{"title": "Got at least: %s" % subtext, "short": False}], | |
271 | { |
|
|||
272 | "title": "Got at least: %s" % subtext, |
|
|||
273 | "short": False |
|
|||
274 | } |
|
|||
275 | ] |
|
|||
276 | } |
|
260 | } | |
277 | ] |
|
261 | ], | |
278 | } |
|
262 | } | |
279 | client.make_request(data=report_data) |
|
263 | client.make_request(data=report_data) | |
280 |
|
264 | |||
281 |
log_msg = |
|
265 | log_msg = "DIGEST : %s via %s :: %s reports" % ( | |
282 |
kwargs[ |
|
266 | kwargs["user"].user_name, | |
283 | self.channel_visible_value, |
|
267 | self.channel_visible_value, | |
284 |
template_vars[ |
|
268 | template_vars["confirmed_total"], | |
|
269 | ) | |||
285 | log.warning(log_msg) |
|
270 | log.warning(log_msg) |
@@ -1,104 +1,113 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import uuid |
|
17 | import uuid | |
18 | import logging |
|
18 | import logging | |
19 | import sqlalchemy as sa |
|
19 | import sqlalchemy as sa | |
20 | from appenlight.models.resource import Resource |
|
20 | from appenlight.models.resource import Resource | |
21 | from sqlalchemy.orm import aliased |
|
21 | from sqlalchemy.orm import aliased | |
22 |
|
22 | |||
23 | log = logging.getLogger(__name__) |
|
23 | log = logging.getLogger(__name__) | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def generate_api_key(): |
|
26 | def generate_api_key(): | |
27 |
uid = str(uuid.uuid4()).replace( |
|
27 | uid = str(uuid.uuid4()).replace("-", "") | |
28 | return uid[0:32] |
|
28 | return uid[0:32] | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class Application(Resource): |
|
31 | class Application(Resource): | |
32 | """ |
|
32 | """ | |
33 | Resource of application type |
|
33 | Resource of application type | |
34 | """ |
|
34 | """ | |
35 |
|
35 | |||
36 |
__tablename__ = |
|
36 | __tablename__ = "applications" | |
37 |
__mapper_args__ = { |
|
37 | __mapper_args__ = {"polymorphic_identity": "application"} | |
38 |
|
38 | |||
39 | # lists configurable possible permissions for this resource type |
|
39 | # lists configurable possible permissions for this resource type | |
40 |
__possible_permissions__ = ( |
|
40 | __possible_permissions__ = ("view", "update_reports") | |
41 |
|
41 | |||
42 |
resource_id = sa.Column( |
|
42 | resource_id = sa.Column( | |
43 | sa.ForeignKey('resources.resource_id', |
|
43 | sa.Integer(), | |
44 | onupdate='CASCADE', |
|
44 | sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), | |
45 | ondelete='CASCADE', ), |
|
45 | primary_key=True, | |
46 | primary_key=True, ) |
|
46 | ) | |
47 |
domains = sa.Column(sa.UnicodeText(), nullable=False, default= |
|
47 | domains = sa.Column(sa.UnicodeText(), nullable=False, default="") | |
48 | api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True, |
|
48 | api_key = sa.Column( | |
49 | default=generate_api_key) |
|
49 | sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key | |
50 | public_key = sa.Column(sa.String(32), nullable=False, unique=True, |
|
50 | ) | |
51 | index=True, |
|
51 | public_key = sa.Column( | |
52 | default=generate_api_key) |
|
52 | sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key | |
53 | default_grouping = sa.Column(sa.Unicode(20), nullable=False, |
|
53 | ) | |
54 | default='url_traceback') |
|
54 | default_grouping = sa.Column( | |
|
55 | sa.Unicode(20), nullable=False, default="url_traceback" | |||
|
56 | ) | |||
55 | error_report_threshold = sa.Column(sa.Integer(), default=10) |
|
57 | error_report_threshold = sa.Column(sa.Integer(), default=10) | |
56 | slow_report_threshold = sa.Column(sa.Integer(), default=10) |
|
58 | slow_report_threshold = sa.Column(sa.Integer(), default=10) | |
57 | allow_permanent_storage = sa.Column(sa.Boolean(), default=False, |
|
59 | allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False) | |
58 | nullable=False) |
|
|||
59 |
|
60 | |||
60 |
@sa.orm.validates( |
|
61 | @sa.orm.validates("default_grouping") | |
61 | def validate_default_grouping(self, key, grouping): |
|
62 | def validate_default_grouping(self, key, grouping): | |
62 | """ validate if resouce can have specific permission """ |
|
63 | """ validate if resouce can have specific permission """ | |
63 |
assert grouping in [ |
|
64 | assert grouping in ["url_type", "url_traceback", "traceback_server"] | |
64 | return grouping |
|
65 | return grouping | |
65 |
|
66 | |||
66 |
report_groups = sa.orm.relationship( |
|
67 | report_groups = sa.orm.relationship( | |
67 | cascade="all, delete-orphan", |
|
68 | "ReportGroup", | |
68 | passive_deletes=True, |
|
69 | cascade="all, delete-orphan", | |
69 | passive_updates=True, |
|
70 | passive_deletes=True, | |
70 | lazy='dynamic', |
|
71 | passive_updates=True, | |
71 | backref=sa.orm.backref('application', |
|
72 | lazy="dynamic", | |
72 | lazy="joined")) |
|
73 | backref=sa.orm.backref("application", lazy="joined"), | |
73 |
|
74 | ) | ||
74 | postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf', |
|
75 | ||
75 | cascade="all, delete-orphan", |
|
76 | postprocess_conf = sa.orm.relationship( | |
76 | passive_deletes=True, |
|
77 | "ApplicationPostprocessConf", | |
77 | passive_updates=True, |
|
78 | cascade="all, delete-orphan", | |
78 | backref='resource') |
|
79 | passive_deletes=True, | |
79 |
|
80 | passive_updates=True, | ||
80 | logs = sa.orm.relationship('Log', |
|
81 | backref="resource", | |
81 | lazy='dynamic', |
|
82 | ) | |
82 | backref='application', |
|
83 | ||
83 | passive_deletes=True, |
|
84 | logs = sa.orm.relationship( | |
84 | passive_updates=True, ) |
|
85 | "Log", | |
85 |
|
86 | lazy="dynamic", | ||
86 | integrations = sa.orm.relationship('IntegrationBase', |
|
87 | backref="application", | |
87 | backref='resource', |
|
88 | passive_deletes=True, | |
88 | cascade="all, delete-orphan", |
|
89 | passive_updates=True, | |
89 | passive_deletes=True, |
|
90 | ) | |
90 | passive_updates=True, ) |
|
91 | ||
|
92 | integrations = sa.orm.relationship( | |||
|
93 | "IntegrationBase", | |||
|
94 | backref="resource", | |||
|
95 | cascade="all, delete-orphan", | |||
|
96 | passive_deletes=True, | |||
|
97 | passive_updates=True, | |||
|
98 | ) | |||
91 |
|
99 | |||
92 | def generate_api_key(self): |
|
100 | def generate_api_key(self): | |
93 | return generate_api_key() |
|
101 | return generate_api_key() | |
94 |
|
102 | |||
95 |
|
103 | |||
96 | def after_update(mapper, connection, target): |
|
104 | def after_update(mapper, connection, target): | |
97 | from appenlight.models.services.application import ApplicationService |
|
105 | from appenlight.models.services.application import ApplicationService | |
98 | log.info('clearing out ApplicationService cache') |
|
106 | ||
|
107 | log.info("clearing out ApplicationService cache") | |||
99 | ApplicationService.by_id_cached().invalidate(target.resource_id) |
|
108 | ApplicationService.by_id_cached().invalidate(target.resource_id) | |
100 | ApplicationService.by_api_key_cached().invalidate(target.api_key) |
|
109 | ApplicationService.by_api_key_cached().invalidate(target.api_key) | |
101 |
|
110 | |||
102 |
|
111 | |||
103 |
sa.event.listen(Application, |
|
112 | sa.event.listen(Application, "after_update", after_update) | |
104 |
sa.event.listen(Application, |
|
113 | sa.event.listen(Application, "after_delete", after_update) |
@@ -1,45 +1,47 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | from ziggurat_foundations.models.base import BaseModel |
|
17 | from ziggurat_foundations.models.base import BaseModel | |
18 | import sqlalchemy as sa |
|
18 | import sqlalchemy as sa | |
19 |
|
19 | |||
20 | from appenlight.models import Base |
|
20 | from appenlight.models import Base | |
21 | from appenlight.models.report_group import ReportGroup |
|
21 | from appenlight.models.report_group import ReportGroup | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | class ApplicationPostprocessConf(Base, BaseModel): |
|
24 | class ApplicationPostprocessConf(Base, BaseModel): | |
25 | """ |
|
25 | """ | |
26 | Stores prioritizing conditions for reports |
|
26 | Stores prioritizing conditions for reports | |
27 | This is later used for rule parsing like "if 10 occurences bump priority +1" |
|
27 | This is later used for rule parsing like "if 10 occurences bump priority +1" | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 |
__tablename__ = |
|
30 | __tablename__ = "application_postprocess_conf" | |
31 |
|
31 | |||
32 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
32 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
33 |
resource_id = sa.Column( |
|
33 | resource_id = sa.Column( | |
34 | sa.ForeignKey('resources.resource_id', |
|
34 | sa.Integer(), | |
35 | onupdate='CASCADE', |
|
35 | sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), | |
36 | ondelete='CASCADE')) |
|
36 | ) | |
37 | do = sa.Column(sa.Unicode(25), nullable=False) |
|
37 | do = sa.Column(sa.Unicode(25), nullable=False) | |
38 |
new_value = sa.Column(sa.UnicodeText(), nullable=False, default= |
|
38 | new_value = sa.Column(sa.UnicodeText(), nullable=False, default="") | |
39 |
rule = sa.Column( |
|
39 | rule = sa.Column( | |
40 | nullable=False, default={'field': 'http_status', |
|
40 | sa.dialects.postgresql.JSON, | |
41 | "op": "ge", "value": "500"}) |
|
41 | nullable=False, | |
|
42 | default={"field": "http_status", "op": "ge", "value": "500"}, | |||
|
43 | ) | |||
42 |
|
44 | |||
43 | def postprocess(self, item): |
|
45 | def postprocess(self, item): | |
44 | new_value = int(self.new_value) |
|
46 | new_value = int(self.new_value) | |
45 | item.priority = ReportGroup.priority + new_value |
|
47 | item.priority = ReportGroup.priority + new_value |
@@ -1,52 +1,57 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | import sqlalchemy as sa |
|
18 | import sqlalchemy as sa | |
19 |
|
19 | |||
20 | from datetime import datetime |
|
20 | from datetime import datetime | |
21 | from appenlight.models import Base |
|
21 | from appenlight.models import Base | |
22 | from ziggurat_foundations.models.base import BaseModel |
|
22 | from ziggurat_foundations.models.base import BaseModel | |
23 | from ziggurat_foundations.models.services.user import UserService |
|
23 | from ziggurat_foundations.models.services.user import UserService | |
24 |
|
24 | |||
25 | log = logging.getLogger(__name__) |
|
25 | log = logging.getLogger(__name__) | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | class AuthToken(Base, BaseModel): |
|
28 | class AuthToken(Base, BaseModel): | |
29 | """ |
|
29 | """ | |
30 | Stores information about possible alerting options |
|
30 | Stores information about possible alerting options | |
31 | """ |
|
31 | """ | |
32 | __tablename__ = 'auth_tokens' |
|
32 | ||
|
33 | __tablename__ = "auth_tokens" | |||
33 |
|
34 | |||
34 | id = sa.Column(sa.Integer, primary_key=True, nullable=False) |
|
35 | id = sa.Column(sa.Integer, primary_key=True, nullable=False) | |
35 | token = sa.Column(sa.Unicode(40), nullable=False, |
|
36 | token = sa.Column( | |
36 | default=lambda x: UserService.generate_random_string(40)) |
|
37 | sa.Unicode(40), | |
37 | owner_id = sa.Column(sa.Unicode(30), |
|
38 | nullable=False, | |
38 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
39 | default=lambda x: UserService.generate_random_string(40), | |
39 | ondelete='CASCADE')) |
|
40 | ) | |
|
41 | owner_id = sa.Column( | |||
|
42 | sa.Unicode(30), | |||
|
43 | sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | |||
|
44 | ) | |||
40 | creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow()) |
|
45 | creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow()) | |
41 | expires = sa.Column(sa.DateTime) |
|
46 | expires = sa.Column(sa.DateTime) | |
42 |
description = sa.Column(sa.Unicode, default= |
|
47 | description = sa.Column(sa.Unicode, default="") | |
43 |
|
48 | |||
44 | @property |
|
49 | @property | |
45 | def is_expired(self): |
|
50 | def is_expired(self): | |
46 | if self.expires: |
|
51 | if self.expires: | |
47 | return self.expires < datetime.utcnow() |
|
52 | return self.expires < datetime.utcnow() | |
48 | else: |
|
53 | else: | |
49 | return False |
|
54 | return False | |
50 |
|
55 | |||
51 | def __str__(self): |
|
56 | def __str__(self): | |
52 |
return |
|
57 | return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10]) |
@@ -1,32 +1,32 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | from ziggurat_foundations.models.base import BaseModel |
|
18 | from ziggurat_foundations.models.base import BaseModel | |
19 | from sqlalchemy.dialects.postgresql import JSON |
|
19 | from sqlalchemy.dialects.postgresql import JSON | |
20 |
|
20 | |||
21 | from . import Base |
|
21 | from . import Base | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | class Config(Base, BaseModel): |
|
24 | class Config(Base, BaseModel): | |
25 |
__tablename__ = |
|
25 | __tablename__ = "config" | |
26 |
|
26 | |||
27 | key = sa.Column(sa.Unicode, primary_key=True) |
|
27 | key = sa.Column(sa.Unicode, primary_key=True) | |
28 | section = sa.Column(sa.Unicode, primary_key=True) |
|
28 | section = sa.Column(sa.Unicode, primary_key=True) | |
29 | value = sa.Column(JSON, nullable=False) |
|
29 | value = sa.Column(JSON, nullable=False) | |
30 |
|
30 | |||
31 | def __json__(self, request): |
|
31 | def __json__(self, request): | |
32 | return self.get_dict() |
|
32 | return self.get_dict() |
@@ -1,165 +1,170 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | import logging |
|
18 | import logging | |
19 |
|
19 | |||
20 | from datetime import datetime |
|
20 | from datetime import datetime | |
21 | from appenlight.models import Base, get_db_session |
|
21 | from appenlight.models import Base, get_db_session | |
22 | from appenlight.models.services.report_stat import ReportStatService |
|
22 | from appenlight.models.services.report_stat import ReportStatService | |
23 | from appenlight.models.integrations import IntegrationException |
|
23 | from appenlight.models.integrations import IntegrationException | |
24 | from pyramid.threadlocal import get_current_request |
|
24 | from pyramid.threadlocal import get_current_request | |
25 | from sqlalchemy.dialects.postgresql import JSON |
|
25 | from sqlalchemy.dialects.postgresql import JSON | |
26 | from ziggurat_foundations.models.base import BaseModel |
|
26 | from ziggurat_foundations.models.base import BaseModel | |
27 | from ziggurat_foundations.models.services.resource import ResourceService |
|
27 | from ziggurat_foundations.models.services.resource import ResourceService | |
28 |
|
28 | |||
29 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | class Event(Base, BaseModel): |
|
32 | class Event(Base, BaseModel): | |
33 |
__tablename__ = |
|
33 | __tablename__ = "events" | |
34 |
|
34 | |||
35 | types = {'error_report_alert': 1, |
|
35 | types = { | |
36 |
|
|
36 | "error_report_alert": 1, | |
37 | 'comment': 5, |
|
37 | "slow_report_alert": 3, | |
38 |
|
|
38 | "comment": 5, | |
39 | 'uptime_alert': 7, |
|
39 | "assignment": 6, | |
40 |
|
|
40 | "uptime_alert": 7, | |
|
41 | "chart_alert": 9, | |||
|
42 | } | |||
41 |
|
43 | |||
42 |
statuses = { |
|
44 | statuses = {"active": 1, "closed": 0} | |
43 | 'closed': 0} |
|
|||
44 |
|
45 | |||
45 | id = sa.Column(sa.Integer, primary_key=True) |
|
46 | id = sa.Column(sa.Integer, primary_key=True) | |
46 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) |
|
47 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) | |
47 | end_date = sa.Column(sa.DateTime) |
|
48 | end_date = sa.Column(sa.DateTime) | |
48 | status = sa.Column(sa.Integer, default=1) |
|
49 | status = sa.Column(sa.Integer, default=1) | |
49 | event_type = sa.Column(sa.Integer, default=1) |
|
50 | event_type = sa.Column(sa.Integer, default=1) | |
50 |
origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey( |
|
51 | origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
51 | nullable=True) |
|
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True) | |
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), |
|
53 | resource_id = sa.Column( | |
53 | nullable=True) |
|
54 | sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True | |
54 | resource_id = sa.Column(sa.Integer(), |
|
55 | ) | |
55 | sa.ForeignKey('resources.resource_id'), |
|
|||
56 | nullable=True) |
|
|||
57 | target_id = sa.Column(sa.Integer) |
|
56 | target_id = sa.Column(sa.Integer) | |
58 | target_uuid = sa.Column(sa.Unicode(40)) |
|
57 | target_uuid = sa.Column(sa.Unicode(40)) | |
59 | text = sa.Column(sa.UnicodeText()) |
|
58 | text = sa.Column(sa.UnicodeText()) | |
60 | values = sa.Column(JSON(), nullable=False, default=None) |
|
59 | values = sa.Column(JSON(), nullable=False, default=None) | |
61 |
|
60 | |||
62 | def __repr__(self): |
|
61 | def __repr__(self): | |
63 |
return |
|
62 | return "<Event %s, app:%s, %s>" % ( | |
64 | self.resource_id, |
|
63 | self.unified_alert_name(), | |
65 | self.unified_alert_action()) |
|
64 | self.resource_id, | |
|
65 | self.unified_alert_action(), | |||
|
66 | ) | |||
66 |
|
67 | |||
67 | @property |
|
68 | @property | |
68 | def reverse_types(self): |
|
69 | def reverse_types(self): | |
69 | return dict([(v, k) for k, v in self.types.items()]) |
|
70 | return dict([(v, k) for k, v in self.types.items()]) | |
70 |
|
71 | |||
71 | def unified_alert_name(self): |
|
72 | def unified_alert_name(self): | |
72 | return self.reverse_types[self.event_type] |
|
73 | return self.reverse_types[self.event_type] | |
73 |
|
74 | |||
74 | def unified_alert_action(self): |
|
75 | def unified_alert_action(self): | |
75 | event_name = self.reverse_types[self.event_type] |
|
76 | event_name = self.reverse_types[self.event_type] | |
76 |
if self.status == Event.statuses[ |
|
77 | if self.status == Event.statuses["closed"]: | |
77 | return "CLOSE" |
|
78 | return "CLOSE" | |
78 |
if self.status != Event.statuses[ |
|
79 | if self.status != Event.statuses["closed"]: | |
79 | return "OPEN" |
|
80 | return "OPEN" | |
80 | return event_name |
|
81 | return event_name | |
81 |
|
82 | |||
82 | def send_alerts(self, request=None, resource=None, db_session=None): |
|
83 | def send_alerts(self, request=None, resource=None, db_session=None): | |
83 | """" Sends alerts to applicable channels """ |
|
84 | """" Sends alerts to applicable channels """ | |
84 | db_session = get_db_session(db_session) |
|
85 | db_session = get_db_session(db_session) | |
85 | db_session.flush() |
|
86 | db_session.flush() | |
86 | if not resource: |
|
87 | if not resource: | |
87 | resource = ResourceService.by_resource_id(self.resource_id) |
|
88 | resource = ResourceService.by_resource_id(self.resource_id) | |
88 | if not request: |
|
89 | if not request: | |
89 | request = get_current_request() |
|
90 | request = get_current_request() | |
90 | if not resource: |
|
91 | if not resource: | |
91 | return |
|
92 | return | |
92 |
users = set([p.user for p in ResourceService.users_for_perm(resource, |
|
93 | users = set([p.user for p in ResourceService.users_for_perm(resource, "view")]) | |
93 | for user in users: |
|
94 | for user in users: | |
94 | for channel in user.alert_channels: |
|
95 | for channel in user.alert_channels: | |
95 |
matches_resource = not channel.resources or resource in [ |
|
96 | matches_resource = not channel.resources or resource in [ | |
|
97 | r.resource_id for r in channel.resources | |||
|
98 | ] | |||
96 | if ( |
|
99 | if ( | |
97 |
not channel.channel_validated |
|
100 | not channel.channel_validated | |
98 |
not channel.send_alerts |
|
101 | or not channel.send_alerts | |
99 | not matches_resource |
|
102 | or not matches_resource | |
100 | ): |
|
103 | ): | |
101 | continue |
|
104 | continue | |
102 | else: |
|
105 | else: | |
103 | try: |
|
106 | try: | |
104 |
channel.notify_alert( |
|
107 | channel.notify_alert( | |
105 | event=self, |
|
108 | resource=resource, event=self, user=user, request=request | |
106 |
|
|
109 | ) | |
107 | request=request) |
|
|||
108 | except IntegrationException as e: |
|
110 | except IntegrationException as e: | |
109 |
log.warning( |
|
111 | log.warning("%s" % e) | |
110 |
|
112 | |||
111 | def validate_or_close(self, since_when, db_session=None): |
|
113 | def validate_or_close(self, since_when, db_session=None): | |
112 | """ Checks if alerts should stay open or it's time to close them. |
|
114 | """ Checks if alerts should stay open or it's time to close them. | |
113 | Generates close alert event if alerts get closed """ |
|
115 | Generates close alert event if alerts get closed """ | |
114 | event_types = [Event.types['error_report_alert'], |
|
116 | event_types = [ | |
115 |
|
|
117 | Event.types["error_report_alert"], | |
|
118 | Event.types["slow_report_alert"], | |||
|
119 | ] | |||
116 | app = ResourceService.by_resource_id(self.resource_id) |
|
120 | app = ResourceService.by_resource_id(self.resource_id) | |
117 | # if app was deleted close instantly |
|
121 | # if app was deleted close instantly | |
118 | if not app: |
|
122 | if not app: | |
119 | self.close() |
|
123 | self.close() | |
120 | return |
|
124 | return | |
121 |
|
125 | |||
122 | if self.event_type in event_types: |
|
126 | if self.event_type in event_types: | |
123 | total = ReportStatService.count_by_type( |
|
127 | total = ReportStatService.count_by_type( | |
124 |
self.event_type, self.resource_id, since_when |
|
128 | self.event_type, self.resource_id, since_when | |
125 | if Event.types['error_report_alert'] == self.event_type: |
|
129 | ) | |
|
130 | if Event.types["error_report_alert"] == self.event_type: | |||
126 | threshold = app.error_report_threshold |
|
131 | threshold = app.error_report_threshold | |
127 |
if Event.types[ |
|
132 | if Event.types["slow_report_alert"] == self.event_type: | |
128 | threshold = app.slow_report_threshold |
|
133 | threshold = app.slow_report_threshold | |
129 |
|
134 | |||
130 | if total < threshold: |
|
135 | if total < threshold: | |
131 | self.close() |
|
136 | self.close() | |
132 |
|
137 | |||
133 | def close(self, db_session=None): |
|
138 | def close(self, db_session=None): | |
134 | """ |
|
139 | """ | |
135 | Closes an event and sends notification to affected users |
|
140 | Closes an event and sends notification to affected users | |
136 | """ |
|
141 | """ | |
137 | self.end_date = datetime.utcnow() |
|
142 | self.end_date = datetime.utcnow() | |
138 |
self.status = Event.statuses[ |
|
143 | self.status = Event.statuses["closed"] | |
139 |
log.warning( |
|
144 | log.warning("ALERT: CLOSE: %s" % self) | |
140 | self.send_alerts() |
|
145 | self.send_alerts() | |
141 |
|
146 | |||
142 | def text_representation(self): |
|
147 | def text_representation(self): | |
143 | alert_type = self.unified_alert_name() |
|
148 | alert_type = self.unified_alert_name() | |
144 |
text = |
|
149 | text = "" | |
145 |
if |
|
150 | if "slow_report" in alert_type: | |
146 |
text += |
|
151 | text += "Slow report alert" | |
147 |
if |
|
152 | if "error_report" in alert_type: | |
148 |
text += |
|
153 | text += "Exception report alert" | |
149 |
if |
|
154 | if "uptime_alert" in alert_type: | |
150 |
text += |
|
155 | text += "Uptime alert" | |
151 |
if |
|
156 | if "chart_alert" in alert_type: | |
152 |
text += |
|
157 | text += "Metrics value alert" | |
153 |
|
158 | |||
154 | alert_action = self.unified_alert_action() |
|
159 | alert_action = self.unified_alert_action() | |
155 |
if alert_action == |
|
160 | if alert_action == "OPEN": | |
156 |
text += |
|
161 | text += " got opened." | |
157 |
if alert_action == |
|
162 | if alert_action == "CLOSE": | |
158 |
text += |
|
163 | text += " got closed." | |
159 | return text |
|
164 | return text | |
160 |
|
165 | |||
161 | def get_dict(self, request=None): |
|
166 | def get_dict(self, request=None): | |
162 | dict_data = super(Event, self).get_dict() |
|
167 | dict_data = super(Event, self).get_dict() | |
163 |
dict_data[ |
|
168 | dict_data["text"] = self.text_representation() | |
164 |
dict_data[ |
|
169 | dict_data["resource_name"] = self.resource.resource_name | |
165 | return dict_data |
|
170 | return dict_data |
@@ -1,36 +1,36 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | from sqlalchemy.ext.declarative import declared_attr |
|
18 | from sqlalchemy.ext.declarative import declared_attr | |
19 | from ziggurat_foundations.models.external_identity import ExternalIdentityMixin |
|
19 | from ziggurat_foundations.models.external_identity import ExternalIdentityMixin | |
20 |
|
20 | |||
21 | from appenlight.models import Base |
|
21 | from appenlight.models import Base | |
22 | from appenlight.lib.sqlalchemy_fields import EncryptedUnicode |
|
22 | from appenlight.lib.sqlalchemy_fields import EncryptedUnicode | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class ExternalIdentity(ExternalIdentityMixin, Base): |
|
25 | class ExternalIdentity(ExternalIdentityMixin, Base): | |
26 | @declared_attr |
|
26 | @declared_attr | |
27 | def access_token(self): |
|
27 | def access_token(self): | |
28 |
return sa.Column(EncryptedUnicode(255), default= |
|
28 | return sa.Column(EncryptedUnicode(255), default="") | |
29 |
|
29 | |||
30 | @declared_attr |
|
30 | @declared_attr | |
31 | def alt_token(self): |
|
31 | def alt_token(self): | |
32 |
return sa.Column(EncryptedUnicode(255), default= |
|
32 | return sa.Column(EncryptedUnicode(255), default="") | |
33 |
|
33 | |||
34 | @declared_attr |
|
34 | @declared_attr | |
35 | def token_secret(self): |
|
35 | def token_secret(self): | |
36 |
return sa.Column(EncryptedUnicode(255), default= |
|
36 | return sa.Column(EncryptedUnicode(255), default="") |
@@ -1,45 +1,46 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | from ziggurat_foundations.models.group import GroupMixin |
|
17 | from ziggurat_foundations.models.group import GroupMixin | |
18 | from appenlight.models import Base |
|
18 | from appenlight.models import Base | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | class Group(GroupMixin, Base): |
|
21 | class Group(GroupMixin, Base): | |
22 |
__possible_permissions__ = ( |
|
22 | __possible_permissions__ = ( | |
23 | 'test_features', |
|
23 | "root_administration", | |
24 | 'admin_panel', |
|
24 | "test_features", | |
25 | 'admin_users', |
|
25 | "admin_panel", | |
26 | 'manage_partitions',) |
|
26 | "admin_users", | |
|
27 | "manage_partitions", | |||
|
28 | ) | |||
27 |
|
29 | |||
28 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
30 | def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False): | |
29 | include_perms=False): |
|
|||
30 | result = super(Group, self).get_dict(exclude_keys, include_keys) |
|
31 | result = super(Group, self).get_dict(exclude_keys, include_keys) | |
31 | if include_perms: |
|
32 | if include_perms: | |
32 |
result[ |
|
33 | result["possible_permissions"] = self.__possible_permissions__ | |
33 |
result[ |
|
34 | result["current_permissions"] = [p.perm_name for p in self.permissions] | |
34 | self.permissions] |
|
|||
35 | else: |
|
35 | else: | |
36 |
result[ |
|
36 | result["possible_permissions"] = [] | |
37 |
result[ |
|
37 | result["current_permissions"] = [] | |
38 | exclude_keys_list = exclude_keys or [] |
|
38 | exclude_keys_list = exclude_keys or [] | |
39 | include_keys_list = include_keys or [] |
|
39 | include_keys_list = include_keys or [] | |
40 | d = {} |
|
40 | d = {} | |
41 | for k in result.keys(): |
|
41 | for k in result.keys(): | |
42 |
if |
|
42 | if k not in exclude_keys_list and ( | |
43 |
|
|
43 | k in include_keys_list or not include_keys | |
|
44 | ): | |||
44 | d[k] = result[k] |
|
45 | d[k] = result[k] | |
45 | return d |
|
46 | return d |
@@ -1,23 +1,24 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 |
from ziggurat_foundations.models.group_resource_permission import |
|
17 | from ziggurat_foundations.models.group_resource_permission import ( | |
18 | GroupResourcePermissionMixin |
|
18 | GroupResourcePermissionMixin, | |
|
19 | ) | |||
19 | from appenlight.models import Base |
|
20 | from appenlight.models import Base | |
20 |
|
21 | |||
21 |
|
22 | |||
22 | class GroupResourcePermission(GroupResourcePermissionMixin, Base): |
|
23 | class GroupResourcePermission(GroupResourcePermissionMixin, Base): | |
23 | pass |
|
24 | pass |
@@ -1,78 +1,80 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | from sqlalchemy.dialects.postgresql import JSON |
|
18 | from sqlalchemy.dialects.postgresql import JSON | |
19 | from sqlalchemy.ext.hybrid import hybrid_property |
|
19 | from sqlalchemy.ext.hybrid import hybrid_property | |
20 | from ziggurat_foundations.models.base import BaseModel |
|
20 | from ziggurat_foundations.models.base import BaseModel | |
21 |
|
21 | |||
22 | from appenlight.lib.encryption import decrypt_dictionary_keys |
|
22 | from appenlight.lib.encryption import decrypt_dictionary_keys | |
23 | from appenlight.lib.encryption import encrypt_dictionary_keys |
|
23 | from appenlight.lib.encryption import encrypt_dictionary_keys | |
24 | from appenlight.models import Base, get_db_session |
|
24 | from appenlight.models import Base, get_db_session | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | class IntegrationException(Exception): |
|
27 | class IntegrationException(Exception): | |
28 | pass |
|
28 | pass | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class IntegrationBase(Base, BaseModel): |
|
31 | class IntegrationBase(Base, BaseModel): | |
32 | """ |
|
32 | """ | |
33 | Model from which all integrations inherit using polymorphic approach |
|
33 | Model from which all integrations inherit using polymorphic approach | |
34 | """ |
|
34 | """ | |
35 | __tablename__ = 'integrations' |
|
35 | ||
|
36 | __tablename__ = "integrations" | |||
36 |
|
37 | |||
37 | front_visible = False |
|
38 | front_visible = False | |
38 | as_alert_channel = False |
|
39 | as_alert_channel = False | |
39 | supports_report_alerting = False |
|
40 | supports_report_alerting = False | |
40 |
|
41 | |||
41 | id = sa.Column(sa.Integer, primary_key=True) |
|
42 | id = sa.Column(sa.Integer, primary_key=True) | |
42 | resource_id = sa.Column(sa.Integer, |
|
43 | resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id")) | |
43 | sa.ForeignKey('applications.resource_id')) |
|
|||
44 | integration_name = sa.Column(sa.Unicode(64)) |
|
44 | integration_name = sa.Column(sa.Unicode(64)) | |
45 |
_config = sa.Column( |
|
45 | _config = sa.Column("config", JSON(), nullable=False, default="") | |
46 | modified_date = sa.Column(sa.DateTime) |
|
46 | modified_date = sa.Column(sa.DateTime) | |
47 |
|
47 | |||
48 |
channel = sa.orm.relationship( |
|
48 | channel = sa.orm.relationship( | |
49 | cascade="all,delete-orphan", |
|
49 | "AlertChannel", | |
50 | passive_deletes=True, |
|
50 | cascade="all,delete-orphan", | |
51 |
|
|
51 | passive_deletes=True, | |
52 | uselist=False, |
|
52 | passive_updates=True, | |
53 | backref='integration') |
|
53 | uselist=False, | |
|
54 | backref="integration", | |||
|
55 | ) | |||
54 |
|
56 | |||
55 | __mapper_args__ = { |
|
57 | __mapper_args__ = { | |
56 |
|
|
58 | "polymorphic_on": "integration_name", | |
57 |
|
|
59 | "polymorphic_identity": "integration", | |
58 | } |
|
60 | } | |
59 |
|
61 | |||
60 | @classmethod |
|
62 | @classmethod | |
61 |
def by_app_id_and_integration_name( |
|
63 | def by_app_id_and_integration_name( | |
62 | db_session=None): |
|
64 | cls, resource_id, integration_name, db_session=None | |
|
65 | ): | |||
63 | db_session = get_db_session(db_session) |
|
66 | db_session = get_db_session(db_session) | |
64 | query = db_session.query(cls) |
|
67 | query = db_session.query(cls) | |
65 | query = query.filter(cls.integration_name == integration_name) |
|
68 | query = query.filter(cls.integration_name == integration_name) | |
66 | query = query.filter(cls.resource_id == resource_id) |
|
69 | query = query.filter(cls.resource_id == resource_id) | |
67 | return query.first() |
|
70 | return query.first() | |
68 |
|
71 | |||
69 | @hybrid_property |
|
72 | @hybrid_property | |
70 | def config(self): |
|
73 | def config(self): | |
71 | return decrypt_dictionary_keys(self._config) |
|
74 | return decrypt_dictionary_keys(self._config) | |
72 |
|
75 | |||
73 | @config.setter |
|
76 | @config.setter | |
74 | def config(self, value): |
|
77 | def config(self, value): | |
75 |
if not hasattr(value, |
|
78 | if not hasattr(value, "items"): | |
76 |
raise Exception( |
|
79 | raise Exception("IntegrationBase.config only accepts " "flat dictionaries") | |
77 | 'flat dictionaries') |
|
|||
78 | self._config = encrypt_dictionary_keys(value) |
|
80 | self._config = encrypt_dictionary_keys(value) |
@@ -1,163 +1,188 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import requests |
|
17 | import requests | |
18 | from requests_oauthlib import OAuth1 |
|
18 | from requests_oauthlib import OAuth1 | |
19 |
from appenlight.models.integrations import |
|
19 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
20 | IntegrationException) |
|
|||
21 |
|
20 | |||
22 | _ = str |
|
21 | _ = str | |
23 |
|
22 | |||
24 |
|
23 | |||
25 | class NotFoundException(Exception): |
|
24 | class NotFoundException(Exception): | |
26 | pass |
|
25 | pass | |
27 |
|
26 | |||
28 |
|
27 | |||
29 | class BitbucketIntegration(IntegrationBase): |
|
28 | class BitbucketIntegration(IntegrationBase): | |
30 | __mapper_args__ = { |
|
29 | __mapper_args__ = {"polymorphic_identity": "bitbucket"} | |
31 | 'polymorphic_identity': 'bitbucket' |
|
|||
32 | } |
|
|||
33 | front_visible = True |
|
30 | front_visible = True | |
34 | as_alert_channel = False |
|
31 | as_alert_channel = False | |
35 | supports_report_alerting = False |
|
32 | supports_report_alerting = False | |
36 | action_notification = True |
|
33 | action_notification = True | |
37 |
integration_action = |
|
34 | integration_action = "Add issue to Bitbucket" | |
38 |
|
35 | |||
39 | @classmethod |
|
36 | @classmethod | |
40 | def create_client(cls, request, user_name=None, repo_name=None): |
|
37 | def create_client(cls, request, user_name=None, repo_name=None): | |
41 | """ |
|
38 | """ | |
42 | Creates REST client that can authenticate to specific repo |
|
39 | Creates REST client that can authenticate to specific repo | |
43 | uses auth tokens for current request user |
|
40 | uses auth tokens for current request user | |
44 | """ |
|
41 | """ | |
45 | config = request.registry.settings |
|
42 | config = request.registry.settings | |
46 | token = None |
|
43 | token = None | |
47 | secret = None |
|
44 | secret = None | |
48 | for identity in request.user.external_identities: |
|
45 | for identity in request.user.external_identities: | |
49 |
if identity.provider_name == |
|
46 | if identity.provider_name == "bitbucket": | |
50 | token = identity.access_token |
|
47 | token = identity.access_token | |
51 | secret = identity.token_secret |
|
48 | secret = identity.token_secret | |
52 | break |
|
49 | break | |
53 | if not token: |
|
50 | if not token: | |
54 | raise IntegrationException( |
|
51 | raise IntegrationException("No valid auth token present for this service") | |
55 | 'No valid auth token present for this service') |
|
52 | client = BitbucketClient( | |
56 | client = BitbucketClient(token, secret, |
|
53 | token, | |
57 | user_name, |
|
54 | secret, | |
58 | repo_name, |
|
55 | user_name, | |
59 | config['authomatic.pr.bitbucket.key'], |
|
56 | repo_name, | |
60 |
|
|
57 | config["authomatic.pr.bitbucket.key"], | |
|
58 | config["authomatic.pr.bitbucket.secret"], | |||
|
59 | ) | |||
61 | return client |
|
60 | return client | |
62 |
|
61 | |||
63 |
|
62 | |||
64 | class BitbucketClient(object): |
|
63 | class BitbucketClient(object): | |
65 |
api_url = |
|
64 | api_url = "https://bitbucket.org/api/1.0" | |
66 |
repo_type = |
|
65 | repo_type = "bitbucket" | |
67 |
|
66 | |||
68 | def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key, |
|
67 | def __init__( | |
69 | bitbucket_consumer_secret): |
|
68 | self, | |
|
69 | token, | |||
|
70 | secret, | |||
|
71 | owner, | |||
|
72 | repo_name, | |||
|
73 | bitbucket_consumer_key, | |||
|
74 | bitbucket_consumer_secret, | |||
|
75 | ): | |||
70 | self.access_token = token |
|
76 | self.access_token = token | |
71 | self.token_secret = secret |
|
77 | self.token_secret = secret | |
72 | self.owner = owner |
|
78 | self.owner = owner | |
73 | self.repo_name = repo_name |
|
79 | self.repo_name = repo_name | |
74 | self.bitbucket_consumer_key = bitbucket_consumer_key |
|
80 | self.bitbucket_consumer_key = bitbucket_consumer_key | |
75 | self.bitbucket_consumer_secret = bitbucket_consumer_secret |
|
81 | self.bitbucket_consumer_secret = bitbucket_consumer_secret | |
76 |
|
82 | |||
77 | possible_keys = { |
|
83 | possible_keys = { | |
78 | 'status': ['new', 'open', 'resolved', 'on hold', 'invalid', |
|
84 | "status": [ | |
79 | 'duplicate', 'wontfix'], |
|
85 | "new", | |
80 | 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'], |
|
86 | "open", | |
81 | 'kind': ['bug', 'enhancement', 'proposal', 'task'] |
|
87 | "resolved", | |
|
88 | "on hold", | |||
|
89 | "invalid", | |||
|
90 | "duplicate", | |||
|
91 | "wontfix", | |||
|
92 | ], | |||
|
93 | "priority": ["trivial", "minor", "major", "critical", "blocker"], | |||
|
94 | "kind": ["bug", "enhancement", "proposal", "task"], | |||
82 | } |
|
95 | } | |
83 |
|
96 | |||
84 | def get_statuses(self): |
|
97 | def get_statuses(self): | |
85 | """Gets list of possible item statuses""" |
|
98 | """Gets list of possible item statuses""" | |
86 |
return self.possible_keys[ |
|
99 | return self.possible_keys["status"] | |
87 |
|
100 | |||
88 | def get_priorities(self): |
|
101 | def get_priorities(self): | |
89 | """Gets list of possible item statuses""" |
|
102 | """Gets list of possible item statuses""" | |
90 |
return self.possible_keys[ |
|
103 | return self.possible_keys["priority"] | |
91 |
|
104 | |||
92 |
def make_request(self, url, method= |
|
105 | def make_request(self, url, method="get", data=None, headers=None): | |
93 | """ |
|
106 | """ | |
94 | Performs HTTP request to bitbucket |
|
107 | Performs HTTP request to bitbucket | |
95 | """ |
|
108 | """ | |
96 |
auth = OAuth1( |
|
109 | auth = OAuth1( | |
97 |
|
|
110 | self.bitbucket_consumer_key, | |
98 | self.access_token, self.token_secret) |
|
111 | self.bitbucket_consumer_secret, | |
|
112 | self.access_token, | |||
|
113 | self.token_secret, | |||
|
114 | ) | |||
99 | try: |
|
115 | try: | |
100 | resp = getattr(requests, method)(url, data=data, auth=auth, |
|
116 | resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10) | |
101 | timeout=10) |
|
|||
102 | except Exception as e: |
|
117 | except Exception as e: | |
103 | raise IntegrationException( |
|
118 | raise IntegrationException( | |
104 |
_( |
|
119 | _("Error communicating with Bitbucket: %s") % (e,) | |
|
120 | ) | |||
105 | if resp.status_code == 401: |
|
121 | if resp.status_code == 401: | |
106 | raise IntegrationException( |
|
122 | raise IntegrationException(_("You are not authorized to access this repo")) | |
107 | _('You are not authorized to access this repo')) |
|
|||
108 | elif resp.status_code == 404: |
|
123 | elif resp.status_code == 404: | |
109 |
raise IntegrationException(_( |
|
124 | raise IntegrationException(_("User or repo name are incorrect")) | |
110 | elif resp.status_code not in [200, 201]: |
|
125 | elif resp.status_code not in [200, 201]: | |
111 | raise IntegrationException( |
|
126 | raise IntegrationException( | |
112 |
_( |
|
127 | _("Bitbucket response_code: %s") % resp.status_code | |
|
128 | ) | |||
113 | try: |
|
129 | try: | |
114 | return resp.json() |
|
130 | return resp.json() | |
115 | except Exception as e: |
|
131 | except Exception as e: | |
116 | raise IntegrationException( |
|
132 | raise IntegrationException( | |
117 |
_( |
|
133 | _("Error decoding response from Bitbucket: %s") % (e,) | |
|
134 | ) | |||
118 |
|
135 | |||
119 | def get_assignees(self): |
|
136 | def get_assignees(self): | |
120 | """Gets list of possible assignees""" |
|
137 | """Gets list of possible assignees""" | |
121 |
url = |
|
138 | url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % { | |
122 |
|
|
139 | "api_url": self.api_url, | |
123 |
|
|
140 | "owner": self.owner, | |
124 |
|
|
141 | "repo_name": self.repo_name, | |
|
142 | } | |||
125 |
|
143 | |||
126 | data = self.make_request(url) |
|
144 | data = self.make_request(url) | |
127 |
results = [{ |
|
145 | results = [{"user": self.owner, "name": "(Repo owner)"}] | |
128 | if data: |
|
146 | if data: | |
129 | for entry in data: |
|
147 | for entry in data: | |
130 |
results.append( |
|
148 | results.append( | |
131 | "name": entry['user'].get('display_name')}) |
|
149 | { | |
|
150 | "user": entry["user"]["username"], | |||
|
151 | "name": entry["user"].get("display_name"), | |||
|
152 | } | |||
|
153 | ) | |||
132 | return results |
|
154 | return results | |
133 |
|
155 | |||
134 | def create_issue(self, form_data): |
|
156 | def create_issue(self, form_data): | |
135 | """ |
|
157 | """ | |
136 | Sends creates a new issue in tracker using REST call |
|
158 | Sends creates a new issue in tracker using REST call | |
137 | """ |
|
159 | """ | |
138 |
url = |
|
160 | url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % { | |
139 |
|
|
161 | "api_url": self.api_url, | |
140 |
|
|
162 | "owner": self.owner, | |
141 |
|
|
163 | "repo_name": self.repo_name, | |
|
164 | } | |||
142 |
|
165 | |||
143 | payload = { |
|
166 | payload = { | |
144 |
"title": form_data[ |
|
167 | "title": form_data["title"], | |
145 |
"content": form_data[ |
|
168 | "content": form_data["content"], | |
146 |
"kind": form_data[ |
|
169 | "kind": form_data["kind"], | |
147 |
"priority": form_data[ |
|
170 | "priority": form_data["priority"], | |
148 |
"responsible": form_data[ |
|
171 | "responsible": form_data["responsible"], | |
149 | } |
|
172 | } | |
150 |
data = self.make_request(url, |
|
173 | data = self.make_request(url, "post", payload) | |
151 | f_args = { |
|
174 | f_args = { | |
152 | "owner": self.owner, |
|
175 | "owner": self.owner, | |
153 | "repo_name": self.repo_name, |
|
176 | "repo_name": self.repo_name, | |
154 |
"issue_id": data[ |
|
177 | "issue_id": data["local_id"], | |
155 | } |
|
178 | } | |
156 | web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \ |
|
179 | web_url = ( | |
157 | '/issue/%(issue_id)s/issue-title' % f_args |
|
180 | "https://bitbucket.org/%(owner)s/%(repo_name)s" | |
|
181 | "/issue/%(issue_id)s/issue-title" % f_args | |||
|
182 | ) | |||
158 | to_return = { |
|
183 | to_return = { | |
159 |
|
|
184 | "id": data["local_id"], | |
160 |
|
|
185 | "resource_url": data["resource_uri"], | |
161 |
|
|
186 | "web_url": web_url, | |
162 | } |
|
187 | } | |
163 | return to_return |
|
188 | return to_return |
@@ -1,74 +1,71 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | from requests.exceptions import HTTPError, ConnectionError |
|
19 | from requests.exceptions import HTTPError, ConnectionError | |
20 | from camplight import Request, Campfire |
|
20 | from camplight import Request, Campfire | |
21 | from camplight.exceptions import CamplightException |
|
21 | from camplight.exceptions import CamplightException | |
22 |
|
22 | |||
23 |
from appenlight.models.integrations import |
|
23 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
24 | IntegrationException) |
|
|||
25 |
|
24 | |||
26 | _ = str |
|
25 | _ = str | |
27 |
|
26 | |||
28 | log = logging.getLogger(__name__) |
|
27 | log = logging.getLogger(__name__) | |
29 |
|
28 | |||
30 |
|
29 | |||
31 | class NotFoundException(Exception): |
|
30 | class NotFoundException(Exception): | |
32 | pass |
|
31 | pass | |
33 |
|
32 | |||
34 |
|
33 | |||
35 | class CampfireIntegration(IntegrationBase): |
|
34 | class CampfireIntegration(IntegrationBase): | |
36 | __mapper_args__ = { |
|
35 | __mapper_args__ = {"polymorphic_identity": "campfire"} | |
37 | 'polymorphic_identity': 'campfire' |
|
|||
38 | } |
|
|||
39 | front_visible = False |
|
36 | front_visible = False | |
40 | as_alert_channel = True |
|
37 | as_alert_channel = True | |
41 | supports_report_alerting = True |
|
38 | supports_report_alerting = True | |
42 | action_notification = True |
|
39 | action_notification = True | |
43 |
integration_action = |
|
40 | integration_action = "Message via Campfire" | |
44 |
|
41 | |||
45 | @classmethod |
|
42 | @classmethod | |
46 | def create_client(cls, api_token, account): |
|
43 | def create_client(cls, api_token, account): | |
47 | client = CampfireClient(api_token, account) |
|
44 | client = CampfireClient(api_token, account) | |
48 | return client |
|
45 | return client | |
49 |
|
46 | |||
50 |
|
47 | |||
51 | class CampfireClient(object): |
|
48 | class CampfireClient(object): | |
52 | def __init__(self, api_token, account): |
|
49 | def __init__(self, api_token, account): | |
53 |
request = Request( |
|
50 | request = Request("https://%s.campfirenow.com" % account, api_token) | |
54 | self.campfire = Campfire(request) |
|
51 | self.campfire = Campfire(request) | |
55 |
|
52 | |||
56 | def get_account(self): |
|
53 | def get_account(self): | |
57 | try: |
|
54 | try: | |
58 | return self.campfire.account() |
|
55 | return self.campfire.account() | |
59 | except (HTTPError, CamplightException) as e: |
|
56 | except (HTTPError, CamplightException) as e: | |
60 | raise IntegrationException(str(e)) |
|
57 | raise IntegrationException(str(e)) | |
61 |
|
58 | |||
62 | def get_rooms(self): |
|
59 | def get_rooms(self): | |
63 | try: |
|
60 | try: | |
64 | return self.campfire.rooms() |
|
61 | return self.campfire.rooms() | |
65 | except (HTTPError, CamplightException) as e: |
|
62 | except (HTTPError, CamplightException) as e: | |
66 | raise IntegrationException(str(e)) |
|
63 | raise IntegrationException(str(e)) | |
67 |
|
64 | |||
68 |
def speak_to_room(self, room, message, sound= |
|
65 | def speak_to_room(self, room, message, sound="RIMSHOT"): | |
69 | try: |
|
66 | try: | |
70 | room = self.campfire.room(room) |
|
67 | room = self.campfire.room(room) | |
71 | room.join() |
|
68 | room.join() | |
72 |
room.speak(message, type_= |
|
69 | room.speak(message, type_="TextMessage") | |
73 | except (HTTPError, CamplightException, ConnectionError) as e: |
|
70 | except (HTTPError, CamplightException, ConnectionError) as e: | |
74 | raise IntegrationException(str(e)) |
|
71 | raise IntegrationException(str(e)) |
@@ -1,82 +1,83 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | import requests |
|
19 | import requests | |
20 | from requests.auth import HTTPBasicAuth |
|
20 | from requests.auth import HTTPBasicAuth | |
21 | import simplejson as json |
|
21 | import simplejson as json | |
22 |
|
22 | |||
23 |
from appenlight.models.integrations import |
|
23 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
24 | IntegrationException) |
|
|||
25 |
|
24 | |||
26 | _ = str |
|
25 | _ = str | |
27 |
|
26 | |||
28 | log = logging.getLogger(__name__) |
|
27 | log = logging.getLogger(__name__) | |
29 |
|
28 | |||
30 |
|
29 | |||
31 | class NotFoundException(Exception): |
|
30 | class NotFoundException(Exception): | |
32 | pass |
|
31 | pass | |
33 |
|
32 | |||
34 |
|
33 | |||
35 | class FlowdockIntegration(IntegrationBase): |
|
34 | class FlowdockIntegration(IntegrationBase): | |
36 | __mapper_args__ = { |
|
35 | __mapper_args__ = {"polymorphic_identity": "flowdock"} | |
37 | 'polymorphic_identity': 'flowdock' |
|
|||
38 | } |
|
|||
39 | front_visible = False |
|
36 | front_visible = False | |
40 | as_alert_channel = True |
|
37 | as_alert_channel = True | |
41 | supports_report_alerting = True |
|
38 | supports_report_alerting = True | |
42 | action_notification = True |
|
39 | action_notification = True | |
43 |
integration_action = |
|
40 | integration_action = "Message via Flowdock" | |
44 |
|
41 | |||
45 | @classmethod |
|
42 | @classmethod | |
46 | def create_client(cls, api_token): |
|
43 | def create_client(cls, api_token): | |
47 | client = FlowdockClient(api_token) |
|
44 | client = FlowdockClient(api_token) | |
48 | return client |
|
45 | return client | |
49 |
|
46 | |||
50 |
|
47 | |||
51 | class FlowdockClient(object): |
|
48 | class FlowdockClient(object): | |
52 | def __init__(self, api_token): |
|
49 | def __init__(self, api_token): | |
53 |
self.auth = HTTPBasicAuth(api_token, |
|
50 | self.auth = HTTPBasicAuth(api_token, "") | |
54 | self.api_token = api_token |
|
51 | self.api_token = api_token | |
55 |
self.api_url = |
|
52 | self.api_url = "https://api.flowdock.com/v1/messages" | |
56 |
|
53 | |||
57 |
def make_request(self, url, method= |
|
54 | def make_request(self, url, method="get", data=None): | |
58 | headers = { |
|
55 | headers = { | |
59 |
|
|
56 | "Content-Type": "application/json", | |
60 |
|
|
57 | "User-Agent": "appenlight-flowdock", | |
61 | } |
|
58 | } | |
62 | try: |
|
59 | try: | |
63 | if data: |
|
60 | if data: | |
64 | data = json.dumps(data) |
|
61 | data = json.dumps(data) | |
65 |
resp = getattr(requests, method)( |
|
62 | resp = getattr(requests, method)( | |
66 | timeout=10) |
|
63 | url, data=data, headers=headers, timeout=10 | |
|
64 | ) | |||
67 | except Exception as e: |
|
65 | except Exception as e: | |
68 | raise IntegrationException( |
|
66 | raise IntegrationException( | |
69 |
_( |
|
67 | _("Error communicating with Flowdock: %s") % (e,) | |
|
68 | ) | |||
70 | if resp.status_code > 299: |
|
69 | if resp.status_code > 299: | |
71 | raise IntegrationException(resp.text) |
|
70 | raise IntegrationException(resp.text) | |
72 | return resp |
|
71 | return resp | |
73 |
|
72 | |||
74 | def send_to_chat(self, payload): |
|
73 | def send_to_chat(self, payload): | |
75 |
url = |
|
74 | url = "%(api_url)s/chat/%(api_token)s" % { | |
76 | 'api_token': self.api_token} |
|
75 | "api_url": self.api_url, | |
77 | return self.make_request(url, method='post', data=payload).json() |
|
76 | "api_token": self.api_token, | |
|
77 | } | |||
|
78 | return self.make_request(url, method="post", data=payload).json() | |||
78 |
|
79 | |||
79 | def send_to_inbox(self, payload): |
|
80 | def send_to_inbox(self, payload): | |
80 |
f_args = { |
|
81 | f_args = {"api_url": self.api_url, "api_token": self.api_token} | |
81 |
url = |
|
82 | url = "%(api_url)s/team_inbox/%(api_token)s" % f_args | |
82 |
return self.make_request(url, method= |
|
83 | return self.make_request(url, method="post", data=payload).json() |
@@ -1,156 +1,158 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import json |
|
17 | import json | |
18 | import requests |
|
18 | import requests | |
19 |
|
19 | |||
20 | from . import IntegrationBase, IntegrationException |
|
20 | from . import IntegrationBase, IntegrationException | |
21 |
|
21 | |||
22 | _ = str |
|
22 | _ = str | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class GithubAuthException(Exception): |
|
25 | class GithubAuthException(Exception): | |
26 | pass |
|
26 | pass | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | class GithubIntegration(IntegrationBase): |
|
29 | class GithubIntegration(IntegrationBase): | |
30 | __mapper_args__ = { |
|
30 | __mapper_args__ = {"polymorphic_identity": "github"} | |
31 | 'polymorphic_identity': 'github' |
|
|||
32 | } |
|
|||
33 | front_visible = True |
|
31 | front_visible = True | |
34 | as_alert_channel = False |
|
32 | as_alert_channel = False | |
35 | supports_report_alerting = False |
|
33 | supports_report_alerting = False | |
36 | action_notification = True |
|
34 | action_notification = True | |
37 |
integration_action = |
|
35 | integration_action = "Add issue to Github" | |
38 |
|
36 | |||
39 | @classmethod |
|
37 | @classmethod | |
40 | def create_client(cls, request, user_name=None, repo_name=None): |
|
38 | def create_client(cls, request, user_name=None, repo_name=None): | |
41 | """ |
|
39 | """ | |
42 | Creates REST client that can authenticate to specific repo |
|
40 | Creates REST client that can authenticate to specific repo | |
43 | uses auth tokens for current request user |
|
41 | uses auth tokens for current request user | |
44 | """ |
|
42 | """ | |
45 | token = None |
|
43 | token = None | |
46 | secret = None |
|
44 | secret = None | |
47 | for identity in request.user.external_identities: |
|
45 | for identity in request.user.external_identities: | |
48 |
if identity.provider_name == |
|
46 | if identity.provider_name == "github": | |
49 | token = identity.access_token |
|
47 | token = identity.access_token | |
50 | secret = identity.token_secret |
|
48 | secret = identity.token_secret | |
51 | break |
|
49 | break | |
52 | if not token: |
|
50 | if not token: | |
53 | raise IntegrationException( |
|
51 | raise IntegrationException("No valid auth token present for this service") | |
54 | 'No valid auth token present for this service') |
|
|||
55 | client = GithubClient(token=token, owner=user_name, name=repo_name) |
|
52 | client = GithubClient(token=token, owner=user_name, name=repo_name) | |
56 | return client |
|
53 | return client | |
57 |
|
54 | |||
58 |
|
55 | |||
59 | class GithubClient(object): |
|
56 | class GithubClient(object): | |
60 |
api_url = |
|
57 | api_url = "https://api.github.com" | |
61 |
repo_type = |
|
58 | repo_type = "github" | |
62 |
|
59 | |||
63 | def __init__(self, token, owner, name): |
|
60 | def __init__(self, token, owner, name): | |
64 | self.access_token = token |
|
61 | self.access_token = token | |
65 | self.owner = owner |
|
62 | self.owner = owner | |
66 | self.name = name |
|
63 | self.name = name | |
67 |
|
64 | |||
68 |
def make_request(self, url, method= |
|
65 | def make_request(self, url, method="get", data=None, headers=None): | |
69 |
req_headers = { |
|
66 | req_headers = { | |
70 | 'Content-Type': 'application/json', |
|
67 | "User-Agent": "appenlight", | |
71 | 'Authorization': 'token %s' % self.access_token} |
|
68 | "Content-Type": "application/json", | |
|
69 | "Authorization": "token %s" % self.access_token, | |||
|
70 | } | |||
72 | try: |
|
71 | try: | |
73 | if data: |
|
72 | if data: | |
74 | data = json.dumps(data) |
|
73 | data = json.dumps(data) | |
75 |
resp = getattr(requests, method)( |
|
74 | resp = getattr(requests, method)( | |
76 | headers=req_headers, |
|
75 | url, data=data, headers=req_headers, timeout=10 | |
77 | timeout=10) |
|
76 | ) | |
78 | except Exception as e: |
|
77 | except Exception as e: | |
79 |
msg = |
|
78 | msg = "Error communicating with Github: %s" | |
80 | raise IntegrationException(_(msg) % (e,)) |
|
79 | raise IntegrationException(_(msg) % (e,)) | |
81 |
|
80 | |||
82 | if resp.status_code == 404: |
|
81 | if resp.status_code == 404: | |
83 |
msg = |
|
82 | msg = "User or repo name are incorrect" | |
84 | raise IntegrationException(_(msg)) |
|
83 | raise IntegrationException(_(msg)) | |
85 | if resp.status_code == 401: |
|
84 | if resp.status_code == 401: | |
86 |
msg = |
|
85 | msg = "You are not authorized to access this repo" | |
87 | raise IntegrationException(_(msg)) |
|
86 | raise IntegrationException(_(msg)) | |
88 | elif resp.status_code not in [200, 201]: |
|
87 | elif resp.status_code not in [200, 201]: | |
89 |
msg = |
|
88 | msg = "Github response_code: %s" | |
90 | raise IntegrationException(_(msg) % resp.status_code) |
|
89 | raise IntegrationException(_(msg) % resp.status_code) | |
91 | try: |
|
90 | try: | |
92 | return resp.json() |
|
91 | return resp.json() | |
93 | except Exception as e: |
|
92 | except Exception as e: | |
94 |
msg = |
|
93 | msg = "Error decoding response from Github: %s" | |
95 | raise IntegrationException(_(msg) % (e,)) |
|
94 | raise IntegrationException(_(msg) % (e,)) | |
96 |
|
95 | |||
97 | def get_statuses(self): |
|
96 | def get_statuses(self): | |
98 | """Gets list of possible item statuses""" |
|
97 | """Gets list of possible item statuses""" | |
99 |
url = |
|
98 | url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % { | |
100 |
|
|
99 | "api_url": self.api_url, | |
101 |
|
|
100 | "owner": self.owner, | |
102 |
|
|
101 | "name": self.name, | |
|
102 | } | |||
103 |
|
103 | |||
104 | data = self.make_request(url) |
|
104 | data = self.make_request(url) | |
105 |
|
105 | |||
106 | statuses = [] |
|
106 | statuses = [] | |
107 | for status in data: |
|
107 | for status in data: | |
108 |
statuses.append(status[ |
|
108 | statuses.append(status["name"]) | |
109 | return statuses |
|
109 | return statuses | |
110 |
|
110 | |||
111 | def get_repo(self): |
|
111 | def get_repo(self): | |
112 | """Gets list of possible item statuses""" |
|
112 | """Gets list of possible item statuses""" | |
113 |
url = |
|
113 | url = "%(api_url)s/repos/%(owner)s/%(name)s" % { | |
114 |
|
|
114 | "api_url": self.api_url, | |
115 |
|
|
115 | "owner": self.owner, | |
116 |
|
|
116 | "name": self.name, | |
|
117 | } | |||
117 |
|
118 | |||
118 | data = self.make_request(url) |
|
119 | data = self.make_request(url) | |
119 | return data |
|
120 | return data | |
120 |
|
121 | |||
121 | def get_assignees(self): |
|
122 | def get_assignees(self): | |
122 | """Gets list of possible assignees""" |
|
123 | """Gets list of possible assignees""" | |
123 |
url = |
|
124 | url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % { | |
124 |
|
|
125 | "api_url": self.api_url, | |
125 |
|
|
126 | "owner": self.owner, | |
126 |
|
|
127 | "name": self.name, | |
|
128 | } | |||
127 | data = self.make_request(url) |
|
129 | data = self.make_request(url) | |
128 | results = [] |
|
130 | results = [] | |
129 | for entry in data: |
|
131 | for entry in data: | |
130 |
results.append({"user": entry[ |
|
132 | results.append({"user": entry["login"], "name": entry.get("name")}) | |
131 | "name": entry.get('name')}) |
|
|||
132 | return results |
|
133 | return results | |
133 |
|
134 | |||
134 | def create_issue(self, form_data): |
|
135 | def create_issue(self, form_data): | |
135 | """ |
|
136 | """ | |
136 | Make a REST call to create issue in Github's issue tracker |
|
137 | Make a REST call to create issue in Github's issue tracker | |
137 | """ |
|
138 | """ | |
138 |
url = |
|
139 | url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % { | |
139 |
|
|
140 | "api_url": self.api_url, | |
140 |
|
|
141 | "owner": self.owner, | |
141 |
|
|
142 | "name": self.name, | |
|
143 | } | |||
142 |
|
144 | |||
143 | payload = { |
|
145 | payload = { | |
144 |
"title": form_data[ |
|
146 | "title": form_data["title"], | |
145 |
"body": form_data[ |
|
147 | "body": form_data["content"], | |
146 | "labels": [], |
|
148 | "labels": [], | |
147 |
"assignee": form_data[ |
|
149 | "assignee": form_data["responsible"], | |
148 | } |
|
150 | } | |
149 |
payload[ |
|
151 | payload["labels"].extend(form_data["kind"]) | |
150 |
data = self.make_request(url, |
|
152 | data = self.make_request(url, "post", data=payload) | |
151 | to_return = { |
|
153 | to_return = { | |
152 |
|
|
154 | "id": data["number"], | |
153 |
|
|
155 | "resource_url": data["url"], | |
154 |
|
|
156 | "web_url": data["html_url"], | |
155 | } |
|
157 | } | |
156 | return to_return |
|
158 | return to_return |
@@ -1,83 +1,75 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | import requests |
|
19 | import requests | |
20 |
|
20 | |||
21 | from . import IntegrationBase, IntegrationException |
|
21 | from . import IntegrationBase, IntegrationException | |
22 |
|
22 | |||
23 | _ = str |
|
23 | _ = str | |
24 |
|
24 | |||
25 | log = logging.getLogger(__name__) |
|
25 | log = logging.getLogger(__name__) | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | class NotFoundException(Exception): |
|
28 | class NotFoundException(Exception): | |
29 | pass |
|
29 | pass | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | class HipchatIntegration(IntegrationBase): |
|
32 | class HipchatIntegration(IntegrationBase): | |
33 | __mapper_args__ = { |
|
33 | __mapper_args__ = {"polymorphic_identity": "hipchat"} | |
34 | 'polymorphic_identity': 'hipchat' |
|
|||
35 | } |
|
|||
36 | front_visible = False |
|
34 | front_visible = False | |
37 | as_alert_channel = True |
|
35 | as_alert_channel = True | |
38 | supports_report_alerting = True |
|
36 | supports_report_alerting = True | |
39 | action_notification = True |
|
37 | action_notification = True | |
40 |
integration_action = |
|
38 | integration_action = "Message via Hipchat" | |
41 |
|
39 | |||
42 | @classmethod |
|
40 | @classmethod | |
43 | def create_client(cls, api_token): |
|
41 | def create_client(cls, api_token): | |
44 | client = HipchatClient(api_token) |
|
42 | client = HipchatClient(api_token) | |
45 | return client |
|
43 | return client | |
46 |
|
44 | |||
47 |
|
45 | |||
48 | class HipchatClient(object): |
|
46 | class HipchatClient(object): | |
49 | def __init__(self, api_token): |
|
47 | def __init__(self, api_token): | |
50 | self.api_token = api_token |
|
48 | self.api_token = api_token | |
51 |
self.api_url = |
|
49 | self.api_url = "https://api.hipchat.com/v1" | |
52 |
|
50 | |||
53 |
def make_request(self, endpoint, method= |
|
51 | def make_request(self, endpoint, method="get", data=None): | |
54 | headers = { |
|
52 | headers = {"User-Agent": "appenlight-hipchat"} | |
55 | 'User-Agent': 'appenlight-hipchat', |
|
53 | url = "%s%s" % (self.api_url, endpoint) | |
56 | } |
|
54 | params = {"format": "json", "auth_token": self.api_token} | |
57 | url = '%s%s' % (self.api_url, endpoint) |
|
|||
58 | params = { |
|
|||
59 | 'format': 'json', |
|
|||
60 | 'auth_token': self.api_token |
|
|||
61 | } |
|
|||
62 | try: |
|
55 | try: | |
63 |
resp = getattr(requests, method)( |
|
56 | resp = getattr(requests, method)( | |
64 | params=params, |
|
57 | url, data=data, headers=headers, params=params, timeout=3 | |
65 | timeout=3) |
|
58 | ) | |
66 | except Exception as e: |
|
59 | except Exception as e: | |
67 |
msg = |
|
60 | msg = "Error communicating with Hipchat: %s" | |
68 | raise IntegrationException(_(msg) % (e,)) |
|
61 | raise IntegrationException(_(msg) % (e,)) | |
69 | if resp.status_code == 404: |
|
62 | if resp.status_code == 404: | |
70 |
msg = |
|
63 | msg = "Error communicating with Hipchat - Room not found" | |
71 | raise IntegrationException(msg) |
|
64 | raise IntegrationException(msg) | |
72 | elif resp.status_code != requests.codes.ok: |
|
65 | elif resp.status_code != requests.codes.ok: | |
73 |
msg = |
|
66 | msg = "Error communicating with Hipchat - status code: %s" | |
74 | raise IntegrationException(msg % resp.status_code) |
|
67 | raise IntegrationException(msg % resp.status_code) | |
75 | return resp |
|
68 | return resp | |
76 |
|
69 | |||
77 | def get_rooms(self): |
|
70 | def get_rooms(self): | |
78 | # not used with notification api token |
|
71 | # not used with notification api token | |
79 |
return self.make_request( |
|
72 | return self.make_request("/rooms/list") | |
80 |
|
73 | |||
81 | def send(self, payload): |
|
74 | def send(self, payload): | |
82 |
return self.make_request( |
|
75 | return self.make_request("/rooms/message", method="post", data=payload).json() | |
83 | data=payload).json() |
|
@@ -1,136 +1,137 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import jira |
|
17 | import jira | |
18 |
from appenlight.models.integrations import |
|
18 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
19 | IntegrationException) |
|
|||
20 |
|
19 | |||
21 | _ = str |
|
20 | _ = str | |
22 |
|
21 | |||
23 |
|
22 | |||
24 | class NotFoundException(Exception): |
|
23 | class NotFoundException(Exception): | |
25 | pass |
|
24 | pass | |
26 |
|
25 | |||
27 |
|
26 | |||
28 | class JiraIntegration(IntegrationBase): |
|
27 | class JiraIntegration(IntegrationBase): | |
29 | __mapper_args__ = { |
|
28 | __mapper_args__ = {"polymorphic_identity": "jira"} | |
30 | 'polymorphic_identity': 'jira' |
|
|||
31 | } |
|
|||
32 | front_visible = True |
|
29 | front_visible = True | |
33 | as_alert_channel = False |
|
30 | as_alert_channel = False | |
34 | supports_report_alerting = False |
|
31 | supports_report_alerting = False | |
35 | action_notification = True |
|
32 | action_notification = True | |
36 |
integration_action = |
|
33 | integration_action = "Add issue to Jira" | |
37 |
|
34 | |||
38 |
|
35 | |||
39 | class JiraClient(object): |
|
36 | class JiraClient(object): | |
40 | def __init__(self, user_name, password, host_name, project, request=None): |
|
37 | def __init__(self, user_name, password, host_name, project, request=None): | |
41 | self.user_name = user_name |
|
38 | self.user_name = user_name | |
42 | self.password = password |
|
39 | self.password = password | |
43 | self.host_name = host_name |
|
40 | self.host_name = host_name | |
44 | self.project = project |
|
41 | self.project = project | |
45 | self.request = request |
|
42 | self.request = request | |
46 | try: |
|
43 | try: | |
47 |
self.client = jira.client.JIRA( |
|
44 | self.client = jira.client.JIRA( | |
48 |
|
|
45 | options={"server": host_name}, basic_auth=(user_name, password) | |
|
46 | ) | |||
49 | except jira.JIRAError as e: |
|
47 | except jira.JIRAError as e: | |
50 | raise IntegrationException( |
|
48 | raise IntegrationException( | |
51 |
|
|
49 | "Communication problem: HTTP_STATUS:%s, URL:%s " | |
52 |
|
|
50 | % (e.status_code, e.url) | |
|
51 | ) | |||
53 |
|
52 | |||
54 | def get_projects(self): |
|
53 | def get_projects(self): | |
55 | projects = self.client.projects() |
|
54 | projects = self.client.projects() | |
56 | return projects |
|
55 | return projects | |
57 |
|
56 | |||
58 | def get_assignees(self, request): |
|
57 | def get_assignees(self, request): | |
59 | """Gets list of possible assignees""" |
|
58 | """Gets list of possible assignees""" | |
60 | cache_region = request.registry.cache_regions.redis_sec_30 |
|
59 | cache_region = request.registry.cache_regions.redis_sec_30 | |
61 | @cache_region.cache_on_arguments('JiraClient.get_assignees') |
|
60 | ||
|
61 | @cache_region.cache_on_arguments("JiraClient.get_assignees") | |||
62 | def cached(project_name): |
|
62 | def cached(project_name): | |
63 | users = self.client.search_assignable_users_for_issues( |
|
63 | users = self.client.search_assignable_users_for_issues( | |
64 |
None, project=project_name |
|
64 | None, project=project_name | |
|
65 | ) | |||
65 | results = [] |
|
66 | results = [] | |
66 | for user in users: |
|
67 | for user in users: | |
67 | results.append({"id": user.name, "name": user.displayName}) |
|
68 | results.append({"id": user.name, "name": user.displayName}) | |
68 | return results |
|
69 | return results | |
|
70 | ||||
69 | return cached(self.project) |
|
71 | return cached(self.project) | |
70 |
|
72 | |||
71 | def get_issue_types(self, request): |
|
73 | def get_issue_types(self, request): | |
72 | metadata = self.get_metadata(request) |
|
74 | metadata = self.get_metadata(request) | |
73 | assignees = self.get_assignees(request) |
|
75 | assignees = self.get_assignees(request) | |
74 | parsed_metadata = [] |
|
76 | parsed_metadata = [] | |
75 |
for entry in metadata[ |
|
77 | for entry in metadata["projects"][0]["issuetypes"]: | |
76 |
issue = {"name": entry[ |
|
78 | issue = {"name": entry["name"], "id": entry["id"], "fields": []} | |
77 | "id": entry['id'], |
|
79 | for i_id, field_i in entry["fields"].items(): | |
78 | "fields": []} |
|
|||
79 | for i_id, field_i in entry['fields'].items(): |
|
|||
80 | field = { |
|
80 | field = { | |
81 |
"name": field_i[ |
|
81 | "name": field_i["name"], | |
82 | "id": i_id, |
|
82 | "id": i_id, | |
83 |
"required": field_i[ |
|
83 | "required": field_i["required"], | |
84 | "values": [], |
|
84 | "values": [], | |
85 |
"type": field_i[ |
|
85 | "type": field_i["schema"].get("type"), | |
86 | } |
|
86 | } | |
87 |
if field_i.get( |
|
87 | if field_i.get("allowedValues"): | |
88 |
field[ |
|
88 | field["values"] = [] | |
89 |
for i in field_i[ |
|
89 | for i in field_i["allowedValues"]: | |
90 |
field[ |
|
90 | field["values"].append( | |
91 |
{ |
|
91 | {"id": i["id"], "name": i.get("name", i.get("value", ""))} | |
92 | 'name': i.get('name', i.get('value', '')) |
|
92 | ) | |
93 | }) |
|
93 | if field["id"] == "assignee": | |
94 |
|
|
94 | field["values"] = assignees | |
95 | field['values'] = assignees |
|
95 | issue["fields"].append(field) | |
96 | issue['fields'].append(field) |
|
|||
97 | parsed_metadata.append(issue) |
|
96 | parsed_metadata.append(issue) | |
98 | return parsed_metadata |
|
97 | return parsed_metadata | |
99 |
|
98 | |||
100 | def get_metadata(self, request): |
|
99 | def get_metadata(self, request): | |
101 | # cache_region = request.registry.cache_regions.redis_sec_30 |
|
100 | # cache_region = request.registry.cache_regions.redis_sec_30 | |
102 | # @cache_region.cache_on_arguments('JiraClient.get_metadata') |
|
101 | # @cache_region.cache_on_arguments('JiraClient.get_metadata') | |
103 | def cached(project_name): |
|
102 | def cached(project_name): | |
104 | return self.client.createmeta( |
|
103 | return self.client.createmeta( | |
105 |
projectKeys=project_name, expand= |
|
104 | projectKeys=project_name, expand="projects.issuetypes.fields" | |
|
105 | ) | |||
|
106 | ||||
106 | return cached(self.project) |
|
107 | return cached(self.project) | |
107 |
|
108 | |||
108 | def create_issue(self, form_data, request): |
|
109 | def create_issue(self, form_data, request): | |
109 | issue_types = self.get_issue_types(request) |
|
110 | issue_types = self.get_issue_types(request) | |
110 | payload = { |
|
111 | payload = { | |
111 |
|
|
112 | "project": {"key": form_data["project"]}, | |
112 |
|
|
113 | "summary": form_data["title"], | |
113 |
|
|
114 | "description": form_data["content"], | |
114 |
|
|
115 | "issuetype": {"id": form_data["issue_type"]}, | |
115 |
"priority": { |
|
116 | "priority": {"id": form_data["priority"]}, | |
116 |
"assignee": { |
|
117 | "assignee": {"name": form_data["responsible"]}, | |
117 | } |
|
118 | } | |
118 | for issue_type in issue_types: |
|
119 | for issue_type in issue_types: | |
119 |
if issue_type[ |
|
120 | if issue_type["id"] == form_data["issue_type"]: | |
120 |
for field in issue_type[ |
|
121 | for field in issue_type["fields"]: | |
121 | # set some defaults for other required fields |
|
122 | # set some defaults for other required fields | |
122 |
if field == |
|
123 | if field == "reporter": | |
123 |
payload["reporter"] = { |
|
124 | payload["reporter"] = {"id": self.user_name} | |
124 |
if field[ |
|
125 | if field["required"] and field["id"] not in payload: | |
125 |
if field[ |
|
126 | if field["type"] == "array": | |
126 |
payload[field[ |
|
127 | payload[field["id"]] = [field["values"][0]] | |
127 |
elif field[ |
|
128 | elif field["type"] == "string": | |
128 |
payload[field[ |
|
129 | payload[field["id"]] = "" | |
129 | new_issue = self.client.create_issue(fields=payload) |
|
130 | new_issue = self.client.create_issue(fields=payload) | |
130 |
web_url = self.host_name + |
|
131 | web_url = self.host_name + "/browse/" + new_issue.key | |
131 | to_return = { |
|
132 | to_return = { | |
132 |
|
|
133 | "id": new_issue.id, | |
133 |
|
|
134 | "resource_url": new_issue.self, | |
134 |
|
|
135 | "web_url": web_url, | |
135 | } |
|
136 | } | |
136 | return to_return |
|
137 | return to_return |
@@ -1,74 +1,65 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | import requests |
|
19 | import requests | |
20 |
|
20 | |||
21 |
from appenlight.models.integrations import |
|
21 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
22 | IntegrationException) |
|
|||
23 | from appenlight.lib.ext_json import json |
|
22 | from appenlight.lib.ext_json import json | |
24 |
|
23 | |||
25 | _ = str |
|
24 | _ = str | |
26 |
|
25 | |||
27 | log = logging.getLogger(__name__) |
|
26 | log = logging.getLogger(__name__) | |
28 |
|
27 | |||
29 |
|
28 | |||
30 | class NotFoundException(Exception): |
|
29 | class NotFoundException(Exception): | |
31 | pass |
|
30 | pass | |
32 |
|
31 | |||
33 |
|
32 | |||
34 | class SlackIntegration(IntegrationBase): |
|
33 | class SlackIntegration(IntegrationBase): | |
35 | __mapper_args__ = { |
|
34 | __mapper_args__ = {"polymorphic_identity": "slack"} | |
36 | 'polymorphic_identity': 'slack' |
|
|||
37 | } |
|
|||
38 | front_visible = False |
|
35 | front_visible = False | |
39 | as_alert_channel = True |
|
36 | as_alert_channel = True | |
40 | supports_report_alerting = True |
|
37 | supports_report_alerting = True | |
41 | action_notification = True |
|
38 | action_notification = True | |
42 |
integration_action = |
|
39 | integration_action = "Message via Slack" | |
43 |
|
40 | |||
44 | @classmethod |
|
41 | @classmethod | |
45 | def create_client(cls, api_token): |
|
42 | def create_client(cls, api_token): | |
46 | client = SlackClient(api_token) |
|
43 | client = SlackClient(api_token) | |
47 | return client |
|
44 | return client | |
48 |
|
45 | |||
49 |
|
46 | |||
50 | class SlackClient(object): |
|
47 | class SlackClient(object): | |
51 | def __init__(self, api_url): |
|
48 | def __init__(self, api_url): | |
52 | self.api_url = api_url |
|
49 | self.api_url = api_url | |
53 |
|
50 | |||
54 | def make_request(self, data=None): |
|
51 | def make_request(self, data=None): | |
55 | headers = { |
|
52 | headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"} | |
56 | 'User-Agent': 'appenlight-slack', |
|
|||
57 | 'Content-Type': 'application/json' |
|
|||
58 | } |
|
|||
59 | try: |
|
53 | try: | |
60 |
resp = getattr(requests, |
|
54 | resp = getattr(requests, "post")( | |
61 | data=json.dumps(data), |
|
55 | self.api_url, data=json.dumps(data), headers=headers, timeout=3 | |
62 | headers=headers, |
|
56 | ) | |
63 | timeout=3) |
|
|||
64 | except Exception as e: |
|
57 | except Exception as e: | |
65 | raise IntegrationException( |
|
58 | raise IntegrationException(_("Error communicating with Slack: %s") % (e,)) | |
66 | _('Error communicating with Slack: %s') % (e,)) |
|
|||
67 | if resp.status_code != requests.codes.ok: |
|
59 | if resp.status_code != requests.codes.ok: | |
68 |
msg = |
|
60 | msg = "Error communicating with Slack - status code: %s" | |
69 | raise IntegrationException(msg % resp.status_code) |
|
61 | raise IntegrationException(msg % resp.status_code) | |
70 | return resp |
|
62 | return resp | |
71 |
|
63 | |||
72 | def send(self, payload): |
|
64 | def send(self, payload): | |
73 |
return self.make_request( |
|
65 | return self.make_request("/rooms/message", method="post", data=payload).json() | |
74 | data=payload).json() |
|
@@ -1,138 +1,145 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | import requests |
|
19 | import requests | |
20 |
|
20 | |||
21 |
from appenlight.models.integrations import |
|
21 | from appenlight.models.integrations import IntegrationBase, IntegrationException | |
22 | IntegrationException) |
|
|||
23 | from appenlight.models.alert_channel import AlertChannel |
|
22 | from appenlight.models.alert_channel import AlertChannel | |
24 | from appenlight.lib.ext_json import json |
|
23 | from appenlight.lib.ext_json import json | |
25 |
|
24 | |||
26 | _ = str |
|
25 | _ = str | |
27 |
|
26 | |||
28 | log = logging.getLogger(__name__) |
|
27 | log = logging.getLogger(__name__) | |
29 |
|
28 | |||
30 |
|
29 | |||
31 | class NotFoundException(Exception): |
|
30 | class NotFoundException(Exception): | |
32 | pass |
|
31 | pass | |
33 |
|
32 | |||
34 |
|
33 | |||
35 | class WebhooksIntegration(IntegrationBase): |
|
34 | class WebhooksIntegration(IntegrationBase): | |
36 | __mapper_args__ = { |
|
35 | __mapper_args__ = {"polymorphic_identity": "webhooks"} | |
37 | 'polymorphic_identity': 'webhooks' |
|
|||
38 | } |
|
|||
39 | front_visible = False |
|
36 | front_visible = False | |
40 | as_alert_channel = True |
|
37 | as_alert_channel = True | |
41 | supports_report_alerting = True |
|
38 | supports_report_alerting = True | |
42 | action_notification = True |
|
39 | action_notification = True | |
43 |
integration_action = |
|
40 | integration_action = "Message via Webhooks" | |
44 |
|
41 | |||
45 | @classmethod |
|
42 | @classmethod | |
46 | def create_client(cls, url): |
|
43 | def create_client(cls, url): | |
47 | client = WebhooksClient(url) |
|
44 | client = WebhooksClient(url) | |
48 | return client |
|
45 | return client | |
49 |
|
46 | |||
50 |
|
47 | |||
51 | class WebhooksClient(object): |
|
48 | class WebhooksClient(object): | |
52 | def __init__(self, url): |
|
49 | def __init__(self, url): | |
53 | self.api_url = url |
|
50 | self.api_url = url | |
54 |
|
51 | |||
55 |
def make_request(self, url, method= |
|
52 | def make_request(self, url, method="get", data=None): | |
56 | headers = { |
|
53 | headers = { | |
57 |
|
|
54 | "Content-Type": "application/json", | |
58 |
|
|
55 | "User-Agent": "appenlight-webhooks", | |
59 | } |
|
56 | } | |
60 | try: |
|
57 | try: | |
61 | if data: |
|
58 | if data: | |
62 | data = json.dumps(data) |
|
59 | data = json.dumps(data) | |
63 | resp = getattr(requests, method)(url, data=data, headers=headers, |
|
60 | resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3) | |
64 | timeout=3) |
|
|||
65 | except Exception as e: |
|
61 | except Exception as e: | |
66 | raise IntegrationException( |
|
62 | raise IntegrationException( | |
67 |
_( |
|
63 | _("Error communicating with Webhooks: {}").format(e) | |
|
64 | ) | |||
68 | if resp.status_code > 299: |
|
65 | if resp.status_code > 299: | |
69 | raise IntegrationException( |
|
66 | raise IntegrationException( | |
70 |
|
|
67 | "Error communicating with Webhooks - status code: {}".format( | |
71 |
resp.status_code |
|
68 | resp.status_code | |
|
69 | ) | |||
|
70 | ) | |||
72 | return resp |
|
71 | return resp | |
73 |
|
72 | |||
74 | def send_to_hook(self, payload): |
|
73 | def send_to_hook(self, payload): | |
75 |
return self.make_request(self.api_url, method= |
|
74 | return self.make_request(self.api_url, method="post", data=payload).json() | |
76 | data=payload).json() |
|
|||
77 |
|
75 | |||
78 |
|
76 | |||
79 | class WebhooksAlertChannel(AlertChannel): |
|
77 | class WebhooksAlertChannel(AlertChannel): | |
80 | __mapper_args__ = { |
|
78 | __mapper_args__ = {"polymorphic_identity": "webhooks"} | |
81 | 'polymorphic_identity': 'webhooks' |
|
|||
82 | } |
|
|||
83 |
|
79 | |||
84 | def notify_reports(self, **kwargs): |
|
80 | def notify_reports(self, **kwargs): | |
85 | """ |
|
81 | """ | |
86 | Notify user of individual reports |
|
82 | Notify user of individual reports | |
87 |
|
83 | |||
88 | kwargs: |
|
84 | kwargs: | |
89 | application: application that the event applies for, |
|
85 | application: application that the event applies for, | |
90 | user: user that should be notified |
|
86 | user: user that should be notified | |
91 | request: request object |
|
87 | request: request object | |
92 | since_when: reports are newer than this time value, |
|
88 | since_when: reports are newer than this time value, | |
93 | reports: list of reports to render |
|
89 | reports: list of reports to render | |
94 |
|
90 | |||
95 | """ |
|
91 | """ | |
96 | template_vars = self.get_notification_basic_vars(kwargs) |
|
92 | template_vars = self.get_notification_basic_vars(kwargs) | |
97 | payload = [] |
|
93 | payload = [] | |
98 | include_keys = ('id', 'http_status', 'report_type', 'resource_name', |
|
94 | include_keys = ( | |
99 | 'front_url', 'resource_id', 'error', 'url_path', |
|
95 | "id", | |
100 | 'tags', 'duration') |
|
96 | "http_status", | |
101 |
|
97 | "report_type", | ||
102 | for occurences, report in kwargs['reports']: |
|
98 | "resource_name", | |
103 | r_dict = report.last_report_ref.get_dict(kwargs['request'], |
|
99 | "front_url", | |
104 | include_keys=include_keys) |
|
100 | "resource_id", | |
105 | r_dict['group']['occurences'] = occurences |
|
101 | "error", | |
|
102 | "url_path", | |||
|
103 | "tags", | |||
|
104 | "duration", | |||
|
105 | ) | |||
|
106 | ||||
|
107 | for occurences, report in kwargs["reports"]: | |||
|
108 | r_dict = report.last_report_ref.get_dict( | |||
|
109 | kwargs["request"], include_keys=include_keys | |||
|
110 | ) | |||
|
111 | r_dict["group"]["occurences"] = occurences | |||
106 | payload.append(r_dict) |
|
112 | payload.append(r_dict) | |
107 | client = WebhooksIntegration.create_client( |
|
113 | client = WebhooksIntegration.create_client( | |
108 |
self.integration.config[ |
|
114 | self.integration.config["reports_webhook"] | |
|
115 | ) | |||
109 | client.send_to_hook(payload) |
|
116 | client.send_to_hook(payload) | |
110 |
|
117 | |||
111 | def notify_alert(self, **kwargs): |
|
118 | def notify_alert(self, **kwargs): | |
112 | """ |
|
119 | """ | |
113 | Notify user of report or uptime threshold events based on events alert type |
|
120 | Notify user of report or uptime threshold events based on events alert type | |
114 |
|
121 | |||
115 | Kwargs: |
|
122 | Kwargs: | |
116 | application: application that the event applies for, |
|
123 | application: application that the event applies for, | |
117 | event: event that is notified, |
|
124 | event: event that is notified, | |
118 | user: user that should be notified |
|
125 | user: user that should be notified | |
119 | request: request object |
|
126 | request: request object | |
120 |
|
127 | |||
121 | """ |
|
128 | """ | |
122 | payload = { |
|
129 | payload = { | |
123 |
|
|
130 | "alert_action": kwargs["event"].unified_alert_action(), | |
124 |
|
|
131 | "alert_name": kwargs["event"].unified_alert_name(), | |
125 |
|
|
132 | "event_time": kwargs["event"].end_date or kwargs["event"].start_date, | |
126 | 'event'].start_date, |
|
133 | "resource_name": None, | |
127 |
|
|
134 | "resource_id": None, | |
128 | 'resource_id': None |
|
|||
129 | } |
|
135 | } | |
130 |
if kwargs[ |
|
136 | if kwargs["event"].values and kwargs["event"].values.get("reports"): | |
131 |
payload[ |
|
137 | payload["reports"] = kwargs["event"].values.get("reports", []) | |
132 |
if |
|
138 | if "application" in kwargs: | |
133 |
payload[ |
|
139 | payload["resource_name"] = kwargs["application"].resource_name | |
134 |
payload[ |
|
140 | payload["resource_id"] = kwargs["application"].resource_id | |
135 |
|
141 | |||
136 | client = WebhooksIntegration.create_client( |
|
142 | client = WebhooksIntegration.create_client( | |
137 |
self.integration.config[ |
|
143 | self.integration.config["alerts_webhook"] | |
|
144 | ) | |||
138 | client.send_to_hook(payload) |
|
145 | client.send_to_hook(payload) |
@@ -1,130 +1,132 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | import logging |
|
18 | import logging | |
19 | import hashlib |
|
19 | import hashlib | |
20 |
|
20 | |||
21 | from datetime import datetime |
|
21 | from datetime import datetime | |
22 | from appenlight.models import Base |
|
22 | from appenlight.models import Base | |
23 | from appenlight.lib.utils import convert_es_type |
|
23 | from appenlight.lib.utils import convert_es_type | |
24 | from appenlight.lib.enums import LogLevel |
|
24 | from appenlight.lib.enums import LogLevel | |
25 | from sqlalchemy.dialects.postgresql import JSON |
|
25 | from sqlalchemy.dialects.postgresql import JSON | |
26 | from ziggurat_foundations.models.base import BaseModel |
|
26 | from ziggurat_foundations.models.base import BaseModel | |
27 |
|
27 | |||
28 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class Log(Base, BaseModel): |
|
31 | class Log(Base, BaseModel): | |
32 |
__tablename__ = |
|
32 | __tablename__ = "logs" | |
33 |
__table_args__ = { |
|
33 | __table_args__ = {"implicit_returning": False} | |
34 |
|
34 | |||
35 | log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) |
|
35 | log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) | |
36 |
resource_id = sa.Column( |
|
36 | resource_id = sa.Column( | |
37 | sa.ForeignKey('applications.resource_id', |
|
37 | sa.Integer(), | |
38 | onupdate='CASCADE', |
|
38 | sa.ForeignKey( | |
39 | ondelete='CASCADE'), |
|
39 | "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE" | |
40 | nullable=False, |
|
40 | ), | |
41 | index=True) |
|
41 | nullable=False, | |
42 | log_level = sa.Column(sa.Unicode, nullable=False, index=True, |
|
42 | index=True, | |
43 | default='INFO') |
|
43 | ) | |
44 |
|
|
44 | log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO") | |
45 |
|
|
45 | message = sa.Column(sa.UnicodeText(), default="") | |
46 | server_default=sa.func.now()) |
|
46 | timestamp = sa.Column( | |
|
47 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |||
|
48 | ) | |||
47 | request_id = sa.Column(sa.Unicode()) |
|
49 | request_id = sa.Column(sa.Unicode()) | |
48 | namespace = sa.Column(sa.Unicode()) |
|
50 | namespace = sa.Column(sa.Unicode()) | |
49 | primary_key = sa.Column(sa.Unicode()) |
|
51 | primary_key = sa.Column(sa.Unicode()) | |
50 |
|
52 | |||
51 | tags = sa.Column(JSON(), default={}) |
|
53 | tags = sa.Column(JSON(), default={}) | |
52 | permanent = sa.Column(sa.Boolean(), nullable=False, default=False) |
|
54 | permanent = sa.Column(sa.Boolean(), nullable=False, default=False) | |
53 |
|
55 | |||
54 | def __str__(self): |
|
56 | def __str__(self): | |
55 |
return self.__unicode__().encode( |
|
57 | return self.__unicode__().encode("utf8") | |
56 |
|
58 | |||
57 | def __unicode__(self): |
|
59 | def __unicode__(self): | |
58 |
return |
|
60 | return "<Log id:%s, lv:%s, ns:%s >" % ( | |
59 |
self.log_id, |
|
61 | self.log_id, | |
|
62 | self.log_level, | |||
|
63 | self.namespace, | |||
|
64 | ) | |||
60 |
|
65 | |||
61 | def set_data(self, data, resource): |
|
66 | def set_data(self, data, resource): | |
62 |
level = data.get( |
|
67 | level = data.get("log_level").upper() | |
63 | self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN) |
|
68 | self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN) | |
64 |
self.message = data.get( |
|
69 | self.message = data.get("message", "") | |
65 |
server_name = data.get( |
|
70 | server_name = data.get("server", "").lower() or "unknown" | |
66 | self.tags = { |
|
71 | self.tags = {"server_name": server_name} | |
67 | 'server_name': server_name |
|
72 | if data.get("tags"): | |
68 | } |
|
73 | for tag_tuple in data["tags"]: | |
69 | if data.get('tags'): |
|
|||
70 | for tag_tuple in data['tags']: |
|
|||
71 | self.tags[tag_tuple[0]] = tag_tuple[1] |
|
74 | self.tags[tag_tuple[0]] = tag_tuple[1] | |
72 |
self.timestamp = data[ |
|
75 | self.timestamp = data["date"] | |
73 |
r_id = data.get( |
|
76 | r_id = data.get("request_id", "") | |
74 | if not r_id: |
|
77 | if not r_id: | |
75 |
r_id = |
|
78 | r_id = "" | |
76 |
self.request_id = r_id.replace( |
|
79 | self.request_id = r_id.replace("-", "") | |
77 | self.resource_id = resource.resource_id |
|
80 | self.resource_id = resource.resource_id | |
78 |
self.namespace = data.get( |
|
81 | self.namespace = data.get("namespace") or "" | |
79 |
self.permanent = data.get( |
|
82 | self.permanent = data.get("permanent") | |
80 |
self.primary_key = data.get( |
|
83 | self.primary_key = data.get("primary_key") | |
81 | if self.primary_key is not None: |
|
84 | if self.primary_key is not None: | |
82 |
self.tags[ |
|
85 | self.tags["appenlight_primary_key"] = self.primary_key | |
83 |
|
86 | |||
84 | def get_dict(self): |
|
87 | def get_dict(self): | |
85 | instance_dict = super(Log, self).get_dict() |
|
88 | instance_dict = super(Log, self).get_dict() | |
86 |
instance_dict[ |
|
89 | instance_dict["log_level"] = LogLevel.key_from_value(self.log_level) | |
87 |
instance_dict[ |
|
90 | instance_dict["resource_name"] = self.application.resource_name | |
88 | return instance_dict |
|
91 | return instance_dict | |
89 |
|
92 | |||
90 | @property |
|
93 | @property | |
91 | def delete_hash(self): |
|
94 | def delete_hash(self): | |
92 | if not self.primary_key: |
|
95 | if not self.primary_key: | |
93 | return None |
|
96 | return None | |
94 |
|
97 | |||
95 |
to_hash = |
|
98 | to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace) | |
96 | self.namespace) |
|
99 | return hashlib.sha1(to_hash.encode("utf8")).hexdigest() | |
97 | return hashlib.sha1(to_hash.encode('utf8')).hexdigest() |
|
|||
98 |
|
100 | |||
99 | def es_doc(self): |
|
101 | def es_doc(self): | |
100 | tags = {} |
|
102 | tags = {} | |
101 | tag_list = [] |
|
103 | tag_list = [] | |
102 | for name, value in self.tags.items(): |
|
104 | for name, value in self.tags.items(): | |
103 | # replace dot in indexed tag name |
|
105 | # replace dot in indexed tag name | |
104 |
name = name.replace( |
|
106 | name = name.replace(".", "_") | |
105 | tag_list.append(name) |
|
107 | tag_list.append(name) | |
106 | tags[name] = { |
|
108 | tags[name] = { | |
107 | "values": convert_es_type(value), |
|
109 | "values": convert_es_type(value), | |
108 |
"numeric_values": value |
|
110 | "numeric_values": value | |
109 |
|
|
111 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
110 | not isinstance(value, bool)) else None |
|
112 | else None, | |
111 | } |
|
113 | } | |
112 | return { |
|
114 | return { | |
113 |
|
|
115 | "pg_id": str(self.log_id), | |
114 |
|
|
116 | "delete_hash": self.delete_hash, | |
115 |
|
|
117 | "resource_id": self.resource_id, | |
116 |
|
|
118 | "request_id": self.request_id, | |
117 |
|
|
119 | "log_level": LogLevel.key_from_value(self.log_level), | |
118 |
|
|
120 | "timestamp": self.timestamp, | |
119 |
|
|
121 | "message": self.message if self.message else "", | |
120 |
|
|
122 | "namespace": self.namespace if self.namespace else "", | |
121 |
|
|
123 | "tags": tags, | |
122 |
|
|
124 | "tag_list": tag_list, | |
123 | } |
|
125 | } | |
124 |
|
126 | |||
125 | @property |
|
127 | @property | |
126 | def partition_id(self): |
|
128 | def partition_id(self): | |
127 | if self.permanent: |
|
129 | if self.permanent: | |
128 |
return |
|
130 | return "rcae_l_%s" % self.timestamp.strftime("%Y_%m") | |
129 | else: |
|
131 | else: | |
130 |
return |
|
132 | return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d") |
@@ -1,64 +1,68 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | from datetime import datetime |
|
17 | from datetime import datetime | |
18 |
|
18 | |||
19 | import sqlalchemy as sa |
|
19 | import sqlalchemy as sa | |
20 | from sqlalchemy.dialects.postgresql import JSON |
|
20 | from sqlalchemy.dialects.postgresql import JSON | |
21 |
|
21 | |||
22 | from ziggurat_foundations.models.base import BaseModel |
|
22 | from ziggurat_foundations.models.base import BaseModel | |
23 | from appenlight.lib.utils import convert_es_type |
|
23 | from appenlight.lib.utils import convert_es_type | |
24 | from appenlight.models import Base |
|
24 | from appenlight.models import Base | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | class Metric(Base, BaseModel): |
|
27 | class Metric(Base, BaseModel): | |
28 |
__tablename__ = |
|
28 | __tablename__ = "metrics" | |
29 |
__table_args__ = { |
|
29 | __table_args__ = {"implicit_returning": False} | |
30 |
|
30 | |||
31 | pkey = sa.Column(sa.BigInteger(), primary_key=True) |
|
31 | pkey = sa.Column(sa.BigInteger(), primary_key=True) | |
32 |
resource_id = sa.Column( |
|
32 | resource_id = sa.Column( | |
33 | sa.ForeignKey('applications.resource_id'), |
|
33 | sa.Integer(), | |
34 | nullable=False, primary_key=True) |
|
34 | sa.ForeignKey("applications.resource_id"), | |
35 | timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
35 | nullable=False, | |
36 | server_default=sa.func.now()) |
|
36 | primary_key=True, | |
|
37 | ) | |||
|
38 | timestamp = sa.Column( | |||
|
39 | sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now() | |||
|
40 | ) | |||
37 | tags = sa.Column(JSON(), default={}) |
|
41 | tags = sa.Column(JSON(), default={}) | |
38 | namespace = sa.Column(sa.Unicode(255)) |
|
42 | namespace = sa.Column(sa.Unicode(255)) | |
39 |
|
43 | |||
40 | @property |
|
44 | @property | |
41 | def partition_id(self): |
|
45 | def partition_id(self): | |
42 |
return |
|
46 | return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d") | |
43 |
|
47 | |||
44 | def es_doc(self): |
|
48 | def es_doc(self): | |
45 | tags = {} |
|
49 | tags = {} | |
46 | tag_list = [] |
|
50 | tag_list = [] | |
47 | for name, value in self.tags.items(): |
|
51 | for name, value in self.tags.items(): | |
48 | # replace dot in indexed tag name |
|
52 | # replace dot in indexed tag name | |
49 |
name = name.replace( |
|
53 | name = name.replace(".", "_") | |
50 | tag_list.append(name) |
|
54 | tag_list.append(name) | |
51 | tags[name] = { |
|
55 | tags[name] = { | |
52 | "values": convert_es_type(value), |
|
56 | "values": convert_es_type(value), | |
53 |
"numeric_values": value |
|
57 | "numeric_values": value | |
54 |
|
|
58 | if (isinstance(value, (int, float)) and not isinstance(value, bool)) | |
55 | not isinstance(value, bool)) else None |
|
59 | else None, | |
56 | } |
|
60 | } | |
57 |
|
61 | |||
58 | return { |
|
62 | return { | |
59 |
|
|
63 | "resource_id": self.resource_id, | |
60 |
|
|
64 | "timestamp": self.timestamp, | |
61 |
|
|
65 | "namespace": self.namespace, | |
62 |
|
|
66 | "tags": tags, | |
63 |
|
|
67 | "tag_list": tag_list, | |
64 | } |
|
68 | } |
@@ -1,40 +1,40 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | from ziggurat_foundations.models.base import BaseModel |
|
18 | from ziggurat_foundations.models.base import BaseModel | |
19 | from sqlalchemy.dialects.postgresql import JSON |
|
19 | from sqlalchemy.dialects.postgresql import JSON | |
20 |
|
20 | |||
21 | from . import Base |
|
21 | from . import Base | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | class PluginConfig(Base, BaseModel): |
|
24 | class PluginConfig(Base, BaseModel): | |
25 |
__tablename__ = |
|
25 | __tablename__ = "plugin_configs" | |
26 |
|
26 | |||
27 | id = sa.Column(sa.Integer, primary_key=True) |
|
27 | id = sa.Column(sa.Integer, primary_key=True) | |
28 | plugin_name = sa.Column(sa.Unicode) |
|
28 | plugin_name = sa.Column(sa.Unicode) | |
29 | section = sa.Column(sa.Unicode) |
|
29 | section = sa.Column(sa.Unicode) | |
30 | config = sa.Column(JSON, nullable=False) |
|
30 | config = sa.Column(JSON, nullable=False) | |
31 |
resource_id = sa.Column( |
|
31 | resource_id = sa.Column( | |
32 | sa.ForeignKey('resources.resource_id', |
|
32 | sa.Integer(), | |
33 | onupdate='cascade', |
|
33 | sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"), | |
34 | ondelete='cascade')) |
|
34 | ) | |
35 |
owner_id = sa.Column( |
|
35 | owner_id = sa.Column( | |
36 |
|
|
36 | sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade") | |
37 | ondelete='cascade')) |
|
37 | ) | |
38 |
|
38 | |||
39 | def __json__(self, request): |
|
39 | def __json__(self, request): | |
40 | return self.get_dict() |
|
40 | return self.get_dict() |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now