Show More
@@ -1,49 +1,47 b'' | |||
|
1 | repoze.sendmail==4.1 | |
|
1 | repoze.sendmail==4.4.1 | |
|
2 | 2 | pyramid==1.10.2 |
|
3 |
pyramid_tm== |
|
|
3 | pyramid_tm==2.2.1 | |
|
4 | 4 | pyramid_debugtoolbar |
|
5 | 5 | pyramid_authstack==1.0.1 |
|
6 |
SQLAlchemy==1. |
|
|
6 | SQLAlchemy==1.2.18 | |
|
7 | 7 | alembic==1.0.8 |
|
8 | 8 | webhelpers2==2.0 |
|
9 |
transaction== |
|
|
10 |
zope.sqlalchemy== |
|
|
11 |
pyramid_mailer==0.1 |
|
|
12 |
redis==2.1 |
|
|
9 | transaction==2.4.0 | |
|
10 | zope.sqlalchemy==1.1 | |
|
11 | pyramid_mailer==0.15.1 | |
|
12 | redis==3.2.1 | |
|
13 | 13 | redlock-py==1.0.8 |
|
14 |
pyramid_jinja2==2. |
|
|
15 | psycopg2==2.7.7 | |
|
16 | wtforms==2.1 | |
|
17 |
celery== |
|
|
18 |
formencode==1.3. |
|
|
19 |
psutil== |
|
|
14 | pyramid_jinja2==2.8 | |
|
15 | psycopg2-binary==2.7.7 | |
|
16 | wtforms==2.2.1 | |
|
17 | celery==4.2.1 | |
|
18 | formencode==1.3.1 | |
|
19 | psutil==5.6.1 | |
|
20 | 20 | ziggurat_foundations==0.8.3 |
|
21 | 21 | bcrypt==3.1.6 |
|
22 | 22 | appenlight_client |
|
23 |
markdown== |
|
|
23 | markdown==3.0.1 | |
|
24 | 24 | colander==1.7 |
|
25 | 25 | defusedxml==0.5.0 |
|
26 |
dogpile.cache==0. |
|
|
26 | dogpile.cache==0.7.1 | |
|
27 | 27 | pyramid_redis_sessions==1.0.1 |
|
28 |
simplejson==3. |
|
|
29 |
waitress==1. |
|
|
28 | simplejson==3.16.0 | |
|
29 | waitress==1.2.1 | |
|
30 | 30 | gunicorn==19.9.0 |
|
31 | requests==2.20.0 | |
|
32 | requests_oauthlib==0.6.1 | |
|
33 | gevent==1.1.1 | |
|
34 | gevent-websocket==0.9.5 | |
|
35 |
pygments==2. |
|
|
31 | uwsgi==2.0.18 | |
|
32 | requests==2.21.0 | |
|
33 | requests_oauthlib==1.2.0 | |
|
34 | gevent==1.4.0 | |
|
35 | pygments==2.3.1 | |
|
36 | 36 | lxml==4.3.2 |
|
37 |
paginate==0.5. |
|
|
38 |
paginate-sqlalchemy==0. |
|
|
37 | paginate==0.5.6 | |
|
38 | paginate-sqlalchemy==0.3.0 | |
|
39 | 39 | elasticsearch>=2.0.0,<3.0.0 |
|
40 | six>=1.10.0 | |
|
41 | 40 | mock==1.0.1 |
|
42 | 41 | itsdangerous==1.1.0 |
|
43 | 42 | camplight==0.9.6 |
|
44 | 43 | jira==1.0.7 |
|
45 | 44 | python-dateutil==2.5.3 |
|
46 | 45 | authomatic==0.1.0.post1 |
|
47 | 46 | cryptography==2.6.1 |
|
48 | webassets==0.11.1 | |
|
49 | 47 |
@@ -1,171 +1,171 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | from datetime import timedelta |
|
20 | 20 | from celery import Celery |
|
21 | 21 | from celery.bin import Option |
|
22 | 22 | from celery.schedules import crontab |
|
23 | 23 | from celery.signals import worker_init, task_revoked, user_preload_options |
|
24 | 24 | from celery.signals import task_prerun, task_retry, task_failure, task_success |
|
25 | 25 | from kombu.serialization import register |
|
26 | 26 | from pyramid.paster import bootstrap |
|
27 | 27 | from pyramid.request import Request |
|
28 | 28 | from pyramid.scripting import prepare |
|
29 | 29 | from pyramid.settings import asbool |
|
30 | 30 | from pyramid.threadlocal import get_current_request |
|
31 | 31 | |
|
32 | 32 | from appenlight.celery.encoders import json_dumps, json_loads |
|
33 | 33 | from appenlight_client.ext.celery import register_signals |
|
34 | 34 | |
|
35 | 35 | log = logging.getLogger(__name__) |
|
36 | 36 | |
|
37 | 37 | register('date_json', json_dumps, json_loads, |
|
38 | 38 | content_type='application/x-date_json', |
|
39 | 39 | content_encoding='utf-8') |
|
40 | 40 | |
|
41 | 41 | celery = Celery() |
|
42 | 42 | |
|
43 | 43 | celery.user_options['preload'].add( |
|
44 | 44 | Option('--ini', dest='ini', default=None, |
|
45 | 45 | help='Specifies pyramid configuration file location.') |
|
46 | 46 | ) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | @user_preload_options.connect |
|
50 | 50 | def on_preload_parsed(options, **kwargs): |
|
51 | 51 | """ |
|
52 | 52 | This actually configures celery from pyramid config file |
|
53 | 53 | """ |
|
54 | 54 | celery.conf['INI_PYRAMID'] = options['ini'] |
|
55 | 55 | import appenlight_client.client as e_client |
|
56 | 56 | ini_location = options['ini'] |
|
57 | 57 | if not ini_location: |
|
58 | 58 | raise Exception('You need to pass pyramid ini location using ' |
|
59 | 59 | '--ini=filename.ini argument to the worker') |
|
60 | env = bootstrap(ini_location) | |
|
60 | env = bootstrap(ini_location[0]) | |
|
61 | 61 | api_key = env['request'].registry.settings['appenlight.api_key'] |
|
62 | 62 | tr_config = env['request'].registry.settings.get( |
|
63 | 63 | 'appenlight.transport_config') |
|
64 | 64 | CONFIG = e_client.get_config({'appenlight.api_key': api_key}) |
|
65 | 65 | if tr_config: |
|
66 | 66 | CONFIG['appenlight.transport_config'] = tr_config |
|
67 | 67 | APPENLIGHT_CLIENT = e_client.Client(CONFIG) |
|
68 | 68 | # log.addHandler(APPENLIGHT_CLIENT.log_handler) |
|
69 | 69 | register_signals(APPENLIGHT_CLIENT) |
|
70 | 70 | celery.pyramid = env |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | celery_config = { |
|
74 | 74 | 'CELERY_IMPORTS': ["appenlight.celery.tasks", ], |
|
75 | 75 | 'CELERYD_TASK_TIME_LIMIT': 60, |
|
76 | 76 | 'CELERYD_MAX_TASKS_PER_CHILD': 1000, |
|
77 | 77 | 'CELERY_IGNORE_RESULT': True, |
|
78 | 78 | 'CELERY_ACCEPT_CONTENT': ['date_json'], |
|
79 | 79 | 'CELERY_TASK_SERIALIZER': 'date_json', |
|
80 | 80 | 'CELERY_RESULT_SERIALIZER': 'date_json', |
|
81 | 81 | 'BROKER_URL': None, |
|
82 | 82 | 'CELERYD_CONCURRENCY': None, |
|
83 | 83 | 'CELERY_TIMEZONE': None, |
|
84 | 84 | 'CELERYBEAT_SCHEDULE': { |
|
85 | 85 | 'alerting_reports': { |
|
86 | 86 | 'task': 'appenlight.celery.tasks.alerting_reports', |
|
87 | 87 | 'schedule': timedelta(seconds=60) |
|
88 | 88 | }, |
|
89 | 89 | 'close_alerts': { |
|
90 | 90 | 'task': 'appenlight.celery.tasks.close_alerts', |
|
91 | 91 | 'schedule': timedelta(seconds=60) |
|
92 | 92 | } |
|
93 | 93 | } |
|
94 | 94 | } |
|
95 | 95 | celery.config_from_object(celery_config) |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | def configure_celery(pyramid_registry): |
|
99 | 99 | settings = pyramid_registry.settings |
|
100 | 100 | celery_config['BROKER_URL'] = settings['celery.broker_url'] |
|
101 | 101 | celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency'] |
|
102 | 102 | celery_config['CELERY_TIMEZONE'] = settings['celery.timezone'] |
|
103 | 103 | |
|
104 | 104 | notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60)) |
|
105 | 105 | |
|
106 | 106 | celery_config['CELERYBEAT_SCHEDULE']['notifications'] = { |
|
107 | 107 | 'task': 'appenlight.celery.tasks.notifications_reports', |
|
108 | 108 | 'schedule': timedelta(seconds=notifications_seconds) |
|
109 | 109 | } |
|
110 | 110 | |
|
111 | 111 | celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = { |
|
112 | 112 | 'task': 'appenlight.celery.tasks.daily_digest', |
|
113 | 113 | 'schedule': crontab(minute=1, hour='4,12,20') |
|
114 | 114 | } |
|
115 | 115 | |
|
116 | 116 | if asbool(settings.get('celery.always_eager')): |
|
117 | 117 | celery_config['CELERY_ALWAYS_EAGER'] = True |
|
118 | 118 | celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True |
|
119 | 119 | |
|
120 | 120 | for plugin in pyramid_registry.appenlight_plugins.values(): |
|
121 | 121 | if plugin.get('celery_tasks'): |
|
122 | 122 | celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks']) |
|
123 | 123 | if plugin.get('celery_beats'): |
|
124 | 124 | for name, config in plugin['celery_beats']: |
|
125 | 125 | celery_config['CELERYBEAT_SCHEDULE'][name] = config |
|
126 | 126 | celery.config_from_object(celery_config) |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | @task_prerun.connect |
|
130 | 130 | def task_prerun_signal(task_id, task, args, kwargs, **kwaargs): |
|
131 | 131 | if hasattr(celery, 'pyramid'): |
|
132 | 132 | env = celery.pyramid |
|
133 | 133 | env = prepare(registry=env['request'].registry) |
|
134 | 134 | proper_base_url = env['request'].registry.settings['mailing.app_url'] |
|
135 | 135 | tmp_req = Request.blank('/', base_url=proper_base_url) |
|
136 | 136 | # ensure tasks generate url for right domain from config |
|
137 | 137 | env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST'] |
|
138 | 138 | env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT'] |
|
139 | 139 | env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME'] |
|
140 | 140 | env['request'].environ['wsgi.url_scheme'] = \ |
|
141 | 141 | tmp_req.environ['wsgi.url_scheme'] |
|
142 | 142 | get_current_request().tm.begin() |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | @task_success.connect |
|
146 | 146 | def task_success_signal(result, **kwargs): |
|
147 | 147 | get_current_request().tm.commit() |
|
148 | 148 | if hasattr(celery, 'pyramid'): |
|
149 | 149 | celery.pyramid["closer"]() |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | @task_retry.connect |
|
153 | 153 | def task_retry_signal(request, reason, einfo, **kwargs): |
|
154 | 154 | get_current_request().tm.abort() |
|
155 | 155 | if hasattr(celery, 'pyramid'): |
|
156 | 156 | celery.pyramid["closer"]() |
|
157 | 157 | |
|
158 | 158 | |
|
159 | 159 | @task_failure.connect |
|
160 | 160 | def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, |
|
161 | 161 | **kwaargs): |
|
162 | 162 | get_current_request().tm.abort() |
|
163 | 163 | if hasattr(celery, 'pyramid'): |
|
164 | 164 | celery.pyramid["closer"]() |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | @task_revoked.connect |
|
168 | 168 | def task_revoked_signal(request, terminated, signum, expired, **kwaargs): |
|
169 | 169 | get_current_request().tm.abort() |
|
170 | 170 | if hasattr(celery, 'pyramid'): |
|
171 | 171 | celery.pyramid["closer"]() |
@@ -1,183 +1,184 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import hashlib |
|
19 | 19 | import inspect |
|
20 | 20 | |
|
21 |
from dogpile.cache import make_region |
|
|
21 | from dogpile.cache import make_region | |
|
22 | from dogpile.cache.util import compat | |
|
22 | 23 | |
|
23 | 24 | regions = None |
|
24 | 25 | |
|
25 | 26 | |
|
26 | 27 | def key_mangler(key): |
|
27 | 28 | return "appenlight:dogpile:{}".format(key) |
|
28 | 29 | |
|
29 | 30 | |
|
30 | 31 | def hashgen(namespace, fn, to_str=compat.string_type): |
|
31 | 32 | """Return a function that generates a string |
|
32 | 33 | key, based on a given function as well as |
|
33 | 34 | arguments to the returned function itself. |
|
34 | 35 | |
|
35 | 36 | This is used by :meth:`.CacheRegion.cache_on_arguments` |
|
36 | 37 | to generate a cache key from a decorated function. |
|
37 | 38 | |
|
38 | 39 | It can be replaced using the ``function_key_generator`` |
|
39 | 40 | argument passed to :func:`.make_region`. |
|
40 | 41 | |
|
41 | 42 | """ |
|
42 | 43 | |
|
43 | 44 | if namespace is None: |
|
44 | 45 | namespace = '%s:%s' % (fn.__module__, fn.__name__) |
|
45 | 46 | else: |
|
46 | 47 | namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace) |
|
47 | 48 | |
|
48 | 49 | args = inspect.getargspec(fn) |
|
49 | 50 | has_self = args[0] and args[0][0] in ('self', 'cls') |
|
50 | 51 | |
|
51 | 52 | def generate_key(*args, **kw): |
|
52 | 53 | if kw: |
|
53 | 54 | raise ValueError( |
|
54 | 55 | "dogpile.cache's default key creation " |
|
55 | 56 | "function does not accept keyword arguments.") |
|
56 | 57 | if has_self: |
|
57 | 58 | args = args[1:] |
|
58 | 59 | |
|
59 | 60 | return namespace + "|" + hashlib.sha1( |
|
60 | 61 | " ".join(map(to_str, args)).encode('utf8')).hexdigest() |
|
61 | 62 | |
|
62 | 63 | return generate_key |
|
63 | 64 | |
|
64 | 65 | |
|
65 | 66 | class CacheRegions(object): |
|
66 | 67 | def __init__(self, settings): |
|
67 | 68 | config_redis = {"arguments": settings} |
|
68 | 69 | |
|
69 | 70 | self.redis_min_1 = make_region( |
|
70 | 71 | function_key_generator=hashgen, |
|
71 | 72 | key_mangler=key_mangler).configure( |
|
72 | 73 | "dogpile.cache.redis", |
|
73 | 74 | expiration_time=60, |
|
74 | 75 | **copy.deepcopy(config_redis)) |
|
75 | 76 | self.redis_min_5 = make_region( |
|
76 | 77 | function_key_generator=hashgen, |
|
77 | 78 | key_mangler=key_mangler).configure( |
|
78 | 79 | "dogpile.cache.redis", |
|
79 | 80 | expiration_time=300, |
|
80 | 81 | **copy.deepcopy(config_redis)) |
|
81 | 82 | |
|
82 | 83 | self.redis_min_10 = make_region( |
|
83 | 84 | function_key_generator=hashgen, |
|
84 | 85 | key_mangler=key_mangler).configure( |
|
85 | 86 | "dogpile.cache.redis", |
|
86 | 87 | expiration_time=60, |
|
87 | 88 | **copy.deepcopy(config_redis)) |
|
88 | 89 | |
|
89 | 90 | self.redis_min_60 = make_region( |
|
90 | 91 | function_key_generator=hashgen, |
|
91 | 92 | key_mangler=key_mangler).configure( |
|
92 | 93 | "dogpile.cache.redis", |
|
93 | 94 | expiration_time=3600, |
|
94 | 95 | **copy.deepcopy(config_redis)) |
|
95 | 96 | |
|
96 | 97 | self.redis_sec_1 = make_region( |
|
97 | 98 | function_key_generator=hashgen, |
|
98 | 99 | key_mangler=key_mangler).configure( |
|
99 | 100 | "dogpile.cache.redis", |
|
100 | 101 | expiration_time=1, |
|
101 | 102 | **copy.deepcopy(config_redis)) |
|
102 | 103 | |
|
103 | 104 | self.redis_sec_5 = make_region( |
|
104 | 105 | function_key_generator=hashgen, |
|
105 | 106 | key_mangler=key_mangler).configure( |
|
106 | 107 | "dogpile.cache.redis", |
|
107 | 108 | expiration_time=5, |
|
108 | 109 | **copy.deepcopy(config_redis)) |
|
109 | 110 | |
|
110 | 111 | self.redis_sec_30 = make_region( |
|
111 | 112 | function_key_generator=hashgen, |
|
112 | 113 | key_mangler=key_mangler).configure( |
|
113 | 114 | "dogpile.cache.redis", |
|
114 | 115 | expiration_time=30, |
|
115 | 116 | **copy.deepcopy(config_redis)) |
|
116 | 117 | |
|
117 | 118 | self.redis_day_1 = make_region( |
|
118 | 119 | function_key_generator=hashgen, |
|
119 | 120 | key_mangler=key_mangler).configure( |
|
120 | 121 | "dogpile.cache.redis", |
|
121 | 122 | expiration_time=86400, |
|
122 | 123 | **copy.deepcopy(config_redis)) |
|
123 | 124 | |
|
124 | 125 | self.redis_day_7 = make_region( |
|
125 | 126 | function_key_generator=hashgen, |
|
126 | 127 | key_mangler=key_mangler).configure( |
|
127 | 128 | "dogpile.cache.redis", |
|
128 | 129 | expiration_time=86400 * 7, |
|
129 | 130 | **copy.deepcopy(config_redis)) |
|
130 | 131 | |
|
131 | 132 | self.redis_day_30 = make_region( |
|
132 | 133 | function_key_generator=hashgen, |
|
133 | 134 | key_mangler=key_mangler).configure( |
|
134 | 135 | "dogpile.cache.redis", |
|
135 | 136 | expiration_time=86400 * 30, |
|
136 | 137 | **copy.deepcopy(config_redis)) |
|
137 | 138 | |
|
138 | 139 | self.memory_day_1 = make_region( |
|
139 | 140 | function_key_generator=hashgen, |
|
140 | 141 | key_mangler=key_mangler).configure( |
|
141 | 142 | "dogpile.cache.memory", |
|
142 | 143 | expiration_time=86400, |
|
143 | 144 | **copy.deepcopy(config_redis)) |
|
144 | 145 | |
|
145 | 146 | self.memory_sec_1 = make_region( |
|
146 | 147 | function_key_generator=hashgen, |
|
147 | 148 | key_mangler=key_mangler).configure( |
|
148 | 149 | "dogpile.cache.memory", |
|
149 | 150 | expiration_time=1) |
|
150 | 151 | |
|
151 | 152 | self.memory_sec_5 = make_region( |
|
152 | 153 | function_key_generator=hashgen, |
|
153 | 154 | key_mangler=key_mangler).configure( |
|
154 | 155 | "dogpile.cache.memory", |
|
155 | 156 | expiration_time=5) |
|
156 | 157 | |
|
157 | 158 | self.memory_min_1 = make_region( |
|
158 | 159 | function_key_generator=hashgen, |
|
159 | 160 | key_mangler=key_mangler).configure( |
|
160 | 161 | "dogpile.cache.memory", |
|
161 | 162 | expiration_time=60) |
|
162 | 163 | |
|
163 | 164 | self.memory_min_5 = make_region( |
|
164 | 165 | function_key_generator=hashgen, |
|
165 | 166 | key_mangler=key_mangler).configure( |
|
166 | 167 | "dogpile.cache.memory", |
|
167 | 168 | expiration_time=300) |
|
168 | 169 | |
|
169 | 170 | self.memory_min_10 = make_region( |
|
170 | 171 | function_key_generator=hashgen, |
|
171 | 172 | key_mangler=key_mangler).configure( |
|
172 | 173 | "dogpile.cache.memory", |
|
173 | 174 | expiration_time=600) |
|
174 | 175 | |
|
175 | 176 | self.memory_min_60 = make_region( |
|
176 | 177 | function_key_generator=hashgen, |
|
177 | 178 | key_mangler=key_mangler).configure( |
|
178 | 179 | "dogpile.cache.memory", |
|
179 | 180 | expiration_time=3600) |
|
180 | 181 | |
|
181 | 182 | |
|
182 | 183 | def get_region(region): |
|
183 | 184 | return getattr(regions, region) |
@@ -1,32 +1,32 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from ziggurat_foundations.models.base import BaseModel |
|
19 | from sqlalchemy.dialects.postgres import JSON | |
|
19 | from sqlalchemy.dialects.postgresql import JSON | |
|
20 | 20 | |
|
21 | 21 | from . import Base |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | class Config(Base, BaseModel): |
|
25 | 25 | __tablename__ = 'config' |
|
26 | 26 | |
|
27 | 27 | key = sa.Column(sa.Unicode, primary_key=True) |
|
28 | 28 | section = sa.Column(sa.Unicode, primary_key=True) |
|
29 | 29 | value = sa.Column(JSON, nullable=False) |
|
30 | 30 | |
|
31 | 31 | def __json__(self, request): |
|
32 | 32 | return self.get_dict() |
@@ -1,40 +1,40 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from ziggurat_foundations.models.base import BaseModel |
|
19 | from sqlalchemy.dialects.postgres import JSON | |
|
19 | from sqlalchemy.dialects.postgresql import JSON | |
|
20 | 20 | |
|
21 | 21 | from . import Base |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | class PluginConfig(Base, BaseModel): |
|
25 | 25 | __tablename__ = 'plugin_configs' |
|
26 | 26 | |
|
27 | 27 | id = sa.Column(sa.Integer, primary_key=True) |
|
28 | 28 | plugin_name = sa.Column(sa.Unicode) |
|
29 | 29 | section = sa.Column(sa.Unicode) |
|
30 | 30 | config = sa.Column(JSON, nullable=False) |
|
31 | 31 | resource_id = sa.Column(sa.Integer(), |
|
32 | 32 | sa.ForeignKey('resources.resource_id', |
|
33 | 33 | onupdate='cascade', |
|
34 | 34 | ondelete='cascade')) |
|
35 | 35 | owner_id = sa.Column(sa.Integer(), |
|
36 | 36 | sa.ForeignKey('users.id', onupdate='cascade', |
|
37 | 37 | ondelete='cascade')) |
|
38 | 38 | |
|
39 | 39 | def __json__(self, request): |
|
40 | 40 | return self.get_dict() |
@@ -1,268 +1,268 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import sqlalchemy as sa |
|
19 | 19 | |
|
20 | 20 | from datetime import datetime, timedelta |
|
21 | 21 | |
|
22 | 22 | from pyramid.threadlocal import get_current_request |
|
23 | 23 | from sqlalchemy.dialects.postgresql import JSON |
|
24 | 24 | from ziggurat_foundations.models.base import BaseModel |
|
25 | 25 | |
|
26 | 26 | from appenlight.models import Base, get_db_session, Datastores |
|
27 | 27 | from appenlight.lib.enums import ReportType |
|
28 | 28 | from appenlight.lib.rule import Rule |
|
29 | 29 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
30 | 30 | from appenlight.models.report import REPORT_TYPE_MATRIX |
|
31 | 31 | |
|
32 | 32 | log = logging.getLogger(__name__) |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | class ReportGroup(Base, BaseModel): |
|
36 | 36 | __tablename__ = 'reports_groups' |
|
37 | 37 | __table_args__ = {'implicit_returning': False} |
|
38 | 38 | |
|
39 | 39 | id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True) |
|
40 | 40 | resource_id = sa.Column(sa.Integer(), |
|
41 | 41 | sa.ForeignKey('applications.resource_id', |
|
42 | 42 | onupdate='CASCADE', |
|
43 | 43 | ondelete='CASCADE'), |
|
44 | 44 | nullable=False, |
|
45 | 45 | index=True) |
|
46 | 46 | priority = sa.Column(sa.Integer, nullable=False, index=True, default=5, |
|
47 | 47 | server_default='5') |
|
48 | 48 | first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
49 | 49 | server_default=sa.func.now()) |
|
50 | 50 | last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
51 | 51 | server_default=sa.func.now()) |
|
52 | 52 | error = sa.Column(sa.UnicodeText(), index=True) |
|
53 | 53 | grouping_hash = sa.Column(sa.String(40), default='') |
|
54 | 54 | triggered_postprocesses_ids = sa.Column(JSON(), nullable=False, |
|
55 | 55 | default=list) |
|
56 | 56 | report_type = sa.Column(sa.Integer, default=1) |
|
57 | 57 | total_reports = sa.Column(sa.Integer, default=1) |
|
58 | 58 | last_report = sa.Column(sa.Integer) |
|
59 | 59 | occurences = sa.Column(sa.Integer, default=1) |
|
60 | 60 | average_duration = sa.Column(sa.Float, default=0) |
|
61 | 61 | summed_duration = sa.Column(sa.Float, default=0) |
|
62 | 62 | read = sa.Column(sa.Boolean(), index=True, default=False) |
|
63 | 63 | fixed = sa.Column(sa.Boolean(), index=True, default=False) |
|
64 | 64 | notified = sa.Column(sa.Boolean(), index=True, default=False) |
|
65 | 65 | public = sa.Column(sa.Boolean(), index=True, default=False) |
|
66 | 66 | |
|
67 | 67 | reports = sa.orm.relationship('Report', |
|
68 | 68 | lazy='dynamic', |
|
69 | 69 | backref='report_group', |
|
70 | 70 | cascade="all, delete-orphan", |
|
71 | 71 | passive_deletes=True, |
|
72 | 72 | passive_updates=True, ) |
|
73 | 73 | |
|
74 | 74 | comments = sa.orm.relationship('ReportComment', |
|
75 | 75 | lazy='dynamic', |
|
76 | 76 | backref='report', |
|
77 | 77 | cascade="all, delete-orphan", |
|
78 | 78 | passive_deletes=True, |
|
79 | 79 | passive_updates=True, |
|
80 | 80 | order_by="ReportComment.comment_id") |
|
81 | 81 | |
|
82 | 82 | assigned_users = sa.orm.relationship('User', |
|
83 | 83 | backref=sa.orm.backref( |
|
84 | 84 | 'assigned_reports_relation', |
|
85 | 85 | lazy='dynamic', |
|
86 | 86 | order_by=sa.desc( |
|
87 | "reports_groups.id") | |
|
87 | sa.text("reports_groups.id")) | |
|
88 | 88 | ), |
|
89 | 89 | passive_deletes=True, |
|
90 | 90 | passive_updates=True, |
|
91 | 91 | secondary='reports_assignments', |
|
92 | 92 | order_by="User.user_name") |
|
93 | 93 | |
|
94 | 94 | stats = sa.orm.relationship('ReportStat', |
|
95 | 95 | lazy='dynamic', |
|
96 | 96 | backref='report', |
|
97 | 97 | passive_deletes=True, |
|
98 | 98 | passive_updates=True, ) |
|
99 | 99 | |
|
100 | 100 | last_report_ref = sa.orm.relationship('Report', |
|
101 | 101 | uselist=False, |
|
102 | 102 | primaryjoin="ReportGroup.last_report " |
|
103 | 103 | "== Report.id", |
|
104 | 104 | foreign_keys="Report.id", |
|
105 | 105 | cascade="all, delete-orphan", |
|
106 | 106 | passive_deletes=True, |
|
107 | 107 | passive_updates=True, ) |
|
108 | 108 | |
|
109 | 109 | def __repr__(self): |
|
110 | 110 | return '<ReportGroup id:{}>'.format(self.id) |
|
111 | 111 | |
|
112 | 112 | def get_report(self, report_id=None, public=False): |
|
113 | 113 | """ |
|
114 | 114 | Gets report with specific id or latest report if id was not specified |
|
115 | 115 | """ |
|
116 | 116 | from .report import Report |
|
117 | 117 | |
|
118 | 118 | if not report_id: |
|
119 | 119 | return self.last_report_ref |
|
120 | 120 | else: |
|
121 | 121 | return self.reports.filter(Report.id == report_id).first() |
|
122 | 122 | |
|
123 | 123 | def get_public_url(self, request, _app_url=None): |
|
124 | 124 | url = request.route_url('/', _app_url=_app_url) |
|
125 | 125 | return (url + 'ui/report/%s') % self.id |
|
126 | 126 | |
|
127 | 127 | def run_postprocessing(self, report): |
|
128 | 128 | """ |
|
129 | 129 | Alters report group priority based on postprocessing configuration |
|
130 | 130 | """ |
|
131 | 131 | request = get_current_request() |
|
132 | 132 | get_db_session(None, self).flush() |
|
133 | 133 | for action in self.application.postprocess_conf: |
|
134 | 134 | get_db_session(None, self).flush() |
|
135 | 135 | rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX) |
|
136 | 136 | report_dict = report.get_dict(request) |
|
137 | 137 | # if was not processed yet |
|
138 | 138 | if (rule_obj.match(report_dict) and |
|
139 | 139 | action.pkey not in self.triggered_postprocesses_ids): |
|
140 | 140 | action.postprocess(self) |
|
141 | 141 | # this way sqla can track mutation of list |
|
142 | 142 | self.triggered_postprocesses_ids = \ |
|
143 | 143 | self.triggered_postprocesses_ids + [action.pkey] |
|
144 | 144 | |
|
145 | 145 | get_db_session(None, self).flush() |
|
146 | 146 | # do not go out of bounds |
|
147 | 147 | if self.priority < 1: |
|
148 | 148 | self.priority = 1 |
|
149 | 149 | if self.priority > 10: |
|
150 | 150 | self.priority = 10 |
|
151 | 151 | |
|
152 | 152 | def get_dict(self, request): |
|
153 | 153 | instance_dict = super(ReportGroup, self).get_dict() |
|
154 | 154 | instance_dict['server_name'] = self.get_report().tags.get( |
|
155 | 155 | 'server_name') |
|
156 | 156 | instance_dict['view_name'] = self.get_report().tags.get('view_name') |
|
157 | 157 | instance_dict['resource_name'] = self.application.resource_name |
|
158 | 158 | instance_dict['report_type'] = self.get_report().report_type |
|
159 | 159 | instance_dict['url_path'] = self.get_report().url_path |
|
160 | 160 | instance_dict['front_url'] = self.get_report().get_public_url(request) |
|
161 | 161 | del instance_dict['triggered_postprocesses_ids'] |
|
162 | 162 | return instance_dict |
|
163 | 163 | |
|
164 | 164 | def es_doc(self): |
|
165 | 165 | return { |
|
166 | 166 | '_id': str(self.id), |
|
167 | 167 | 'pg_id': str(self.id), |
|
168 | 168 | 'resource_id': self.resource_id, |
|
169 | 169 | 'error': self.error, |
|
170 | 170 | 'fixed': self.fixed, |
|
171 | 171 | 'public': self.public, |
|
172 | 172 | 'read': self.read, |
|
173 | 173 | 'priority': self.priority, |
|
174 | 174 | 'occurences': self.occurences, |
|
175 | 175 | 'average_duration': self.average_duration, |
|
176 | 176 | 'summed_duration': self.summed_duration, |
|
177 | 177 | 'first_timestamp': self.first_timestamp, |
|
178 | 178 | 'last_timestamp': self.last_timestamp |
|
179 | 179 | } |
|
180 | 180 | |
|
181 | 181 | def set_notification_info(self, notify_10=False, notify_100=False): |
|
182 | 182 | """ |
|
183 | 183 | Update redis notification maps for notification job |
|
184 | 184 | """ |
|
185 | 185 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
186 | 186 | # global app counter |
|
187 | 187 | key = REDIS_KEYS['counters']['reports_per_type'].format( |
|
188 | 188 | self.report_type, current_time) |
|
189 | 189 | redis_pipeline = Datastores.redis.pipeline() |
|
190 | 190 | redis_pipeline.incr(key) |
|
191 | 191 | redis_pipeline.expire(key, 3600 * 24) |
|
192 | 192 | # detailed app notification for alerts and notifications |
|
193 | 193 | redis_pipeline.sadd( |
|
194 | 194 | REDIS_KEYS['apps_that_had_reports'], self.resource_id) |
|
195 | 195 | redis_pipeline.sadd( |
|
196 | 196 | REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id) |
|
197 | 197 | # only notify for exceptions here |
|
198 | 198 | if self.report_type == ReportType.error: |
|
199 | 199 | redis_pipeline.sadd( |
|
200 | 200 | REDIS_KEYS['apps_that_had_reports'], self.resource_id) |
|
201 | 201 | redis_pipeline.sadd( |
|
202 | 202 | REDIS_KEYS['apps_that_had_error_reports_alerting'], |
|
203 | 203 | self.resource_id) |
|
204 | 204 | key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id) |
|
205 | 205 | redis_pipeline.incr(key) |
|
206 | 206 | redis_pipeline.expire(key, 3600 * 24) |
|
207 | 207 | key = REDIS_KEYS['counters']['report_group_occurences_alerting'].format( |
|
208 | 208 | self.id) |
|
209 | 209 | redis_pipeline.incr(key) |
|
210 | 210 | redis_pipeline.expire(key, 3600 * 24) |
|
211 | 211 | |
|
212 | 212 | if notify_10: |
|
213 | 213 | key = REDIS_KEYS['counters'][ |
|
214 | 214 | 'report_group_occurences_10th'].format(self.id) |
|
215 | 215 | redis_pipeline.setex(key, 3600 * 24, 1) |
|
216 | 216 | if notify_100: |
|
217 | 217 | key = REDIS_KEYS['counters'][ |
|
218 | 218 | 'report_group_occurences_100th'].format(self.id) |
|
219 | 219 | redis_pipeline.setex(key, 3600 * 24, 1) |
|
220 | 220 | |
|
221 | 221 | key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
222 | 222 | self.report_type, self.resource_id) |
|
223 | 223 | redis_pipeline.sadd(key, self.id) |
|
224 | 224 | redis_pipeline.expire(key, 3600 * 24) |
|
225 | 225 | key = REDIS_KEYS['reports_to_notify_per_type_per_app_alerting'].format( |
|
226 | 226 | self.report_type, self.resource_id) |
|
227 | 227 | redis_pipeline.sadd(key, self.id) |
|
228 | 228 | redis_pipeline.expire(key, 3600 * 24) |
|
229 | 229 | redis_pipeline.execute() |
|
230 | 230 | |
|
231 | 231 | @property |
|
232 | 232 | def partition_id(self): |
|
233 | 233 | return 'rcae_r_%s' % self.first_timestamp.strftime('%Y_%m') |
|
234 | 234 | |
|
235 | 235 | def partition_range(self): |
|
236 | 236 | start_date = self.first_timestamp.date().replace(day=1) |
|
237 | 237 | end_date = start_date + timedelta(days=40) |
|
238 | 238 | end_date = end_date.replace(day=1) |
|
239 | 239 | return start_date, end_date |
|
240 | 240 | |
|
241 | 241 | |
|
242 | 242 | def after_insert(mapper, connection, target): |
|
243 | 243 | if not hasattr(target, '_skip_ft_index'): |
|
244 | 244 | data = target.es_doc() |
|
245 | 245 | data.pop('_id', None) |
|
246 | 246 | Datastores.es.index(target.partition_id, 'report_group', |
|
247 | 247 | data, id=target.id) |
|
248 | 248 | |
|
249 | 249 | |
|
250 | 250 | def after_update(mapper, connection, target): |
|
251 | 251 | if not hasattr(target, '_skip_ft_index'): |
|
252 | 252 | data = target.es_doc() |
|
253 | 253 | data.pop('_id', None) |
|
254 | 254 | Datastores.es.index(target.partition_id, 'report_group', |
|
255 | 255 | data, id=target.id) |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | def after_delete(mapper, connection, target): |
|
259 | 259 | query = {"query": {'term': {'group_id': target.id}}} |
|
260 | 260 | # delete by query |
|
261 | 261 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query) |
|
262 | 262 | query = {"query": {'term': {'pg_id': target.id}}} |
|
263 | 263 | Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query) |
|
264 | 264 | |
|
265 | 265 | |
|
266 | 266 | sa.event.listen(ReportGroup, 'after_insert', after_insert) |
|
267 | 267 | sa.event.listen(ReportGroup, 'after_update', after_update) |
|
268 | 268 | sa.event.listen(ReportGroup, 'after_delete', after_delete) |
@@ -1,37 +1,37 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from datetime import datetime |
|
19 | 19 | from ziggurat_foundations.models.base import BaseModel |
|
20 | from sqlalchemy.dialects.postgres import JSON | |
|
20 | from sqlalchemy.dialects.postgresql import JSON | |
|
21 | 21 | |
|
22 | 22 | from . import Base |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class Tag(Base, BaseModel): |
|
26 | 26 | __tablename__ = 'tags' |
|
27 | 27 | |
|
28 | 28 | id = sa.Column(sa.Integer, primary_key=True) |
|
29 | 29 | resource_id = sa.Column(sa.Integer, |
|
30 | 30 | sa.ForeignKey('resources.resource_id')) |
|
31 | 31 | name = sa.Column(sa.Unicode(512), nullable=False) |
|
32 | 32 | value = sa.Column(JSON, nullable=False) |
|
33 | 33 | first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
34 | 34 | server_default=sa.func.now()) |
|
35 | 35 | last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, |
|
36 | 36 | server_default=sa.func.now()) |
|
37 | 37 | times_seen = sa.Column(sa.Integer, nullable=False, default=0) |
@@ -1,193 +1,192 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import os |
|
19 | 19 | import pkg_resources |
|
20 | 20 | |
|
21 | 21 | from datetime import datetime, timedelta |
|
22 | 22 | |
|
23 | 23 | import psutil |
|
24 | 24 | import redis |
|
25 | 25 | |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from appenlight.models import DBSession |
|
28 | 28 | from appenlight.models import Datastores |
|
29 | 29 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def bytes2human(total): |
|
33 | 33 | giga = 1024.0 ** 3 |
|
34 | 34 | mega = 1024.0 ** 2 |
|
35 | 35 | kilo = 1024.0 |
|
36 | 36 | if giga <= total: |
|
37 | 37 | return '{:0.1f}G'.format(total / giga) |
|
38 | 38 | elif mega <= total: |
|
39 | 39 | return '{:0.1f}M'.format(total / mega) |
|
40 | 40 | else: |
|
41 | 41 | return '{:0.1f}K'.format(total / kilo) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | log = logging.getLogger(__name__) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | @view_config(route_name='section_view', |
|
48 | 48 | match_param=['section=admin_section', 'view=system'], |
|
49 | 49 | renderer='json', permission='root_administration') |
|
50 | 50 | def system(request): |
|
51 | 51 | current_time = datetime.utcnow(). \ |
|
52 | 52 | replace(second=0, microsecond=0) - timedelta(minutes=1) |
|
53 | 53 | # global app counter |
|
54 | 54 | processed_reports = request.registry.redis_conn.get( |
|
55 | 55 | REDIS_KEYS['counters']['reports_per_minute'].format(current_time)) |
|
56 | 56 | processed_reports = int(processed_reports) if processed_reports else 0 |
|
57 | 57 | processed_logs = request.registry.redis_conn.get( |
|
58 | 58 | REDIS_KEYS['counters']['logs_per_minute'].format(current_time)) |
|
59 | 59 | processed_logs = int(processed_logs) if processed_logs else 0 |
|
60 | 60 | processed_metrics = request.registry.redis_conn.get( |
|
61 | 61 | REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)) |
|
62 | 62 | processed_metrics = int(processed_metrics) if processed_metrics else 0 |
|
63 | 63 | |
|
64 | 64 | waiting_reports = 0 |
|
65 | 65 | waiting_logs = 0 |
|
66 | 66 | waiting_metrics = 0 |
|
67 | 67 | waiting_other = 0 |
|
68 | 68 | |
|
69 | 69 | if 'redis' in request.registry.settings['celery.broker_type']: |
|
70 | 70 | redis_client = redis.StrictRedis.from_url( |
|
71 | 71 | request.registry.settings['celery.broker_url']) |
|
72 | 72 | waiting_reports = redis_client.llen('reports') |
|
73 | 73 | waiting_logs = redis_client.llen('logs') |
|
74 | 74 | waiting_metrics = redis_client.llen('metrics') |
|
75 | 75 | waiting_other = redis_client.llen('default') |
|
76 | 76 | |
|
77 | 77 | # process |
|
78 | 78 | def replace_inf(val): |
|
79 | 79 | return val if val != psutil.RLIM_INFINITY else 'unlimited' |
|
80 | 80 | |
|
81 | 81 | p = psutil.Process() |
|
82 | 82 | fd = p.rlimit(psutil.RLIMIT_NOFILE) |
|
83 | 83 | memlock = p.rlimit(psutil.RLIMIT_MEMLOCK) |
|
84 | 84 | self_info = { |
|
85 | 85 | 'fds': {'soft': replace_inf(fd[0]), |
|
86 | 86 | 'hard': replace_inf(fd[1])}, |
|
87 | 87 | 'memlock': {'soft': replace_inf(memlock[0]), |
|
88 | 88 | 'hard': replace_inf(memlock[1])}, |
|
89 | 89 | } |
|
90 | 90 | |
|
91 | 91 | # disks |
|
92 | 92 | disks = [] |
|
93 | 93 | for part in psutil.disk_partitions(all=False): |
|
94 | 94 | if os.name == 'nt': |
|
95 | 95 | if 'cdrom' in part.opts or part.fstype == '': |
|
96 | 96 | continue |
|
97 | 97 | usage = psutil.disk_usage(part.mountpoint) |
|
98 | 98 | disks.append({ |
|
99 | 99 | 'device': part.device, |
|
100 | 100 | 'total': bytes2human(usage.total), |
|
101 | 101 | 'used': bytes2human(usage.used), |
|
102 | 102 | 'free': bytes2human(usage.free), |
|
103 | 103 | 'percentage': int(usage.percent), |
|
104 | 104 | 'mountpoint': part.mountpoint, |
|
105 | 105 | 'fstype': part.fstype |
|
106 | 106 | }) |
|
107 | 107 | |
|
108 | 108 | # memory |
|
109 | 109 | memory_v = psutil.virtual_memory() |
|
110 | 110 | memory_s = psutil.swap_memory() |
|
111 | 111 | |
|
112 | 112 | memory = { |
|
113 | 113 | 'total': bytes2human(memory_v.total), |
|
114 | 114 | 'available': bytes2human(memory_v.available), |
|
115 | 115 | 'percentage': memory_v.percent, |
|
116 | 116 | 'used': bytes2human(memory_v.used), |
|
117 | 117 | 'free': bytes2human(memory_v.free), |
|
118 | 118 | 'active': bytes2human(memory_v.active), |
|
119 | 119 | 'inactive': bytes2human(memory_v.inactive), |
|
120 | 120 | 'buffers': bytes2human(memory_v.buffers), |
|
121 | 121 | 'cached': bytes2human(memory_v.cached), |
|
122 | 122 | 'swap_total': bytes2human(memory_s.total), |
|
123 | 123 | 'swap_used': bytes2human(memory_s.used) |
|
124 | 124 | } |
|
125 | 125 | |
|
126 | 126 | # load |
|
127 | 127 | system_load = os.getloadavg() |
|
128 | 128 | |
|
129 | 129 | # processes |
|
130 | 130 | min_mem = 1024 * 1024 * 40 # 40MB |
|
131 | 131 | process_info = [] |
|
132 | 132 | for p in psutil.process_iter(): |
|
133 |
mem_used = p. |
|
|
133 | mem_used = p.memory_info().rss | |
|
134 | 134 | if mem_used < min_mem: |
|
135 | 135 | continue |
|
136 | 136 | process_info.append({'owner': p.username(), |
|
137 | 137 | 'pid': p.pid, |
|
138 |
'cpu': round(p. |
|
|
139 |
'mem_percentage': round(p. |
|
|
140 | 1), | |
|
138 | 'cpu': round(p.cpu_percent(interval=0), 1), | |
|
139 | 'mem_percentage': round(p.memory_percent(),1), | |
|
141 | 140 | 'mem_usage': bytes2human(mem_used), |
|
142 | 141 | 'name': p.name(), |
|
143 | 142 | 'command': ' '.join(p.cmdline()) |
|
144 | 143 | }) |
|
145 | 144 | process_info = sorted(process_info, key=lambda x: x['mem_percentage'], |
|
146 | 145 | reverse=True) |
|
147 | 146 | |
|
148 | 147 | # pg tables |
|
149 | 148 | |
|
150 | 149 | db_size_query = ''' |
|
151 | 150 | SELECT tablename, pg_total_relation_size(tablename::text) size |
|
152 | 151 | FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND |
|
153 | 152 | tablename NOT LIKE 'sql_%' ORDER BY size DESC;''' |
|
154 | 153 | |
|
155 | 154 | db_tables = [] |
|
156 | 155 | for row in DBSession.execute(db_size_query): |
|
157 | 156 | db_tables.append({"size_human": bytes2human(row.size), |
|
158 | 157 | "table_name": row.tablename}) |
|
159 | 158 | |
|
160 | 159 | # es indices |
|
161 | 160 | es_indices = [] |
|
162 | 161 | result = Datastores.es.indices.stats(metric=['store, docs']) |
|
163 | 162 | for ix, stats in result['indices'].items(): |
|
164 | 163 | size = stats['primaries']['store']['size_in_bytes'] |
|
165 | 164 | es_indices.append({'name': ix, |
|
166 | 165 | 'size': size, |
|
167 | 166 | 'size_human': bytes2human(size)}) |
|
168 | 167 | |
|
169 | 168 | # packages |
|
170 | 169 | |
|
171 | 170 | packages = ({'name': p.project_name, 'version': p.version} |
|
172 | 171 | for p in pkg_resources.working_set) |
|
173 | 172 | |
|
174 | 173 | return {'db_tables': db_tables, |
|
175 | 174 | 'es_indices': sorted(es_indices, |
|
176 | 175 | key=lambda x: x['size'], reverse=True), |
|
177 | 176 | 'process_info': process_info, |
|
178 | 177 | 'system_load': system_load, |
|
179 | 178 | 'disks': disks, |
|
180 | 179 | 'memory': memory, |
|
181 | 180 | 'packages': sorted(packages, key=lambda x: x['name'].lower()), |
|
182 | 181 | 'current_time': current_time, |
|
183 | 182 | 'queue_stats': { |
|
184 | 183 | 'processed_reports': processed_reports, |
|
185 | 184 | 'processed_logs': processed_logs, |
|
186 | 185 | 'processed_metrics': processed_metrics, |
|
187 | 186 | 'waiting_reports': waiting_reports, |
|
188 | 187 | 'waiting_logs': waiting_logs, |
|
189 | 188 | 'waiting_metrics': waiting_metrics, |
|
190 | 189 | 'waiting_other': waiting_other |
|
191 | 190 | }, |
|
192 | 191 | 'self_info': self_info |
|
193 | 192 | } |
@@ -1,760 +1,759 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import json |
|
19 | 19 | import logging |
|
20 | import six | |
|
21 | 20 | |
|
22 | 21 | from datetime import datetime, timedelta |
|
23 | 22 | |
|
24 | 23 | import colander |
|
25 | 24 | from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity |
|
26 | 25 | from pyramid.view import view_config |
|
27 | 26 | from webob.multidict import MultiDict |
|
28 | 27 | from zope.sqlalchemy import mark_changed |
|
29 | 28 | from ziggurat_foundations.permissions import ANY_PERMISSION |
|
30 | 29 | |
|
31 | 30 | import appenlight.forms as forms |
|
32 | 31 | from appenlight.models import DBSession |
|
33 | 32 | from appenlight.models.resource import Resource |
|
34 | 33 | from appenlight.models.application import Application |
|
35 | 34 | from appenlight.models.application_postprocess_conf import \ |
|
36 | 35 | ApplicationPostprocessConf |
|
37 | 36 | from ziggurat_foundations.models.services.user import UserService |
|
38 | 37 | from ziggurat_foundations.models.services.resource import ResourceService |
|
39 | 38 | from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService |
|
40 | 39 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
41 | 40 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
42 | 41 | from appenlight.models.services.application import ApplicationService |
|
43 | 42 | from appenlight.models.services.application_postprocess_conf import \ |
|
44 | 43 | ApplicationPostprocessConfService |
|
45 | 44 | from appenlight.models.services.group import GroupService |
|
46 | 45 | from appenlight.models.services.group_resource_permission import \ |
|
47 | 46 | GroupResourcePermissionService |
|
48 | 47 | from appenlight.models.services.request_metric import RequestMetricService |
|
49 | 48 | from appenlight.models.services.report_group import ReportGroupService |
|
50 | 49 | from appenlight.models.services.slow_call import SlowCallService |
|
51 | 50 | from appenlight.lib import helpers as h |
|
52 | 51 | from appenlight.lib.utils import build_filter_settings_from_query_dict |
|
53 | 52 | from appenlight.security import RootFactory |
|
54 | 53 | from appenlight.models.report import REPORT_TYPE_MATRIX |
|
55 | 54 | from appenlight.validators import build_rule_schema |
|
56 | 55 | |
|
57 | 56 | _ = str |
|
58 | 57 | |
|
59 | 58 | log = logging.getLogger(__name__) |
|
60 | 59 | |
|
61 | 60 | |
|
62 | 61 | def app_not_found(request, id): |
|
63 | 62 | """ |
|
64 | 63 | Redirects on non found and sets a flash message |
|
65 | 64 | """ |
|
66 | 65 | request.session.flash(_('Application not found'), 'warning') |
|
67 | 66 | return HTTPFound( |
|
68 | 67 | location=request.route_url('applications', action='index')) |
|
69 | 68 | |
|
70 | 69 | |
|
71 | 70 | @view_config(route_name='applications_no_id', |
|
72 | 71 | renderer='json', request_method="GET", permission='authenticated') |
|
73 | 72 | def applications_list(request): |
|
74 | 73 | """ |
|
75 | 74 | Applications list |
|
76 | 75 | |
|
77 | 76 | if query params contain ?type=foo, it will list applications |
|
78 | 77 | with one of those permissions for user, |
|
79 | 78 | otherwise only list of owned applications will |
|
80 | 79 | be returned |
|
81 | 80 | |
|
82 | 81 | appending ?root_list while being administration will allow to list all |
|
83 | 82 | applications in the system |
|
84 | 83 | |
|
85 | 84 | """ |
|
86 | 85 | is_root = request.has_permission('root_administration', |
|
87 | 86 | RootFactory(request)) |
|
88 | 87 | if is_root and request.GET.get('root_list'): |
|
89 | 88 | resources = Resource.all().order_by(Resource.resource_name) |
|
90 | 89 | resource_type = request.GET.get('resource_type', 'application') |
|
91 | 90 | if resource_type: |
|
92 | 91 | resources = resources.filter( |
|
93 | 92 | Resource.resource_type == resource_type) |
|
94 | 93 | else: |
|
95 | 94 | permissions = request.params.getall('permission') |
|
96 | 95 | if permissions: |
|
97 | 96 | resources = UserService.resources_with_perms( |
|
98 | 97 | request.user, |
|
99 | 98 | permissions, |
|
100 | 99 | resource_types=[request.GET.get('resource_type', |
|
101 | 100 | 'application')]) |
|
102 | 101 | else: |
|
103 | 102 | resources = request.user.resources.filter( |
|
104 | 103 | Application.resource_type == request.GET.get( |
|
105 | 104 | 'resource_type', |
|
106 | 105 | 'application')) |
|
107 | 106 | return [r.get_dict(include_keys=['resource_id', 'resource_name', 'domains', |
|
108 | 107 | 'owner_user_name', 'owner_group_name']) |
|
109 | 108 | for |
|
110 | 109 | r in resources] |
|
111 | 110 | |
|
112 | 111 | |
|
113 | 112 | @view_config(route_name='applications', renderer='json', |
|
114 | 113 | request_method="GET", permission='view') |
|
115 | 114 | def application_GET(request): |
|
116 | 115 | resource = request.context.resource |
|
117 | 116 | include_sensitive_info = False |
|
118 | 117 | if request.has_permission('edit'): |
|
119 | 118 | include_sensitive_info = True |
|
120 | 119 | resource_dict = resource.get_dict( |
|
121 | 120 | include_perms=include_sensitive_info, |
|
122 | 121 | include_processing_rules=include_sensitive_info) |
|
123 | 122 | return resource_dict |
|
124 | 123 | |
|
125 | 124 | |
|
126 | 125 | @view_config(route_name='applications_no_id', request_method="POST", |
|
127 | 126 | renderer='json', permission='create_resources') |
|
128 | 127 | def application_create(request): |
|
129 | 128 | """ |
|
130 | 129 | Creates new application instances |
|
131 | 130 | """ |
|
132 | 131 | user = request.user |
|
133 | 132 | form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body), |
|
134 | 133 | csrf_context=request) |
|
135 | 134 | if form.validate(): |
|
136 | 135 | session = DBSession() |
|
137 | 136 | resource = Application() |
|
138 | 137 | DBSession.add(resource) |
|
139 | 138 | form.populate_obj(resource) |
|
140 | 139 | resource.api_key = resource.generate_api_key() |
|
141 | 140 | user.resources.append(resource) |
|
142 | 141 | request.session.flash(_('Application created')) |
|
143 | 142 | DBSession.flush() |
|
144 | 143 | mark_changed(session) |
|
145 | 144 | else: |
|
146 | 145 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
147 | 146 | |
|
148 | 147 | return resource.get_dict() |
|
149 | 148 | |
|
150 | 149 | |
|
151 | 150 | @view_config(route_name='applications', request_method="PATCH", |
|
152 | 151 | renderer='json', permission='edit') |
|
153 | 152 | def application_update(request): |
|
154 | 153 | """ |
|
155 | 154 | Updates main application configuration |
|
156 | 155 | """ |
|
157 | 156 | resource = request.context.resource |
|
158 | 157 | if not resource: |
|
159 | 158 | return app_not_found() |
|
160 | 159 | |
|
161 | 160 | # disallow setting permanent storage by non-admins |
|
162 | 161 | # use default/non-resource based context for this check |
|
163 | 162 | req_dict = copy.copy(request.unsafe_json_body) |
|
164 | 163 | if not request.has_permission('root_administration', RootFactory(request)): |
|
165 | 164 | req_dict['allow_permanent_storage'] = '' |
|
166 | 165 | if not req_dict.get('uptime_url'): |
|
167 | 166 | # needed cause validator is still triggered by default |
|
168 | 167 | req_dict.pop('uptime_url', '') |
|
169 | 168 | application_form = forms.ApplicationUpdateForm(MultiDict(req_dict), |
|
170 | 169 | csrf_context=request) |
|
171 | 170 | if application_form.validate(): |
|
172 | 171 | application_form.populate_obj(resource) |
|
173 | 172 | request.session.flash(_('Application updated')) |
|
174 | 173 | else: |
|
175 | 174 | return HTTPUnprocessableEntity(body=application_form.errors_json) |
|
176 | 175 | |
|
177 | 176 | include_sensitive_info = False |
|
178 | 177 | if request.has_permission('edit'): |
|
179 | 178 | include_sensitive_info = True |
|
180 | 179 | resource_dict = resource.get_dict( |
|
181 | 180 | include_perms=include_sensitive_info, |
|
182 | 181 | include_processing_rules=include_sensitive_info) |
|
183 | 182 | return resource_dict |
|
184 | 183 | |
|
185 | 184 | |
|
186 | 185 | @view_config(route_name='applications_property', match_param='key=api_key', |
|
187 | 186 | request_method="POST", renderer='json', |
|
188 | 187 | permission='delete') |
|
189 | 188 | def application_regenerate_key(request): |
|
190 | 189 | """ |
|
191 | 190 | Regenerates API keys for application |
|
192 | 191 | """ |
|
193 | 192 | resource = request.context.resource |
|
194 | 193 | |
|
195 | 194 | form = forms.CheckPasswordForm(MultiDict(request.unsafe_json_body), |
|
196 | 195 | csrf_context=request) |
|
197 | 196 | form.password.user = request.user |
|
198 | 197 | |
|
199 | 198 | if form.validate(): |
|
200 | 199 | resource.api_key = resource.generate_api_key() |
|
201 | 200 | resource.public_key = resource.generate_api_key() |
|
202 | 201 | msg = 'API keys regenerated - please update your application config.' |
|
203 | 202 | request.session.flash(_(msg)) |
|
204 | 203 | else: |
|
205 | 204 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
206 | 205 | |
|
207 | 206 | if request.has_permission('edit'): |
|
208 | 207 | include_sensitive_info = True |
|
209 | 208 | resource_dict = resource.get_dict( |
|
210 | 209 | include_perms=include_sensitive_info, |
|
211 | 210 | include_processing_rules=include_sensitive_info) |
|
212 | 211 | return resource_dict |
|
213 | 212 | |
|
214 | 213 | |
|
215 | 214 | @view_config(route_name='applications_property', |
|
216 | 215 | match_param='key=delete_resource', |
|
217 | 216 | request_method="PATCH", renderer='json', permission='delete') |
|
218 | 217 | def application_remove(request): |
|
219 | 218 | """ |
|
220 | 219 | Removes application resources |
|
221 | 220 | """ |
|
222 | 221 | resource = request.context.resource |
|
223 | 222 | # we need polymorphic object here, to properly launch sqlalchemy events |
|
224 | 223 | resource = ApplicationService.by_id(resource.resource_id) |
|
225 | 224 | form = forms.CheckPasswordForm(MultiDict(request.safe_json_body or {}), |
|
226 | 225 | csrf_context=request) |
|
227 | 226 | form.password.user = request.user |
|
228 | 227 | if form.validate(): |
|
229 | 228 | DBSession.delete(resource) |
|
230 | 229 | request.session.flash(_('Application removed')) |
|
231 | 230 | else: |
|
232 | 231 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
233 | 232 | |
|
234 | 233 | return True |
|
235 | 234 | |
|
236 | 235 | |
|
237 | 236 | @view_config(route_name='applications_property', match_param='key=owner', |
|
238 | 237 | request_method="PATCH", renderer='json', permission='delete') |
|
239 | 238 | def application_ownership_transfer(request): |
|
240 | 239 | """ |
|
241 | 240 | Allows application owner to transfer application ownership to other user |
|
242 | 241 | """ |
|
243 | 242 | resource = request.context.resource |
|
244 | 243 | form = forms.ChangeApplicationOwnerForm( |
|
245 | 244 | MultiDict(request.safe_json_body or {}), csrf_context=request) |
|
246 | 245 | form.password.user = request.user |
|
247 | 246 | if form.validate(): |
|
248 | 247 | user = UserService.by_user_name(form.user_name.data) |
|
249 | 248 | user.resources.append(resource) |
|
250 | 249 | # remove integrations to not leak security data of external applications |
|
251 | 250 | for integration in resource.integrations[:]: |
|
252 | 251 | resource.integrations.remove(integration) |
|
253 | 252 | request.session.flash(_('Application transfered')) |
|
254 | 253 | else: |
|
255 | 254 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
256 | 255 | return True |
|
257 | 256 | |
|
258 | 257 | |
|
259 | 258 | @view_config(route_name='applications_property', |
|
260 | 259 | match_param='key=postprocessing_rules', renderer='json', |
|
261 | 260 | request_method='POST', permission='edit') |
|
262 | 261 | def applications_postprocess_POST(request): |
|
263 | 262 | """ |
|
264 | 263 | Creates new postprocessing rules for applications |
|
265 | 264 | """ |
|
266 | 265 | resource = request.context.resource |
|
267 | 266 | conf = ApplicationPostprocessConf() |
|
268 | 267 | conf.do = 'postprocess' |
|
269 | 268 | conf.new_value = '1' |
|
270 | 269 | resource.postprocess_conf.append(conf) |
|
271 | 270 | DBSession.flush() |
|
272 | 271 | return conf.get_dict() |
|
273 | 272 | |
|
274 | 273 | |
|
275 | 274 | @view_config(route_name='applications_property', |
|
276 | 275 | match_param='key=postprocessing_rules', renderer='json', |
|
277 | 276 | request_method='PATCH', permission='edit') |
|
278 | 277 | def applications_postprocess_PATCH(request): |
|
279 | 278 | """ |
|
280 | 279 | Creates new postprocessing rules for applications |
|
281 | 280 | """ |
|
282 | 281 | json_body = request.unsafe_json_body |
|
283 | 282 | |
|
284 | 283 | schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX) |
|
285 | 284 | try: |
|
286 | 285 | schema.deserialize(json_body['rule']) |
|
287 | 286 | except colander.Invalid as exc: |
|
288 | 287 | return HTTPUnprocessableEntity(body=json.dumps(exc.asdict())) |
|
289 | 288 | |
|
290 | 289 | resource = request.context.resource |
|
291 | 290 | conf = ApplicationPostprocessConfService.by_pkey_and_resource_id( |
|
292 | 291 | json_body['pkey'], resource.resource_id) |
|
293 | 292 | conf.rule = request.unsafe_json_body['rule'] |
|
294 | 293 | # for now hardcode int since we dont support anything else so far |
|
295 | 294 | conf.new_value = int(request.unsafe_json_body['new_value']) |
|
296 | 295 | return conf.get_dict() |
|
297 | 296 | |
|
298 | 297 | |
|
299 | 298 | @view_config(route_name='applications_property', |
|
300 | 299 | match_param='key=postprocessing_rules', renderer='json', |
|
301 | 300 | request_method='DELETE', permission='edit') |
|
302 | 301 | def applications_postprocess_DELETE(request): |
|
303 | 302 | """ |
|
304 | 303 | Removes application postprocessing rules |
|
305 | 304 | """ |
|
306 | 305 | form = forms.ReactorForm(request.POST, csrf_context=request) |
|
307 | 306 | resource = request.context.resource |
|
308 | 307 | if form.validate(): |
|
309 | 308 | for postprocess_conf in resource.postprocess_conf: |
|
310 | 309 | if postprocess_conf.pkey == int(request.GET['pkey']): |
|
311 | 310 | # remove rule |
|
312 | 311 | DBSession.delete(postprocess_conf) |
|
313 | 312 | return True |
|
314 | 313 | |
|
315 | 314 | |
|
316 | 315 | @view_config(route_name='applications_property', |
|
317 | 316 | match_param='key=report_graphs', renderer='json', |
|
318 | 317 | permission='view') |
|
319 | 318 | @view_config(route_name='applications_property', |
|
320 | 319 | match_param='key=slow_report_graphs', renderer='json', |
|
321 | 320 | permission='view') |
|
322 | 321 | def get_application_report_stats(request): |
|
323 | 322 | query_params = request.GET.mixed() |
|
324 | 323 | query_params['resource'] = (request.context.resource.resource_id,) |
|
325 | 324 | |
|
326 | 325 | filter_settings = build_filter_settings_from_query_dict(request, |
|
327 | 326 | query_params) |
|
328 | 327 | if not filter_settings.get('end_date'): |
|
329 | 328 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
330 | 329 | filter_settings['end_date'] = end_date |
|
331 | 330 | |
|
332 | 331 | if not filter_settings.get('start_date'): |
|
333 | 332 | delta = timedelta(hours=1) |
|
334 | 333 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
335 | 334 | |
|
336 | 335 | result = ReportGroupService.get_report_stats(request, filter_settings) |
|
337 | 336 | return result |
|
338 | 337 | |
|
339 | 338 | |
|
340 | 339 | @view_config(route_name='applications_property', |
|
341 | 340 | match_param='key=metrics_graphs', renderer='json', |
|
342 | 341 | permission='view') |
|
343 | 342 | def metrics_graphs(request): |
|
344 | 343 | """ |
|
345 | 344 | Handles metric dashboard graphs |
|
346 | 345 | Returns information for time/tier breakdown |
|
347 | 346 | """ |
|
348 | 347 | query_params = request.GET.mixed() |
|
349 | 348 | query_params['resource'] = (request.context.resource.resource_id,) |
|
350 | 349 | |
|
351 | 350 | filter_settings = build_filter_settings_from_query_dict(request, |
|
352 | 351 | query_params) |
|
353 | 352 | |
|
354 | 353 | if not filter_settings.get('end_date'): |
|
355 | 354 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
356 | 355 | filter_settings['end_date'] = end_date |
|
357 | 356 | |
|
358 | 357 | delta = timedelta(hours=1) |
|
359 | 358 | if not filter_settings.get('start_date'): |
|
360 | 359 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
361 | 360 | if filter_settings['end_date'] <= filter_settings['start_date']: |
|
362 | 361 | filter_settings['end_date'] = filter_settings['start_date'] |
|
363 | 362 | |
|
364 | 363 | delta = filter_settings['end_date'] - filter_settings['start_date'] |
|
365 | 364 | if delta < h.time_deltas.get('12h')['delta']: |
|
366 | 365 | divide_by_min = 1 |
|
367 | 366 | elif delta <= h.time_deltas.get('3d')['delta']: |
|
368 | 367 | divide_by_min = 5.0 |
|
369 | 368 | elif delta >= h.time_deltas.get('2w')['delta']: |
|
370 | 369 | divide_by_min = 60.0 * 24 |
|
371 | 370 | else: |
|
372 | 371 | divide_by_min = 60.0 |
|
373 | 372 | |
|
374 | 373 | results = RequestMetricService.get_metrics_stats( |
|
375 | 374 | request, filter_settings) |
|
376 | 375 | # because requests are PER SECOND / we divide 1 min stats by 60 |
|
377 | 376 | # requests are normalized to 1 min average |
|
378 | 377 | # results are average seconds time spent per request in specific area |
|
379 | 378 | for point in results: |
|
380 | 379 | if point['requests']: |
|
381 | 380 | point['main'] = (point['main'] - point['sql'] - |
|
382 | 381 | point['nosql'] - point['remote'] - |
|
383 | 382 | point['tmpl'] - |
|
384 | 383 | point['custom']) / point['requests'] |
|
385 | 384 | point['sql'] = point['sql'] / point['requests'] |
|
386 | 385 | point['nosql'] = point['nosql'] / point['requests'] |
|
387 | 386 | point['remote'] = point['remote'] / point['requests'] |
|
388 | 387 | point['tmpl'] = point['tmpl'] / point['requests'] |
|
389 | 388 | point['custom'] = point['custom'] / point['requests'] |
|
390 | 389 | point['requests_2'] = point['requests'] / 60.0 / divide_by_min |
|
391 | 390 | |
|
392 | 391 | selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom'] |
|
393 | 392 | |
|
394 | 393 | for point in results: |
|
395 | 394 | for stat_type in selected_types: |
|
396 | 395 | point[stat_type] = round(point.get(stat_type, 0), 3) |
|
397 | 396 | |
|
398 | 397 | return results |
|
399 | 398 | |
|
400 | 399 | |
|
401 | 400 | @view_config(route_name='applications_property', |
|
402 | 401 | match_param='key=response_graphs', renderer='json', |
|
403 | 402 | permission='view') |
|
404 | 403 | def response_graphs(request): |
|
405 | 404 | """ |
|
406 | 405 | Handles dashboard infomation for avg. response time split by today, |
|
407 | 406 | 2 days ago and week ago |
|
408 | 407 | """ |
|
409 | 408 | query_params = request.GET.mixed() |
|
410 | 409 | query_params['resource'] = (request.context.resource.resource_id,) |
|
411 | 410 | |
|
412 | 411 | filter_settings = build_filter_settings_from_query_dict(request, |
|
413 | 412 | query_params) |
|
414 | 413 | |
|
415 | 414 | if not filter_settings.get('end_date'): |
|
416 | 415 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
417 | 416 | filter_settings['end_date'] = end_date |
|
418 | 417 | |
|
419 | 418 | delta = timedelta(hours=1) |
|
420 | 419 | if not filter_settings.get('start_date'): |
|
421 | 420 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
422 | 421 | |
|
423 | 422 | result_now = RequestMetricService.get_metrics_stats( |
|
424 | 423 | request, filter_settings) |
|
425 | 424 | |
|
426 | 425 | filter_settings_2d = filter_settings.copy() |
|
427 | 426 | filter_settings_2d['start_date'] = filter_settings['start_date'] - \ |
|
428 | 427 | timedelta(days=2) |
|
429 | 428 | filter_settings_2d['end_date'] = filter_settings['end_date'] - \ |
|
430 | 429 | timedelta(days=2) |
|
431 | 430 | result_2d = RequestMetricService.get_metrics_stats( |
|
432 | 431 | request, filter_settings_2d) |
|
433 | 432 | |
|
434 | 433 | filter_settings_7d = filter_settings.copy() |
|
435 | 434 | filter_settings_7d['start_date'] = filter_settings['start_date'] - \ |
|
436 | 435 | timedelta(days=7) |
|
437 | 436 | filter_settings_7d['end_date'] = filter_settings['end_date'] - \ |
|
438 | 437 | timedelta(days=7) |
|
439 | 438 | result_7d = RequestMetricService.get_metrics_stats( |
|
440 | 439 | request, filter_settings_7d) |
|
441 | 440 | |
|
442 | 441 | plot_data = [] |
|
443 | 442 | |
|
444 | 443 | for item in result_now: |
|
445 | 444 | point = {'x': item['x'], 'today': 0, 'days_ago_2': 0, |
|
446 | 445 | 'days_ago_7': 0} |
|
447 | 446 | if item['requests']: |
|
448 | 447 | point['today'] = round(item['main'] / item['requests'], 3) |
|
449 | 448 | plot_data.append(point) |
|
450 | 449 | |
|
451 | 450 | for i, item in enumerate(result_2d[:len(plot_data)]): |
|
452 | 451 | plot_data[i]['days_ago_2'] = 0 |
|
453 | 452 | point = result_2d[i] |
|
454 | 453 | if point['requests']: |
|
455 | 454 | plot_data[i]['days_ago_2'] = round(point['main'] / |
|
456 | 455 | point['requests'], 3) |
|
457 | 456 | |
|
458 | 457 | for i, item in enumerate(result_7d[:len(plot_data)]): |
|
459 | 458 | plot_data[i]['days_ago_7'] = 0 |
|
460 | 459 | point = result_7d[i] |
|
461 | 460 | if point['requests']: |
|
462 | 461 | plot_data[i]['days_ago_7'] = round(point['main'] / |
|
463 | 462 | point['requests'], 3) |
|
464 | 463 | |
|
465 | 464 | return plot_data |
|
466 | 465 | |
|
467 | 466 | |
|
468 | 467 | @view_config(route_name='applications_property', |
|
469 | 468 | match_param='key=requests_graphs', renderer='json', |
|
470 | 469 | permission='view') |
|
471 | 470 | def requests_graphs(request): |
|
472 | 471 | """ |
|
473 | 472 | Handles dashboard infomation for avg. response time split by today, |
|
474 | 473 | 2 days ago and week ago |
|
475 | 474 | """ |
|
476 | 475 | query_params = request.GET.mixed() |
|
477 | 476 | query_params['resource'] = (request.context.resource.resource_id,) |
|
478 | 477 | |
|
479 | 478 | filter_settings = build_filter_settings_from_query_dict(request, |
|
480 | 479 | query_params) |
|
481 | 480 | |
|
482 | 481 | if not filter_settings.get('end_date'): |
|
483 | 482 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
484 | 483 | filter_settings['end_date'] = end_date |
|
485 | 484 | |
|
486 | 485 | delta = timedelta(hours=1) |
|
487 | 486 | if not filter_settings.get('start_date'): |
|
488 | 487 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
489 | 488 | |
|
490 | 489 | result_now = RequestMetricService.get_metrics_stats( |
|
491 | 490 | request, filter_settings) |
|
492 | 491 | |
|
493 | 492 | delta = filter_settings['end_date'] - filter_settings['start_date'] |
|
494 | 493 | if delta < h.time_deltas.get('12h')['delta']: |
|
495 | 494 | seconds = h.time_deltas['1m']['minutes'] * 60.0 |
|
496 | 495 | elif delta <= h.time_deltas.get('3d')['delta']: |
|
497 | 496 | seconds = h.time_deltas['5m']['minutes'] * 60.0 |
|
498 | 497 | elif delta >= h.time_deltas.get('2w')['delta']: |
|
499 | 498 | seconds = h.time_deltas['24h']['minutes'] * 60.0 |
|
500 | 499 | else: |
|
501 | 500 | seconds = h.time_deltas['1h']['minutes'] * 60.0 |
|
502 | 501 | |
|
503 | 502 | for item in result_now: |
|
504 | 503 | if item['requests']: |
|
505 | 504 | item['requests'] = round(item['requests'] / seconds, 3) |
|
506 | 505 | return result_now |
|
507 | 506 | |
|
508 | 507 | |
|
509 | 508 | @view_config(route_name='applications_property', |
|
510 | 509 | match_param='key=apdex_stats', renderer='json', |
|
511 | 510 | permission='view') |
|
512 | 511 | def get_apdex_stats(request): |
|
513 | 512 | """ |
|
514 | 513 | Returns information and calculates APDEX score per server for dashboard |
|
515 | 514 | server information (upper right stats boxes) |
|
516 | 515 | """ |
|
517 | 516 | query_params = request.GET.mixed() |
|
518 | 517 | query_params['resource'] = (request.context.resource.resource_id,) |
|
519 | 518 | |
|
520 | 519 | filter_settings = build_filter_settings_from_query_dict(request, |
|
521 | 520 | query_params) |
|
522 | 521 | # make sure we have only one resource here to don't produce |
|
523 | 522 | # weird results when we have wrong app in app selector |
|
524 | 523 | filter_settings['resource'] = [filter_settings['resource'][0]] |
|
525 | 524 | |
|
526 | 525 | if not filter_settings.get('end_date'): |
|
527 | 526 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
528 | 527 | filter_settings['end_date'] = end_date |
|
529 | 528 | |
|
530 | 529 | delta = timedelta(hours=1) |
|
531 | 530 | if not filter_settings.get('start_date'): |
|
532 | 531 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
533 | 532 | |
|
534 | 533 | return RequestMetricService.get_apdex_stats(request, filter_settings) |
|
535 | 534 | |
|
536 | 535 | |
|
537 | 536 | @view_config(route_name='applications_property', match_param='key=slow_calls', |
|
538 | 537 | renderer='json', permission='view') |
|
539 | 538 | def get_slow_calls(request): |
|
540 | 539 | """ |
|
541 | 540 | Returns information for time consuming calls in specific time interval |
|
542 | 541 | """ |
|
543 | 542 | query_params = request.GET.mixed() |
|
544 | 543 | query_params['resource'] = (request.context.resource.resource_id,) |
|
545 | 544 | |
|
546 | 545 | filter_settings = build_filter_settings_from_query_dict(request, |
|
547 | 546 | query_params) |
|
548 | 547 | |
|
549 | 548 | if not filter_settings.get('end_date'): |
|
550 | 549 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
551 | 550 | filter_settings['end_date'] = end_date |
|
552 | 551 | |
|
553 | 552 | delta = timedelta(hours=1) |
|
554 | 553 | if not filter_settings.get('start_date'): |
|
555 | 554 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
556 | 555 | |
|
557 | 556 | return SlowCallService.get_time_consuming_calls(request, filter_settings) |
|
558 | 557 | |
|
559 | 558 | |
|
560 | 559 | @view_config(route_name='applications_property', |
|
561 | 560 | match_param='key=requests_breakdown', |
|
562 | 561 | renderer='json', permission='view') |
|
563 | 562 | def get_requests_breakdown(request): |
|
564 | 563 | """ |
|
565 | 564 | Used on dashboard to get information which views are most used in |
|
566 | 565 | a time interval |
|
567 | 566 | """ |
|
568 | 567 | query_params = request.GET.mixed() |
|
569 | 568 | query_params['resource'] = (request.context.resource.resource_id,) |
|
570 | 569 | |
|
571 | 570 | filter_settings = build_filter_settings_from_query_dict(request, |
|
572 | 571 | query_params) |
|
573 | 572 | if not filter_settings.get('end_date'): |
|
574 | 573 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
575 | 574 | filter_settings['end_date'] = end_date |
|
576 | 575 | |
|
577 | 576 | if not filter_settings.get('start_date'): |
|
578 | 577 | delta = timedelta(hours=1) |
|
579 | 578 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
580 | 579 | |
|
581 | 580 | series = RequestMetricService.get_requests_breakdown( |
|
582 | 581 | request, filter_settings) |
|
583 | 582 | |
|
584 | 583 | results = [] |
|
585 | 584 | for row in series: |
|
586 | 585 | d_row = {'avg_response': round(row['main'] / row['requests'], 3), |
|
587 | 586 | 'requests': row['requests'], |
|
588 | 587 | 'main': row['main'], |
|
589 | 588 | 'view_name': row['key'], |
|
590 | 589 | 'latest_details': row['latest_details'], |
|
591 | 590 | 'percentage': round(row['percentage'] * 100, 1)} |
|
592 | 591 | |
|
593 | 592 | results.append(d_row) |
|
594 | 593 | |
|
595 | 594 | return results |
|
596 | 595 | |
|
597 | 596 | |
|
598 | 597 | @view_config(route_name='applications_property', |
|
599 | 598 | match_param='key=trending_reports', renderer='json', |
|
600 | 599 | permission='view') |
|
601 | 600 | def trending_reports(request): |
|
602 | 601 | """ |
|
603 | 602 | Returns exception/slow reports trending for specific time interval |
|
604 | 603 | """ |
|
605 | 604 | query_params = request.GET.mixed().copy() |
|
606 | 605 | # pop report type to rewrite it to tag later |
|
607 | 606 | report_type = query_params.pop('report_type', None) |
|
608 | 607 | if report_type: |
|
609 | 608 | query_params['type'] = report_type |
|
610 | 609 | |
|
611 | 610 | query_params['resource'] = (request.context.resource.resource_id,) |
|
612 | 611 | |
|
613 | 612 | filter_settings = build_filter_settings_from_query_dict(request, |
|
614 | 613 | query_params) |
|
615 | 614 | |
|
616 | 615 | if not filter_settings.get('end_date'): |
|
617 | 616 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
618 | 617 | filter_settings['end_date'] = end_date |
|
619 | 618 | |
|
620 | 619 | if not filter_settings.get('start_date'): |
|
621 | 620 | delta = timedelta(hours=1) |
|
622 | 621 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
623 | 622 | |
|
624 | 623 | results = ReportGroupService.get_trending(request, filter_settings) |
|
625 | 624 | |
|
626 | 625 | trending = [] |
|
627 | 626 | for occurences, group in results: |
|
628 | 627 | report_group = group.get_dict(request) |
|
629 | 628 | # show the occurences in time range instead of global ones |
|
630 | 629 | report_group['occurences'] = occurences |
|
631 | 630 | trending.append(report_group) |
|
632 | 631 | |
|
633 | 632 | return trending |
|
634 | 633 | |
|
635 | 634 | |
|
636 | 635 | @view_config(route_name='applications_property', |
|
637 | 636 | match_param='key=integrations', |
|
638 | 637 | renderer='json', permission='view') |
|
639 | 638 | def integrations(request): |
|
640 | 639 | """ |
|
641 | 640 | Integration list for given application |
|
642 | 641 | """ |
|
643 | 642 | application = request.context.resource |
|
644 | 643 | return {'resource': application} |
|
645 | 644 | |
|
646 | 645 | |
|
647 | 646 | @view_config(route_name='applications_property', |
|
648 | 647 | match_param='key=user_permissions', renderer='json', |
|
649 | 648 | permission='owner', request_method='POST') |
|
650 | 649 | def user_resource_permission_create(request): |
|
651 | 650 | """ |
|
652 | 651 | Set new permissions for user for a resource |
|
653 | 652 | """ |
|
654 | 653 | resource = request.context.resource |
|
655 | 654 | user_name = request.unsafe_json_body.get('user_name') |
|
656 | 655 | user = UserService.by_user_name(user_name) |
|
657 | 656 | if not user: |
|
658 | 657 | user = UserService.by_email(user_name) |
|
659 | 658 | if not user: |
|
660 | 659 | return False |
|
661 | 660 | |
|
662 | 661 | for perm_name in request.unsafe_json_body.get('permissions', []): |
|
663 | 662 | permission = UserResourcePermissionService.by_resource_user_and_perm( |
|
664 | 663 | user.id, perm_name, resource.resource_id) |
|
665 | 664 | if not permission: |
|
666 | 665 | permission = UserResourcePermission(perm_name=perm_name, |
|
667 | 666 | user_id=user.id) |
|
668 | 667 | resource.user_permissions.append(permission) |
|
669 | 668 | DBSession.flush() |
|
670 | 669 | perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user) |
|
671 | 670 | if p.type == 'user'] |
|
672 | 671 | result = {'user_name': user.user_name, |
|
673 | 672 | 'permissions': list(set(perms))} |
|
674 | 673 | return result |
|
675 | 674 | |
|
676 | 675 | |
|
677 | 676 | @view_config(route_name='applications_property', |
|
678 | 677 | match_param='key=user_permissions', renderer='json', |
|
679 | 678 | permission='owner', request_method='DELETE') |
|
680 | 679 | def user_resource_permission_delete(request): |
|
681 | 680 | """ |
|
682 | 681 | Removes user permission from specific resource |
|
683 | 682 | """ |
|
684 | 683 | resource = request.context.resource |
|
685 | 684 | |
|
686 | 685 | user = UserService.by_user_name(request.GET.get('user_name')) |
|
687 | 686 | if not user: |
|
688 | 687 | return False |
|
689 | 688 | |
|
690 | 689 | for perm_name in request.GET.getall('permissions'): |
|
691 | 690 | permission = UserResourcePermissionService.by_resource_user_and_perm( |
|
692 | 691 | user.id, perm_name, resource.resource_id) |
|
693 | 692 | resource.user_permissions.remove(permission) |
|
694 | 693 | DBSession.flush() |
|
695 | 694 | perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user) |
|
696 | 695 | if p.type == 'user'] |
|
697 | 696 | result = {'user_name': user.user_name, |
|
698 | 697 | 'permissions': list(set(perms))} |
|
699 | 698 | return result |
|
700 | 699 | |
|
701 | 700 | |
|
702 | 701 | @view_config(route_name='applications_property', |
|
703 | 702 | match_param='key=group_permissions', renderer='json', |
|
704 | 703 | permission='owner', request_method='POST') |
|
705 | 704 | def group_resource_permission_create(request): |
|
706 | 705 | """ |
|
707 | 706 | Set new permissions for group for a resource |
|
708 | 707 | """ |
|
709 | 708 | resource = request.context.resource |
|
710 | 709 | group = GroupService.by_id(request.unsafe_json_body.get('group_id')) |
|
711 | 710 | if not group: |
|
712 | 711 | return False |
|
713 | 712 | |
|
714 | 713 | for perm_name in request.unsafe_json_body.get('permissions', []): |
|
715 | 714 | permission = GroupResourcePermissionService.by_resource_group_and_perm( |
|
716 | 715 | group.id, perm_name, resource.resource_id) |
|
717 | 716 | if not permission: |
|
718 | 717 | permission = GroupResourcePermission(perm_name=perm_name, |
|
719 | 718 | group_id=group.id) |
|
720 | 719 | resource.group_permissions.append(permission) |
|
721 | 720 | DBSession.flush() |
|
722 | 721 | perm_tuples = ResourceService.groups_for_perm( |
|
723 | 722 | resource, |
|
724 | 723 | ANY_PERMISSION, |
|
725 | 724 | limit_group_permissions=True, |
|
726 | 725 | group_ids=[group.id]) |
|
727 | 726 | perms = [p.perm_name for p in perm_tuples if p.type == 'group'] |
|
728 | 727 | result = {'group': group.get_dict(), |
|
729 | 728 | 'permissions': list(set(perms))} |
|
730 | 729 | return result |
|
731 | 730 | |
|
732 | 731 | |
|
733 | 732 | @view_config(route_name='applications_property', |
|
734 | 733 | match_param='key=group_permissions', renderer='json', |
|
735 | 734 | permission='owner', request_method='DELETE') |
|
736 | 735 | def group_resource_permission_delete(request): |
|
737 | 736 | """ |
|
738 | 737 | Removes group permission from specific resource |
|
739 | 738 | """ |
|
740 | 739 | form = forms.ReactorForm(request.POST, csrf_context=request) |
|
741 | 740 | form.validate() |
|
742 | 741 | resource = request.context.resource |
|
743 | 742 | group = GroupService.by_id(request.GET.get('group_id')) |
|
744 | 743 | if not group: |
|
745 | 744 | return False |
|
746 | 745 | |
|
747 | 746 | for perm_name in request.GET.getall('permissions'): |
|
748 | 747 | permission = GroupResourcePermissionService.by_resource_group_and_perm( |
|
749 | 748 | group.id, perm_name, resource.resource_id) |
|
750 | 749 | resource.group_permissions.remove(permission) |
|
751 | 750 | DBSession.flush() |
|
752 | 751 | perm_tuples = ResourceService.groups_for_perm( |
|
753 | 752 | resource, |
|
754 | 753 | ANY_PERMISSION, |
|
755 | 754 | limit_group_permissions=True, |
|
756 | 755 | group_ids=[group.id]) |
|
757 | 756 | perms = [p.perm_name for p in perm_tuples if p.type == 'group'] |
|
758 | 757 | result = {'group': group.get_dict(), |
|
759 | 758 | 'permissions': list(set(perms))} |
|
760 | 759 | return result |
@@ -1,446 +1,445 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import logging |
|
19 | 19 | import datetime |
|
20 | 20 | import time |
|
21 | 21 | import random |
|
22 | 22 | import redis |
|
23 | import six | |
|
24 | 23 | import pyramid.renderers |
|
25 | 24 | import requests |
|
26 | 25 | |
|
27 | 26 | from ziggurat_foundations.models.services.user import UserService |
|
28 | 27 | |
|
29 | 28 | import appenlight.celery.tasks |
|
30 | 29 | from pyramid.view import view_config |
|
31 | 30 | from pyramid_mailer.message import Message |
|
32 | 31 | from appenlight_client.timing import time_trace |
|
33 | 32 | from appenlight.models import DBSession, Datastores |
|
34 | 33 | from appenlight.models.user import User |
|
35 | 34 | from appenlight.models.report_group import ReportGroup |
|
36 | 35 | from appenlight.models.event import Event |
|
37 | 36 | from appenlight.models.services.report_group import ReportGroupService |
|
38 | 37 | from appenlight.models.services.event import EventService |
|
39 | 38 | from appenlight.lib.enums import ReportType |
|
40 | 39 | |
|
41 | 40 | log = logging.getLogger(__name__) |
|
42 | 41 | |
|
43 | 42 | GLOBAL_REQ = None |
|
44 | 43 | |
|
45 | 44 | |
|
46 | 45 | @view_config(route_name='test', match_param='action=mail', |
|
47 | 46 | renderer='string', permission='root_administration') |
|
48 | 47 | def mail(request): |
|
49 | 48 | """ |
|
50 | 49 | Test email communication |
|
51 | 50 | """ |
|
52 | 51 | request.environ['HTTP_HOST'] = 'appenlight.com' |
|
53 | 52 | request.environ['wsgi.url_scheme'] = 'https' |
|
54 | 53 | renderer_vars = {"title": "You have just registered on AppEnlight", |
|
55 | 54 | "username": "test", |
|
56 | 55 | "email": "grzegΕΌΓ³Εka", |
|
57 | 56 | 'firstname': 'dupa'} |
|
58 | 57 | # return vars |
|
59 | 58 | html = pyramid.renderers.render('/email_templates/registered.jinja2', |
|
60 | 59 | renderer_vars, |
|
61 | 60 | request=request) |
|
62 | 61 | message = Message(subject="hello world %s" % random.randint(1, 9999), |
|
63 | 62 | sender="info@appenlight.com", |
|
64 | 63 | recipients=["ergo14@gmail.com"], |
|
65 | 64 | html=html) |
|
66 | 65 | request.registry.mailer.send(message) |
|
67 | 66 | return html |
|
68 | 67 | return vars |
|
69 | 68 | |
|
70 | 69 | |
|
71 | 70 | @view_config(route_name='test', match_param='action=alerting', |
|
72 | 71 | renderer='appenlight:templates/tests/alerting.jinja2', |
|
73 | 72 | permission='root_administration') |
|
74 | 73 | def alerting_test(request): |
|
75 | 74 | """ |
|
76 | 75 | Allows to test send data on various registered alerting channels |
|
77 | 76 | """ |
|
78 | 77 | applications = UserService.resources_with_perms(request.user, ['view'], resource_types=['application']) |
|
79 | 78 | # what we can select in total |
|
80 | 79 | all_possible_app_ids = [app.resource_id for app in applications] |
|
81 | 80 | resource = applications[0] |
|
82 | 81 | |
|
83 | 82 | alert_channels = [] |
|
84 | 83 | for channel in request.user.alert_channels: |
|
85 | 84 | alert_channels.append(channel.get_dict()) |
|
86 | 85 | |
|
87 | 86 | cname = request.params.get('channel_name') |
|
88 | 87 | cvalue = request.params.get('channel_value') |
|
89 | 88 | event_name = request.params.get('event_name') |
|
90 | 89 | if cname and cvalue: |
|
91 | 90 | for channel in request.user.alert_channels: |
|
92 | 91 | if (channel.channel_value == cvalue and |
|
93 | 92 | channel.channel_name == cname): |
|
94 | 93 | break |
|
95 | 94 | if event_name in ['error_report_alert', 'slow_report_alert']: |
|
96 | 95 | # opened |
|
97 | 96 | new_event = Event(resource_id=resource.resource_id, |
|
98 | 97 | event_type=Event.types[event_name], |
|
99 | 98 | start_date=datetime.datetime.utcnow(), |
|
100 | 99 | status=Event.statuses['active'], |
|
101 | 100 | values={'reports': 5, |
|
102 | 101 | 'threshold': 10} |
|
103 | 102 | ) |
|
104 | 103 | channel.notify_alert(resource=resource, |
|
105 | 104 | event=new_event, |
|
106 | 105 | user=request.user, |
|
107 | 106 | request=request) |
|
108 | 107 | |
|
109 | 108 | # closed |
|
110 | 109 | ev_type = Event.types[event_name.replace('open', 'close')] |
|
111 | 110 | new_event = Event(resource_id=resource.resource_id, |
|
112 | 111 | event_type=ev_type, |
|
113 | 112 | start_date=datetime.datetime.utcnow(), |
|
114 | 113 | status=Event.statuses['closed'], |
|
115 | 114 | values={'reports': 5, |
|
116 | 115 | 'threshold': 10}) |
|
117 | 116 | channel.notify_alert(resource=resource, |
|
118 | 117 | event=new_event, |
|
119 | 118 | user=request.user, |
|
120 | 119 | request=request) |
|
121 | 120 | elif event_name == 'notify_reports': |
|
122 | 121 | report = ReportGroupService.by_app_ids(all_possible_app_ids) \ |
|
123 | 122 | .filter(ReportGroup.report_type == ReportType.error).first() |
|
124 | 123 | confirmed_reports = [(5, report), (1, report)] |
|
125 | 124 | channel.notify_reports(resource=resource, |
|
126 | 125 | user=request.user, |
|
127 | 126 | request=request, |
|
128 | 127 | since_when=datetime.datetime.utcnow(), |
|
129 | 128 | reports=confirmed_reports) |
|
130 | 129 | confirmed_reports = [(5, report)] |
|
131 | 130 | channel.notify_reports(resource=resource, |
|
132 | 131 | user=request.user, |
|
133 | 132 | request=request, |
|
134 | 133 | since_when=datetime.datetime.utcnow(), |
|
135 | 134 | reports=confirmed_reports) |
|
136 | 135 | elif event_name == 'notify_uptime': |
|
137 | 136 | new_event = Event(resource_id=resource.resource_id, |
|
138 | 137 | event_type=Event.types['uptime_alert'], |
|
139 | 138 | start_date=datetime.datetime.utcnow(), |
|
140 | 139 | status=Event.statuses['active'], |
|
141 | 140 | values={"status_code": 500, |
|
142 | 141 | "tries": 2, |
|
143 | 142 | "response_time": 0}) |
|
144 | 143 | channel.notify_uptime_alert(resource=resource, |
|
145 | 144 | event=new_event, |
|
146 | 145 | user=request.user, |
|
147 | 146 | request=request) |
|
148 | 147 | elif event_name == 'chart_alert': |
|
149 | 148 | event = EventService.by_type_and_status( |
|
150 | 149 | event_types=(Event.types['chart_alert'],), |
|
151 | 150 | status_types=(Event.statuses['active'],)).first() |
|
152 | 151 | channel.notify_chart_alert(resource=event.resource, |
|
153 | 152 | event=event, |
|
154 | 153 | user=request.user, |
|
155 | 154 | request=request) |
|
156 | 155 | elif event_name == 'daily_digest': |
|
157 | 156 | since_when = datetime.datetime.utcnow() - datetime.timedelta( |
|
158 | 157 | hours=8) |
|
159 | 158 | filter_settings = {'resource': [resource.resource_id], |
|
160 | 159 | 'tags': [{'name': 'type', |
|
161 | 160 | 'value': ['error'], 'op': None}], |
|
162 | 161 | 'type': 'error', 'start_date': since_when} |
|
163 | 162 | |
|
164 | 163 | reports = ReportGroupService.get_trending( |
|
165 | 164 | request, filter_settings=filter_settings, limit=50) |
|
166 | 165 | channel.send_digest(resource=resource, |
|
167 | 166 | user=request.user, |
|
168 | 167 | request=request, |
|
169 | 168 | since_when=datetime.datetime.utcnow(), |
|
170 | 169 | reports=reports) |
|
171 | 170 | |
|
172 | 171 | return {'alert_channels': alert_channels, |
|
173 | 172 | 'applications': dict([(app.resource_id, app.resource_name) |
|
174 | 173 | for app in applications.all()])} |
|
175 | 174 | |
|
176 | 175 | |
|
177 | 176 | @view_config(route_name='test', match_param='action=error', |
|
178 | 177 | renderer='string', permission='root_administration') |
|
179 | 178 | def error(request): |
|
180 | 179 | """ |
|
181 | 180 | Raises an internal error with some test data for testing purposes |
|
182 | 181 | """ |
|
183 | 182 | request.environ['appenlight.message'] = 'test message' |
|
184 | 183 | request.environ['appenlight.extra']['dupa'] = 'dupa' |
|
185 | 184 | request.environ['appenlight.extra']['message'] = 'message' |
|
186 | 185 | request.environ['appenlight.tags']['action'] = 'test_error' |
|
187 | 186 | request.environ['appenlight.tags']['count'] = 5 |
|
188 | 187 | log.debug(chr(960)) |
|
189 | 188 | log.debug('debug') |
|
190 | 189 | log.info(chr(960)) |
|
191 | 190 | log.info('INFO') |
|
192 | 191 | log.warning('warning') |
|
193 | 192 | |
|
194 | 193 | @time_trace(name='error.foobar', min_duration=0.1) |
|
195 | 194 | def fooobar(): |
|
196 | 195 | time.sleep(0.12) |
|
197 | 196 | return 1 |
|
198 | 197 | |
|
199 | 198 | fooobar() |
|
200 | 199 | |
|
201 | 200 | def foobar(somearg): |
|
202 | 201 | raise Exception('test') |
|
203 | 202 | |
|
204 | 203 | client = redis.StrictRedis() |
|
205 | 204 | client.setex('testval', 10, 'foo') |
|
206 | 205 | request.environ['appenlight.force_send'] = 1 |
|
207 | 206 | |
|
208 | 207 | # stats, result = get_local_storage(local_timing).get_thread_stats() |
|
209 | 208 | # import pprint |
|
210 | 209 | # pprint.pprint(stats) |
|
211 | 210 | # pprint.pprint(result) |
|
212 | 211 | # print 'entries', len(result) |
|
213 | 212 | request.environ['appenlight.username'] = 'ErgO' |
|
214 | 213 | raise Exception(chr(960) + '%s' % random.randint(1, 5)) |
|
215 | 214 | return {} |
|
216 | 215 | |
|
217 | 216 | |
|
218 | 217 | @view_config(route_name='test', match_param='action=task', |
|
219 | 218 | renderer='string', permission='root_administration') |
|
220 | 219 | def test_task(request): |
|
221 | 220 | """ |
|
222 | 221 | Test erroneous celery task |
|
223 | 222 | """ |
|
224 | 223 | import appenlight.celery.tasks |
|
225 | 224 | |
|
226 | 225 | appenlight.celery.tasks.test_exception_task.delay() |
|
227 | 226 | return 'task sent' |
|
228 | 227 | |
|
229 | 228 | |
|
230 | 229 | @view_config(route_name='test', match_param='action=task_retry', |
|
231 | 230 | renderer='string', permission='root_administration') |
|
232 | 231 | def test_task_retry(request): |
|
233 | 232 | """ |
|
234 | 233 | Test erroneous celery task |
|
235 | 234 | """ |
|
236 | 235 | import appenlight.celery.tasks |
|
237 | 236 | |
|
238 | 237 | appenlight.celery.tasks.test_retry_exception_task.delay() |
|
239 | 238 | return 'task sent' |
|
240 | 239 | |
|
241 | 240 | |
|
242 | 241 | @view_config(route_name='test', match_param='action=celery_emails', |
|
243 | 242 | renderer='string', permission='root_administration') |
|
244 | 243 | def test_celery_emails(request): |
|
245 | 244 | import appenlight.celery.tasks |
|
246 | 245 | appenlight.celery.tasks.alerting.delay() |
|
247 | 246 | return 'task sent' |
|
248 | 247 | |
|
249 | 248 | |
|
250 | 249 | @view_config(route_name='test', match_param='action=daily_digest', |
|
251 | 250 | renderer='string', permission='root_administration') |
|
252 | 251 | def test_celery_daily_digest(request): |
|
253 | 252 | import appenlight.celery.tasks |
|
254 | 253 | appenlight.celery.tasks.daily_digest.delay() |
|
255 | 254 | return 'task sent' |
|
256 | 255 | |
|
257 | 256 | |
|
258 | 257 | @view_config(route_name='test', match_param='action=celery_alerting', |
|
259 | 258 | renderer='string', permission='root_administration') |
|
260 | 259 | def test_celery_alerting(request): |
|
261 | 260 | import appenlight.celery.tasks |
|
262 | 261 | appenlight.celery.tasks.alerting() |
|
263 | 262 | return 'task sent' |
|
264 | 263 | |
|
265 | 264 | |
|
266 | 265 | @view_config(route_name='test', match_param='action=logging', |
|
267 | 266 | renderer='string', permission='root_administration') |
|
268 | 267 | def logs(request): |
|
269 | 268 | """ |
|
270 | 269 | Test some in-app logging |
|
271 | 270 | """ |
|
272 | 271 | log.debug(chr(960)) |
|
273 | 272 | log.debug('debug') |
|
274 | 273 | log.info(chr(960)) |
|
275 | 274 | log.info('INFO') |
|
276 | 275 | log.warning('Matched GET /\xc4\x85\xc5\xbc\xc4\x87' |
|
277 | 276 | '\xc4\x99\xc4\x99\xc4\x85/summary') |
|
278 | 277 | log.warning('XXXXMatched GET /\xc4\x85\xc5\xbc\xc4' |
|
279 | 278 | '\x87\xc4\x99\xc4\x99\xc4\x85/summary') |
|
280 | 279 | log.warning('DUPA /Δ ΕΌΔΔΔΔ ') |
|
281 | 280 | log.warning("g\u017ceg\u017c\u00f3\u0142ka") |
|
282 | 281 | log.error('TEST Lorem ipsum2', |
|
283 | 282 | extra={'user': 'ergo', 'commit': 'sog8ds0g7sdih12hh1j512h5k'}) |
|
284 | 283 | log.fatal('TEST Lorem ipsum3') |
|
285 | 284 | log.warning('TEST Lorem ipsum', |
|
286 | 285 | extra={"action": 'purchase', |
|
287 | 286 | "price": random.random() * 100, |
|
288 | 287 | "quantity": random.randint(1, 99)}) |
|
289 | 288 | log.warning('test_pkey', |
|
290 | 289 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
291 | 290 | 'ae_primary_key': 1, |
|
292 | 291 | "quantity": random.randint(1, 99)}) |
|
293 | 292 | log.warning('test_pkey2', |
|
294 | 293 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
295 | 294 | 'ae_primary_key': 'b', |
|
296 | 295 | 'ae_permanent': 't', |
|
297 | 296 | "quantity": random.randint(1, 99)}) |
|
298 | 297 | log.warning('test_pkey3', |
|
299 | 298 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
300 | 299 | 'ae_primary_key': 1, |
|
301 | 300 | "quantity": random.randint(1, 99)}) |
|
302 | 301 | log.warning('test_pkey4', |
|
303 | 302 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
304 | 303 | 'ae_primary_key': 'b', |
|
305 | 304 | 'ae_permanent': True, |
|
306 | 305 | "quantity": random.randint(1, 99)}) |
|
307 | 306 | request.environ['appenlight.force_send'] = 1 |
|
308 | 307 | return {} |
|
309 | 308 | |
|
310 | 309 | |
|
311 | 310 | @view_config(route_name='test', match_param='action=transaction', |
|
312 | 311 | renderer='string', permission='root_administration') |
|
313 | 312 | def transaction_test(request): |
|
314 | 313 | """ |
|
315 | 314 | Test transactions |
|
316 | 315 | """ |
|
317 | 316 | try: |
|
318 | 317 | result = DBSession.execute("SELECT 1/0") |
|
319 | 318 | except: |
|
320 | 319 | request.tm.abort() |
|
321 | 320 | result = DBSession.execute("SELECT 1") |
|
322 | 321 | return 'OK' |
|
323 | 322 | |
|
324 | 323 | |
|
325 | 324 | @view_config(route_name='test', match_param='action=slow_request', |
|
326 | 325 | renderer='string', permission='root_administration') |
|
327 | 326 | def slow_request(request): |
|
328 | 327 | """ |
|
329 | 328 | Test a request that has some slow entries - including nested calls |
|
330 | 329 | """ |
|
331 | 330 | users = DBSession.query(User).all() |
|
332 | 331 | import random |
|
333 | 332 | some_val = random.random() |
|
334 | 333 | import threading |
|
335 | 334 | t_id = id(threading.currentThread()) |
|
336 | 335 | log.warning('slow_log %s %s ' % (some_val, t_id)) |
|
337 | 336 | log.critical('tid %s' % t_id) |
|
338 | 337 | |
|
339 | 338 | @time_trace(name='baz_func %s' % some_val, min_duration=0.1) |
|
340 | 339 | def baz(arg): |
|
341 | 340 | time.sleep(0.32) |
|
342 | 341 | return arg |
|
343 | 342 | |
|
344 | 343 | requests.get('http://ubuntu.com') |
|
345 | 344 | |
|
346 | 345 | @time_trace(name='foo_func %s %s' % (some_val, t_id), min_duration=0.1) |
|
347 | 346 | def foo(arg): |
|
348 | 347 | time.sleep(0.52) |
|
349 | 348 | log.warning('foo_func %s %s' % (some_val, t_id)) |
|
350 | 349 | requests.get('http://ubuntu.com?test=%s' % some_val) |
|
351 | 350 | return bar(arg) |
|
352 | 351 | |
|
353 | 352 | @time_trace(name='bar_func %s %s' % (some_val, t_id), min_duration=0.1) |
|
354 | 353 | def bar(arg): |
|
355 | 354 | log.warning('bar_func %s %s' % (some_val, t_id)) |
|
356 | 355 | time.sleep(1.52) |
|
357 | 356 | baz(arg) |
|
358 | 357 | baz(arg) |
|
359 | 358 | return baz(arg) |
|
360 | 359 | |
|
361 | 360 | foo('a') |
|
362 | 361 | return {} |
|
363 | 362 | |
|
364 | 363 | |
|
365 | 364 | @view_config(route_name='test', match_param='action=styling', |
|
366 | 365 | renderer='appenlight:templates/tests/styling.jinja2', |
|
367 | 366 | permission='__no_permission_required__') |
|
368 | 367 | def styling(request): |
|
369 | 368 | """ |
|
370 | 369 | Some styling test page |
|
371 | 370 | """ |
|
372 | 371 | _ = str |
|
373 | 372 | request.session.flash(_( |
|
374 | 373 | 'Your password got updated. ' |
|
375 | 374 | 'Next time log in with your new credentials.')) |
|
376 | 375 | request.session.flash(_( |
|
377 | 376 | 'Something went wrong when we ' |
|
378 | 377 | 'tried to authorize you via external provider'), |
|
379 | 378 | 'warning') |
|
380 | 379 | request.session.flash(_( |
|
381 | 380 | 'Unfortunately there was a problem ' |
|
382 | 381 | 'processing your payment, please try again later.'), |
|
383 | 382 | 'error') |
|
384 | 383 | return {} |
|
385 | 384 | |
|
386 | 385 | |
|
387 | 386 | @view_config(route_name='test', match_param='action=js_error', |
|
388 | 387 | renderer='appenlight:templates/tests/js_error.jinja2', |
|
389 | 388 | permission='__no_permission_required__') |
|
390 | 389 | def js(request): |
|
391 | 390 | """ |
|
392 | 391 | Used for testing javasctipt client for error catching |
|
393 | 392 | """ |
|
394 | 393 | return {} |
|
395 | 394 | |
|
396 | 395 | |
|
397 | 396 | @view_config(route_name='test', match_param='action=js_log', |
|
398 | 397 | renderer='appenlight:templates/tests/js_log.jinja2', |
|
399 | 398 | permission='__no_permission_required__') |
|
400 | 399 | def js_log(request): |
|
401 | 400 | """ |
|
402 | 401 | Used for testing javasctipt client for logging |
|
403 | 402 | """ |
|
404 | 403 | return {} |
|
405 | 404 | |
|
406 | 405 | |
|
407 | 406 | @view_config(route_name='test', match_param='action=log_requests', |
|
408 | 407 | renderer='string', |
|
409 | 408 | permission='__no_permission_required__') |
|
410 | 409 | def log_requests(request): |
|
411 | 410 | """ |
|
412 | 411 | Util view for printing json requests |
|
413 | 412 | """ |
|
414 | 413 | return {} |
|
415 | 414 | |
|
416 | 415 | |
|
417 | 416 | @view_config(route_name='test', match_param='action=url', renderer='string', |
|
418 | 417 | permission='__no_permission_required__') |
|
419 | 418 | def log_requests(request): |
|
420 | 419 | """ |
|
421 | 420 | I have no fucking clue why I needed that ;-) |
|
422 | 421 | """ |
|
423 | 422 | return request.route_url('reports', _app_url='https://appenlight.com') |
|
424 | 423 | |
|
425 | 424 | |
|
426 | 425 | class TestClass(object): |
|
427 | 426 | """ |
|
428 | 427 | Used to test if class-based view name resolution works correctly |
|
429 | 428 | """ |
|
430 | 429 | |
|
431 | 430 | def __init__(self, request): |
|
432 | 431 | self.request = request |
|
433 | 432 | |
|
434 | 433 | @view_config(route_name='test', match_param='action=test_a', |
|
435 | 434 | renderer='string', permission='root_administration') |
|
436 | 435 | @view_config(route_name='test', match_param='action=test_c', |
|
437 | 436 | renderer='string', permission='root_administration') |
|
438 | 437 | @view_config(route_name='test', match_param='action=test_d', |
|
439 | 438 | renderer='string', permission='root_administration') |
|
440 | 439 | def test_a(self): |
|
441 | 440 | return 'ok' |
|
442 | 441 | |
|
443 | 442 | @view_config(route_name='test', match_param='action=test_b', |
|
444 | 443 | renderer='string', permission='root_administration') |
|
445 | 444 | def test_b(self): |
|
446 | 445 | return 'ok' |
General Comments 0
You need to be logged in to leave comments.
Login now