Auto status change to "Under Review"
Show More
@@ -0,0 +1,30 b'' | |||||
|
1 | """connect resources to alert_channels | |||
|
2 | ||||
|
3 | Revision ID: e9fcfbdd9498 | |||
|
4 | Revises: 55b6e612672f | |||
|
5 | Create Date: 2018-02-28 13:52:50.717217 | |||
|
6 | ||||
|
7 | """ | |||
|
8 | ||||
|
9 | # revision identifiers, used by Alembic. | |||
|
10 | revision = 'e9fcfbdd9498' | |||
|
11 | down_revision = '55b6e612672f' | |||
|
12 | ||||
|
13 | from alembic import op | |||
|
14 | import sqlalchemy as sa | |||
|
15 | ||||
|
16 | ||||
|
17 | def upgrade(): | |||
|
18 | op.create_table( | |||
|
19 | 'channels_resources', | |||
|
20 | sa.Column('channel_pkey', sa.Integer, | |||
|
21 | sa.ForeignKey('alert_channels.pkey', | |||
|
22 | ondelete='CASCADE', onupdate='CASCADE')), | |||
|
23 | sa.Column('resource_id', sa.Integer, | |||
|
24 | sa.ForeignKey('resources.resource_id', | |||
|
25 | ondelete='CASCADE', onupdate='CASCADE')) | |||
|
26 | ) | |||
|
27 | ||||
|
28 | ||||
|
29 | def downgrade(): | |||
|
30 | op.drop_table('channels_resources') |
@@ -1,291 +1,305 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | import sqlalchemy as sa |
|
18 | import sqlalchemy as sa | |
19 | import urllib.request, urllib.parse, urllib.error |
|
19 | import urllib.request, urllib.parse, urllib.error | |
20 | from datetime import timedelta |
|
20 | from datetime import timedelta | |
21 | from appenlight.models import Base |
|
21 | from appenlight.models import Base | |
22 | from appenlight.lib.utils.date_utils import convert_date |
|
22 | from appenlight.lib.utils.date_utils import convert_date | |
23 | from sqlalchemy.dialects.postgresql import JSON |
|
23 | from sqlalchemy.dialects.postgresql import JSON | |
24 | from ziggurat_foundations.models.base import BaseModel |
|
24 | from ziggurat_foundations.models.base import BaseModel | |
25 |
|
25 | |||
26 | log = logging.getLogger(__name__) |
|
26 | log = logging.getLogger(__name__) | |
27 |
|
27 | |||
28 | # |
|
28 | # | |
29 | channel_rules_m2m_table = sa.Table( |
|
29 | channel_rules_m2m_table = sa.Table( | |
30 | 'channels_actions', Base.metadata, |
|
30 | 'channels_actions', Base.metadata, | |
31 | sa.Column('channel_pkey', sa.Integer, |
|
31 | sa.Column('channel_pkey', sa.Integer, | |
32 | sa.ForeignKey('alert_channels.pkey')), |
|
32 | sa.ForeignKey('alert_channels.pkey')), | |
33 | sa.Column('action_pkey', sa.Integer, |
|
33 | sa.Column('action_pkey', sa.Integer, | |
34 | sa.ForeignKey('alert_channels_actions.pkey')) |
|
34 | sa.ForeignKey('alert_channels_actions.pkey')) | |
35 | ) |
|
35 | ) | |
36 |
|
36 | |||
|
37 | channel_resources_m2m_table = sa.Table( | |||
|
38 | 'channels_resources', Base.metadata, | |||
|
39 | sa.Column('channel_pkey', sa.Integer, | |||
|
40 | sa.ForeignKey('alert_channels.pkey')), | |||
|
41 | sa.Column('resource_id', sa.Integer, | |||
|
42 | sa.ForeignKey('resources.resource_id')) | |||
|
43 | ) | |||
|
44 | ||||
37 | DATE_FRMT = '%Y-%m-%dT%H:%M' |
|
45 | DATE_FRMT = '%Y-%m-%dT%H:%M' | |
38 |
|
46 | |||
39 |
|
47 | |||
40 | class AlertChannel(Base, BaseModel): |
|
48 | class AlertChannel(Base, BaseModel): | |
41 | """ |
|
49 | """ | |
42 | Stores information about possible alerting options |
|
50 | Stores information about possible alerting options | |
43 | """ |
|
51 | """ | |
44 | __tablename__ = 'alert_channels' |
|
52 | __tablename__ = 'alert_channels' | |
45 | __possible_channel_names__ = ['email'] |
|
53 | __possible_channel_names__ = ['email'] | |
46 | __mapper_args__ = { |
|
54 | __mapper_args__ = { | |
47 | 'polymorphic_on': 'channel_name', |
|
55 | 'polymorphic_on': 'channel_name', | |
48 | 'polymorphic_identity': 'integration' |
|
56 | 'polymorphic_identity': 'integration' | |
49 | } |
|
57 | } | |
50 |
|
58 | |||
51 | owner_id = sa.Column(sa.Unicode(30), |
|
59 | owner_id = sa.Column(sa.Unicode(30), | |
52 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
60 | sa.ForeignKey('users.id', onupdate='CASCADE', | |
53 | ondelete='CASCADE')) |
|
61 | ondelete='CASCADE')) | |
54 | channel_name = sa.Column(sa.Unicode(25), nullable=False) |
|
62 | channel_name = sa.Column(sa.Unicode(25), nullable=False) | |
55 | channel_value = sa.Column(sa.Unicode(80), nullable=False, default='') |
|
63 | channel_value = sa.Column(sa.Unicode(80), nullable=False, default='') | |
56 | channel_json_conf = sa.Column(JSON(), nullable=False, default='') |
|
64 | channel_json_conf = sa.Column(JSON(), nullable=False, default='') | |
57 | channel_validated = sa.Column(sa.Boolean, nullable=False, |
|
65 | channel_validated = sa.Column(sa.Boolean, nullable=False, | |
58 | default=False) |
|
66 | default=False) | |
59 | send_alerts = sa.Column(sa.Boolean, nullable=False, |
|
67 | send_alerts = sa.Column(sa.Boolean, nullable=False, | |
60 | default=True) |
|
68 | default=True) | |
61 | daily_digest = sa.Column(sa.Boolean, nullable=False, |
|
69 | daily_digest = sa.Column(sa.Boolean, nullable=False, | |
62 | default=True) |
|
70 | default=True) | |
63 | integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'), |
|
71 | integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'), | |
64 | nullable=True) |
|
72 | nullable=True) | |
65 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
73 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |
66 |
|
74 | |||
67 | channel_actions = sa.orm.relationship('AlertChannelAction', |
|
75 | channel_actions = sa.orm.relationship('AlertChannelAction', | |
68 | cascade="all", |
|
76 | cascade="all", | |
69 | passive_deletes=True, |
|
77 | passive_deletes=True, | |
70 | passive_updates=True, |
|
78 | passive_updates=True, | |
71 | secondary=channel_rules_m2m_table, |
|
79 | secondary=channel_rules_m2m_table, | |
72 | backref='channels') |
|
80 | backref='channels') | |
|
81 | resources = sa.orm.relationship('Resource', | |||
|
82 | cascade="all, delete-orphan", | |||
|
83 | passive_deletes=True, | |||
|
84 | passive_updates=True, | |||
|
85 | secondary=channel_resources_m2m_table, | |||
|
86 | backref='resources') | |||
73 |
|
87 | |||
74 | @property |
|
88 | @property | |
75 | def channel_visible_value(self): |
|
89 | def channel_visible_value(self): | |
76 | if self.integration: |
|
90 | if self.integration: | |
77 | return '{}: {}'.format( |
|
91 | return '{}: {}'.format( | |
78 | self.channel_name, |
|
92 | self.channel_name, | |
79 | self.integration.resource.resource_name |
|
93 | self.integration.resource.resource_name | |
80 | ) |
|
94 | ) | |
81 |
|
95 | |||
82 | return '{}: {}'.format( |
|
96 | return '{}: {}'.format( | |
83 | self.channel_name, |
|
97 | self.channel_name, | |
84 | self.channel_value |
|
98 | self.channel_value | |
85 | ) |
|
99 | ) | |
86 |
|
100 | |||
87 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
101 | def get_dict(self, exclude_keys=None, include_keys=None, | |
88 | extended_info=True): |
|
102 | extended_info=True): | |
89 | """ |
|
103 | """ | |
90 | Returns dictionary with required information that will be consumed by |
|
104 | Returns dictionary with required information that will be consumed by | |
91 | angular |
|
105 | angular | |
92 | """ |
|
106 | """ | |
93 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, |
|
107 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, | |
94 | include_keys) |
|
108 | include_keys) | |
95 | exclude_keys_list = exclude_keys or [] |
|
109 | exclude_keys_list = exclude_keys or [] | |
96 | include_keys_list = include_keys or [] |
|
110 | include_keys_list = include_keys or [] | |
97 |
|
111 | |||
98 | instance_dict['supports_report_alerting'] = True |
|
112 | instance_dict['supports_report_alerting'] = True | |
99 | instance_dict['channel_visible_value'] = self.channel_visible_value |
|
113 | instance_dict['channel_visible_value'] = self.channel_visible_value | |
100 |
|
114 | |||
101 | if extended_info: |
|
115 | if extended_info: | |
102 | instance_dict['actions'] = [ |
|
116 | instance_dict['actions'] = [ | |
103 | rule.get_dict(extended_info=True) for |
|
117 | rule.get_dict(extended_info=True) for | |
104 | rule in self.channel_actions] |
|
118 | rule in self.channel_actions] | |
105 |
|
119 | |||
106 | del instance_dict['channel_json_conf'] |
|
120 | del instance_dict['channel_json_conf'] | |
107 |
|
121 | |||
108 | if self.integration: |
|
122 | if self.integration: | |
109 | instance_dict[ |
|
123 | instance_dict[ | |
110 | 'supports_report_alerting'] = \ |
|
124 | 'supports_report_alerting'] = \ | |
111 | self.integration.supports_report_alerting |
|
125 | self.integration.supports_report_alerting | |
112 | d = {} |
|
126 | d = {} | |
113 | for k in instance_dict.keys(): |
|
127 | for k in instance_dict.keys(): | |
114 | if (k not in exclude_keys_list and |
|
128 | if (k not in exclude_keys_list and | |
115 | (k in include_keys_list or not include_keys)): |
|
129 | (k in include_keys_list or not include_keys)): | |
116 | d[k] = instance_dict[k] |
|
130 | d[k] = instance_dict[k] | |
117 | return d |
|
131 | return d | |
118 |
|
132 | |||
119 | def __repr__(self): |
|
133 | def __repr__(self): | |
120 | return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name, |
|
134 | return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name, | |
121 | self.channel_value, |
|
135 | self.channel_value, | |
122 | self.user_name,) |
|
136 | self.user_name,) | |
123 |
|
137 | |||
124 | def send_digest(self, **kwargs): |
|
138 | def send_digest(self, **kwargs): | |
125 | """ |
|
139 | """ | |
126 | This should implement daily top error report notifications |
|
140 | This should implement daily top error report notifications | |
127 | """ |
|
141 | """ | |
128 | log.warning('send_digest NOT IMPLEMENTED') |
|
142 | log.warning('send_digest NOT IMPLEMENTED') | |
129 |
|
143 | |||
130 | def notify_reports(self, **kwargs): |
|
144 | def notify_reports(self, **kwargs): | |
131 | """ |
|
145 | """ | |
132 | This should implement notification of reports that occured in 1 min |
|
146 | This should implement notification of reports that occured in 1 min | |
133 | interval |
|
147 | interval | |
134 | """ |
|
148 | """ | |
135 | log.warning('notify_reports NOT IMPLEMENTED') |
|
149 | log.warning('notify_reports NOT IMPLEMENTED') | |
136 |
|
150 | |||
137 | def notify_alert(self, **kwargs): |
|
151 | def notify_alert(self, **kwargs): | |
138 | """ |
|
152 | """ | |
139 | Notify user of report/uptime/chart threshold events based on events alert |
|
153 | Notify user of report/uptime/chart threshold events based on events alert | |
140 | type |
|
154 | type | |
141 |
|
155 | |||
142 | Kwargs: |
|
156 | Kwargs: | |
143 | application: application that the event applies for, |
|
157 | application: application that the event applies for, | |
144 | event: event that is notified, |
|
158 | event: event that is notified, | |
145 | user: user that should be notified |
|
159 | user: user that should be notified | |
146 | request: request object |
|
160 | request: request object | |
147 |
|
161 | |||
148 | """ |
|
162 | """ | |
149 | alert_name = kwargs['event'].unified_alert_name() |
|
163 | alert_name = kwargs['event'].unified_alert_name() | |
150 | if alert_name in ['slow_report_alert', 'error_report_alert']: |
|
164 | if alert_name in ['slow_report_alert', 'error_report_alert']: | |
151 | self.notify_report_alert(**kwargs) |
|
165 | self.notify_report_alert(**kwargs) | |
152 | elif alert_name == 'uptime_alert': |
|
166 | elif alert_name == 'uptime_alert': | |
153 | self.notify_uptime_alert(**kwargs) |
|
167 | self.notify_uptime_alert(**kwargs) | |
154 | elif alert_name == 'chart_alert': |
|
168 | elif alert_name == 'chart_alert': | |
155 | self.notify_chart_alert(**kwargs) |
|
169 | self.notify_chart_alert(**kwargs) | |
156 |
|
170 | |||
157 | def notify_chart_alert(self, **kwargs): |
|
171 | def notify_chart_alert(self, **kwargs): | |
158 | """ |
|
172 | """ | |
159 | This should implement report open/close alerts notifications |
|
173 | This should implement report open/close alerts notifications | |
160 | """ |
|
174 | """ | |
161 | log.warning('notify_chart_alert NOT IMPLEMENTED') |
|
175 | log.warning('notify_chart_alert NOT IMPLEMENTED') | |
162 |
|
176 | |||
163 | def notify_report_alert(self, **kwargs): |
|
177 | def notify_report_alert(self, **kwargs): | |
164 | """ |
|
178 | """ | |
165 | This should implement report open/close alerts notifications |
|
179 | This should implement report open/close alerts notifications | |
166 | """ |
|
180 | """ | |
167 | log.warning('notify_report_alert NOT IMPLEMENTED') |
|
181 | log.warning('notify_report_alert NOT IMPLEMENTED') | |
168 |
|
182 | |||
169 | def notify_uptime_alert(self, **kwargs): |
|
183 | def notify_uptime_alert(self, **kwargs): | |
170 | """ |
|
184 | """ | |
171 | This should implement uptime open/close alerts notifications |
|
185 | This should implement uptime open/close alerts notifications | |
172 | """ |
|
186 | """ | |
173 | log.warning('notify_uptime_alert NOT IMPLEMENTED') |
|
187 | log.warning('notify_uptime_alert NOT IMPLEMENTED') | |
174 |
|
188 | |||
175 | def get_notification_basic_vars(self, kwargs): |
|
189 | def get_notification_basic_vars(self, kwargs): | |
176 | """ |
|
190 | """ | |
177 | Sets most common variables used later for rendering notifications for |
|
191 | Sets most common variables used later for rendering notifications for | |
178 | channel |
|
192 | channel | |
179 | """ |
|
193 | """ | |
180 | if 'event' in kwargs: |
|
194 | if 'event' in kwargs: | |
181 | kwargs['since_when'] = kwargs['event'].start_date |
|
195 | kwargs['since_when'] = kwargs['event'].start_date | |
182 |
|
196 | |||
183 | url_start_date = kwargs.get('since_when') - timedelta(minutes=1) |
|
197 | url_start_date = kwargs.get('since_when') - timedelta(minutes=1) | |
184 | url_end_date = kwargs.get('since_when') + timedelta(minutes=4) |
|
198 | url_end_date = kwargs.get('since_when') + timedelta(minutes=4) | |
185 | tmpl_vars = { |
|
199 | tmpl_vars = { | |
186 | "timestamp": kwargs['since_when'], |
|
200 | "timestamp": kwargs['since_when'], | |
187 | "user": kwargs['user'], |
|
201 | "user": kwargs['user'], | |
188 | "since_when": kwargs.get('since_when'), |
|
202 | "since_when": kwargs.get('since_when'), | |
189 | "url_start_date": url_start_date, |
|
203 | "url_start_date": url_start_date, | |
190 | "url_end_date": url_end_date |
|
204 | "url_end_date": url_end_date | |
191 | } |
|
205 | } | |
192 | tmpl_vars["resource_name"] = kwargs['resource'].resource_name |
|
206 | tmpl_vars["resource_name"] = kwargs['resource'].resource_name | |
193 | tmpl_vars["resource"] = kwargs['resource'] |
|
207 | tmpl_vars["resource"] = kwargs['resource'] | |
194 |
|
208 | |||
195 | if 'event' in kwargs: |
|
209 | if 'event' in kwargs: | |
196 | tmpl_vars['event_values'] = kwargs['event'].values |
|
210 | tmpl_vars['event_values'] = kwargs['event'].values | |
197 | tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name() |
|
211 | tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name() | |
198 | tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action() |
|
212 | tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action() | |
199 | return tmpl_vars |
|
213 | return tmpl_vars | |
200 |
|
214 | |||
201 | def report_alert_notification_vars(self, kwargs): |
|
215 | def report_alert_notification_vars(self, kwargs): | |
202 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
216 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
203 | reports = kwargs.get('reports', []) |
|
217 | reports = kwargs.get('reports', []) | |
204 | tmpl_vars["reports"] = reports |
|
218 | tmpl_vars["reports"] = reports | |
205 | tmpl_vars["confirmed_total"] = len(reports) |
|
219 | tmpl_vars["confirmed_total"] = len(reports) | |
206 |
|
220 | |||
207 | tmpl_vars["report_type"] = "error reports" |
|
221 | tmpl_vars["report_type"] = "error reports" | |
208 | tmpl_vars["url_report_type"] = 'report/list' |
|
222 | tmpl_vars["url_report_type"] = 'report/list' | |
209 |
|
223 | |||
210 | alert_type = tmpl_vars.get('alert_type', '') |
|
224 | alert_type = tmpl_vars.get('alert_type', '') | |
211 | if 'slow_report' in alert_type: |
|
225 | if 'slow_report' in alert_type: | |
212 | tmpl_vars["report_type"] = "slow reports" |
|
226 | tmpl_vars["report_type"] = "slow reports" | |
213 | tmpl_vars["url_report_type"] = 'report/list_slow' |
|
227 | tmpl_vars["url_report_type"] = 'report/list_slow' | |
214 |
|
228 | |||
215 | app_url = kwargs['request'].registry.settings['_mail_url'] |
|
229 | app_url = kwargs['request'].registry.settings['_mail_url'] | |
216 |
|
230 | |||
217 | destination_url = kwargs['request'].route_url('/', |
|
231 | destination_url = kwargs['request'].route_url('/', | |
218 | _app_url=app_url) |
|
232 | _app_url=app_url) | |
219 | if alert_type: |
|
233 | if alert_type: | |
220 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( |
|
234 | destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format( | |
221 | tmpl_vars["url_report_type"], |
|
235 | tmpl_vars["url_report_type"], | |
222 | tmpl_vars['resource'].resource_id, |
|
236 | tmpl_vars['resource'].resource_id, | |
223 | tmpl_vars['url_start_date'].strftime(DATE_FRMT), |
|
237 | tmpl_vars['url_start_date'].strftime(DATE_FRMT), | |
224 | tmpl_vars['url_end_date'].strftime(DATE_FRMT) |
|
238 | tmpl_vars['url_end_date'].strftime(DATE_FRMT) | |
225 | ) |
|
239 | ) | |
226 | else: |
|
240 | else: | |
227 | destination_url += 'ui/{}?resource={}'.format( |
|
241 | destination_url += 'ui/{}?resource={}'.format( | |
228 | tmpl_vars["url_report_type"], |
|
242 | tmpl_vars["url_report_type"], | |
229 | tmpl_vars['resource'].resource_id |
|
243 | tmpl_vars['resource'].resource_id | |
230 | ) |
|
244 | ) | |
231 | tmpl_vars["destination_url"] = destination_url |
|
245 | tmpl_vars["destination_url"] = destination_url | |
232 |
|
246 | |||
233 | return tmpl_vars |
|
247 | return tmpl_vars | |
234 |
|
248 | |||
235 | def uptime_alert_notification_vars(self, kwargs): |
|
249 | def uptime_alert_notification_vars(self, kwargs): | |
236 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
250 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
237 | app_url = kwargs['request'].registry.settings['_mail_url'] |
|
251 | app_url = kwargs['request'].registry.settings['_mail_url'] | |
238 | destination_url = kwargs['request'].route_url('/', _app_url=app_url) |
|
252 | destination_url = kwargs['request'].route_url('/', _app_url=app_url) | |
239 | destination_url += 'ui/{}?resource={}'.format( |
|
253 | destination_url += 'ui/{}?resource={}'.format( | |
240 | 'uptime', |
|
254 | 'uptime', | |
241 | tmpl_vars['resource'].resource_id) |
|
255 | tmpl_vars['resource'].resource_id) | |
242 | tmpl_vars['destination_url'] = destination_url |
|
256 | tmpl_vars['destination_url'] = destination_url | |
243 |
|
257 | |||
244 | reason = '' |
|
258 | reason = '' | |
245 | e_values = tmpl_vars.get('event_values') |
|
259 | e_values = tmpl_vars.get('event_values') | |
246 |
|
260 | |||
247 | if e_values and e_values.get('response_time') == 0: |
|
261 | if e_values and e_values.get('response_time') == 0: | |
248 | reason += ' Response time was slower than 20 seconds.' |
|
262 | reason += ' Response time was slower than 20 seconds.' | |
249 | elif e_values: |
|
263 | elif e_values: | |
250 | code = e_values.get('status_code') |
|
264 | code = e_values.get('status_code') | |
251 | reason += ' Response status code: %s.' % code |
|
265 | reason += ' Response status code: %s.' % code | |
252 |
|
266 | |||
253 | tmpl_vars['reason'] = reason |
|
267 | tmpl_vars['reason'] = reason | |
254 | return tmpl_vars |
|
268 | return tmpl_vars | |
255 |
|
269 | |||
256 | def chart_alert_notification_vars(self, kwargs): |
|
270 | def chart_alert_notification_vars(self, kwargs): | |
257 | tmpl_vars = self.get_notification_basic_vars(kwargs) |
|
271 | tmpl_vars = self.get_notification_basic_vars(kwargs) | |
258 | tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name'] |
|
272 | tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name'] | |
259 | tmpl_vars['action_name'] = tmpl_vars['event_values'].get( |
|
273 | tmpl_vars['action_name'] = tmpl_vars['event_values'].get( | |
260 | 'action_name') or '' |
|
274 | 'action_name') or '' | |
261 | matched_values = tmpl_vars['event_values']['matched_step_values'] |
|
275 | matched_values = tmpl_vars['event_values']['matched_step_values'] | |
262 | tmpl_vars['readable_values'] = [] |
|
276 | tmpl_vars['readable_values'] = [] | |
263 | for key, value in list(matched_values['values'].items()): |
|
277 | for key, value in list(matched_values['values'].items()): | |
264 | matched_label = matched_values['labels'].get(key) |
|
278 | matched_label = matched_values['labels'].get(key) | |
265 | if matched_label: |
|
279 | if matched_label: | |
266 | tmpl_vars['readable_values'].append({ |
|
280 | tmpl_vars['readable_values'].append({ | |
267 | 'label': matched_label['human_label'], |
|
281 | 'label': matched_label['human_label'], | |
268 | 'value': value |
|
282 | 'value': value | |
269 | }) |
|
283 | }) | |
270 | tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'], |
|
284 | tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'], | |
271 | key=lambda x: x['label']) |
|
285 | key=lambda x: x['label']) | |
272 | start_date = convert_date(tmpl_vars['event_values']['start_interval']) |
|
286 | start_date = convert_date(tmpl_vars['event_values']['start_interval']) | |
273 | end_date = None |
|
287 | end_date = None | |
274 | if tmpl_vars['event_values'].get('end_interval'): |
|
288 | if tmpl_vars['event_values'].get('end_interval'): | |
275 | end_date = convert_date(tmpl_vars['event_values']['end_interval']) |
|
289 | end_date = convert_date(tmpl_vars['event_values']['end_interval']) | |
276 |
|
290 | |||
277 | app_url = kwargs['request'].registry.settings['_mail_url'] |
|
291 | app_url = kwargs['request'].registry.settings['_mail_url'] | |
278 | destination_url = kwargs['request'].route_url('/', _app_url=app_url) |
|
292 | destination_url = kwargs['request'].route_url('/', _app_url=app_url) | |
279 | to_encode = { |
|
293 | to_encode = { | |
280 | 'resource': tmpl_vars['event_values']['resource'], |
|
294 | 'resource': tmpl_vars['event_values']['resource'], | |
281 | 'start_date': start_date.strftime(DATE_FRMT), |
|
295 | 'start_date': start_date.strftime(DATE_FRMT), | |
282 | } |
|
296 | } | |
283 | if end_date: |
|
297 | if end_date: | |
284 | to_encode['end_date'] = end_date.strftime(DATE_FRMT) |
|
298 | to_encode['end_date'] = end_date.strftime(DATE_FRMT) | |
285 |
|
299 | |||
286 | destination_url += 'ui/{}?{}'.format( |
|
300 | destination_url += 'ui/{}?{}'.format( | |
287 | 'logs', |
|
301 | 'logs', | |
288 | urllib.parse.urlencode(to_encode) |
|
302 | urllib.parse.urlencode(to_encode) | |
289 | ) |
|
303 | ) | |
290 | tmpl_vars['destination_url'] = destination_url |
|
304 | tmpl_vars['destination_url'] = destination_url | |
291 | return tmpl_vars |
|
305 | return tmpl_vars |
@@ -1,155 +1,160 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import sqlalchemy as sa |
|
17 | import sqlalchemy as sa | |
18 | import logging |
|
18 | import logging | |
19 |
|
19 | |||
20 | from datetime import datetime |
|
20 | from datetime import datetime | |
21 | from appenlight.models import Base, get_db_session |
|
21 | from appenlight.models import Base, get_db_session | |
22 | from appenlight.models.services.report_stat import ReportStatService |
|
22 | from appenlight.models.services.report_stat import ReportStatService | |
23 | from appenlight.models.resource import Resource |
|
23 | from appenlight.models.resource import Resource | |
24 | from appenlight.models.integrations import IntegrationException |
|
24 | from appenlight.models.integrations import IntegrationException | |
25 | from pyramid.threadlocal import get_current_request |
|
25 | from pyramid.threadlocal import get_current_request | |
26 | from sqlalchemy.dialects.postgresql import JSON |
|
26 | from sqlalchemy.dialects.postgresql import JSON | |
27 | from ziggurat_foundations.models.base import BaseModel |
|
27 | from ziggurat_foundations.models.base import BaseModel | |
28 |
|
28 | |||
29 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | class Event(Base, BaseModel): |
|
32 | class Event(Base, BaseModel): | |
33 | __tablename__ = 'events' |
|
33 | __tablename__ = 'events' | |
34 |
|
34 | |||
35 | types = {'error_report_alert': 1, |
|
35 | types = {'error_report_alert': 1, | |
36 | 'slow_report_alert': 3, |
|
36 | 'slow_report_alert': 3, | |
37 | 'comment': 5, |
|
37 | 'comment': 5, | |
38 | 'assignment': 6, |
|
38 | 'assignment': 6, | |
39 | 'uptime_alert': 7, |
|
39 | 'uptime_alert': 7, | |
40 | 'chart_alert': 9} |
|
40 | 'chart_alert': 9} | |
41 |
|
41 | |||
42 | statuses = {'active': 1, |
|
42 | statuses = {'active': 1, | |
43 | 'closed': 0} |
|
43 | 'closed': 0} | |
44 |
|
44 | |||
45 | id = sa.Column(sa.Integer, primary_key=True) |
|
45 | id = sa.Column(sa.Integer, primary_key=True) | |
46 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) |
|
46 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) | |
47 | end_date = sa.Column(sa.DateTime) |
|
47 | end_date = sa.Column(sa.DateTime) | |
48 | status = sa.Column(sa.Integer, default=1) |
|
48 | status = sa.Column(sa.Integer, default=1) | |
49 | event_type = sa.Column(sa.Integer, default=1) |
|
49 | event_type = sa.Column(sa.Integer, default=1) | |
50 | origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), |
|
50 | origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), | |
51 | nullable=True) |
|
51 | nullable=True) | |
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), |
|
52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), | |
53 | nullable=True) |
|
53 | nullable=True) | |
54 | resource_id = sa.Column(sa.Integer(), |
|
54 | resource_id = sa.Column(sa.Integer(), | |
55 | sa.ForeignKey('resources.resource_id'), |
|
55 | sa.ForeignKey('resources.resource_id'), | |
56 | nullable=True) |
|
56 | nullable=True) | |
57 | target_id = sa.Column(sa.Integer) |
|
57 | target_id = sa.Column(sa.Integer) | |
58 | target_uuid = sa.Column(sa.Unicode(40)) |
|
58 | target_uuid = sa.Column(sa.Unicode(40)) | |
59 | text = sa.Column(sa.UnicodeText()) |
|
59 | text = sa.Column(sa.UnicodeText()) | |
60 | values = sa.Column(JSON(), nullable=False, default=None) |
|
60 | values = sa.Column(JSON(), nullable=False, default=None) | |
61 |
|
61 | |||
62 | def __repr__(self): |
|
62 | def __repr__(self): | |
63 | return '<Event %s, app:%s, %s>' % (self.unified_alert_name(), |
|
63 | return '<Event %s, app:%s, %s>' % (self.unified_alert_name(), | |
64 | self.resource_id, |
|
64 | self.resource_id, | |
65 | self.unified_alert_action()) |
|
65 | self.unified_alert_action()) | |
66 |
|
66 | |||
67 | @property |
|
67 | @property | |
68 | def reverse_types(self): |
|
68 | def reverse_types(self): | |
69 | return dict([(v, k) for k, v in self.types.items()]) |
|
69 | return dict([(v, k) for k, v in self.types.items()]) | |
70 |
|
70 | |||
71 | def unified_alert_name(self): |
|
71 | def unified_alert_name(self): | |
72 | return self.reverse_types[self.event_type] |
|
72 | return self.reverse_types[self.event_type] | |
73 |
|
73 | |||
74 | def unified_alert_action(self): |
|
74 | def unified_alert_action(self): | |
75 | event_name = self.reverse_types[self.event_type] |
|
75 | event_name = self.reverse_types[self.event_type] | |
76 | if self.status == Event.statuses['closed']: |
|
76 | if self.status == Event.statuses['closed']: | |
77 | return "CLOSE" |
|
77 | return "CLOSE" | |
78 | if self.status != Event.statuses['closed']: |
|
78 | if self.status != Event.statuses['closed']: | |
79 | return "OPEN" |
|
79 | return "OPEN" | |
80 | return event_name |
|
80 | return event_name | |
81 |
|
81 | |||
82 | def send_alerts(self, request=None, resource=None, db_session=None): |
|
82 | def send_alerts(self, request=None, resource=None, db_session=None): | |
83 | """" Sends alerts to applicable channels """ |
|
83 | """" Sends alerts to applicable channels """ | |
84 | db_session = get_db_session(db_session) |
|
84 | db_session = get_db_session(db_session) | |
85 | db_session.flush() |
|
85 | db_session.flush() | |
86 | if not resource: |
|
86 | if not resource: | |
87 | resource = Resource.by_resource_id(self.resource_id) |
|
87 | resource = Resource.by_resource_id(self.resource_id) | |
88 | if not request: |
|
88 | if not request: | |
89 | request = get_current_request() |
|
89 | request = get_current_request() | |
90 | if not resource: |
|
90 | if not resource: | |
91 | return |
|
91 | return | |
92 | users = set([p.user for p in resource.users_for_perm('view')]) |
|
92 | users = set([p.user for p in resource.users_for_perm('view')]) | |
93 | for user in users: |
|
93 | for user in users: | |
94 | for channel in user.alert_channels: |
|
94 | for channel in user.alert_channels: | |
95 | if not channel.channel_validated or not channel.send_alerts: |
|
95 | matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources] | |
|
96 | if ( | |||
|
97 | not channel.channel_validated or | |||
|
98 | not channel.send_alerts or | |||
|
99 | not matches_resource | |||
|
100 | ): | |||
96 | continue |
|
101 | continue | |
97 | else: |
|
102 | else: | |
98 | try: |
|
103 | try: | |
99 | channel.notify_alert(resource=resource, |
|
104 | channel.notify_alert(resource=resource, | |
100 | event=self, |
|
105 | event=self, | |
101 | user=user, |
|
106 | user=user, | |
102 | request=request) |
|
107 | request=request) | |
103 | except IntegrationException as e: |
|
108 | except IntegrationException as e: | |
104 | log.warning('%s' % e) |
|
109 | log.warning('%s' % e) | |
105 |
|
110 | |||
106 | def validate_or_close(self, since_when, db_session=None): |
|
111 | def validate_or_close(self, since_when, db_session=None): | |
107 | """ Checks if alerts should stay open or it's time to close them. |
|
112 | """ Checks if alerts should stay open or it's time to close them. | |
108 | Generates close alert event if alerts get closed """ |
|
113 | Generates close alert event if alerts get closed """ | |
109 | event_types = [Event.types['error_report_alert'], |
|
114 | event_types = [Event.types['error_report_alert'], | |
110 | Event.types['slow_report_alert']] |
|
115 | Event.types['slow_report_alert']] | |
111 | app = Resource.by_resource_id(self.resource_id) |
|
116 | app = Resource.by_resource_id(self.resource_id) | |
112 | if self.event_type in event_types: |
|
117 | if self.event_type in event_types: | |
113 | total = ReportStatService.count_by_type( |
|
118 | total = ReportStatService.count_by_type( | |
114 | self.event_type, self.resource_id, since_when) |
|
119 | self.event_type, self.resource_id, since_when) | |
115 | if Event.types['error_report_alert'] == self.event_type: |
|
120 | if Event.types['error_report_alert'] == self.event_type: | |
116 | threshold = app.error_report_threshold |
|
121 | threshold = app.error_report_threshold | |
117 | if Event.types['slow_report_alert'] == self.event_type: |
|
122 | if Event.types['slow_report_alert'] == self.event_type: | |
118 | threshold = app.slow_report_threshold |
|
123 | threshold = app.slow_report_threshold | |
119 |
|
124 | |||
120 | if total < threshold: |
|
125 | if total < threshold: | |
121 | self.close() |
|
126 | self.close() | |
122 |
|
127 | |||
123 | def close(self, db_session=None): |
|
128 | def close(self, db_session=None): | |
124 | """ |
|
129 | """ | |
125 | Closes an event and sends notification to affected users |
|
130 | Closes an event and sends notification to affected users | |
126 | """ |
|
131 | """ | |
127 | self.end_date = datetime.utcnow() |
|
132 | self.end_date = datetime.utcnow() | |
128 | self.status = Event.statuses['closed'] |
|
133 | self.status = Event.statuses['closed'] | |
129 | log.warning('ALERT: CLOSE: %s' % self) |
|
134 | log.warning('ALERT: CLOSE: %s' % self) | |
130 | self.send_alerts() |
|
135 | self.send_alerts() | |
131 |
|
136 | |||
132 | def text_representation(self): |
|
137 | def text_representation(self): | |
133 | alert_type = self.unified_alert_name() |
|
138 | alert_type = self.unified_alert_name() | |
134 | text = '' |
|
139 | text = '' | |
135 | if 'slow_report' in alert_type: |
|
140 | if 'slow_report' in alert_type: | |
136 | text += 'Slow report alert' |
|
141 | text += 'Slow report alert' | |
137 | if 'error_report' in alert_type: |
|
142 | if 'error_report' in alert_type: | |
138 | text += 'Exception report alert' |
|
143 | text += 'Exception report alert' | |
139 | if 'uptime_alert' in alert_type: |
|
144 | if 'uptime_alert' in alert_type: | |
140 | text += 'Uptime alert' |
|
145 | text += 'Uptime alert' | |
141 | if 'chart_alert' in alert_type: |
|
146 | if 'chart_alert' in alert_type: | |
142 | text += 'Metrics value alert' |
|
147 | text += 'Metrics value alert' | |
143 |
|
148 | |||
144 | alert_action = self.unified_alert_action() |
|
149 | alert_action = self.unified_alert_action() | |
145 | if alert_action == 'OPEN': |
|
150 | if alert_action == 'OPEN': | |
146 | text += ' got opened.' |
|
151 | text += ' got opened.' | |
147 | if alert_action == 'CLOSE': |
|
152 | if alert_action == 'CLOSE': | |
148 | text += ' got closed.' |
|
153 | text += ' got closed.' | |
149 | return text |
|
154 | return text | |
150 |
|
155 | |||
151 | def get_dict(self, request=None): |
|
156 | def get_dict(self, request=None): | |
152 | dict_data = super(Event, self).get_dict() |
|
157 | dict_data = super(Event, self).get_dict() | |
153 | dict_data['text'] = self.text_representation() |
|
158 | dict_data['text'] = self.text_representation() | |
154 | dict_data['resource_name'] = self.resource.resource_name |
|
159 | dict_data['resource_name'] = self.resource.resource_name | |
155 | return dict_data |
|
160 | return dict_data |
General Comments 2
Please use: https://github.com/Appenlight/appenlight to contribute :) Thanks !
You need to be logged in to leave comments.
Login now