alert_channel.py
298 lines
| 10.8 KiB
| text/x-python
|
PythonLexer
r0 | # -*- coding: utf-8 -*- | |||
r112 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |||
r0 | # | |||
r112 | # Licensed under the Apache License, Version 2.0 (the "License"); | |||
# you may not use this file except in compliance with the License. | ||||
# You may obtain a copy of the License at | ||||
r0 | # | |||
r112 | # http://www.apache.org/licenses/LICENSE-2.0 | |||
r0 | # | |||
r112 | # Unless required by applicable law or agreed to in writing, software | |||
# distributed under the License is distributed on an "AS IS" BASIS, | ||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
# See the License for the specific language governing permissions and | ||||
# limitations under the License. | ||||
r0 | ||||
import logging | ||||
import sqlalchemy as sa | ||||
import urllib.request, urllib.parse, urllib.error | ||||
from datetime import timedelta | ||||
from appenlight.models import Base | ||||
from appenlight.lib.utils.date_utils import convert_date | ||||
from sqlalchemy.dialects.postgresql import JSON | ||||
from ziggurat_foundations.models.base import BaseModel | ||||
log = logging.getLogger(__name__) | ||||
# | ||||
channel_rules_m2m_table = sa.Table( | ||||
r153 | "channels_actions", | |||
Base.metadata, | ||||
sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | ||||
sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")), | ||||
r0 | ) | |||
r123 | channel_resources_m2m_table = sa.Table( | |||
r153 | "channels_resources", | |||
Base.metadata, | ||||
sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")), | ||||
sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")), | ||||
r123 | ) | |||
r153 | DATE_FRMT = "%Y-%m-%dT%H:%M" | |||
r0 | ||||
class AlertChannel(Base, BaseModel): | ||||
""" | ||||
Stores information about possible alerting options | ||||
""" | ||||
r153 | ||||
__tablename__ = "alert_channels" | ||||
__possible_channel_names__ = ["email"] | ||||
r0 | __mapper_args__ = { | |||
r153 | "polymorphic_on": "channel_name", | |||
"polymorphic_identity": "integration", | ||||
r0 | } | |||
r153 | owner_id = sa.Column( | |||
sa.Unicode(30), | ||||
sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), | ||||
) | ||||
r0 | channel_name = sa.Column(sa.Unicode(25), nullable=False) | |||
r153 | channel_value = sa.Column(sa.Unicode(80), nullable=False, default="") | |||
channel_json_conf = sa.Column(JSON(), nullable=False, default="") | ||||
channel_validated = sa.Column(sa.Boolean, nullable=False, default=False) | ||||
send_alerts = sa.Column(sa.Boolean, nullable=False, default=True) | ||||
daily_digest = sa.Column(sa.Boolean, nullable=False, default=True) | ||||
integration_id = sa.Column( | ||||
sa.Integer, sa.ForeignKey("integrations.id"), nullable=True | ||||
) | ||||
r0 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) | |||
r153 | channel_actions = sa.orm.relationship( | |||
"AlertChannelAction", | ||||
cascade="all", | ||||
passive_deletes=True, | ||||
passive_updates=True, | ||||
secondary=channel_rules_m2m_table, | ||||
backref="channels", | ||||
) | ||||
resources = sa.orm.relationship( | ||||
"Resource", | ||||
cascade="all", | ||||
passive_deletes=True, | ||||
passive_updates=True, | ||||
secondary=channel_resources_m2m_table, | ||||
backref="resources", | ||||
) | ||||
r0 | ||||
@property | ||||
def channel_visible_value(self): | ||||
if self.integration: | ||||
r153 | return "{}: {}".format( | |||
self.channel_name, self.integration.resource.resource_name | ||||
r0 | ) | |||
r153 | return "{}: {}".format(self.channel_name, self.channel_value) | |||
r0 | ||||
r153 | def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True): | |||
r0 | """ | |||
Returns dictionary with required information that will be consumed by | ||||
angular | ||||
""" | ||||
r153 | instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys) | |||
r0 | exclude_keys_list = exclude_keys or [] | |||
include_keys_list = include_keys or [] | ||||
r153 | instance_dict["supports_report_alerting"] = True | |||
instance_dict["channel_visible_value"] = self.channel_visible_value | ||||
r0 | ||||
if extended_info: | ||||
r153 | instance_dict["actions"] = [ | |||
rule.get_dict(extended_info=True) for rule in self.channel_actions | ||||
] | ||||
r0 | ||||
r153 | del instance_dict["channel_json_conf"] | |||
r0 | ||||
if self.integration: | ||||
instance_dict[ | ||||
r153 | "supports_report_alerting" | |||
] = self.integration.supports_report_alerting | ||||
r0 | d = {} | |||
for k in instance_dict.keys(): | ||||
r153 | if k not in exclude_keys_list and ( | |||
k in include_keys_list or not include_keys | ||||
): | ||||
r0 | d[k] = instance_dict[k] | |||
return d | ||||
def __repr__(self): | ||||
r153 | return "<AlertChannel: (%s,%s), user:%s>" % ( | |||
self.channel_name, | ||||
self.channel_value, | ||||
self.user_name, | ||||
) | ||||
r0 | ||||
def send_digest(self, **kwargs): | ||||
""" | ||||
This should implement daily top error report notifications | ||||
""" | ||||
r153 | log.warning("send_digest NOT IMPLEMENTED") | |||
r0 | ||||
def notify_reports(self, **kwargs): | ||||
""" | ||||
This should implement notification of reports that occured in 1 min | ||||
interval | ||||
""" | ||||
r153 | log.warning("notify_reports NOT IMPLEMENTED") | |||
r0 | ||||
def notify_alert(self, **kwargs): | ||||
""" | ||||
Notify user of report/uptime/chart threshold events based on events alert | ||||
type | ||||
Kwargs: | ||||
application: application that the event applies for, | ||||
event: event that is notified, | ||||
user: user that should be notified | ||||
request: request object | ||||
""" | ||||
r153 | alert_name = kwargs["event"].unified_alert_name() | |||
if alert_name in ["slow_report_alert", "error_report_alert"]: | ||||
r0 | self.notify_report_alert(**kwargs) | |||
r153 | elif alert_name == "uptime_alert": | |||
r0 | self.notify_uptime_alert(**kwargs) | |||
r153 | elif alert_name == "chart_alert": | |||
r0 | self.notify_chart_alert(**kwargs) | |||
def notify_chart_alert(self, **kwargs): | ||||
""" | ||||
This should implement report open/close alerts notifications | ||||
""" | ||||
r153 | log.warning("notify_chart_alert NOT IMPLEMENTED") | |||
r0 | ||||
def notify_report_alert(self, **kwargs): | ||||
""" | ||||
This should implement report open/close alerts notifications | ||||
""" | ||||
r153 | log.warning("notify_report_alert NOT IMPLEMENTED") | |||
r0 | ||||
def notify_uptime_alert(self, **kwargs): | ||||
""" | ||||
This should implement uptime open/close alerts notifications | ||||
""" | ||||
r153 | log.warning("notify_uptime_alert NOT IMPLEMENTED") | |||
r0 | ||||
def get_notification_basic_vars(self, kwargs): | ||||
""" | ||||
Sets most common variables used later for rendering notifications for | ||||
channel | ||||
""" | ||||
r153 | if "event" in kwargs: | |||
kwargs["since_when"] = kwargs["event"].start_date | ||||
r0 | ||||
r153 | url_start_date = kwargs.get("since_when") - timedelta(minutes=1) | |||
url_end_date = kwargs.get("since_when") + timedelta(minutes=4) | ||||
r0 | tmpl_vars = { | |||
r153 | "timestamp": kwargs["since_when"], | |||
"user": kwargs["user"], | ||||
"since_when": kwargs.get("since_when"), | ||||
r0 | "url_start_date": url_start_date, | |||
r153 | "url_end_date": url_end_date, | |||
r0 | } | |||
r153 | tmpl_vars["resource_name"] = kwargs["resource"].resource_name | |||
tmpl_vars["resource"] = kwargs["resource"] | ||||
r0 | ||||
r153 | if "event" in kwargs: | |||
tmpl_vars["event_values"] = kwargs["event"].values | ||||
tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name() | ||||
tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action() | ||||
r0 | return tmpl_vars | |||
def report_alert_notification_vars(self, kwargs): | ||||
tmpl_vars = self.get_notification_basic_vars(kwargs) | ||||
r153 | reports = kwargs.get("reports", []) | |||
r0 | tmpl_vars["reports"] = reports | |||
tmpl_vars["confirmed_total"] = len(reports) | ||||
tmpl_vars["report_type"] = "error reports" | ||||
r153 | tmpl_vars["url_report_type"] = "report/list" | |||
r0 | ||||
r153 | alert_type = tmpl_vars.get("alert_type", "") | |||
if "slow_report" in alert_type: | ||||
r0 | tmpl_vars["report_type"] = "slow reports" | |||
r153 | tmpl_vars["url_report_type"] = "report/list_slow" | |||
r0 | ||||
r153 | app_url = kwargs["request"].registry.settings["_mail_url"] | |||
r0 | ||||
r153 | destination_url = kwargs["request"].route_url("/", _app_url=app_url) | |||
r0 | if alert_type: | |||
r153 | destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format( | |||
r0 | tmpl_vars["url_report_type"], | |||
r153 | tmpl_vars["resource"].resource_id, | |||
tmpl_vars["url_start_date"].strftime(DATE_FRMT), | ||||
tmpl_vars["url_end_date"].strftime(DATE_FRMT), | ||||
r0 | ) | |||
else: | ||||
r153 | destination_url += "ui/{}?resource={}".format( | |||
tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id | ||||
r0 | ) | |||
tmpl_vars["destination_url"] = destination_url | ||||
return tmpl_vars | ||||
def uptime_alert_notification_vars(self, kwargs): | ||||
tmpl_vars = self.get_notification_basic_vars(kwargs) | ||||
r153 | app_url = kwargs["request"].registry.settings["_mail_url"] | |||
destination_url = kwargs["request"].route_url("/", _app_url=app_url) | ||||
destination_url += "ui/{}?resource={}".format( | ||||
"uptime", tmpl_vars["resource"].resource_id | ||||
) | ||||
tmpl_vars["destination_url"] = destination_url | ||||
reason = "" | ||||
e_values = tmpl_vars.get("event_values") | ||||
if e_values and e_values.get("response_time") == 0: | ||||
reason += " Response time was slower than 20 seconds." | ||||
r0 | elif e_values: | |||
r153 | code = e_values.get("status_code") | |||
reason += " Response status code: %s." % code | ||||
r0 | ||||
r153 | tmpl_vars["reason"] = reason | |||
r0 | return tmpl_vars | |||
def chart_alert_notification_vars(self, kwargs): | ||||
tmpl_vars = self.get_notification_basic_vars(kwargs) | ||||
r153 | tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"] | |||
tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or "" | ||||
matched_values = tmpl_vars["event_values"]["matched_step_values"] | ||||
tmpl_vars["readable_values"] = [] | ||||
for key, value in list(matched_values["values"].items()): | ||||
matched_label = matched_values["labels"].get(key) | ||||
r0 | if matched_label: | |||
r153 | tmpl_vars["readable_values"].append( | |||
{"label": matched_label["human_label"], "value": value} | ||||
) | ||||
tmpl_vars["readable_values"] = sorted( | ||||
tmpl_vars["readable_values"], key=lambda x: x["label"] | ||||
) | ||||
start_date = convert_date(tmpl_vars["event_values"]["start_interval"]) | ||||
r0 | end_date = None | |||
r153 | if tmpl_vars["event_values"].get("end_interval"): | |||
end_date = convert_date(tmpl_vars["event_values"]["end_interval"]) | ||||
r0 | ||||
r153 | app_url = kwargs["request"].registry.settings["_mail_url"] | |||
destination_url = kwargs["request"].route_url("/", _app_url=app_url) | ||||
r0 | to_encode = { | |||
r153 | "resource": tmpl_vars["event_values"]["resource"], | |||
"start_date": start_date.strftime(DATE_FRMT), | ||||
r0 | } | |||
if end_date: | ||||
r153 | to_encode["end_date"] = end_date.strftime(DATE_FRMT) | |||
r0 | ||||
r153 | destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode)) | |||
tmpl_vars["destination_url"] = destination_url | ||||
r0 | return tmpl_vars | |||