report_group.py
519 lines
| 19.0 KiB
| text/x-python
|
PythonLexer
r0 | # -*- coding: utf-8 -*- | |||
r112 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |||
r0 | # | |||
r112 | # Licensed under the Apache License, Version 2.0 (the "License"); | |||
# you may not use this file except in compliance with the License. | ||||
# You may obtain a copy of the License at | ||||
r0 | # | |||
r112 | # http://www.apache.org/licenses/LICENSE-2.0 | |||
r0 | # | |||
r112 | # Unless required by applicable law or agreed to in writing, software | |||
# distributed under the License is distributed on an "AS IS" BASIS, | ||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
# See the License for the specific language governing permissions and | ||||
# limitations under the License. | ||||
r0 | ||||
import logging | ||||
import paginate | ||||
import sqlalchemy as sa | ||||
import appenlight.lib.helpers as h | ||||
from datetime import datetime | ||||
from appenlight.models import get_db_session, Datastores | ||||
from appenlight.models.report import Report | ||||
from appenlight.models.report_group import ReportGroup | ||||
from appenlight.models.report_comment import ReportComment | ||||
from appenlight.models.user import User | ||||
from appenlight.models.services.base import BaseService | ||||
from appenlight.lib.enums import ReportType | ||||
from appenlight.lib.utils import es_index_name_limiter | ||||
log = logging.getLogger(__name__) | ||||
class ReportGroupService(BaseService): | ||||
@classmethod | ||||
r153 | def get_trending(cls, request, filter_settings, limit=15, db_session=None): | |||
r0 | """ | |||
Returns report groups trending for specific time interval | ||||
""" | ||||
db_session = get_db_session(db_session) | ||||
tags = [] | ||||
r153 | if filter_settings.get("tags"): | |||
for tag in filter_settings["tags"]: | ||||
r0 | tags.append( | |||
r153 | {"terms": {"tags.{}.values".format(tag["name"]): tag["value"]}} | |||
) | ||||
r0 | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["reports"], | ||||
) | ||||
r0 | ||||
r153 | if not index_names or not filter_settings["resource"]: | |||
r0 | return [] | |||
es_query = { | ||||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"groups": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"value_count": {"field": "tags.group_id.values"} | ||||
} | ||||
}, | ||||
"filter": {"exists": {"field": "tags.group_id.values"}}, | ||||
} | ||||
r0 | }, | |||
r153 | "terms": {"field": "tags.group_id.values", "size": limit}, | |||
r0 | } | |||
r153 | }, | |||
"query": { | ||||
r156 | "bool": { | |||
r157 | "filter": [ | |||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
r153 | } | |||
r157 | } | |||
}, | ||||
] | ||||
r153 | } | |||
}, | ||||
r0 | } | |||
if tags: | ||||
r157 | es_query["query"]["bool"]["filter"].extend(tags) | |||
r0 | ||||
result = Datastores.es.search( | ||||
r153 | body=es_query, index=index_names, doc_type="log", size=0 | |||
) | ||||
r0 | series = [] | |||
r153 | for bucket in result["aggregations"]["parent_agg"]["buckets"]: | |||
series.append( | ||||
{"key": bucket["key"], "groups": bucket["groups"]["sub_agg"]["value"]} | ||||
) | ||||
r0 | ||||
report_groups_d = {} | ||||
for g in series: | ||||
r153 | report_groups_d[int(g["key"])] = g["groups"] or 0 | |||
r0 | ||||
query = db_session.query(ReportGroup) | ||||
query = query.filter(ReportGroup.id.in_(list(report_groups_d.keys()))) | ||||
r153 | query = query.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |||
results = [(report_groups_d[group.id], group) for group in query] | ||||
return sorted(results, reverse=True, key=lambda x: x[0]) | ||||
r0 | ||||
@classmethod | ||||
r153 | def get_search_iterator( | |||
cls, | ||||
app_ids=None, | ||||
page=1, | ||||
items_per_page=50, | ||||
order_by=None, | ||||
filter_settings=None, | ||||
limit=None, | ||||
): | ||||
r0 | if not app_ids: | |||
return {} | ||||
if not filter_settings: | ||||
filter_settings = {} | ||||
query = { | ||||
"size": 0, | ||||
"query": { | ||||
r156 | "bool": { | |||
"must": [], | ||||
"should": [], | ||||
r157 | "filter": [{"terms": {"resource_id": list(app_ids)}}] | |||
r0 | } | |||
}, | ||||
"aggs": { | ||||
"top_groups": { | ||||
"terms": { | ||||
"size": 5000, | ||||
"field": "_parent", | ||||
r153 | "order": {"newest": "desc"}, | |||
r0 | }, | |||
"aggs": { | ||||
"top_reports_hits": { | ||||
r153 | "top_hits": {"size": 1, "sort": {"start_time": "desc"}} | |||
r0 | }, | |||
r153 | "newest": {"max": {"field": "start_time"}}, | |||
}, | ||||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
r153 | start_date = filter_settings.get("start_date") | |||
end_date = filter_settings.get("end_date") | ||||
r157 | filter_part = query["query"]["bool"]["filter"] | |||
r0 | date_range = {"range": {"start_time": {}}} | |||
if start_date: | ||||
date_range["range"]["start_time"]["gte"] = start_date | ||||
if end_date: | ||||
date_range["range"]["start_time"]["lte"] = end_date | ||||
if start_date or end_date: | ||||
filter_part.append(date_range) | ||||
r153 | priorities = filter_settings.get("priority") | |||
r0 | ||||
r153 | for tag in filter_settings.get("tags", []): | |||
tag_values = [v.lower() for v in tag["value"]] | ||||
key = "tags.%s.values" % tag["name"].replace(".", "_") | ||||
r0 | filter_part.append({"terms": {key: tag_values}}) | |||
if priorities: | ||||
r153 | filter_part.append( | |||
{ | ||||
"has_parent": { | ||||
"parent_type": "report_group", | ||||
"query": {"terms": {"priority": priorities}}, | ||||
} | ||||
} | ||||
) | ||||
r0 | ||||
r153 | min_occurences = filter_settings.get("min_occurences") | |||
r0 | if min_occurences: | |||
r153 | filter_part.append( | |||
{ | ||||
"has_parent": { | ||||
"parent_type": "report_group", | ||||
"query": {"range": {"occurences": {"gte": min_occurences[0]}}}, | ||||
} | ||||
} | ||||
) | ||||
r0 | ||||
r153 | min_duration = filter_settings.get("min_duration") | |||
max_duration = filter_settings.get("max_duration") | ||||
r0 | ||||
r153 | request_ids = filter_settings.get("request_id") | |||
r0 | if request_ids: | |||
r153 | filter_part.append({"terms": {"request_id": request_ids}}) | |||
r0 | ||||
duration_range = {"range": {"average_duration": {}}} | ||||
if min_duration: | ||||
r153 | duration_range["range"]["average_duration"]["gte"] = min_duration[0] | |||
r0 | if max_duration: | |||
r153 | duration_range["range"]["average_duration"]["lte"] = max_duration[0] | |||
r0 | if min_duration or max_duration: | |||
r153 | filter_part.append( | |||
{"has_parent": {"parent_type": "report_group", "query": duration_range}} | ||||
) | ||||
r0 | ||||
r153 | http_status = filter_settings.get("http_status") | |||
report_type = filter_settings.get("report_type", [ReportType.error]) | ||||
r0 | # set error report type if http status is not found | |||
# and we are dealing with slow reports | ||||
if not http_status or ReportType.slow in report_type: | ||||
r153 | filter_part.append({"terms": {"report_type": report_type}}) | |||
r0 | if http_status: | |||
r153 | filter_part.append({"terms": {"http_status": http_status}}) | |||
r0 | ||||
r153 | messages = filter_settings.get("message") | |||
r0 | if messages: | |||
r153 | condition = {"match": {"message": " ".join(messages)}} | |||
r156 | query["query"]["bool"]["must"].append(condition) | |||
r153 | errors = filter_settings.get("error") | |||
r0 | if errors: | |||
r153 | condition = {"match": {"error": " ".join(errors)}} | |||
r156 | query["query"]["bool"]["must"].append(condition) | |||
r153 | url_domains = filter_settings.get("url_domain") | |||
r0 | if url_domains: | |||
r153 | condition = {"terms": {"url_domain": url_domains}} | |||
r156 | query["query"]["bool"]["must"].append(condition) | |||
r153 | url_paths = filter_settings.get("url_path") | |||
r0 | if url_paths: | |||
r153 | condition = {"terms": {"url_path": url_paths}} | |||
r156 | query["query"]["bool"]["must"].append(condition) | |||
r153 | ||||
if filter_settings.get("report_status"): | ||||
for status in filter_settings.get("report_status"): | ||||
if status == "never_reviewed": | ||||
filter_part.append( | ||||
{ | ||||
"has_parent": { | ||||
"parent_type": "report_group", | ||||
"query": {"term": {"read": False}}, | ||||
} | ||||
} | ||||
) | ||||
elif status == "reviewed": | ||||
filter_part.append( | ||||
{ | ||||
"has_parent": { | ||||
"parent_type": "report_group", | ||||
"query": {"term": {"read": True}}, | ||||
} | ||||
} | ||||
) | ||||
elif status == "public": | ||||
filter_part.append( | ||||
{ | ||||
"has_parent": { | ||||
"parent_type": "report_group", | ||||
"query": {"term": {"public": True}}, | ||||
} | ||||
} | ||||
) | ||||
elif status == "fixed": | ||||
filter_part.append( | ||||
{ | ||||
"has_parent": { | ||||
"parent_type": "report_group", | ||||
"query": {"term": {"fixed": True}}, | ||||
} | ||||
} | ||||
) | ||||
r0 | ||||
# logging.getLogger('pyelasticsearch').setLevel(logging.DEBUG) | ||||
r153 | index_names = es_index_name_limiter( | |||
filter_settings.get("start_date"), | ||||
filter_settings.get("end_date"), | ||||
ixtypes=["reports"], | ||||
) | ||||
r0 | if index_names: | |||
results = Datastores.es.search( | ||||
r153 | body=query, | |||
index=index_names, | ||||
doc_type=["report", "report_group"], | ||||
size=0, | ||||
) | ||||
r0 | else: | |||
return [] | ||||
r153 | return results["aggregations"] | |||
r0 | ||||
@classmethod | ||||
r153 | def get_paginator_by_app_ids( | |||
cls, | ||||
app_ids=None, | ||||
page=1, | ||||
item_count=None, | ||||
items_per_page=50, | ||||
order_by=None, | ||||
filter_settings=None, | ||||
exclude_columns=None, | ||||
db_session=None, | ||||
): | ||||
r0 | if not filter_settings: | |||
filter_settings = {} | ||||
r153 | results = cls.get_search_iterator( | |||
app_ids, page, items_per_page, order_by, filter_settings | ||||
) | ||||
r0 | ||||
ordered_ids = [] | ||||
if results: | ||||
r153 | for item in results["top_groups"]["buckets"]: | |||
pg_id = item["top_reports_hits"]["hits"]["hits"][0]["_source"]["pg_id"] | ||||
r0 | ordered_ids.append(pg_id) | |||
log.info(filter_settings) | ||||
r153 | paginator = paginate.Page( | |||
ordered_ids, items_per_page=items_per_page, **filter_settings | ||||
) | ||||
r0 | sa_items = () | |||
if paginator.items: | ||||
db_session = get_db_session(db_session) | ||||
# latest report detail | ||||
query = db_session.query(Report) | ||||
query = query.options(sa.orm.joinedload(Report.report_group)) | ||||
query = query.filter(Report.id.in_(paginator.items)) | ||||
r153 | if filter_settings.get("order_col"): | |||
order_col = filter_settings.get("order_col") | ||||
if filter_settings.get("order_dir") == "dsc": | ||||
sort_on = "desc" | ||||
r0 | else: | |||
r153 | sort_on = "asc" | |||
if order_col == "when": | ||||
order_col = "last_timestamp" | ||||
query = query.order_by( | ||||
getattr(sa, sort_on)(getattr(ReportGroup, order_col)) | ||||
) | ||||
r0 | sa_items = query.all() | |||
sorted_instance_list = [] | ||||
for i_id in ordered_ids: | ||||
for report in sa_items: | ||||
r153 | if str(report.id) == i_id and report not in sorted_instance_list: | |||
r0 | sorted_instance_list.append(report) | |||
paginator.sa_items = sorted_instance_list | ||||
return paginator | ||||
@classmethod | ||||
def by_app_ids(cls, app_ids=None, order_by=True, db_session=None): | ||||
db_session = get_db_session(db_session) | ||||
q = db_session.query(ReportGroup) | ||||
if app_ids: | ||||
q = q.filter(ReportGroup.resource_id.in_(app_ids)) | ||||
if order_by: | ||||
q = q.order_by(sa.desc(ReportGroup.id)) | ||||
return q | ||||
@classmethod | ||||
def by_id(cls, group_id, app_ids=None, db_session=None): | ||||
db_session = get_db_session(db_session) | ||||
r153 | q = db_session.query(ReportGroup).filter(ReportGroup.id == int(group_id)) | |||
r0 | if app_ids: | |||
q = q.filter(ReportGroup.resource_id.in_(app_ids)) | ||||
return q.first() | ||||
@classmethod | ||||
def by_ids(cls, group_ids=None, db_session=None): | ||||
db_session = get_db_session(db_session) | ||||
query = db_session.query(ReportGroup) | ||||
query = query.filter(ReportGroup.id.in_(group_ids)) | ||||
return query | ||||
@classmethod | ||||
r153 | def by_hash_and_resource( | |||
cls, resource_id, grouping_hash, since_when=None, db_session=None | ||||
): | ||||
r0 | db_session = get_db_session(db_session) | |||
q = db_session.query(ReportGroup) | ||||
q = q.filter(ReportGroup.resource_id == resource_id) | ||||
q = q.filter(ReportGroup.grouping_hash == grouping_hash) | ||||
q = q.filter(ReportGroup.fixed == False) | ||||
r108 | if since_when: | |||
q = q.filter(ReportGroup.first_timestamp >= since_when) | ||||
r0 | return q.first() | |||
@classmethod | ||||
r153 | def users_commenting(cls, report_group, exclude_user_id=None, db_session=None): | |||
r0 | db_session = get_db_session(None, report_group) | |||
query = db_session.query(User).distinct() | ||||
query = query.filter(User.id == ReportComment.owner_id) | ||||
query = query.filter(ReportComment.group_id == report_group.id) | ||||
if exclude_user_id: | ||||
query = query.filter(ReportComment.owner_id != exclude_user_id) | ||||
return query | ||||
@classmethod | ||||
def affected_users_count(cls, report_group, db_session=None): | ||||
db_session = get_db_session(db_session) | ||||
query = db_session.query(sa.func.count(Report.username)) | ||||
query = query.filter(Report.group_id == report_group.id) | ||||
r153 | query = query.filter(Report.username != "") | |||
r0 | query = query.filter(Report.username != None) | |||
query = query.group_by(Report.username) | ||||
return query.count() | ||||
@classmethod | ||||
def top_affected_users(cls, report_group, db_session=None): | ||||
db_session = get_db_session(db_session) | ||||
r153 | count_label = sa.func.count(Report.username).label("count") | |||
r0 | query = db_session.query(Report.username, count_label) | |||
query = query.filter(Report.group_id == report_group.id) | ||||
query = query.filter(Report.username != None) | ||||
r153 | query = query.filter(Report.username != "") | |||
r0 | query = query.group_by(Report.username) | |||
query = query.order_by(sa.desc(count_label)) | ||||
query = query.limit(50) | ||||
return query | ||||
@classmethod | ||||
def get_report_stats(cls, request, filter_settings): | ||||
""" | ||||
Gets report dashboard graphs | ||||
Returns information for BAR charts with occurences/interval information | ||||
detailed means version that returns time intervals - non detailed | ||||
returns total sum | ||||
""" | ||||
r153 | delta = filter_settings["end_date"] - filter_settings["start_date"] | |||
if delta < h.time_deltas.get("12h")["delta"]: | ||||
interval = "1m" | ||||
elif delta <= h.time_deltas.get("3d")["delta"]: | ||||
interval = "5m" | ||||
elif delta >= h.time_deltas.get("2w")["delta"]: | ||||
interval = "24h" | ||||
r0 | else: | |||
r153 | interval = "1h" | |||
r0 | ||||
r153 | group_id = filter_settings.get("group_id") | |||
r0 | ||||
es_query = { | ||||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"types": { | ||||
"aggs": { | ||||
"sub_agg": {"terms": {"field": "tags.type.values"}} | ||||
}, | ||||
"filter": { | ||||
"and": [{"exists": {"field": "tags.type.values"}}] | ||||
}, | ||||
} | ||||
}, | ||||
"date_histogram": { | ||||
"extended_bounds": { | ||||
"max": filter_settings["end_date"], | ||||
"min": filter_settings["start_date"], | ||||
}, | ||||
"field": "timestamp", | ||||
"interval": interval, | ||||
"min_doc_count": 0, | ||||
}, | ||||
} | ||||
}, | ||||
"query": { | ||||
r156 | "bool": { | |||
r157 | "filter": [ | |||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
r153 | } | |||
r157 | } | |||
}, | ||||
] | ||||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
if group_id: | ||||
r153 | parent_agg = es_query["aggs"]["parent_agg"] | |||
filters = parent_agg["aggs"]["types"]["filter"]["and"] | ||||
filters.append({"terms": {"tags.group_id.values": [group_id]}}) | ||||
r0 | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["reports"], | ||||
) | ||||
r0 | ||||
if not index_names: | ||||
return [] | ||||
r153 | result = Datastores.es.search( | |||
body=es_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
r0 | series = [] | |||
r153 | for bucket in result["aggregations"]["parent_agg"]["buckets"]: | |||
r0 | point = { | |||
r153 | "x": datetime.utcfromtimestamp(int(bucket["key"]) / 1000), | |||
"report": 0, | ||||
"not_found": 0, | ||||
"slow_report": 0, | ||||
r0 | } | |||
r153 | for subbucket in bucket["types"]["sub_agg"]["buckets"]: | |||
if subbucket["key"] == "slow": | ||||
point["slow_report"] = subbucket["doc_count"] | ||||
elif subbucket["key"] == "error": | ||||
point["report"] = subbucket["doc_count"] | ||||
elif subbucket["key"] == "not_found": | ||||
point["not_found"] = subbucket["doc_count"] | ||||
r0 | series.append(point) | |||
return series | ||||