request_metric.py
617 lines
| 24.8 KiB
| text/x-python
|
PythonLexer
r0 | # -*- coding: utf-8 -*- | |||
r112 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |||
r0 | # | |||
r112 | # Licensed under the Apache License, Version 2.0 (the "License"); | |||
# you may not use this file except in compliance with the License. | ||||
# You may obtain a copy of the License at | ||||
r0 | # | |||
r112 | # http://www.apache.org/licenses/LICENSE-2.0 | |||
r0 | # | |||
r112 | # Unless required by applicable law or agreed to in writing, software | |||
# distributed under the License is distributed on an "AS IS" BASIS, | ||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
# See the License for the specific language governing permissions and | ||||
# limitations under the License. | ||||
r0 | ||||
from datetime import datetime | ||||
import appenlight.lib.helpers as h | ||||
from appenlight.models import get_db_session, Datastores | ||||
from appenlight.models.services.base import BaseService | ||||
from appenlight.lib.enums import ReportType | ||||
from appenlight.lib.utils import es_index_name_limiter | ||||
try: | ||||
r153 | from ae_uptime_ce.models.services.uptime_metric import UptimeMetricService | |||
r0 | except ImportError: | |||
UptimeMetricService = None | ||||
def check_key(key, stats, uptime, total_seconds): | ||||
if key not in stats: | ||||
r153 | stats[key] = { | |||
"name": key, | ||||
"requests": 0, | ||||
"errors": 0, | ||||
"tolerated_requests": 0, | ||||
"frustrating_requests": 0, | ||||
"satisfying_requests": 0, | ||||
"total_minutes": total_seconds / 60.0, | ||||
"uptime": uptime, | ||||
"apdex": 0, | ||||
"rpm": 0, | ||||
"response_time": 0, | ||||
"avg_response_time": 0, | ||||
} | ||||
r0 | ||||
class RequestMetricService(BaseService): | ||||
@classmethod | ||||
def get_metrics_stats(cls, request, filter_settings, db_session=None): | ||||
r153 | delta = filter_settings["end_date"] - filter_settings["start_date"] | |||
if delta < h.time_deltas.get("12h")["delta"]: | ||||
interval = "1m" | ||||
elif delta <= h.time_deltas.get("3d")["delta"]: | ||||
interval = "5m" | ||||
elif delta >= h.time_deltas.get("2w")["delta"]: | ||||
interval = "24h" | ||||
r0 | else: | |||
r153 | interval = "1h" | |||
r0 | ||||
r153 | filter_settings["namespace"] = ["appenlight.request_metric"] | |||
r0 | ||||
es_query = { | ||||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"custom": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.custom.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.custom.numeric_values"} | ||||
}, | ||||
}, | ||||
"main": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.main.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": {"exists": {"field": "tags.main.numeric_values"}}, | ||||
}, | ||||
"nosql": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.nosql.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.nosql.numeric_values"} | ||||
}, | ||||
}, | ||||
"remote": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.remote.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.remote.numeric_values"} | ||||
}, | ||||
}, | ||||
"requests": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.requests.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.requests.numeric_values"} | ||||
}, | ||||
}, | ||||
"sql": { | ||||
"aggs": { | ||||
"sub_agg": {"sum": {"field": "tags.sql.numeric_values"}} | ||||
}, | ||||
"filter": {"exists": {"field": "tags.sql.numeric_values"}}, | ||||
}, | ||||
"tmpl": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.tmpl.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": {"exists": {"field": "tags.tmpl.numeric_values"}}, | ||||
}, | ||||
}, | ||||
"date_histogram": { | ||||
"extended_bounds": { | ||||
"max": filter_settings["end_date"], | ||||
"min": filter_settings["start_date"], | ||||
}, | ||||
"field": "timestamp", | ||||
"interval": interval, | ||||
"min_doc_count": 0, | ||||
}, | ||||
} | ||||
}, | ||||
"query": { | ||||
r156 | "bool": { | |||
r153 | "filter": { | |||
"and": [ | ||||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
} | ||||
} | ||||
}, | ||||
{"terms": {"namespace": ["appenlight.request_metric"]}}, | ||||
] | ||||
} | ||||
} | ||||
}, | ||||
} | ||||
r0 | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["metrics"], | ||||
) | ||||
r0 | if not index_names: | |||
return [] | ||||
r153 | result = Datastores.es.search( | |||
body=es_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
r0 | ||||
plot_data = [] | ||||
r153 | for item in result["aggregations"]["parent_agg"]["buckets"]: | |||
x_time = datetime.utcfromtimestamp(int(item["key"]) / 1000) | ||||
r0 | point = {"x": x_time} | |||
r153 | for key in ["custom", "main", "nosql", "remote", "requests", "sql", "tmpl"]: | |||
value = item[key]["sub_agg"]["value"] | ||||
r0 | point[key] = round(value, 3) if value else 0 | |||
plot_data.append(point) | ||||
return plot_data | ||||
@classmethod | ||||
r153 | def get_requests_breakdown(cls, request, filter_settings, db_session=None): | |||
r0 | db_session = get_db_session(db_session) | |||
# fetch total time of all requests in this time range | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["metrics"], | ||||
) | ||||
r0 | ||||
r153 | if index_names and filter_settings["resource"]: | |||
r0 | es_query = { | |||
r153 | "aggs": { | |||
"main": { | ||||
"aggs": { | ||||
"sub_agg": {"sum": {"field": "tags.main.numeric_values"}} | ||||
}, | ||||
"filter": {"exists": {"field": "tags.main.numeric_values"}}, | ||||
} | ||||
}, | ||||
"query": { | ||||
r156 | "bool": { | |||
r153 | "filter": { | |||
"and": [ | ||||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
} | ||||
} | ||||
}, | ||||
{"terms": {"namespace": ["appenlight.request_metric"]}}, | ||||
] | ||||
} | ||||
} | ||||
}, | ||||
} | ||||
result = Datastores.es.search( | ||||
body=es_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
total_time_spent = result["aggregations"]["main"]["sub_agg"]["value"] | ||||
r0 | else: | |||
total_time_spent = 0 | ||||
script_text = "doc['tags.main.numeric_values'].value / {}".format( | ||||
r153 | total_time_spent | |||
) | ||||
r0 | ||||
r153 | if index_names and filter_settings["resource"]: | |||
r0 | es_query = { | |||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"main": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.main.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.main.numeric_values"} | ||||
}, | ||||
}, | ||||
"percentage": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": { | ||||
"lang": "expression", | ||||
"script": script_text, | ||||
} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.main.numeric_values"} | ||||
}, | ||||
}, | ||||
"requests": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.requests.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.requests.numeric_values"} | ||||
}, | ||||
}, | ||||
}, | ||||
"terms": { | ||||
"field": "tags.view_name.values", | ||||
"order": {"percentage>sub_agg": "desc"}, | ||||
"size": 15, | ||||
}, | ||||
r0 | } | |||
r153 | }, | |||
"query": { | ||||
r156 | "bool": { | |||
r153 | "filter": { | |||
"and": [ | ||||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
} | ||||
} | ||||
}, | ||||
] | ||||
} | ||||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
r153 | result = Datastores.es.search( | |||
body=es_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
series = result["aggregations"]["parent_agg"]["buckets"] | ||||
r0 | else: | |||
series = [] | ||||
and_part = [ | ||||
r153 | {"term": {"resource_id": filter_settings["resource"][0]}}, | |||
{"terms": {"tags.view_name.values": [row["key"] for row in series]}}, | ||||
{"term": {"report_type": str(ReportType.slow)}}, | ||||
r0 | ] | |||
query = { | ||||
"aggs": { | ||||
"top_reports": { | ||||
r153 | "terms": {"field": "tags.view_name.values", "size": len(series)}, | |||
r0 | "aggs": { | |||
"top_calls_hits": { | ||||
r153 | "top_hits": {"sort": {"start_time": "desc"}, "size": 5} | |||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
}, | ||||
r156 | "query": {"bool": {"filter": {"and": and_part}}}, | |||
r0 | } | |||
details = {} | ||||
r153 | index_names = es_index_name_limiter(ixtypes=["reports"]) | |||
r0 | if index_names and series: | |||
result = Datastores.es.search( | ||||
r153 | body=query, doc_type="report", size=0, index=index_names | |||
) | ||||
for bucket in result["aggregations"]["top_reports"]["buckets"]: | ||||
details[bucket["key"]] = [] | ||||
for hit in bucket["top_calls_hits"]["hits"]["hits"]: | ||||
details[bucket["key"]].append( | ||||
{ | ||||
"report_id": hit["_source"]["pg_id"], | ||||
"group_id": hit["_source"]["group_id"], | ||||
} | ||||
r0 | ) | |||
results = [] | ||||
for row in series: | ||||
result = { | ||||
r153 | "key": row["key"], | |||
"main": row["main"]["sub_agg"]["value"], | ||||
"requests": row["requests"]["sub_agg"]["value"], | ||||
r0 | } | |||
# es can return 'infinity' | ||||
try: | ||||
r153 | result["percentage"] = float(row["percentage"]["sub_agg"]["value"]) | |||
r0 | except ValueError: | |||
r153 | result["percentage"] = 0 | |||
r0 | ||||
r153 | result["latest_details"] = details.get(row["key"]) or [] | |||
r0 | results.append(result) | |||
return results | ||||
@classmethod | ||||
r153 | def get_apdex_stats(cls, request, filter_settings, threshold=1, db_session=None): | |||
r0 | """ | |||
Returns information and calculates APDEX score per server for dashboard | ||||
server information (upper right stats boxes) | ||||
""" | ||||
# Apdex t = (Satisfied Count + Tolerated Count / 2) / Total Samples | ||||
db_session = get_db_session(db_session) | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["metrics"], | ||||
) | ||||
r0 | ||||
requests_series = [] | ||||
r153 | if index_names and filter_settings["resource"]: | |||
r0 | es_query = { | |||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"frustrating": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.requests.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"and": [ | ||||
{ | ||||
"range": { | ||||
"tags.main.numeric_values": {"gte": "4"} | ||||
} | ||||
}, | ||||
{ | ||||
"exists": { | ||||
"field": "tags.requests.numeric_values" | ||||
} | ||||
}, | ||||
] | ||||
}, | ||||
}, | ||||
"main": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.main.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.main.numeric_values"} | ||||
}, | ||||
}, | ||||
"requests": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.requests.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.requests.numeric_values"} | ||||
}, | ||||
}, | ||||
"tolerated": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.requests.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"and": [ | ||||
{ | ||||
"range": { | ||||
"tags.main.numeric_values": {"gte": "1"} | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"tags.main.numeric_values": {"lt": "4"} | ||||
} | ||||
}, | ||||
{ | ||||
"exists": { | ||||
"field": "tags.requests.numeric_values" | ||||
} | ||||
}, | ||||
] | ||||
}, | ||||
}, | ||||
r0 | }, | |||
r153 | "terms": {"field": "tags.server_name.values", "size": 999999}, | |||
} | ||||
}, | ||||
"query": { | ||||
r156 | "bool": { | |||
r153 | "filter": { | |||
"and": [ | ||||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
} | ||||
} | ||||
}, | ||||
{"terms": {"namespace": ["appenlight.request_metric"]}}, | ||||
] | ||||
r0 | } | |||
r153 | } | |||
}, | ||||
} | ||||
result = Datastores.es.search( | ||||
body=es_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
for bucket in result["aggregations"]["parent_agg"]["buckets"]: | ||||
requests_series.append( | ||||
{ | ||||
"frustrating": bucket["frustrating"]["sub_agg"]["value"], | ||||
"main": bucket["main"]["sub_agg"]["value"], | ||||
"requests": bucket["requests"]["sub_agg"]["value"], | ||||
"tolerated": bucket["tolerated"]["sub_agg"]["value"], | ||||
"key": bucket["key"], | ||||
} | ||||
) | ||||
since_when = filter_settings["start_date"] | ||||
until = filter_settings["end_date"] | ||||
r0 | ||||
# total errors | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["reports"], | ||||
) | ||||
r0 | ||||
report_series = [] | ||||
r153 | if index_names and filter_settings["resource"]: | |||
r0 | report_type = ReportType.key_from_value(ReportType.error) | |||
es_query = { | ||||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"errors": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": { | ||||
"field": "tags.occurences.numeric_values" | ||||
} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"and": [ | ||||
{"terms": {"tags.type.values": [report_type]}}, | ||||
{ | ||||
"exists": { | ||||
"field": "tags.occurences.numeric_values" | ||||
} | ||||
}, | ||||
] | ||||
}, | ||||
} | ||||
r0 | }, | |||
r153 | "terms": {"field": "tags.server_name.values", "size": 999999}, | |||
} | ||||
}, | ||||
"query": { | ||||
r156 | "bool": { | |||
r153 | "filter": { | |||
"and": [ | ||||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
} | ||||
} | ||||
}, | ||||
{"terms": {"namespace": ["appenlight.error"]}}, | ||||
] | ||||
} | ||||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
r153 | result = Datastores.es.search( | |||
body=es_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
for bucket in result["aggregations"]["parent_agg"]["buckets"]: | ||||
r0 | report_series.append( | |||
r153 | { | |||
"key": bucket["key"], | ||||
"errors": bucket["errors"]["sub_agg"]["value"], | ||||
r0 | } | |||
) | ||||
stats = {} | ||||
if UptimeMetricService is not None: | ||||
uptime = UptimeMetricService.get_uptime_by_app( | ||||
r153 | filter_settings["resource"][0], since_when=since_when, until=until | |||
) | ||||
r0 | else: | |||
uptime = 0 | ||||
total_seconds = (until - since_when).total_seconds() | ||||
for stat in requests_series: | ||||
r153 | check_key(stat["key"], stats, uptime, total_seconds) | |||
stats[stat["key"]]["requests"] = int(stat["requests"]) | ||||
stats[stat["key"]]["response_time"] = stat["main"] | ||||
stats[stat["key"]]["tolerated_requests"] = stat["tolerated"] | ||||
stats[stat["key"]]["frustrating_requests"] = stat["frustrating"] | ||||
r0 | for server in report_series: | |||
r153 | check_key(server["key"], stats, uptime, total_seconds) | |||
stats[server["key"]]["errors"] = server["errors"] | ||||
r0 | ||||
server_stats = list(stats.values()) | ||||
for stat in server_stats: | ||||
r153 | stat["satisfying_requests"] = ( | |||
stat["requests"] | ||||
- stat["errors"] | ||||
- stat["frustrating_requests"] | ||||
- stat["tolerated_requests"] | ||||
) | ||||
if stat["satisfying_requests"] < 0: | ||||
stat["satisfying_requests"] = 0 | ||||
if stat["requests"]: | ||||
stat["avg_response_time"] = round( | ||||
stat["response_time"] / stat["requests"], 3 | ||||
) | ||||
qual_requests = ( | ||||
stat["satisfying_requests"] + stat["tolerated_requests"] / 2.0 | ||||
) | ||||
stat["apdex"] = round((qual_requests / stat["requests"]) * 100, 2) | ||||
stat["rpm"] = round(stat["requests"] / stat["total_minutes"], 2) | ||||
return sorted(server_stats, key=lambda x: x["name"]) | ||||