##// END OF EJS Templates
request_metric: fix wrong key
request_metric: fix wrong key

File last commit:

r178:4f132e2e
r178:4f132e2e
Show More
request_metric.py
623 lines | 25.1 KiB | text/x-python | PythonLexer
project: initial commit
r0 # -*- coding: utf-8 -*-
license: change the license to Apache 2.0
r112 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
project: initial commit
r0 #
license: change the license to Apache 2.0
r112 # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
project: initial commit
r0 #
license: change the license to Apache 2.0
r112 # http://www.apache.org/licenses/LICENSE-2.0
project: initial commit
r0 #
license: change the license to Apache 2.0
r112 # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
project: initial commit
r0
from datetime import datetime
import appenlight.lib.helpers as h
from appenlight.models import get_db_session, Datastores
from appenlight.models.services.base import BaseService
from appenlight.lib.enums import ReportType
from appenlight.lib.utils import es_index_name_limiter
try:
black: reformat source
r153 from ae_uptime_ce.models.services.uptime_metric import UptimeMetricService
project: initial commit
r0 except ImportError:
UptimeMetricService = None
def check_key(key, stats, uptime, total_seconds):
if key not in stats:
black: reformat source
r153 stats[key] = {
"name": key,
"requests": 0,
"errors": 0,
"tolerated_requests": 0,
"frustrating_requests": 0,
"satisfying_requests": 0,
"total_minutes": total_seconds / 60.0,
"uptime": uptime,
"apdex": 0,
"rpm": 0,
"response_time": 0,
"avg_response_time": 0,
}
project: initial commit
r0
class RequestMetricService(BaseService):
@classmethod
def get_metrics_stats(cls, request, filter_settings, db_session=None):
black: reformat source
r153 delta = filter_settings["end_date"] - filter_settings["start_date"]
if delta < h.time_deltas.get("12h")["delta"]:
interval = "1m"
elif delta <= h.time_deltas.get("3d")["delta"]:
interval = "5m"
elif delta >= h.time_deltas.get("2w")["delta"]:
interval = "24h"
project: initial commit
r0 else:
black: reformat source
r153 interval = "1h"
project: initial commit
r0
black: reformat source
r153 filter_settings["namespace"] = ["appenlight.request_metric"]
project: initial commit
r0
es_query = {
black: reformat source
r153 "aggs": {
"parent_agg": {
"aggs": {
"custom": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.custom.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.custom.numeric_values"}
},
},
"main": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.main.numeric_values"}
}
},
"filter": {"exists": {"field": "tags.main.numeric_values"}},
},
"nosql": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.nosql.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.nosql.numeric_values"}
},
},
"remote": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.remote.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.remote.numeric_values"}
},
},
"requests": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.requests.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.requests.numeric_values"}
},
},
"sql": {
"aggs": {
"sub_agg": {"sum": {"field": "tags.sql.numeric_values"}}
},
"filter": {"exists": {"field": "tags.sql.numeric_values"}},
},
"tmpl": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.tmpl.numeric_values"}
}
},
"filter": {"exists": {"field": "tags.tmpl.numeric_values"}},
},
},
"date_histogram": {
"extended_bounds": {
"max": filter_settings["end_date"],
"min": filter_settings["start_date"],
},
"field": "timestamp",
"interval": interval,
"min_doc_count": 0,
},
}
},
"query": {
elasticsearch: replace "filtered" with "bool" clause
r156 "bool": {
normalize bool.filter format for elasticsearch 5.x
r157 "filter": [
reformat: black
r175 {"terms": {"resource_id": [filter_settings["resource"][0]]}},
normalize bool.filter format for elasticsearch 5.x
r157 {
"range": {
"timestamp": {
"gte": filter_settings["start_date"],
"lte": filter_settings["end_date"],
black: reformat source
r153 }
normalize bool.filter format for elasticsearch 5.x
r157 }
},
{"terms": {"namespace": ["appenlight.request_metric"]}},
]
black: reformat source
r153 }
},
}
project: initial commit
r0
index_names = es_index_name_limiter(
black: reformat source
r153 start_date=filter_settings["start_date"],
end_date=filter_settings["end_date"],
ixtypes=["metrics"],
)
project: initial commit
r0 if not index_names:
return []
black: reformat source
r153 result = Datastores.es.search(
body=es_query, index=index_names, doc_type="log", size=0
)
project: initial commit
r0
plot_data = []
black: reformat source
r153 for item in result["aggregations"]["parent_agg"]["buckets"]:
x_time = datetime.utcfromtimestamp(int(item["key"]) / 1000)
project: initial commit
r0 point = {"x": x_time}
black: reformat source
r153 for key in ["custom", "main", "nosql", "remote", "requests", "sql", "tmpl"]:
value = item[key]["sub_agg"]["value"]
project: initial commit
r0 point[key] = round(value, 3) if value else 0
plot_data.append(point)
return plot_data
@classmethod
black: reformat source
r153 def get_requests_breakdown(cls, request, filter_settings, db_session=None):
project: initial commit
r0 db_session = get_db_session(db_session)
# fetch total time of all requests in this time range
index_names = es_index_name_limiter(
black: reformat source
r153 start_date=filter_settings["start_date"],
end_date=filter_settings["end_date"],
ixtypes=["metrics"],
)
project: initial commit
r0
black: reformat source
r153 if index_names and filter_settings["resource"]:
project: initial commit
r0 es_query = {
black: reformat source
r153 "aggs": {
"main": {
"aggs": {
"sub_agg": {"sum": {"field": "tags.main.numeric_values"}}
},
"filter": {"exists": {"field": "tags.main.numeric_values"}},
}
},
"query": {
elasticsearch: replace "filtered" with "bool" clause
r156 "bool": {
normalize bool.filter format for elasticsearch 5.x
r157 "filter": [
{
"terms": {
"resource_id": [filter_settings["resource"][0]]
}
},
{
"range": {
"timestamp": {
"gte": filter_settings["start_date"],
"lte": filter_settings["end_date"],
black: reformat source
r153 }
normalize bool.filter format for elasticsearch 5.x
r157 }
},
{"terms": {"namespace": ["appenlight.request_metric"]}},
]
black: reformat source
r153 }
},
}
result = Datastores.es.search(
body=es_query, index=index_names, doc_type="log", size=0
)
total_time_spent = result["aggregations"]["main"]["sub_agg"]["value"]
project: initial commit
r0 else:
total_time_spent = 0
script_text = "doc['tags.main.numeric_values'].value / {}".format(
black: reformat source
r153 total_time_spent
)
elasticsearch: migrate to ES 5.x
r165 if total_time_spent == 0:
reformat: black
r175 script_text = "0"
project: initial commit
r0
black: reformat source
r153 if index_names and filter_settings["resource"]:
project: initial commit
r0 es_query = {
black: reformat source
r153 "aggs": {
"parent_agg": {
"aggs": {
"main": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.main.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.main.numeric_values"}
},
},
"percentage": {
reformat: black
r175 "aggs": {"sub_agg": {"sum": {"script": script_text}}},
black: reformat source
r153 "filter": {
"exists": {"field": "tags.main.numeric_values"}
},
},
"requests": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.requests.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.requests.numeric_values"}
},
},
},
"terms": {
elasticsearch: migrate to ES 5.x
r165 "field": "tags.view_name.values.keyword",
black: reformat source
r153 "order": {"percentage>sub_agg": "desc"},
"size": 15,
},
project: initial commit
r0 }
black: reformat source
r153 },
"query": {
elasticsearch: replace "filtered" with "bool" clause
r156 "bool": {
normalize bool.filter format for elasticsearch 5.x
r157 "filter": [
{
"terms": {
"resource_id": [filter_settings["resource"][0]]
}
},
{
"range": {
"timestamp": {
"gte": filter_settings["start_date"],
"lte": filter_settings["end_date"],
black: reformat source
r153 }
normalize bool.filter format for elasticsearch 5.x
r157 }
},
]
project: initial commit
r0 }
black: reformat source
r153 },
project: initial commit
r0 }
black: reformat source
r153 result = Datastores.es.search(
body=es_query, index=index_names, doc_type="log", size=0
)
series = result["aggregations"]["parent_agg"]["buckets"]
project: initial commit
r0 else:
series = []
and_part = [
black: reformat source
r153 {"term": {"resource_id": filter_settings["resource"][0]}},
{"terms": {"tags.view_name.values": [row["key"] for row in series]}},
{"term": {"report_type": str(ReportType.slow)}},
project: initial commit
r0 ]
query = {
"aggs": {
"top_reports": {
reformat: black
r175 "terms": {
"field": "tags.view_name.values.keyword",
"size": len(series),
},
project: initial commit
r0 "aggs": {
"top_calls_hits": {
black: reformat source
r153 "top_hits": {"sort": {"start_time": "desc"}, "size": 5}
project: initial commit
r0 }
black: reformat source
r153 },
project: initial commit
r0 }
},
normalize bool.filter format for elasticsearch 5.x
r157 "query": {"bool": {"filter": and_part}},
project: initial commit
r0 }
details = {}
black: reformat source
r153 index_names = es_index_name_limiter(ixtypes=["reports"])
project: initial commit
r0 if index_names and series:
result = Datastores.es.search(
black: reformat source
r153 body=query, doc_type="report", size=0, index=index_names
)
for bucket in result["aggregations"]["top_reports"]["buckets"]:
details[bucket["key"]] = []
for hit in bucket["top_calls_hits"]["hits"]["hits"]:
details[bucket["key"]].append(
{
request_metric: fix wrong key
r178 "report_id": hit["_source"]["report_id"],
black: reformat source
r153 "group_id": hit["_source"]["group_id"],
}
project: initial commit
r0 )
results = []
for row in series:
result = {
black: reformat source
r153 "key": row["key"],
"main": row["main"]["sub_agg"]["value"],
"requests": row["requests"]["sub_agg"]["value"],
project: initial commit
r0 }
# es can return 'infinity'
try:
black: reformat source
r153 result["percentage"] = float(row["percentage"]["sub_agg"]["value"])
project: initial commit
r0 except ValueError:
black: reformat source
r153 result["percentage"] = 0
project: initial commit
r0
black: reformat source
r153 result["latest_details"] = details.get(row["key"]) or []
project: initial commit
r0 results.append(result)
return results
@classmethod
black: reformat source
r153 def get_apdex_stats(cls, request, filter_settings, threshold=1, db_session=None):
project: initial commit
r0 """
Returns information and calculates APDEX score per server for dashboard
server information (upper right stats boxes)
"""
# Apdex t = (Satisfied Count + Tolerated Count / 2) / Total Samples
db_session = get_db_session(db_session)
index_names = es_index_name_limiter(
black: reformat source
r153 start_date=filter_settings["start_date"],
end_date=filter_settings["end_date"],
ixtypes=["metrics"],
)
project: initial commit
r0
requests_series = []
black: reformat source
r153 if index_names and filter_settings["resource"]:
project: initial commit
r0 es_query = {
black: reformat source
r153 "aggs": {
"parent_agg": {
"aggs": {
"frustrating": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.requests.numeric_values"}
}
},
"filter": {
elasticsearch: migrate to ES 5.x
r165 "bool": {
"filter": [
{
"range": {
reformat: black
r175 "tags.main.numeric_values": {
"gte": "4"
}
elasticsearch: migrate to ES 5.x
r165 }
},
{
"exists": {
"field": "tags.requests.numeric_values"
}
},
]
}
black: reformat source
r153 },
},
"main": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.main.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.main.numeric_values"}
},
},
"requests": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.requests.numeric_values"}
}
},
"filter": {
"exists": {"field": "tags.requests.numeric_values"}
},
},
"tolerated": {
"aggs": {
"sub_agg": {
"sum": {"field": "tags.requests.numeric_values"}
}
},
"filter": {
reformat: black
r175 "bool": {
"filter": [
{
"range": {
"tags.main.numeric_values": {
"gte": "1"
}
}
},
{
"range": {
"tags.main.numeric_values": {
"lt": "4"
}
}
},
{
"exists": {
"field": "tags.requests.numeric_values"
}
},
]
}
black: reformat source
r153 },
},
project: initial commit
r0 },
reformat: black
r175 "terms": {
"field": "tags.server_name.values.keyword",
"size": 999999,
},
black: reformat source
r153 }
},
"query": {
elasticsearch: replace "filtered" with "bool" clause
r156 "bool": {
normalize bool.filter format for elasticsearch 5.x
r157 "filter": [
{
"terms": {
"resource_id": [filter_settings["resource"][0]]
}
},
{
"range": {
"timestamp": {
"gte": filter_settings["start_date"],
"lte": filter_settings["end_date"],
black: reformat source
r153 }
normalize bool.filter format for elasticsearch 5.x
r157 }
},
{"terms": {"namespace": ["appenlight.request_metric"]}},
]
black: reformat source
r153 }
},
}
result = Datastores.es.search(
body=es_query, index=index_names, doc_type="log", size=0
)
for bucket in result["aggregations"]["parent_agg"]["buckets"]:
requests_series.append(
{
"frustrating": bucket["frustrating"]["sub_agg"]["value"],
"main": bucket["main"]["sub_agg"]["value"],
"requests": bucket["requests"]["sub_agg"]["value"],
"tolerated": bucket["tolerated"]["sub_agg"]["value"],
"key": bucket["key"],
}
)
since_when = filter_settings["start_date"]
until = filter_settings["end_date"]
project: initial commit
r0
# total errors
index_names = es_index_name_limiter(
black: reformat source
r153 start_date=filter_settings["start_date"],
end_date=filter_settings["end_date"],
ixtypes=["reports"],
)
project: initial commit
r0
report_series = []
black: reformat source
r153 if index_names and filter_settings["resource"]:
project: initial commit
r0 report_type = ReportType.key_from_value(ReportType.error)
es_query = {
black: reformat source
r153 "aggs": {
"parent_agg": {
"aggs": {
"errors": {
"aggs": {
"sub_agg": {
"sum": {
"field": "tags.occurences.numeric_values"
}
}
},
"filter": {
elasticsearch: migrate to ES 5.x
r165 "bool": {
"filter": [
reformat: black
r175 {
"terms": {
"tags.type.values": [report_type]
}
},
elasticsearch: migrate to ES 5.x
r165 {
"exists": {
"field": "tags.occurences.numeric_values"
}
},
]
}
black: reformat source
r153 },
}
project: initial commit
r0 },
reformat: black
r175 "terms": {
"field": "tags.server_name.values.keyword",
"size": 999999,
},
black: reformat source
r153 }
},
"query": {
elasticsearch: replace "filtered" with "bool" clause
r156 "bool": {
normalize bool.filter format for elasticsearch 5.x
r157 "filter": [
{
"terms": {
"resource_id": [filter_settings["resource"][0]]
}
},
{
"range": {
"timestamp": {
"gte": filter_settings["start_date"],
"lte": filter_settings["end_date"],
black: reformat source
r153 }
normalize bool.filter format for elasticsearch 5.x
r157 }
},
{"terms": {"namespace": ["appenlight.error"]}},
]
project: initial commit
r0 }
black: reformat source
r153 },
project: initial commit
r0 }
black: reformat source
r153 result = Datastores.es.search(
body=es_query, index=index_names, doc_type="log", size=0
)
for bucket in result["aggregations"]["parent_agg"]["buckets"]:
project: initial commit
r0 report_series.append(
black: reformat source
r153 {
"key": bucket["key"],
"errors": bucket["errors"]["sub_agg"]["value"],
project: initial commit
r0 }
)
stats = {}
if UptimeMetricService is not None:
uptime = UptimeMetricService.get_uptime_by_app(
black: reformat source
r153 filter_settings["resource"][0], since_when=since_when, until=until
)
project: initial commit
r0 else:
uptime = 0
total_seconds = (until - since_when).total_seconds()
for stat in requests_series:
black: reformat source
r153 check_key(stat["key"], stats, uptime, total_seconds)
stats[stat["key"]]["requests"] = int(stat["requests"])
stats[stat["key"]]["response_time"] = stat["main"]
stats[stat["key"]]["tolerated_requests"] = stat["tolerated"]
stats[stat["key"]]["frustrating_requests"] = stat["frustrating"]
project: initial commit
r0 for server in report_series:
black: reformat source
r153 check_key(server["key"], stats, uptime, total_seconds)
stats[server["key"]]["errors"] = server["errors"]
project: initial commit
r0
server_stats = list(stats.values())
for stat in server_stats:
black: reformat source
r153 stat["satisfying_requests"] = (
reformat: black
r175 stat["requests"]
- stat["errors"]
- stat["frustrating_requests"]
- stat["tolerated_requests"]
black: reformat source
r153 )
if stat["satisfying_requests"] < 0:
stat["satisfying_requests"] = 0
if stat["requests"]:
stat["avg_response_time"] = round(
stat["response_time"] / stat["requests"], 3
)
qual_requests = (
reformat: black
r175 stat["satisfying_requests"] + stat["tolerated_requests"] / 2.0
black: reformat source
r153 )
stat["apdex"] = round((qual_requests / stat["requests"]) * 100, 2)
stat["rpm"] = round(stat["requests"] / stat["total_minutes"], 2)
return sorted(server_stats, key=lambda x: x["name"])