slow_call.py
182 lines
| 7.2 KiB
| text/x-python
|
PythonLexer
r0 | # -*- coding: utf-8 -*- | |||
r112 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |||
r0 | # | |||
r112 | # Licensed under the Apache License, Version 2.0 (the "License"); | |||
# you may not use this file except in compliance with the License. | ||||
# You may obtain a copy of the License at | ||||
r0 | # | |||
r112 | # http://www.apache.org/licenses/LICENSE-2.0 | |||
r0 | # | |||
r112 | # Unless required by applicable law or agreed to in writing, software | |||
# distributed under the License is distributed on an "AS IS" BASIS, | ||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
# See the License for the specific language governing permissions and | ||||
# limitations under the License. | ||||
r0 | ||||
from appenlight.models import get_db_session, Datastores | ||||
from appenlight.models.report import Report | ||||
from appenlight.models.services.base import BaseService | ||||
from appenlight.lib.utils import es_index_name_limiter | ||||
class SlowCallService(BaseService): | ||||
@classmethod | ||||
r153 | def get_time_consuming_calls(cls, request, filter_settings, db_session=None): | |||
r0 | db_session = get_db_session(db_session) | |||
# get slow calls from older partitions too | ||||
index_names = es_index_name_limiter( | ||||
r153 | start_date=filter_settings["start_date"], | |||
end_date=filter_settings["end_date"], | ||||
ixtypes=["slow_calls"], | ||||
) | ||||
if index_names and filter_settings["resource"]: | ||||
r0 | # get longest time taking hashes | |||
es_query = { | ||||
r153 | "aggs": { | |||
"parent_agg": { | ||||
"aggs": { | ||||
"duration": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"sum": {"field": "tags.duration.numeric_values"} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.duration.numeric_values"} | ||||
}, | ||||
}, | ||||
"total": { | ||||
"aggs": { | ||||
"sub_agg": { | ||||
"value_count": { | ||||
"field": "tags.statement_hash.values" | ||||
} | ||||
} | ||||
}, | ||||
"filter": { | ||||
"exists": {"field": "tags.statement_hash.values"} | ||||
}, | ||||
}, | ||||
}, | ||||
"terms": { | ||||
"field": "tags.statement_hash.values", | ||||
"order": {"duration>sub_agg": "desc"}, | ||||
"size": 15, | ||||
}, | ||||
r0 | } | |||
r153 | }, | |||
"query": { | ||||
r156 | "bool": { | |||
r157 | "filter": [ | |||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
r153 | } | |||
r157 | } | |||
}, | ||||
] | ||||
r153 | } | |||
}, | ||||
r0 | } | |||
result = Datastores.es.search( | ||||
r153 | body=es_query, index=index_names, doc_type="log", size=0 | |||
) | ||||
results = result["aggregations"]["parent_agg"]["buckets"] | ||||
r0 | else: | |||
return [] | ||||
r153 | hashes = [i["key"] for i in results] | |||
r0 | ||||
# get queries associated with hashes | ||||
calls_query = { | ||||
"aggs": { | ||||
"top_calls": { | ||||
r153 | "terms": {"field": "tags.statement_hash.values", "size": 15}, | |||
r0 | "aggs": { | |||
"top_calls_hits": { | ||||
r153 | "top_hits": {"sort": {"timestamp": "desc"}, "size": 5} | |||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
}, | ||||
"query": { | ||||
r156 | "bool": { | |||
r157 | "filter": [ | |||
{ | ||||
"terms": { | ||||
"resource_id": [filter_settings["resource"][0]] | ||||
} | ||||
}, | ||||
{"terms": {"tags.statement_hash.values": hashes}}, | ||||
{ | ||||
"range": { | ||||
"timestamp": { | ||||
"gte": filter_settings["start_date"], | ||||
"lte": filter_settings["end_date"], | ||||
r0 | } | |||
r157 | } | |||
}, | ||||
] | ||||
r0 | } | |||
r153 | }, | |||
r0 | } | |||
r153 | calls = Datastores.es.search( | |||
body=calls_query, index=index_names, doc_type="log", size=0 | ||||
) | ||||
r0 | call_results = {} | |||
report_ids = [] | ||||
r153 | for call in calls["aggregations"]["top_calls"]["buckets"]: | |||
hits = call["top_calls_hits"]["hits"]["hits"] | ||||
call_results[call["key"]] = [i["_source"] for i in hits] | ||||
report_ids.extend( | ||||
[i["_source"]["tags"]["report_id"]["values"] for i in hits] | ||||
) | ||||
r0 | if report_ids: | |||
r_query = db_session.query(Report.group_id, Report.id) | ||||
r_query = r_query.filter(Report.id.in_(report_ids)) | ||||
r153 | r_query = r_query.filter(Report.start_time >= filter_settings["start_date"]) | |||
r0 | else: | |||
r_query = [] | ||||
reports_reversed = {} | ||||
for report in r_query: | ||||
reports_reversed[report.id] = report.group_id | ||||
final_results = [] | ||||
for item in results: | ||||
r153 | if item["key"] not in call_results: | |||
r0 | continue | |||
r153 | call = call_results[item["key"]][0] | |||
row = { | ||||
"occurences": item["total"]["sub_agg"]["value"], | ||||
"total_duration": round(item["duration"]["sub_agg"]["value"]), | ||||
"statement": call["message"], | ||||
"statement_type": call["tags"]["type"]["values"], | ||||
"statement_subtype": call["tags"]["subtype"]["values"], | ||||
"statement_hash": item["key"], | ||||
"latest_details": [], | ||||
} | ||||
if row["statement_type"] in ["tmpl", " remote"]: | ||||
params = ( | ||||
call["tags"]["parameters"]["values"] | ||||
if "parameters" in call["tags"] | ||||
else "" | ||||
) | ||||
row["statement"] = "{} ({})".format(call["message"], params) | ||||
for call in call_results[item["key"]]: | ||||
report_id = call["tags"]["report_id"]["values"] | ||||
r0 | group_id = reports_reversed.get(report_id) | |||
if group_id: | ||||
r153 | row["latest_details"].append( | |||
{"group_id": group_id, "report_id": report_id} | ||||
) | ||||
r0 | ||||
final_results.append(row) | ||||
return final_results | ||||