##// END OF EJS Templates
logs: fix for common values
ergo -
Show More
@@ -1,219 +1,222 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 from datetime import datetime, timedelta
19 19
20 20 from pyramid.view import view_config
21 21 from pyramid.httpexceptions import HTTPUnprocessableEntity
22 22 from appenlight.models import Datastores, Log
23 23 from appenlight.models.services.log import LogService
24 24 from appenlight.lib.utils import (
25 25 build_filter_settings_from_query_dict,
26 26 es_index_name_limiter,
27 27 )
28 28 from appenlight.lib.helpers import gen_pagination_headers
29 29 from appenlight.celery.tasks import logs_cleanup
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33 section_filters_key = "appenlight:logs:filter:%s"
34 34
35 35
36 36 @view_config(route_name="logs_no_id", renderer="json", permission="authenticated")
37 37 def fetch_logs(request):
38 38 """
39 39 Returns list of log entries from Elasticsearch
40 40 """
41 41
42 42 filter_settings = build_filter_settings_from_query_dict(
43 43 request, request.GET.mixed()
44 44 )
45 45 logs_paginator = LogService.get_paginator_by_app_ids(
46 46 app_ids=filter_settings["resource"],
47 47 page=filter_settings["page"],
48 48 filter_settings=filter_settings,
49 49 )
50 50 headers = gen_pagination_headers(request, logs_paginator)
51 51 request.response.headers.update(headers)
52 52
53 53 return [l.get_dict() for l in logs_paginator.sa_items]
54 54
55 55
56 56 @view_config(
57 57 route_name="section_view",
58 58 match_param=["section=logs_section", "view=fetch_series"],
59 59 renderer="json",
60 60 permission="authenticated",
61 61 )
62 62 def logs_fetch_series(request):
63 63 """
64 64 Handles metric dashboard graphs
65 65 Returns information for time/tier breakdown
66 66 """
67 67 filter_settings = build_filter_settings_from_query_dict(
68 68 request, request.GET.mixed()
69 69 )
70 70 paginator = LogService.get_paginator_by_app_ids(
71 71 app_ids=filter_settings["resource"],
72 72 page=1,
73 73 filter_settings=filter_settings,
74 74 items_per_page=1,
75 75 )
76 76 now = datetime.utcnow().replace(microsecond=0, second=0)
77 77 delta = timedelta(days=7)
78 78 if paginator.sa_items:
79 79 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0, second=0)
80 80 filter_settings["start_date"] = start_date - delta
81 81 else:
82 82 filter_settings["start_date"] = now - delta
83 83 filter_settings["end_date"] = filter_settings["start_date"] + timedelta(days=7)
84 84
85 85 @request.registry.cache_regions.redis_sec_30.cache_on_arguments("logs_graphs")
86 86 def cached(apps, search_params, delta, now):
87 87 data = LogService.get_time_series_aggregate(
88 88 filter_settings["resource"], filter_settings
89 89 )
90 90 if not data:
91 91 return []
92 92 buckets = data["aggregations"]["events_over_time"]["buckets"]
93 93 return [
94 94 {
95 95 "x": datetime.utcfromtimestamp(item["key"] / 1000),
96 96 "logs": item["doc_count"],
97 97 }
98 98 for item in buckets
99 99 ]
100 100
101 101 return cached(filter_settings, request.GET.mixed(), delta, now)
102 102
103 103
104 104 @view_config(
105 105 route_name="logs_no_id",
106 106 renderer="json",
107 107 request_method="DELETE",
108 108 permission="authenticated",
109 109 )
110 110 def logs_mass_delete(request):
111 111 params = request.GET.mixed()
112 112 if "resource" not in params:
113 113 raise HTTPUnprocessableEntity()
114 114 # this might be '' and then colander will not validate the schema
115 115 if not params.get("namespace"):
116 116 params.pop("namespace", None)
117 117 filter_settings = build_filter_settings_from_query_dict(
118 118 request, params, resource_permissions=["update_reports"]
119 119 )
120 120
121 121 resource_id = list(filter_settings["resource"])[0]
122 122 # filter settings returns list of all of users applications
123 123 # if app is not matching - normally we would not care as its used for search
124 124 # but here user playing with params would possibly wipe out their whole data
125 125 if int(resource_id) != int(params["resource"]):
126 126 raise HTTPUnprocessableEntity()
127 127
128 128 logs_cleanup.delay(resource_id, filter_settings)
129 129 msg = (
130 130 "Log cleanup process started - it may take a while for "
131 131 "everything to get removed"
132 132 )
133 133 request.session.flash(msg)
134 134 return {}
135 135
136 136
137 137 @view_config(
138 138 route_name="section_view",
139 139 match_param=("view=common_tags", "section=logs_section"),
140 140 renderer="json",
141 141 permission="authenticated",
142 142 )
143 143 def common_tags(request):
144 144 config = request.GET.mixed()
145 145 filter_settings = build_filter_settings_from_query_dict(request, config)
146 146
147 147 resources = list(filter_settings["resource"])
148 148 query = {
149 149 "query": {
150 150 "filtered": {
151 151 "filter": {"and": [{"terms": {"resource_id": list(resources)}}]}
152 152 }
153 153 }
154 154 }
155 155 start_date = filter_settings.get("start_date")
156 156 end_date = filter_settings.get("end_date")
157 157 filter_part = query["query"]["filtered"]["filter"]["and"]
158 158
159 159 date_range = {"range": {"timestamp": {}}}
160 160 if start_date:
161 161 date_range["range"]["timestamp"]["gte"] = start_date
162 162 if end_date:
163 163 date_range["range"]["timestamp"]["lte"] = end_date
164 164 if start_date or end_date:
165 165 filter_part.append(date_range)
166 166
167 167 levels = filter_settings.get("level")
168 168 if levels:
169 169 filter_part.append({"terms": {"log_level": levels}})
170 170 namespaces = filter_settings.get("namespace")
171 171 if namespaces:
172 172 filter_part.append({"terms": {"namespace": namespaces}})
173 173
174 174 query["aggs"] = {"sub_agg": {"terms": {"field": "tag_list", "size": 50}}}
175 175 # tags
176 176 index_names = es_index_name_limiter(ixtypes=[config.get("datasource", "logs")])
177 177 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
178 178 tag_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
179 179 # namespaces
180 180 query["aggs"] = {"sub_agg": {"terms": {"field": "namespace", "size": 50}}}
181 181 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
182 182 namespaces_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
183 183 return {
184 184 "tags": [item["key"] for item in tag_buckets],
185 185 "namespaces": [item["key"] for item in namespaces_buckets],
186 186 }
187 187
188 188
189 189 @view_config(
190 190 route_name="section_view",
191 191 match_param=("view=common_values", "section=logs_section"),
192 192 renderer="json",
193 193 permission="authenticated",
194 194 )
195 195 def common_values(request):
196 196 config = request.GET.mixed()
197 197 datasource = config.pop("datasource", "logs")
198 198 filter_settings = build_filter_settings_from_query_dict(request, config)
199 199 resources = list(filter_settings["resource"])
200 200 tag_name = filter_settings["tags"][0]["value"][0]
201
202 and_part = [
203 {"terms": {"resource_id": list(resources)}},
204 ]
205 if filter_settings["namespace"]:
206 and_part.append({"terms": {"namespace": filter_settings["namespace"]}})
201 207 query = {
202 208 "query": {
203 209 "filtered": {
204 210 "filter": {
205 "and": [
206 {"terms": {"resource_id": list(resources)}},
207 {"terms": {"namespace": filter_settings["namespace"]}},
208 ]
211 "and": and_part
209 212 }
210 213 }
211 214 }
212 215 }
213 216 query["aggs"] = {
214 217 "sub_agg": {"terms": {"field": "tags.{}.values".format(tag_name), "size": 50}}
215 218 }
216 219 index_names = es_index_name_limiter(ixtypes=[datasource])
217 220 result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0)
218 221 values_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
219 222 return {"values": [item["key"] for item in values_buckets]}
General Comments 0
You need to be logged in to leave comments. Login now