Show More
@@ -1,219 +1,222 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors | |
4 | # |
|
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 | # you may not use this file except in compliance with the License. |
|
6 | # you may not use this file except in compliance with the License. | |
7 | # You may obtain a copy of the License at |
|
7 | # You may obtain a copy of the License at | |
8 | # |
|
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # |
|
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software |
|
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
12 | # distributed under the License is distributed on an "AS IS" BASIS, | |
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | # See the License for the specific language governing permissions and |
|
14 | # See the License for the specific language governing permissions and | |
15 | # limitations under the License. |
|
15 | # limitations under the License. | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 | from datetime import datetime, timedelta |
|
18 | from datetime import datetime, timedelta | |
19 |
|
19 | |||
20 | from pyramid.view import view_config |
|
20 | from pyramid.view import view_config | |
21 | from pyramid.httpexceptions import HTTPUnprocessableEntity |
|
21 | from pyramid.httpexceptions import HTTPUnprocessableEntity | |
22 | from appenlight.models import Datastores, Log |
|
22 | from appenlight.models import Datastores, Log | |
23 | from appenlight.models.services.log import LogService |
|
23 | from appenlight.models.services.log import LogService | |
24 | from appenlight.lib.utils import ( |
|
24 | from appenlight.lib.utils import ( | |
25 | build_filter_settings_from_query_dict, |
|
25 | build_filter_settings_from_query_dict, | |
26 | es_index_name_limiter, |
|
26 | es_index_name_limiter, | |
27 | ) |
|
27 | ) | |
28 | from appenlight.lib.helpers import gen_pagination_headers |
|
28 | from appenlight.lib.helpers import gen_pagination_headers | |
29 | from appenlight.celery.tasks import logs_cleanup |
|
29 | from appenlight.celery.tasks import logs_cleanup | |
30 |
|
30 | |||
31 | log = logging.getLogger(__name__) |
|
31 | log = logging.getLogger(__name__) | |
32 |
|
32 | |||
33 | section_filters_key = "appenlight:logs:filter:%s" |
|
33 | section_filters_key = "appenlight:logs:filter:%s" | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | @view_config(route_name="logs_no_id", renderer="json", permission="authenticated") |
|
36 | @view_config(route_name="logs_no_id", renderer="json", permission="authenticated") | |
37 | def fetch_logs(request): |
|
37 | def fetch_logs(request): | |
38 | """ |
|
38 | """ | |
39 | Returns list of log entries from Elasticsearch |
|
39 | Returns list of log entries from Elasticsearch | |
40 | """ |
|
40 | """ | |
41 |
|
41 | |||
42 | filter_settings = build_filter_settings_from_query_dict( |
|
42 | filter_settings = build_filter_settings_from_query_dict( | |
43 | request, request.GET.mixed() |
|
43 | request, request.GET.mixed() | |
44 | ) |
|
44 | ) | |
45 | logs_paginator = LogService.get_paginator_by_app_ids( |
|
45 | logs_paginator = LogService.get_paginator_by_app_ids( | |
46 | app_ids=filter_settings["resource"], |
|
46 | app_ids=filter_settings["resource"], | |
47 | page=filter_settings["page"], |
|
47 | page=filter_settings["page"], | |
48 | filter_settings=filter_settings, |
|
48 | filter_settings=filter_settings, | |
49 | ) |
|
49 | ) | |
50 | headers = gen_pagination_headers(request, logs_paginator) |
|
50 | headers = gen_pagination_headers(request, logs_paginator) | |
51 | request.response.headers.update(headers) |
|
51 | request.response.headers.update(headers) | |
52 |
|
52 | |||
53 | return [l.get_dict() for l in logs_paginator.sa_items] |
|
53 | return [l.get_dict() for l in logs_paginator.sa_items] | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | @view_config( |
|
56 | @view_config( | |
57 | route_name="section_view", |
|
57 | route_name="section_view", | |
58 | match_param=["section=logs_section", "view=fetch_series"], |
|
58 | match_param=["section=logs_section", "view=fetch_series"], | |
59 | renderer="json", |
|
59 | renderer="json", | |
60 | permission="authenticated", |
|
60 | permission="authenticated", | |
61 | ) |
|
61 | ) | |
62 | def logs_fetch_series(request): |
|
62 | def logs_fetch_series(request): | |
63 | """ |
|
63 | """ | |
64 | Handles metric dashboard graphs |
|
64 | Handles metric dashboard graphs | |
65 | Returns information for time/tier breakdown |
|
65 | Returns information for time/tier breakdown | |
66 | """ |
|
66 | """ | |
67 | filter_settings = build_filter_settings_from_query_dict( |
|
67 | filter_settings = build_filter_settings_from_query_dict( | |
68 | request, request.GET.mixed() |
|
68 | request, request.GET.mixed() | |
69 | ) |
|
69 | ) | |
70 | paginator = LogService.get_paginator_by_app_ids( |
|
70 | paginator = LogService.get_paginator_by_app_ids( | |
71 | app_ids=filter_settings["resource"], |
|
71 | app_ids=filter_settings["resource"], | |
72 | page=1, |
|
72 | page=1, | |
73 | filter_settings=filter_settings, |
|
73 | filter_settings=filter_settings, | |
74 | items_per_page=1, |
|
74 | items_per_page=1, | |
75 | ) |
|
75 | ) | |
76 | now = datetime.utcnow().replace(microsecond=0, second=0) |
|
76 | now = datetime.utcnow().replace(microsecond=0, second=0) | |
77 | delta = timedelta(days=7) |
|
77 | delta = timedelta(days=7) | |
78 | if paginator.sa_items: |
|
78 | if paginator.sa_items: | |
79 | start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0, second=0) |
|
79 | start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0, second=0) | |
80 | filter_settings["start_date"] = start_date - delta |
|
80 | filter_settings["start_date"] = start_date - delta | |
81 | else: |
|
81 | else: | |
82 | filter_settings["start_date"] = now - delta |
|
82 | filter_settings["start_date"] = now - delta | |
83 | filter_settings["end_date"] = filter_settings["start_date"] + timedelta(days=7) |
|
83 | filter_settings["end_date"] = filter_settings["start_date"] + timedelta(days=7) | |
84 |
|
84 | |||
85 | @request.registry.cache_regions.redis_sec_30.cache_on_arguments("logs_graphs") |
|
85 | @request.registry.cache_regions.redis_sec_30.cache_on_arguments("logs_graphs") | |
86 | def cached(apps, search_params, delta, now): |
|
86 | def cached(apps, search_params, delta, now): | |
87 | data = LogService.get_time_series_aggregate( |
|
87 | data = LogService.get_time_series_aggregate( | |
88 | filter_settings["resource"], filter_settings |
|
88 | filter_settings["resource"], filter_settings | |
89 | ) |
|
89 | ) | |
90 | if not data: |
|
90 | if not data: | |
91 | return [] |
|
91 | return [] | |
92 | buckets = data["aggregations"]["events_over_time"]["buckets"] |
|
92 | buckets = data["aggregations"]["events_over_time"]["buckets"] | |
93 | return [ |
|
93 | return [ | |
94 | { |
|
94 | { | |
95 | "x": datetime.utcfromtimestamp(item["key"] / 1000), |
|
95 | "x": datetime.utcfromtimestamp(item["key"] / 1000), | |
96 | "logs": item["doc_count"], |
|
96 | "logs": item["doc_count"], | |
97 | } |
|
97 | } | |
98 | for item in buckets |
|
98 | for item in buckets | |
99 | ] |
|
99 | ] | |
100 |
|
100 | |||
101 | return cached(filter_settings, request.GET.mixed(), delta, now) |
|
101 | return cached(filter_settings, request.GET.mixed(), delta, now) | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | @view_config( |
|
104 | @view_config( | |
105 | route_name="logs_no_id", |
|
105 | route_name="logs_no_id", | |
106 | renderer="json", |
|
106 | renderer="json", | |
107 | request_method="DELETE", |
|
107 | request_method="DELETE", | |
108 | permission="authenticated", |
|
108 | permission="authenticated", | |
109 | ) |
|
109 | ) | |
110 | def logs_mass_delete(request): |
|
110 | def logs_mass_delete(request): | |
111 | params = request.GET.mixed() |
|
111 | params = request.GET.mixed() | |
112 | if "resource" not in params: |
|
112 | if "resource" not in params: | |
113 | raise HTTPUnprocessableEntity() |
|
113 | raise HTTPUnprocessableEntity() | |
114 | # this might be '' and then colander will not validate the schema |
|
114 | # this might be '' and then colander will not validate the schema | |
115 | if not params.get("namespace"): |
|
115 | if not params.get("namespace"): | |
116 | params.pop("namespace", None) |
|
116 | params.pop("namespace", None) | |
117 | filter_settings = build_filter_settings_from_query_dict( |
|
117 | filter_settings = build_filter_settings_from_query_dict( | |
118 | request, params, resource_permissions=["update_reports"] |
|
118 | request, params, resource_permissions=["update_reports"] | |
119 | ) |
|
119 | ) | |
120 |
|
120 | |||
121 | resource_id = list(filter_settings["resource"])[0] |
|
121 | resource_id = list(filter_settings["resource"])[0] | |
122 | # filter settings returns list of all of users applications |
|
122 | # filter settings returns list of all of users applications | |
123 | # if app is not matching - normally we would not care as its used for search |
|
123 | # if app is not matching - normally we would not care as its used for search | |
124 | # but here user playing with params would possibly wipe out their whole data |
|
124 | # but here user playing with params would possibly wipe out their whole data | |
125 | if int(resource_id) != int(params["resource"]): |
|
125 | if int(resource_id) != int(params["resource"]): | |
126 | raise HTTPUnprocessableEntity() |
|
126 | raise HTTPUnprocessableEntity() | |
127 |
|
127 | |||
128 | logs_cleanup.delay(resource_id, filter_settings) |
|
128 | logs_cleanup.delay(resource_id, filter_settings) | |
129 | msg = ( |
|
129 | msg = ( | |
130 | "Log cleanup process started - it may take a while for " |
|
130 | "Log cleanup process started - it may take a while for " | |
131 | "everything to get removed" |
|
131 | "everything to get removed" | |
132 | ) |
|
132 | ) | |
133 | request.session.flash(msg) |
|
133 | request.session.flash(msg) | |
134 | return {} |
|
134 | return {} | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | @view_config( |
|
137 | @view_config( | |
138 | route_name="section_view", |
|
138 | route_name="section_view", | |
139 | match_param=("view=common_tags", "section=logs_section"), |
|
139 | match_param=("view=common_tags", "section=logs_section"), | |
140 | renderer="json", |
|
140 | renderer="json", | |
141 | permission="authenticated", |
|
141 | permission="authenticated", | |
142 | ) |
|
142 | ) | |
143 | def common_tags(request): |
|
143 | def common_tags(request): | |
144 | config = request.GET.mixed() |
|
144 | config = request.GET.mixed() | |
145 | filter_settings = build_filter_settings_from_query_dict(request, config) |
|
145 | filter_settings = build_filter_settings_from_query_dict(request, config) | |
146 |
|
146 | |||
147 | resources = list(filter_settings["resource"]) |
|
147 | resources = list(filter_settings["resource"]) | |
148 | query = { |
|
148 | query = { | |
149 | "query": { |
|
149 | "query": { | |
150 | "filtered": { |
|
150 | "filtered": { | |
151 | "filter": {"and": [{"terms": {"resource_id": list(resources)}}]} |
|
151 | "filter": {"and": [{"terms": {"resource_id": list(resources)}}]} | |
152 | } |
|
152 | } | |
153 | } |
|
153 | } | |
154 | } |
|
154 | } | |
155 | start_date = filter_settings.get("start_date") |
|
155 | start_date = filter_settings.get("start_date") | |
156 | end_date = filter_settings.get("end_date") |
|
156 | end_date = filter_settings.get("end_date") | |
157 | filter_part = query["query"]["filtered"]["filter"]["and"] |
|
157 | filter_part = query["query"]["filtered"]["filter"]["and"] | |
158 |
|
158 | |||
159 | date_range = {"range": {"timestamp": {}}} |
|
159 | date_range = {"range": {"timestamp": {}}} | |
160 | if start_date: |
|
160 | if start_date: | |
161 | date_range["range"]["timestamp"]["gte"] = start_date |
|
161 | date_range["range"]["timestamp"]["gte"] = start_date | |
162 | if end_date: |
|
162 | if end_date: | |
163 | date_range["range"]["timestamp"]["lte"] = end_date |
|
163 | date_range["range"]["timestamp"]["lte"] = end_date | |
164 | if start_date or end_date: |
|
164 | if start_date or end_date: | |
165 | filter_part.append(date_range) |
|
165 | filter_part.append(date_range) | |
166 |
|
166 | |||
167 | levels = filter_settings.get("level") |
|
167 | levels = filter_settings.get("level") | |
168 | if levels: |
|
168 | if levels: | |
169 | filter_part.append({"terms": {"log_level": levels}}) |
|
169 | filter_part.append({"terms": {"log_level": levels}}) | |
170 | namespaces = filter_settings.get("namespace") |
|
170 | namespaces = filter_settings.get("namespace") | |
171 | if namespaces: |
|
171 | if namespaces: | |
172 | filter_part.append({"terms": {"namespace": namespaces}}) |
|
172 | filter_part.append({"terms": {"namespace": namespaces}}) | |
173 |
|
173 | |||
174 | query["aggs"] = {"sub_agg": {"terms": {"field": "tag_list", "size": 50}}} |
|
174 | query["aggs"] = {"sub_agg": {"terms": {"field": "tag_list", "size": 50}}} | |
175 | # tags |
|
175 | # tags | |
176 | index_names = es_index_name_limiter(ixtypes=[config.get("datasource", "logs")]) |
|
176 | index_names = es_index_name_limiter(ixtypes=[config.get("datasource", "logs")]) | |
177 | result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0) |
|
177 | result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0) | |
178 | tag_buckets = result["aggregations"]["sub_agg"].get("buckets", []) |
|
178 | tag_buckets = result["aggregations"]["sub_agg"].get("buckets", []) | |
179 | # namespaces |
|
179 | # namespaces | |
180 | query["aggs"] = {"sub_agg": {"terms": {"field": "namespace", "size": 50}}} |
|
180 | query["aggs"] = {"sub_agg": {"terms": {"field": "namespace", "size": 50}}} | |
181 | result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0) |
|
181 | result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0) | |
182 | namespaces_buckets = result["aggregations"]["sub_agg"].get("buckets", []) |
|
182 | namespaces_buckets = result["aggregations"]["sub_agg"].get("buckets", []) | |
183 | return { |
|
183 | return { | |
184 | "tags": [item["key"] for item in tag_buckets], |
|
184 | "tags": [item["key"] for item in tag_buckets], | |
185 | "namespaces": [item["key"] for item in namespaces_buckets], |
|
185 | "namespaces": [item["key"] for item in namespaces_buckets], | |
186 | } |
|
186 | } | |
187 |
|
187 | |||
188 |
|
188 | |||
189 | @view_config( |
|
189 | @view_config( | |
190 | route_name="section_view", |
|
190 | route_name="section_view", | |
191 | match_param=("view=common_values", "section=logs_section"), |
|
191 | match_param=("view=common_values", "section=logs_section"), | |
192 | renderer="json", |
|
192 | renderer="json", | |
193 | permission="authenticated", |
|
193 | permission="authenticated", | |
194 | ) |
|
194 | ) | |
195 | def common_values(request): |
|
195 | def common_values(request): | |
196 | config = request.GET.mixed() |
|
196 | config = request.GET.mixed() | |
197 | datasource = config.pop("datasource", "logs") |
|
197 | datasource = config.pop("datasource", "logs") | |
198 | filter_settings = build_filter_settings_from_query_dict(request, config) |
|
198 | filter_settings = build_filter_settings_from_query_dict(request, config) | |
199 | resources = list(filter_settings["resource"]) |
|
199 | resources = list(filter_settings["resource"]) | |
200 | tag_name = filter_settings["tags"][0]["value"][0] |
|
200 | tag_name = filter_settings["tags"][0]["value"][0] | |
|
201 | ||||
|
202 | and_part = [ | |||
|
203 | {"terms": {"resource_id": list(resources)}}, | |||
|
204 | ] | |||
|
205 | if filter_settings["namespace"]: | |||
|
206 | and_part.append({"terms": {"namespace": filter_settings["namespace"]}}) | |||
201 | query = { |
|
207 | query = { | |
202 | "query": { |
|
208 | "query": { | |
203 | "filtered": { |
|
209 | "filtered": { | |
204 | "filter": { |
|
210 | "filter": { | |
205 |
"and": |
|
211 | "and": and_part | |
206 | {"terms": {"resource_id": list(resources)}}, |
|
|||
207 | {"terms": {"namespace": filter_settings["namespace"]}}, |
|
|||
208 | ] |
|
|||
209 | } |
|
212 | } | |
210 | } |
|
213 | } | |
211 | } |
|
214 | } | |
212 | } |
|
215 | } | |
213 | query["aggs"] = { |
|
216 | query["aggs"] = { | |
214 | "sub_agg": {"terms": {"field": "tags.{}.values".format(tag_name), "size": 50}} |
|
217 | "sub_agg": {"terms": {"field": "tags.{}.values".format(tag_name), "size": 50}} | |
215 | } |
|
218 | } | |
216 | index_names = es_index_name_limiter(ixtypes=[datasource]) |
|
219 | index_names = es_index_name_limiter(ixtypes=[datasource]) | |
217 | result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0) |
|
220 | result = Datastores.es.search(body=query, index=index_names, doc_type="log", size=0) | |
218 | values_buckets = result["aggregations"]["sub_agg"].get("buckets", []) |
|
221 | values_buckets = result["aggregations"]["sub_agg"].get("buckets", []) | |
219 | return {"values": [item["key"] for item in values_buckets]} |
|
222 | return {"values": [item["key"] for item in values_buckets]} |
General Comments 0
You need to be logged in to leave comments.
Login now