##// END OF EJS Templates
bump requirements
ergo -
Show More
@@ -1,49 +1,47 b''
1 repoze.sendmail==4.1
1 repoze.sendmail==4.4.1
2 pyramid==1.10.2
2 pyramid==1.10.2
3 pyramid_tm==0.12
3 pyramid_tm==2.2.1
4 pyramid_debugtoolbar
4 pyramid_debugtoolbar
5 pyramid_authstack==1.0.1
5 pyramid_authstack==1.0.1
6 SQLAlchemy==1.0.12
6 SQLAlchemy==1.2.18
7 alembic==1.0.8
7 alembic==1.0.8
8 webhelpers2==2.0
8 webhelpers2==2.0
9 transaction==1.4.3
9 transaction==2.4.0
10 zope.sqlalchemy==0.7.6
10 zope.sqlalchemy==1.1
11 pyramid_mailer==0.14.1
11 pyramid_mailer==0.15.1
12 redis==2.10.5
12 redis==3.2.1
13 redlock-py==1.0.8
13 redlock-py==1.0.8
14 pyramid_jinja2==2.6.2
14 pyramid_jinja2==2.8
15 psycopg2==2.7.7
15 psycopg2-binary==2.7.7
16 wtforms==2.1
16 wtforms==2.2.1
17 celery==3.1.23
17 celery==4.2.1
18 formencode==1.3.0
18 formencode==1.3.1
19 psutil==2.1.2
19 psutil==5.6.1
20 ziggurat_foundations==0.8.3
20 ziggurat_foundations==0.8.3
21 bcrypt==3.1.6
21 bcrypt==3.1.6
22 appenlight_client
22 appenlight_client
23 markdown==2.5
23 markdown==3.0.1
24 colander==1.7
24 colander==1.7
25 defusedxml==0.5.0
25 defusedxml==0.5.0
26 dogpile.cache==0.5.7
26 dogpile.cache==0.7.1
27 pyramid_redis_sessions==1.0.1
27 pyramid_redis_sessions==1.0.1
28 simplejson==3.8.2
28 simplejson==3.16.0
29 waitress==1.0
29 waitress==1.2.1
30 gunicorn==19.9.0
30 gunicorn==19.9.0
31 requests==2.20.0
31 uwsgi==2.0.18
32 requests_oauthlib==0.6.1
32 requests==2.21.0
33 gevent==1.1.1
33 requests_oauthlib==1.2.0
34 gevent-websocket==0.9.5
34 gevent==1.4.0
35 pygments==2.1.3
35 pygments==2.3.1
36 lxml==4.3.2
36 lxml==4.3.2
37 paginate==0.5.4
37 paginate==0.5.6
38 paginate-sqlalchemy==0.2.0
38 paginate-sqlalchemy==0.3.0
39 elasticsearch>=2.0.0,<3.0.0
39 elasticsearch>=2.0.0,<3.0.0
40 six>=1.10.0
41 mock==1.0.1
40 mock==1.0.1
42 itsdangerous==1.1.0
41 itsdangerous==1.1.0
43 camplight==0.9.6
42 camplight==0.9.6
44 jira==1.0.7
43 jira==1.0.7
45 python-dateutil==2.5.3
44 python-dateutil==2.5.3
46 authomatic==0.1.0.post1
45 authomatic==0.1.0.post1
47 cryptography==2.6.1
46 cryptography==2.6.1
48 webassets==0.11.1
49
47
@@ -1,171 +1,171 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import logging
17 import logging
18
18
19 from datetime import timedelta
19 from datetime import timedelta
20 from celery import Celery
20 from celery import Celery
21 from celery.bin import Option
21 from celery.bin import Option
22 from celery.schedules import crontab
22 from celery.schedules import crontab
23 from celery.signals import worker_init, task_revoked, user_preload_options
23 from celery.signals import worker_init, task_revoked, user_preload_options
24 from celery.signals import task_prerun, task_retry, task_failure, task_success
24 from celery.signals import task_prerun, task_retry, task_failure, task_success
25 from kombu.serialization import register
25 from kombu.serialization import register
26 from pyramid.paster import bootstrap
26 from pyramid.paster import bootstrap
27 from pyramid.request import Request
27 from pyramid.request import Request
28 from pyramid.scripting import prepare
28 from pyramid.scripting import prepare
29 from pyramid.settings import asbool
29 from pyramid.settings import asbool
30 from pyramid.threadlocal import get_current_request
30 from pyramid.threadlocal import get_current_request
31
31
32 from appenlight.celery.encoders import json_dumps, json_loads
32 from appenlight.celery.encoders import json_dumps, json_loads
33 from appenlight_client.ext.celery import register_signals
33 from appenlight_client.ext.celery import register_signals
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37 register('date_json', json_dumps, json_loads,
37 register('date_json', json_dumps, json_loads,
38 content_type='application/x-date_json',
38 content_type='application/x-date_json',
39 content_encoding='utf-8')
39 content_encoding='utf-8')
40
40
41 celery = Celery()
41 celery = Celery()
42
42
43 celery.user_options['preload'].add(
43 celery.user_options['preload'].add(
44 Option('--ini', dest='ini', default=None,
44 Option('--ini', dest='ini', default=None,
45 help='Specifies pyramid configuration file location.')
45 help='Specifies pyramid configuration file location.')
46 )
46 )
47
47
48
48
49 @user_preload_options.connect
49 @user_preload_options.connect
50 def on_preload_parsed(options, **kwargs):
50 def on_preload_parsed(options, **kwargs):
51 """
51 """
52 This actually configures celery from pyramid config file
52 This actually configures celery from pyramid config file
53 """
53 """
54 celery.conf['INI_PYRAMID'] = options['ini']
54 celery.conf['INI_PYRAMID'] = options['ini']
55 import appenlight_client.client as e_client
55 import appenlight_client.client as e_client
56 ini_location = options['ini']
56 ini_location = options['ini']
57 if not ini_location:
57 if not ini_location:
58 raise Exception('You need to pass pyramid ini location using '
58 raise Exception('You need to pass pyramid ini location using '
59 '--ini=filename.ini argument to the worker')
59 '--ini=filename.ini argument to the worker')
60 env = bootstrap(ini_location)
60 env = bootstrap(ini_location[0])
61 api_key = env['request'].registry.settings['appenlight.api_key']
61 api_key = env['request'].registry.settings['appenlight.api_key']
62 tr_config = env['request'].registry.settings.get(
62 tr_config = env['request'].registry.settings.get(
63 'appenlight.transport_config')
63 'appenlight.transport_config')
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
65 if tr_config:
65 if tr_config:
66 CONFIG['appenlight.transport_config'] = tr_config
66 CONFIG['appenlight.transport_config'] = tr_config
67 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
67 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
68 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
68 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
69 register_signals(APPENLIGHT_CLIENT)
69 register_signals(APPENLIGHT_CLIENT)
70 celery.pyramid = env
70 celery.pyramid = env
71
71
72
72
73 celery_config = {
73 celery_config = {
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
75 'CELERYD_TASK_TIME_LIMIT': 60,
75 'CELERYD_TASK_TIME_LIMIT': 60,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
77 'CELERY_IGNORE_RESULT': True,
77 'CELERY_IGNORE_RESULT': True,
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
79 'CELERY_TASK_SERIALIZER': 'date_json',
79 'CELERY_TASK_SERIALIZER': 'date_json',
80 'CELERY_RESULT_SERIALIZER': 'date_json',
80 'CELERY_RESULT_SERIALIZER': 'date_json',
81 'BROKER_URL': None,
81 'BROKER_URL': None,
82 'CELERYD_CONCURRENCY': None,
82 'CELERYD_CONCURRENCY': None,
83 'CELERY_TIMEZONE': None,
83 'CELERY_TIMEZONE': None,
84 'CELERYBEAT_SCHEDULE': {
84 'CELERYBEAT_SCHEDULE': {
85 'alerting_reports': {
85 'alerting_reports': {
86 'task': 'appenlight.celery.tasks.alerting_reports',
86 'task': 'appenlight.celery.tasks.alerting_reports',
87 'schedule': timedelta(seconds=60)
87 'schedule': timedelta(seconds=60)
88 },
88 },
89 'close_alerts': {
89 'close_alerts': {
90 'task': 'appenlight.celery.tasks.close_alerts',
90 'task': 'appenlight.celery.tasks.close_alerts',
91 'schedule': timedelta(seconds=60)
91 'schedule': timedelta(seconds=60)
92 }
92 }
93 }
93 }
94 }
94 }
95 celery.config_from_object(celery_config)
95 celery.config_from_object(celery_config)
96
96
97
97
98 def configure_celery(pyramid_registry):
98 def configure_celery(pyramid_registry):
99 settings = pyramid_registry.settings
99 settings = pyramid_registry.settings
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
103
103
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
105
105
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
107 'task': 'appenlight.celery.tasks.notifications_reports',
107 'task': 'appenlight.celery.tasks.notifications_reports',
108 'schedule': timedelta(seconds=notifications_seconds)
108 'schedule': timedelta(seconds=notifications_seconds)
109 }
109 }
110
110
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
112 'task': 'appenlight.celery.tasks.daily_digest',
112 'task': 'appenlight.celery.tasks.daily_digest',
113 'schedule': crontab(minute=1, hour='4,12,20')
113 'schedule': crontab(minute=1, hour='4,12,20')
114 }
114 }
115
115
116 if asbool(settings.get('celery.always_eager')):
116 if asbool(settings.get('celery.always_eager')):
117 celery_config['CELERY_ALWAYS_EAGER'] = True
117 celery_config['CELERY_ALWAYS_EAGER'] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
119
119
120 for plugin in pyramid_registry.appenlight_plugins.values():
120 for plugin in pyramid_registry.appenlight_plugins.values():
121 if plugin.get('celery_tasks'):
121 if plugin.get('celery_tasks'):
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
123 if plugin.get('celery_beats'):
123 if plugin.get('celery_beats'):
124 for name, config in plugin['celery_beats']:
124 for name, config in plugin['celery_beats']:
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
126 celery.config_from_object(celery_config)
126 celery.config_from_object(celery_config)
127
127
128
128
129 @task_prerun.connect
129 @task_prerun.connect
130 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
130 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
131 if hasattr(celery, 'pyramid'):
131 if hasattr(celery, 'pyramid'):
132 env = celery.pyramid
132 env = celery.pyramid
133 env = prepare(registry=env['request'].registry)
133 env = prepare(registry=env['request'].registry)
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
135 tmp_req = Request.blank('/', base_url=proper_base_url)
135 tmp_req = Request.blank('/', base_url=proper_base_url)
136 # ensure tasks generate url for right domain from config
136 # ensure tasks generate url for right domain from config
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
140 env['request'].environ['wsgi.url_scheme'] = \
140 env['request'].environ['wsgi.url_scheme'] = \
141 tmp_req.environ['wsgi.url_scheme']
141 tmp_req.environ['wsgi.url_scheme']
142 get_current_request().tm.begin()
142 get_current_request().tm.begin()
143
143
144
144
145 @task_success.connect
145 @task_success.connect
146 def task_success_signal(result, **kwargs):
146 def task_success_signal(result, **kwargs):
147 get_current_request().tm.commit()
147 get_current_request().tm.commit()
148 if hasattr(celery, 'pyramid'):
148 if hasattr(celery, 'pyramid'):
149 celery.pyramid["closer"]()
149 celery.pyramid["closer"]()
150
150
151
151
152 @task_retry.connect
152 @task_retry.connect
153 def task_retry_signal(request, reason, einfo, **kwargs):
153 def task_retry_signal(request, reason, einfo, **kwargs):
154 get_current_request().tm.abort()
154 get_current_request().tm.abort()
155 if hasattr(celery, 'pyramid'):
155 if hasattr(celery, 'pyramid'):
156 celery.pyramid["closer"]()
156 celery.pyramid["closer"]()
157
157
158
158
159 @task_failure.connect
159 @task_failure.connect
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
161 **kwaargs):
161 **kwaargs):
162 get_current_request().tm.abort()
162 get_current_request().tm.abort()
163 if hasattr(celery, 'pyramid'):
163 if hasattr(celery, 'pyramid'):
164 celery.pyramid["closer"]()
164 celery.pyramid["closer"]()
165
165
166
166
167 @task_revoked.connect
167 @task_revoked.connect
168 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
168 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
169 get_current_request().tm.abort()
169 get_current_request().tm.abort()
170 if hasattr(celery, 'pyramid'):
170 if hasattr(celery, 'pyramid'):
171 celery.pyramid["closer"]()
171 celery.pyramid["closer"]()
@@ -1,183 +1,184 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import copy
17 import copy
18 import hashlib
18 import hashlib
19 import inspect
19 import inspect
20
20
21 from dogpile.cache import make_region, compat
21 from dogpile.cache import make_region
22 from dogpile.cache.util import compat
22
23
23 regions = None
24 regions = None
24
25
25
26
26 def key_mangler(key):
27 def key_mangler(key):
27 return "appenlight:dogpile:{}".format(key)
28 return "appenlight:dogpile:{}".format(key)
28
29
29
30
30 def hashgen(namespace, fn, to_str=compat.string_type):
31 def hashgen(namespace, fn, to_str=compat.string_type):
31 """Return a function that generates a string
32 """Return a function that generates a string
32 key, based on a given function as well as
33 key, based on a given function as well as
33 arguments to the returned function itself.
34 arguments to the returned function itself.
34
35
35 This is used by :meth:`.CacheRegion.cache_on_arguments`
36 This is used by :meth:`.CacheRegion.cache_on_arguments`
36 to generate a cache key from a decorated function.
37 to generate a cache key from a decorated function.
37
38
38 It can be replaced using the ``function_key_generator``
39 It can be replaced using the ``function_key_generator``
39 argument passed to :func:`.make_region`.
40 argument passed to :func:`.make_region`.
40
41
41 """
42 """
42
43
43 if namespace is None:
44 if namespace is None:
44 namespace = '%s:%s' % (fn.__module__, fn.__name__)
45 namespace = '%s:%s' % (fn.__module__, fn.__name__)
45 else:
46 else:
46 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
47 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
47
48
48 args = inspect.getargspec(fn)
49 args = inspect.getargspec(fn)
49 has_self = args[0] and args[0][0] in ('self', 'cls')
50 has_self = args[0] and args[0][0] in ('self', 'cls')
50
51
51 def generate_key(*args, **kw):
52 def generate_key(*args, **kw):
52 if kw:
53 if kw:
53 raise ValueError(
54 raise ValueError(
54 "dogpile.cache's default key creation "
55 "dogpile.cache's default key creation "
55 "function does not accept keyword arguments.")
56 "function does not accept keyword arguments.")
56 if has_self:
57 if has_self:
57 args = args[1:]
58 args = args[1:]
58
59
59 return namespace + "|" + hashlib.sha1(
60 return namespace + "|" + hashlib.sha1(
60 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
61 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
61
62
62 return generate_key
63 return generate_key
63
64
64
65
65 class CacheRegions(object):
66 class CacheRegions(object):
66 def __init__(self, settings):
67 def __init__(self, settings):
67 config_redis = {"arguments": settings}
68 config_redis = {"arguments": settings}
68
69
69 self.redis_min_1 = make_region(
70 self.redis_min_1 = make_region(
70 function_key_generator=hashgen,
71 function_key_generator=hashgen,
71 key_mangler=key_mangler).configure(
72 key_mangler=key_mangler).configure(
72 "dogpile.cache.redis",
73 "dogpile.cache.redis",
73 expiration_time=60,
74 expiration_time=60,
74 **copy.deepcopy(config_redis))
75 **copy.deepcopy(config_redis))
75 self.redis_min_5 = make_region(
76 self.redis_min_5 = make_region(
76 function_key_generator=hashgen,
77 function_key_generator=hashgen,
77 key_mangler=key_mangler).configure(
78 key_mangler=key_mangler).configure(
78 "dogpile.cache.redis",
79 "dogpile.cache.redis",
79 expiration_time=300,
80 expiration_time=300,
80 **copy.deepcopy(config_redis))
81 **copy.deepcopy(config_redis))
81
82
82 self.redis_min_10 = make_region(
83 self.redis_min_10 = make_region(
83 function_key_generator=hashgen,
84 function_key_generator=hashgen,
84 key_mangler=key_mangler).configure(
85 key_mangler=key_mangler).configure(
85 "dogpile.cache.redis",
86 "dogpile.cache.redis",
86 expiration_time=60,
87 expiration_time=60,
87 **copy.deepcopy(config_redis))
88 **copy.deepcopy(config_redis))
88
89
89 self.redis_min_60 = make_region(
90 self.redis_min_60 = make_region(
90 function_key_generator=hashgen,
91 function_key_generator=hashgen,
91 key_mangler=key_mangler).configure(
92 key_mangler=key_mangler).configure(
92 "dogpile.cache.redis",
93 "dogpile.cache.redis",
93 expiration_time=3600,
94 expiration_time=3600,
94 **copy.deepcopy(config_redis))
95 **copy.deepcopy(config_redis))
95
96
96 self.redis_sec_1 = make_region(
97 self.redis_sec_1 = make_region(
97 function_key_generator=hashgen,
98 function_key_generator=hashgen,
98 key_mangler=key_mangler).configure(
99 key_mangler=key_mangler).configure(
99 "dogpile.cache.redis",
100 "dogpile.cache.redis",
100 expiration_time=1,
101 expiration_time=1,
101 **copy.deepcopy(config_redis))
102 **copy.deepcopy(config_redis))
102
103
103 self.redis_sec_5 = make_region(
104 self.redis_sec_5 = make_region(
104 function_key_generator=hashgen,
105 function_key_generator=hashgen,
105 key_mangler=key_mangler).configure(
106 key_mangler=key_mangler).configure(
106 "dogpile.cache.redis",
107 "dogpile.cache.redis",
107 expiration_time=5,
108 expiration_time=5,
108 **copy.deepcopy(config_redis))
109 **copy.deepcopy(config_redis))
109
110
110 self.redis_sec_30 = make_region(
111 self.redis_sec_30 = make_region(
111 function_key_generator=hashgen,
112 function_key_generator=hashgen,
112 key_mangler=key_mangler).configure(
113 key_mangler=key_mangler).configure(
113 "dogpile.cache.redis",
114 "dogpile.cache.redis",
114 expiration_time=30,
115 expiration_time=30,
115 **copy.deepcopy(config_redis))
116 **copy.deepcopy(config_redis))
116
117
117 self.redis_day_1 = make_region(
118 self.redis_day_1 = make_region(
118 function_key_generator=hashgen,
119 function_key_generator=hashgen,
119 key_mangler=key_mangler).configure(
120 key_mangler=key_mangler).configure(
120 "dogpile.cache.redis",
121 "dogpile.cache.redis",
121 expiration_time=86400,
122 expiration_time=86400,
122 **copy.deepcopy(config_redis))
123 **copy.deepcopy(config_redis))
123
124
124 self.redis_day_7 = make_region(
125 self.redis_day_7 = make_region(
125 function_key_generator=hashgen,
126 function_key_generator=hashgen,
126 key_mangler=key_mangler).configure(
127 key_mangler=key_mangler).configure(
127 "dogpile.cache.redis",
128 "dogpile.cache.redis",
128 expiration_time=86400 * 7,
129 expiration_time=86400 * 7,
129 **copy.deepcopy(config_redis))
130 **copy.deepcopy(config_redis))
130
131
131 self.redis_day_30 = make_region(
132 self.redis_day_30 = make_region(
132 function_key_generator=hashgen,
133 function_key_generator=hashgen,
133 key_mangler=key_mangler).configure(
134 key_mangler=key_mangler).configure(
134 "dogpile.cache.redis",
135 "dogpile.cache.redis",
135 expiration_time=86400 * 30,
136 expiration_time=86400 * 30,
136 **copy.deepcopy(config_redis))
137 **copy.deepcopy(config_redis))
137
138
138 self.memory_day_1 = make_region(
139 self.memory_day_1 = make_region(
139 function_key_generator=hashgen,
140 function_key_generator=hashgen,
140 key_mangler=key_mangler).configure(
141 key_mangler=key_mangler).configure(
141 "dogpile.cache.memory",
142 "dogpile.cache.memory",
142 expiration_time=86400,
143 expiration_time=86400,
143 **copy.deepcopy(config_redis))
144 **copy.deepcopy(config_redis))
144
145
145 self.memory_sec_1 = make_region(
146 self.memory_sec_1 = make_region(
146 function_key_generator=hashgen,
147 function_key_generator=hashgen,
147 key_mangler=key_mangler).configure(
148 key_mangler=key_mangler).configure(
148 "dogpile.cache.memory",
149 "dogpile.cache.memory",
149 expiration_time=1)
150 expiration_time=1)
150
151
151 self.memory_sec_5 = make_region(
152 self.memory_sec_5 = make_region(
152 function_key_generator=hashgen,
153 function_key_generator=hashgen,
153 key_mangler=key_mangler).configure(
154 key_mangler=key_mangler).configure(
154 "dogpile.cache.memory",
155 "dogpile.cache.memory",
155 expiration_time=5)
156 expiration_time=5)
156
157
157 self.memory_min_1 = make_region(
158 self.memory_min_1 = make_region(
158 function_key_generator=hashgen,
159 function_key_generator=hashgen,
159 key_mangler=key_mangler).configure(
160 key_mangler=key_mangler).configure(
160 "dogpile.cache.memory",
161 "dogpile.cache.memory",
161 expiration_time=60)
162 expiration_time=60)
162
163
163 self.memory_min_5 = make_region(
164 self.memory_min_5 = make_region(
164 function_key_generator=hashgen,
165 function_key_generator=hashgen,
165 key_mangler=key_mangler).configure(
166 key_mangler=key_mangler).configure(
166 "dogpile.cache.memory",
167 "dogpile.cache.memory",
167 expiration_time=300)
168 expiration_time=300)
168
169
169 self.memory_min_10 = make_region(
170 self.memory_min_10 = make_region(
170 function_key_generator=hashgen,
171 function_key_generator=hashgen,
171 key_mangler=key_mangler).configure(
172 key_mangler=key_mangler).configure(
172 "dogpile.cache.memory",
173 "dogpile.cache.memory",
173 expiration_time=600)
174 expiration_time=600)
174
175
175 self.memory_min_60 = make_region(
176 self.memory_min_60 = make_region(
176 function_key_generator=hashgen,
177 function_key_generator=hashgen,
177 key_mangler=key_mangler).configure(
178 key_mangler=key_mangler).configure(
178 "dogpile.cache.memory",
179 "dogpile.cache.memory",
179 expiration_time=3600)
180 expiration_time=3600)
180
181
181
182
182 def get_region(region):
183 def get_region(region):
183 return getattr(regions, region)
184 return getattr(regions, region)
@@ -1,32 +1,32 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import sqlalchemy as sa
17 import sqlalchemy as sa
18 from ziggurat_foundations.models.base import BaseModel
18 from ziggurat_foundations.models.base import BaseModel
19 from sqlalchemy.dialects.postgres import JSON
19 from sqlalchemy.dialects.postgresql import JSON
20
20
21 from . import Base
21 from . import Base
22
22
23
23
24 class Config(Base, BaseModel):
24 class Config(Base, BaseModel):
25 __tablename__ = 'config'
25 __tablename__ = 'config'
26
26
27 key = sa.Column(sa.Unicode, primary_key=True)
27 key = sa.Column(sa.Unicode, primary_key=True)
28 section = sa.Column(sa.Unicode, primary_key=True)
28 section = sa.Column(sa.Unicode, primary_key=True)
29 value = sa.Column(JSON, nullable=False)
29 value = sa.Column(JSON, nullable=False)
30
30
31 def __json__(self, request):
31 def __json__(self, request):
32 return self.get_dict()
32 return self.get_dict()
@@ -1,40 +1,40 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import sqlalchemy as sa
17 import sqlalchemy as sa
18 from ziggurat_foundations.models.base import BaseModel
18 from ziggurat_foundations.models.base import BaseModel
19 from sqlalchemy.dialects.postgres import JSON
19 from sqlalchemy.dialects.postgresql import JSON
20
20
21 from . import Base
21 from . import Base
22
22
23
23
24 class PluginConfig(Base, BaseModel):
24 class PluginConfig(Base, BaseModel):
25 __tablename__ = 'plugin_configs'
25 __tablename__ = 'plugin_configs'
26
26
27 id = sa.Column(sa.Integer, primary_key=True)
27 id = sa.Column(sa.Integer, primary_key=True)
28 plugin_name = sa.Column(sa.Unicode)
28 plugin_name = sa.Column(sa.Unicode)
29 section = sa.Column(sa.Unicode)
29 section = sa.Column(sa.Unicode)
30 config = sa.Column(JSON, nullable=False)
30 config = sa.Column(JSON, nullable=False)
31 resource_id = sa.Column(sa.Integer(),
31 resource_id = sa.Column(sa.Integer(),
32 sa.ForeignKey('resources.resource_id',
32 sa.ForeignKey('resources.resource_id',
33 onupdate='cascade',
33 onupdate='cascade',
34 ondelete='cascade'))
34 ondelete='cascade'))
35 owner_id = sa.Column(sa.Integer(),
35 owner_id = sa.Column(sa.Integer(),
36 sa.ForeignKey('users.id', onupdate='cascade',
36 sa.ForeignKey('users.id', onupdate='cascade',
37 ondelete='cascade'))
37 ondelete='cascade'))
38
38
39 def __json__(self, request):
39 def __json__(self, request):
40 return self.get_dict()
40 return self.get_dict()
@@ -1,268 +1,268 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import logging
17 import logging
18 import sqlalchemy as sa
18 import sqlalchemy as sa
19
19
20 from datetime import datetime, timedelta
20 from datetime import datetime, timedelta
21
21
22 from pyramid.threadlocal import get_current_request
22 from pyramid.threadlocal import get_current_request
23 from sqlalchemy.dialects.postgresql import JSON
23 from sqlalchemy.dialects.postgresql import JSON
24 from ziggurat_foundations.models.base import BaseModel
24 from ziggurat_foundations.models.base import BaseModel
25
25
26 from appenlight.models import Base, get_db_session, Datastores
26 from appenlight.models import Base, get_db_session, Datastores
27 from appenlight.lib.enums import ReportType
27 from appenlight.lib.enums import ReportType
28 from appenlight.lib.rule import Rule
28 from appenlight.lib.rule import Rule
29 from appenlight.lib.redis_keys import REDIS_KEYS
29 from appenlight.lib.redis_keys import REDIS_KEYS
30 from appenlight.models.report import REPORT_TYPE_MATRIX
30 from appenlight.models.report import REPORT_TYPE_MATRIX
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 class ReportGroup(Base, BaseModel):
35 class ReportGroup(Base, BaseModel):
36 __tablename__ = 'reports_groups'
36 __tablename__ = 'reports_groups'
37 __table_args__ = {'implicit_returning': False}
37 __table_args__ = {'implicit_returning': False}
38
38
39 id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
39 id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
40 resource_id = sa.Column(sa.Integer(),
40 resource_id = sa.Column(sa.Integer(),
41 sa.ForeignKey('applications.resource_id',
41 sa.ForeignKey('applications.resource_id',
42 onupdate='CASCADE',
42 onupdate='CASCADE',
43 ondelete='CASCADE'),
43 ondelete='CASCADE'),
44 nullable=False,
44 nullable=False,
45 index=True)
45 index=True)
46 priority = sa.Column(sa.Integer, nullable=False, index=True, default=5,
46 priority = sa.Column(sa.Integer, nullable=False, index=True, default=5,
47 server_default='5')
47 server_default='5')
48 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
48 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
49 server_default=sa.func.now())
49 server_default=sa.func.now())
50 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
50 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
51 server_default=sa.func.now())
51 server_default=sa.func.now())
52 error = sa.Column(sa.UnicodeText(), index=True)
52 error = sa.Column(sa.UnicodeText(), index=True)
53 grouping_hash = sa.Column(sa.String(40), default='')
53 grouping_hash = sa.Column(sa.String(40), default='')
54 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False,
54 triggered_postprocesses_ids = sa.Column(JSON(), nullable=False,
55 default=list)
55 default=list)
56 report_type = sa.Column(sa.Integer, default=1)
56 report_type = sa.Column(sa.Integer, default=1)
57 total_reports = sa.Column(sa.Integer, default=1)
57 total_reports = sa.Column(sa.Integer, default=1)
58 last_report = sa.Column(sa.Integer)
58 last_report = sa.Column(sa.Integer)
59 occurences = sa.Column(sa.Integer, default=1)
59 occurences = sa.Column(sa.Integer, default=1)
60 average_duration = sa.Column(sa.Float, default=0)
60 average_duration = sa.Column(sa.Float, default=0)
61 summed_duration = sa.Column(sa.Float, default=0)
61 summed_duration = sa.Column(sa.Float, default=0)
62 read = sa.Column(sa.Boolean(), index=True, default=False)
62 read = sa.Column(sa.Boolean(), index=True, default=False)
63 fixed = sa.Column(sa.Boolean(), index=True, default=False)
63 fixed = sa.Column(sa.Boolean(), index=True, default=False)
64 notified = sa.Column(sa.Boolean(), index=True, default=False)
64 notified = sa.Column(sa.Boolean(), index=True, default=False)
65 public = sa.Column(sa.Boolean(), index=True, default=False)
65 public = sa.Column(sa.Boolean(), index=True, default=False)
66
66
67 reports = sa.orm.relationship('Report',
67 reports = sa.orm.relationship('Report',
68 lazy='dynamic',
68 lazy='dynamic',
69 backref='report_group',
69 backref='report_group',
70 cascade="all, delete-orphan",
70 cascade="all, delete-orphan",
71 passive_deletes=True,
71 passive_deletes=True,
72 passive_updates=True, )
72 passive_updates=True, )
73
73
74 comments = sa.orm.relationship('ReportComment',
74 comments = sa.orm.relationship('ReportComment',
75 lazy='dynamic',
75 lazy='dynamic',
76 backref='report',
76 backref='report',
77 cascade="all, delete-orphan",
77 cascade="all, delete-orphan",
78 passive_deletes=True,
78 passive_deletes=True,
79 passive_updates=True,
79 passive_updates=True,
80 order_by="ReportComment.comment_id")
80 order_by="ReportComment.comment_id")
81
81
82 assigned_users = sa.orm.relationship('User',
82 assigned_users = sa.orm.relationship('User',
83 backref=sa.orm.backref(
83 backref=sa.orm.backref(
84 'assigned_reports_relation',
84 'assigned_reports_relation',
85 lazy='dynamic',
85 lazy='dynamic',
86 order_by=sa.desc(
86 order_by=sa.desc(
87 "reports_groups.id")
87 sa.text("reports_groups.id"))
88 ),
88 ),
89 passive_deletes=True,
89 passive_deletes=True,
90 passive_updates=True,
90 passive_updates=True,
91 secondary='reports_assignments',
91 secondary='reports_assignments',
92 order_by="User.user_name")
92 order_by="User.user_name")
93
93
94 stats = sa.orm.relationship('ReportStat',
94 stats = sa.orm.relationship('ReportStat',
95 lazy='dynamic',
95 lazy='dynamic',
96 backref='report',
96 backref='report',
97 passive_deletes=True,
97 passive_deletes=True,
98 passive_updates=True, )
98 passive_updates=True, )
99
99
100 last_report_ref = sa.orm.relationship('Report',
100 last_report_ref = sa.orm.relationship('Report',
101 uselist=False,
101 uselist=False,
102 primaryjoin="ReportGroup.last_report "
102 primaryjoin="ReportGroup.last_report "
103 "== Report.id",
103 "== Report.id",
104 foreign_keys="Report.id",
104 foreign_keys="Report.id",
105 cascade="all, delete-orphan",
105 cascade="all, delete-orphan",
106 passive_deletes=True,
106 passive_deletes=True,
107 passive_updates=True, )
107 passive_updates=True, )
108
108
109 def __repr__(self):
109 def __repr__(self):
110 return '<ReportGroup id:{}>'.format(self.id)
110 return '<ReportGroup id:{}>'.format(self.id)
111
111
112 def get_report(self, report_id=None, public=False):
112 def get_report(self, report_id=None, public=False):
113 """
113 """
114 Gets report with specific id or latest report if id was not specified
114 Gets report with specific id or latest report if id was not specified
115 """
115 """
116 from .report import Report
116 from .report import Report
117
117
118 if not report_id:
118 if not report_id:
119 return self.last_report_ref
119 return self.last_report_ref
120 else:
120 else:
121 return self.reports.filter(Report.id == report_id).first()
121 return self.reports.filter(Report.id == report_id).first()
122
122
123 def get_public_url(self, request, _app_url=None):
123 def get_public_url(self, request, _app_url=None):
124 url = request.route_url('/', _app_url=_app_url)
124 url = request.route_url('/', _app_url=_app_url)
125 return (url + 'ui/report/%s') % self.id
125 return (url + 'ui/report/%s') % self.id
126
126
127 def run_postprocessing(self, report):
127 def run_postprocessing(self, report):
128 """
128 """
129 Alters report group priority based on postprocessing configuration
129 Alters report group priority based on postprocessing configuration
130 """
130 """
131 request = get_current_request()
131 request = get_current_request()
132 get_db_session(None, self).flush()
132 get_db_session(None, self).flush()
133 for action in self.application.postprocess_conf:
133 for action in self.application.postprocess_conf:
134 get_db_session(None, self).flush()
134 get_db_session(None, self).flush()
135 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
135 rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX)
136 report_dict = report.get_dict(request)
136 report_dict = report.get_dict(request)
137 # if was not processed yet
137 # if was not processed yet
138 if (rule_obj.match(report_dict) and
138 if (rule_obj.match(report_dict) and
139 action.pkey not in self.triggered_postprocesses_ids):
139 action.pkey not in self.triggered_postprocesses_ids):
140 action.postprocess(self)
140 action.postprocess(self)
141 # this way sqla can track mutation of list
141 # this way sqla can track mutation of list
142 self.triggered_postprocesses_ids = \
142 self.triggered_postprocesses_ids = \
143 self.triggered_postprocesses_ids + [action.pkey]
143 self.triggered_postprocesses_ids + [action.pkey]
144
144
145 get_db_session(None, self).flush()
145 get_db_session(None, self).flush()
146 # do not go out of bounds
146 # do not go out of bounds
147 if self.priority < 1:
147 if self.priority < 1:
148 self.priority = 1
148 self.priority = 1
149 if self.priority > 10:
149 if self.priority > 10:
150 self.priority = 10
150 self.priority = 10
151
151
152 def get_dict(self, request):
152 def get_dict(self, request):
153 instance_dict = super(ReportGroup, self).get_dict()
153 instance_dict = super(ReportGroup, self).get_dict()
154 instance_dict['server_name'] = self.get_report().tags.get(
154 instance_dict['server_name'] = self.get_report().tags.get(
155 'server_name')
155 'server_name')
156 instance_dict['view_name'] = self.get_report().tags.get('view_name')
156 instance_dict['view_name'] = self.get_report().tags.get('view_name')
157 instance_dict['resource_name'] = self.application.resource_name
157 instance_dict['resource_name'] = self.application.resource_name
158 instance_dict['report_type'] = self.get_report().report_type
158 instance_dict['report_type'] = self.get_report().report_type
159 instance_dict['url_path'] = self.get_report().url_path
159 instance_dict['url_path'] = self.get_report().url_path
160 instance_dict['front_url'] = self.get_report().get_public_url(request)
160 instance_dict['front_url'] = self.get_report().get_public_url(request)
161 del instance_dict['triggered_postprocesses_ids']
161 del instance_dict['triggered_postprocesses_ids']
162 return instance_dict
162 return instance_dict
163
163
164 def es_doc(self):
164 def es_doc(self):
165 return {
165 return {
166 '_id': str(self.id),
166 '_id': str(self.id),
167 'pg_id': str(self.id),
167 'pg_id': str(self.id),
168 'resource_id': self.resource_id,
168 'resource_id': self.resource_id,
169 'error': self.error,
169 'error': self.error,
170 'fixed': self.fixed,
170 'fixed': self.fixed,
171 'public': self.public,
171 'public': self.public,
172 'read': self.read,
172 'read': self.read,
173 'priority': self.priority,
173 'priority': self.priority,
174 'occurences': self.occurences,
174 'occurences': self.occurences,
175 'average_duration': self.average_duration,
175 'average_duration': self.average_duration,
176 'summed_duration': self.summed_duration,
176 'summed_duration': self.summed_duration,
177 'first_timestamp': self.first_timestamp,
177 'first_timestamp': self.first_timestamp,
178 'last_timestamp': self.last_timestamp
178 'last_timestamp': self.last_timestamp
179 }
179 }
180
180
181 def set_notification_info(self, notify_10=False, notify_100=False):
181 def set_notification_info(self, notify_10=False, notify_100=False):
182 """
182 """
183 Update redis notification maps for notification job
183 Update redis notification maps for notification job
184 """
184 """
185 current_time = datetime.utcnow().replace(second=0, microsecond=0)
185 current_time = datetime.utcnow().replace(second=0, microsecond=0)
186 # global app counter
186 # global app counter
187 key = REDIS_KEYS['counters']['reports_per_type'].format(
187 key = REDIS_KEYS['counters']['reports_per_type'].format(
188 self.report_type, current_time)
188 self.report_type, current_time)
189 redis_pipeline = Datastores.redis.pipeline()
189 redis_pipeline = Datastores.redis.pipeline()
190 redis_pipeline.incr(key)
190 redis_pipeline.incr(key)
191 redis_pipeline.expire(key, 3600 * 24)
191 redis_pipeline.expire(key, 3600 * 24)
192 # detailed app notification for alerts and notifications
192 # detailed app notification for alerts and notifications
193 redis_pipeline.sadd(
193 redis_pipeline.sadd(
194 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
194 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
195 redis_pipeline.sadd(
195 redis_pipeline.sadd(
196 REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id)
196 REDIS_KEYS['apps_that_had_reports_alerting'], self.resource_id)
197 # only notify for exceptions here
197 # only notify for exceptions here
198 if self.report_type == ReportType.error:
198 if self.report_type == ReportType.error:
199 redis_pipeline.sadd(
199 redis_pipeline.sadd(
200 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
200 REDIS_KEYS['apps_that_had_reports'], self.resource_id)
201 redis_pipeline.sadd(
201 redis_pipeline.sadd(
202 REDIS_KEYS['apps_that_had_error_reports_alerting'],
202 REDIS_KEYS['apps_that_had_error_reports_alerting'],
203 self.resource_id)
203 self.resource_id)
204 key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id)
204 key = REDIS_KEYS['counters']['report_group_occurences'].format(self.id)
205 redis_pipeline.incr(key)
205 redis_pipeline.incr(key)
206 redis_pipeline.expire(key, 3600 * 24)
206 redis_pipeline.expire(key, 3600 * 24)
207 key = REDIS_KEYS['counters']['report_group_occurences_alerting'].format(
207 key = REDIS_KEYS['counters']['report_group_occurences_alerting'].format(
208 self.id)
208 self.id)
209 redis_pipeline.incr(key)
209 redis_pipeline.incr(key)
210 redis_pipeline.expire(key, 3600 * 24)
210 redis_pipeline.expire(key, 3600 * 24)
211
211
212 if notify_10:
212 if notify_10:
213 key = REDIS_KEYS['counters'][
213 key = REDIS_KEYS['counters'][
214 'report_group_occurences_10th'].format(self.id)
214 'report_group_occurences_10th'].format(self.id)
215 redis_pipeline.setex(key, 3600 * 24, 1)
215 redis_pipeline.setex(key, 3600 * 24, 1)
216 if notify_100:
216 if notify_100:
217 key = REDIS_KEYS['counters'][
217 key = REDIS_KEYS['counters'][
218 'report_group_occurences_100th'].format(self.id)
218 'report_group_occurences_100th'].format(self.id)
219 redis_pipeline.setex(key, 3600 * 24, 1)
219 redis_pipeline.setex(key, 3600 * 24, 1)
220
220
221 key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
221 key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
222 self.report_type, self.resource_id)
222 self.report_type, self.resource_id)
223 redis_pipeline.sadd(key, self.id)
223 redis_pipeline.sadd(key, self.id)
224 redis_pipeline.expire(key, 3600 * 24)
224 redis_pipeline.expire(key, 3600 * 24)
225 key = REDIS_KEYS['reports_to_notify_per_type_per_app_alerting'].format(
225 key = REDIS_KEYS['reports_to_notify_per_type_per_app_alerting'].format(
226 self.report_type, self.resource_id)
226 self.report_type, self.resource_id)
227 redis_pipeline.sadd(key, self.id)
227 redis_pipeline.sadd(key, self.id)
228 redis_pipeline.expire(key, 3600 * 24)
228 redis_pipeline.expire(key, 3600 * 24)
229 redis_pipeline.execute()
229 redis_pipeline.execute()
230
230
231 @property
231 @property
232 def partition_id(self):
232 def partition_id(self):
233 return 'rcae_r_%s' % self.first_timestamp.strftime('%Y_%m')
233 return 'rcae_r_%s' % self.first_timestamp.strftime('%Y_%m')
234
234
235 def partition_range(self):
235 def partition_range(self):
236 start_date = self.first_timestamp.date().replace(day=1)
236 start_date = self.first_timestamp.date().replace(day=1)
237 end_date = start_date + timedelta(days=40)
237 end_date = start_date + timedelta(days=40)
238 end_date = end_date.replace(day=1)
238 end_date = end_date.replace(day=1)
239 return start_date, end_date
239 return start_date, end_date
240
240
241
241
242 def after_insert(mapper, connection, target):
242 def after_insert(mapper, connection, target):
243 if not hasattr(target, '_skip_ft_index'):
243 if not hasattr(target, '_skip_ft_index'):
244 data = target.es_doc()
244 data = target.es_doc()
245 data.pop('_id', None)
245 data.pop('_id', None)
246 Datastores.es.index(target.partition_id, 'report_group',
246 Datastores.es.index(target.partition_id, 'report_group',
247 data, id=target.id)
247 data, id=target.id)
248
248
249
249
250 def after_update(mapper, connection, target):
250 def after_update(mapper, connection, target):
251 if not hasattr(target, '_skip_ft_index'):
251 if not hasattr(target, '_skip_ft_index'):
252 data = target.es_doc()
252 data = target.es_doc()
253 data.pop('_id', None)
253 data.pop('_id', None)
254 Datastores.es.index(target.partition_id, 'report_group',
254 Datastores.es.index(target.partition_id, 'report_group',
255 data, id=target.id)
255 data, id=target.id)
256
256
257
257
258 def after_delete(mapper, connection, target):
258 def after_delete(mapper, connection, target):
259 query = {"query": {'term': {'group_id': target.id}}}
259 query = {"query": {'term': {'group_id': target.id}}}
260 # delete by query
260 # delete by query
261 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
261 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report'), body=query)
262 query = {"query": {'term': {'pg_id': target.id}}}
262 query = {"query": {'term': {'pg_id': target.id}}}
263 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query)
263 Datastores.es.transport.perform_request("DELETE", '/{}/{}/_query'.format(target.partition_id, 'report_group'), body=query)
264
264
265
265
266 sa.event.listen(ReportGroup, 'after_insert', after_insert)
266 sa.event.listen(ReportGroup, 'after_insert', after_insert)
267 sa.event.listen(ReportGroup, 'after_update', after_update)
267 sa.event.listen(ReportGroup, 'after_update', after_update)
268 sa.event.listen(ReportGroup, 'after_delete', after_delete)
268 sa.event.listen(ReportGroup, 'after_delete', after_delete)
@@ -1,37 +1,37 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import sqlalchemy as sa
17 import sqlalchemy as sa
18 from datetime import datetime
18 from datetime import datetime
19 from ziggurat_foundations.models.base import BaseModel
19 from ziggurat_foundations.models.base import BaseModel
20 from sqlalchemy.dialects.postgres import JSON
20 from sqlalchemy.dialects.postgresql import JSON
21
21
22 from . import Base
22 from . import Base
23
23
24
24
25 class Tag(Base, BaseModel):
25 class Tag(Base, BaseModel):
26 __tablename__ = 'tags'
26 __tablename__ = 'tags'
27
27
28 id = sa.Column(sa.Integer, primary_key=True)
28 id = sa.Column(sa.Integer, primary_key=True)
29 resource_id = sa.Column(sa.Integer,
29 resource_id = sa.Column(sa.Integer,
30 sa.ForeignKey('resources.resource_id'))
30 sa.ForeignKey('resources.resource_id'))
31 name = sa.Column(sa.Unicode(512), nullable=False)
31 name = sa.Column(sa.Unicode(512), nullable=False)
32 value = sa.Column(JSON, nullable=False)
32 value = sa.Column(JSON, nullable=False)
33 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
33 first_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
34 server_default=sa.func.now())
34 server_default=sa.func.now())
35 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
35 last_timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
36 server_default=sa.func.now())
36 server_default=sa.func.now())
37 times_seen = sa.Column(sa.Integer, nullable=False, default=0)
37 times_seen = sa.Column(sa.Integer, nullable=False, default=0)
@@ -1,193 +1,192 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import logging
17 import logging
18 import os
18 import os
19 import pkg_resources
19 import pkg_resources
20
20
21 from datetime import datetime, timedelta
21 from datetime import datetime, timedelta
22
22
23 import psutil
23 import psutil
24 import redis
24 import redis
25
25
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from appenlight.models import DBSession
27 from appenlight.models import DBSession
28 from appenlight.models import Datastores
28 from appenlight.models import Datastores
29 from appenlight.lib.redis_keys import REDIS_KEYS
29 from appenlight.lib.redis_keys import REDIS_KEYS
30
30
31
31
32 def bytes2human(total):
32 def bytes2human(total):
33 giga = 1024.0 ** 3
33 giga = 1024.0 ** 3
34 mega = 1024.0 ** 2
34 mega = 1024.0 ** 2
35 kilo = 1024.0
35 kilo = 1024.0
36 if giga <= total:
36 if giga <= total:
37 return '{:0.1f}G'.format(total / giga)
37 return '{:0.1f}G'.format(total / giga)
38 elif mega <= total:
38 elif mega <= total:
39 return '{:0.1f}M'.format(total / mega)
39 return '{:0.1f}M'.format(total / mega)
40 else:
40 else:
41 return '{:0.1f}K'.format(total / kilo)
41 return '{:0.1f}K'.format(total / kilo)
42
42
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 @view_config(route_name='section_view',
47 @view_config(route_name='section_view',
48 match_param=['section=admin_section', 'view=system'],
48 match_param=['section=admin_section', 'view=system'],
49 renderer='json', permission='root_administration')
49 renderer='json', permission='root_administration')
50 def system(request):
50 def system(request):
51 current_time = datetime.utcnow(). \
51 current_time = datetime.utcnow(). \
52 replace(second=0, microsecond=0) - timedelta(minutes=1)
52 replace(second=0, microsecond=0) - timedelta(minutes=1)
53 # global app counter
53 # global app counter
54 processed_reports = request.registry.redis_conn.get(
54 processed_reports = request.registry.redis_conn.get(
55 REDIS_KEYS['counters']['reports_per_minute'].format(current_time))
55 REDIS_KEYS['counters']['reports_per_minute'].format(current_time))
56 processed_reports = int(processed_reports) if processed_reports else 0
56 processed_reports = int(processed_reports) if processed_reports else 0
57 processed_logs = request.registry.redis_conn.get(
57 processed_logs = request.registry.redis_conn.get(
58 REDIS_KEYS['counters']['logs_per_minute'].format(current_time))
58 REDIS_KEYS['counters']['logs_per_minute'].format(current_time))
59 processed_logs = int(processed_logs) if processed_logs else 0
59 processed_logs = int(processed_logs) if processed_logs else 0
60 processed_metrics = request.registry.redis_conn.get(
60 processed_metrics = request.registry.redis_conn.get(
61 REDIS_KEYS['counters']['metrics_per_minute'].format(current_time))
61 REDIS_KEYS['counters']['metrics_per_minute'].format(current_time))
62 processed_metrics = int(processed_metrics) if processed_metrics else 0
62 processed_metrics = int(processed_metrics) if processed_metrics else 0
63
63
64 waiting_reports = 0
64 waiting_reports = 0
65 waiting_logs = 0
65 waiting_logs = 0
66 waiting_metrics = 0
66 waiting_metrics = 0
67 waiting_other = 0
67 waiting_other = 0
68
68
69 if 'redis' in request.registry.settings['celery.broker_type']:
69 if 'redis' in request.registry.settings['celery.broker_type']:
70 redis_client = redis.StrictRedis.from_url(
70 redis_client = redis.StrictRedis.from_url(
71 request.registry.settings['celery.broker_url'])
71 request.registry.settings['celery.broker_url'])
72 waiting_reports = redis_client.llen('reports')
72 waiting_reports = redis_client.llen('reports')
73 waiting_logs = redis_client.llen('logs')
73 waiting_logs = redis_client.llen('logs')
74 waiting_metrics = redis_client.llen('metrics')
74 waiting_metrics = redis_client.llen('metrics')
75 waiting_other = redis_client.llen('default')
75 waiting_other = redis_client.llen('default')
76
76
77 # process
77 # process
78 def replace_inf(val):
78 def replace_inf(val):
79 return val if val != psutil.RLIM_INFINITY else 'unlimited'
79 return val if val != psutil.RLIM_INFINITY else 'unlimited'
80
80
81 p = psutil.Process()
81 p = psutil.Process()
82 fd = p.rlimit(psutil.RLIMIT_NOFILE)
82 fd = p.rlimit(psutil.RLIMIT_NOFILE)
83 memlock = p.rlimit(psutil.RLIMIT_MEMLOCK)
83 memlock = p.rlimit(psutil.RLIMIT_MEMLOCK)
84 self_info = {
84 self_info = {
85 'fds': {'soft': replace_inf(fd[0]),
85 'fds': {'soft': replace_inf(fd[0]),
86 'hard': replace_inf(fd[1])},
86 'hard': replace_inf(fd[1])},
87 'memlock': {'soft': replace_inf(memlock[0]),
87 'memlock': {'soft': replace_inf(memlock[0]),
88 'hard': replace_inf(memlock[1])},
88 'hard': replace_inf(memlock[1])},
89 }
89 }
90
90
91 # disks
91 # disks
92 disks = []
92 disks = []
93 for part in psutil.disk_partitions(all=False):
93 for part in psutil.disk_partitions(all=False):
94 if os.name == 'nt':
94 if os.name == 'nt':
95 if 'cdrom' in part.opts or part.fstype == '':
95 if 'cdrom' in part.opts or part.fstype == '':
96 continue
96 continue
97 usage = psutil.disk_usage(part.mountpoint)
97 usage = psutil.disk_usage(part.mountpoint)
98 disks.append({
98 disks.append({
99 'device': part.device,
99 'device': part.device,
100 'total': bytes2human(usage.total),
100 'total': bytes2human(usage.total),
101 'used': bytes2human(usage.used),
101 'used': bytes2human(usage.used),
102 'free': bytes2human(usage.free),
102 'free': bytes2human(usage.free),
103 'percentage': int(usage.percent),
103 'percentage': int(usage.percent),
104 'mountpoint': part.mountpoint,
104 'mountpoint': part.mountpoint,
105 'fstype': part.fstype
105 'fstype': part.fstype
106 })
106 })
107
107
108 # memory
108 # memory
109 memory_v = psutil.virtual_memory()
109 memory_v = psutil.virtual_memory()
110 memory_s = psutil.swap_memory()
110 memory_s = psutil.swap_memory()
111
111
112 memory = {
112 memory = {
113 'total': bytes2human(memory_v.total),
113 'total': bytes2human(memory_v.total),
114 'available': bytes2human(memory_v.available),
114 'available': bytes2human(memory_v.available),
115 'percentage': memory_v.percent,
115 'percentage': memory_v.percent,
116 'used': bytes2human(memory_v.used),
116 'used': bytes2human(memory_v.used),
117 'free': bytes2human(memory_v.free),
117 'free': bytes2human(memory_v.free),
118 'active': bytes2human(memory_v.active),
118 'active': bytes2human(memory_v.active),
119 'inactive': bytes2human(memory_v.inactive),
119 'inactive': bytes2human(memory_v.inactive),
120 'buffers': bytes2human(memory_v.buffers),
120 'buffers': bytes2human(memory_v.buffers),
121 'cached': bytes2human(memory_v.cached),
121 'cached': bytes2human(memory_v.cached),
122 'swap_total': bytes2human(memory_s.total),
122 'swap_total': bytes2human(memory_s.total),
123 'swap_used': bytes2human(memory_s.used)
123 'swap_used': bytes2human(memory_s.used)
124 }
124 }
125
125
126 # load
126 # load
127 system_load = os.getloadavg()
127 system_load = os.getloadavg()
128
128
129 # processes
129 # processes
130 min_mem = 1024 * 1024 * 40 # 40MB
130 min_mem = 1024 * 1024 * 40 # 40MB
131 process_info = []
131 process_info = []
132 for p in psutil.process_iter():
132 for p in psutil.process_iter():
133 mem_used = p.get_memory_info().rss
133 mem_used = p.memory_info().rss
134 if mem_used < min_mem:
134 if mem_used < min_mem:
135 continue
135 continue
136 process_info.append({'owner': p.username(),
136 process_info.append({'owner': p.username(),
137 'pid': p.pid,
137 'pid': p.pid,
138 'cpu': round(p.get_cpu_percent(interval=0), 1),
138 'cpu': round(p.cpu_percent(interval=0), 1),
139 'mem_percentage': round(p.get_memory_percent(),
139 'mem_percentage': round(p.memory_percent(),1),
140 1),
141 'mem_usage': bytes2human(mem_used),
140 'mem_usage': bytes2human(mem_used),
142 'name': p.name(),
141 'name': p.name(),
143 'command': ' '.join(p.cmdline())
142 'command': ' '.join(p.cmdline())
144 })
143 })
145 process_info = sorted(process_info, key=lambda x: x['mem_percentage'],
144 process_info = sorted(process_info, key=lambda x: x['mem_percentage'],
146 reverse=True)
145 reverse=True)
147
146
148 # pg tables
147 # pg tables
149
148
150 db_size_query = '''
149 db_size_query = '''
151 SELECT tablename, pg_total_relation_size(tablename::text) size
150 SELECT tablename, pg_total_relation_size(tablename::text) size
152 FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
151 FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND
153 tablename NOT LIKE 'sql_%' ORDER BY size DESC;'''
152 tablename NOT LIKE 'sql_%' ORDER BY size DESC;'''
154
153
155 db_tables = []
154 db_tables = []
156 for row in DBSession.execute(db_size_query):
155 for row in DBSession.execute(db_size_query):
157 db_tables.append({"size_human": bytes2human(row.size),
156 db_tables.append({"size_human": bytes2human(row.size),
158 "table_name": row.tablename})
157 "table_name": row.tablename})
159
158
160 # es indices
159 # es indices
161 es_indices = []
160 es_indices = []
162 result = Datastores.es.indices.stats(metric=['store, docs'])
161 result = Datastores.es.indices.stats(metric=['store, docs'])
163 for ix, stats in result['indices'].items():
162 for ix, stats in result['indices'].items():
164 size = stats['primaries']['store']['size_in_bytes']
163 size = stats['primaries']['store']['size_in_bytes']
165 es_indices.append({'name': ix,
164 es_indices.append({'name': ix,
166 'size': size,
165 'size': size,
167 'size_human': bytes2human(size)})
166 'size_human': bytes2human(size)})
168
167
169 # packages
168 # packages
170
169
171 packages = ({'name': p.project_name, 'version': p.version}
170 packages = ({'name': p.project_name, 'version': p.version}
172 for p in pkg_resources.working_set)
171 for p in pkg_resources.working_set)
173
172
174 return {'db_tables': db_tables,
173 return {'db_tables': db_tables,
175 'es_indices': sorted(es_indices,
174 'es_indices': sorted(es_indices,
176 key=lambda x: x['size'], reverse=True),
175 key=lambda x: x['size'], reverse=True),
177 'process_info': process_info,
176 'process_info': process_info,
178 'system_load': system_load,
177 'system_load': system_load,
179 'disks': disks,
178 'disks': disks,
180 'memory': memory,
179 'memory': memory,
181 'packages': sorted(packages, key=lambda x: x['name'].lower()),
180 'packages': sorted(packages, key=lambda x: x['name'].lower()),
182 'current_time': current_time,
181 'current_time': current_time,
183 'queue_stats': {
182 'queue_stats': {
184 'processed_reports': processed_reports,
183 'processed_reports': processed_reports,
185 'processed_logs': processed_logs,
184 'processed_logs': processed_logs,
186 'processed_metrics': processed_metrics,
185 'processed_metrics': processed_metrics,
187 'waiting_reports': waiting_reports,
186 'waiting_reports': waiting_reports,
188 'waiting_logs': waiting_logs,
187 'waiting_logs': waiting_logs,
189 'waiting_metrics': waiting_metrics,
188 'waiting_metrics': waiting_metrics,
190 'waiting_other': waiting_other
189 'waiting_other': waiting_other
191 },
190 },
192 'self_info': self_info
191 'self_info': self_info
193 }
192 }
@@ -1,760 +1,759 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import copy
17 import copy
18 import json
18 import json
19 import logging
19 import logging
20 import six
21
20
22 from datetime import datetime, timedelta
21 from datetime import datetime, timedelta
23
22
24 import colander
23 import colander
25 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
24 from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity
26 from pyramid.view import view_config
25 from pyramid.view import view_config
27 from webob.multidict import MultiDict
26 from webob.multidict import MultiDict
28 from zope.sqlalchemy import mark_changed
27 from zope.sqlalchemy import mark_changed
29 from ziggurat_foundations.permissions import ANY_PERMISSION
28 from ziggurat_foundations.permissions import ANY_PERMISSION
30
29
31 import appenlight.forms as forms
30 import appenlight.forms as forms
32 from appenlight.models import DBSession
31 from appenlight.models import DBSession
33 from appenlight.models.resource import Resource
32 from appenlight.models.resource import Resource
34 from appenlight.models.application import Application
33 from appenlight.models.application import Application
35 from appenlight.models.application_postprocess_conf import \
34 from appenlight.models.application_postprocess_conf import \
36 ApplicationPostprocessConf
35 ApplicationPostprocessConf
37 from ziggurat_foundations.models.services.user import UserService
36 from ziggurat_foundations.models.services.user import UserService
38 from ziggurat_foundations.models.services.resource import ResourceService
37 from ziggurat_foundations.models.services.resource import ResourceService
39 from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService
38 from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService
40 from appenlight.models.user_resource_permission import UserResourcePermission
39 from appenlight.models.user_resource_permission import UserResourcePermission
41 from appenlight.models.group_resource_permission import GroupResourcePermission
40 from appenlight.models.group_resource_permission import GroupResourcePermission
42 from appenlight.models.services.application import ApplicationService
41 from appenlight.models.services.application import ApplicationService
43 from appenlight.models.services.application_postprocess_conf import \
42 from appenlight.models.services.application_postprocess_conf import \
44 ApplicationPostprocessConfService
43 ApplicationPostprocessConfService
45 from appenlight.models.services.group import GroupService
44 from appenlight.models.services.group import GroupService
46 from appenlight.models.services.group_resource_permission import \
45 from appenlight.models.services.group_resource_permission import \
47 GroupResourcePermissionService
46 GroupResourcePermissionService
48 from appenlight.models.services.request_metric import RequestMetricService
47 from appenlight.models.services.request_metric import RequestMetricService
49 from appenlight.models.services.report_group import ReportGroupService
48 from appenlight.models.services.report_group import ReportGroupService
50 from appenlight.models.services.slow_call import SlowCallService
49 from appenlight.models.services.slow_call import SlowCallService
51 from appenlight.lib import helpers as h
50 from appenlight.lib import helpers as h
52 from appenlight.lib.utils import build_filter_settings_from_query_dict
51 from appenlight.lib.utils import build_filter_settings_from_query_dict
53 from appenlight.security import RootFactory
52 from appenlight.security import RootFactory
54 from appenlight.models.report import REPORT_TYPE_MATRIX
53 from appenlight.models.report import REPORT_TYPE_MATRIX
55 from appenlight.validators import build_rule_schema
54 from appenlight.validators import build_rule_schema
56
55
57 _ = str
56 _ = str
58
57
59 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
60
59
61
60
62 def app_not_found(request, id):
61 def app_not_found(request, id):
63 """
62 """
64 Redirects on non found and sets a flash message
63 Redirects on non found and sets a flash message
65 """
64 """
66 request.session.flash(_('Application not found'), 'warning')
65 request.session.flash(_('Application not found'), 'warning')
67 return HTTPFound(
66 return HTTPFound(
68 location=request.route_url('applications', action='index'))
67 location=request.route_url('applications', action='index'))
69
68
70
69
71 @view_config(route_name='applications_no_id',
70 @view_config(route_name='applications_no_id',
72 renderer='json', request_method="GET", permission='authenticated')
71 renderer='json', request_method="GET", permission='authenticated')
73 def applications_list(request):
72 def applications_list(request):
74 """
73 """
75 Applications list
74 Applications list
76
75
77 if query params contain ?type=foo, it will list applications
76 if query params contain ?type=foo, it will list applications
78 with one of those permissions for user,
77 with one of those permissions for user,
79 otherwise only list of owned applications will
78 otherwise only list of owned applications will
80 be returned
79 be returned
81
80
82 appending ?root_list while being administration will allow to list all
81 appending ?root_list while being administration will allow to list all
83 applications in the system
82 applications in the system
84
83
85 """
84 """
86 is_root = request.has_permission('root_administration',
85 is_root = request.has_permission('root_administration',
87 RootFactory(request))
86 RootFactory(request))
88 if is_root and request.GET.get('root_list'):
87 if is_root and request.GET.get('root_list'):
89 resources = Resource.all().order_by(Resource.resource_name)
88 resources = Resource.all().order_by(Resource.resource_name)
90 resource_type = request.GET.get('resource_type', 'application')
89 resource_type = request.GET.get('resource_type', 'application')
91 if resource_type:
90 if resource_type:
92 resources = resources.filter(
91 resources = resources.filter(
93 Resource.resource_type == resource_type)
92 Resource.resource_type == resource_type)
94 else:
93 else:
95 permissions = request.params.getall('permission')
94 permissions = request.params.getall('permission')
96 if permissions:
95 if permissions:
97 resources = UserService.resources_with_perms(
96 resources = UserService.resources_with_perms(
98 request.user,
97 request.user,
99 permissions,
98 permissions,
100 resource_types=[request.GET.get('resource_type',
99 resource_types=[request.GET.get('resource_type',
101 'application')])
100 'application')])
102 else:
101 else:
103 resources = request.user.resources.filter(
102 resources = request.user.resources.filter(
104 Application.resource_type == request.GET.get(
103 Application.resource_type == request.GET.get(
105 'resource_type',
104 'resource_type',
106 'application'))
105 'application'))
107 return [r.get_dict(include_keys=['resource_id', 'resource_name', 'domains',
106 return [r.get_dict(include_keys=['resource_id', 'resource_name', 'domains',
108 'owner_user_name', 'owner_group_name'])
107 'owner_user_name', 'owner_group_name'])
109 for
108 for
110 r in resources]
109 r in resources]
111
110
112
111
113 @view_config(route_name='applications', renderer='json',
112 @view_config(route_name='applications', renderer='json',
114 request_method="GET", permission='view')
113 request_method="GET", permission='view')
115 def application_GET(request):
114 def application_GET(request):
116 resource = request.context.resource
115 resource = request.context.resource
117 include_sensitive_info = False
116 include_sensitive_info = False
118 if request.has_permission('edit'):
117 if request.has_permission('edit'):
119 include_sensitive_info = True
118 include_sensitive_info = True
120 resource_dict = resource.get_dict(
119 resource_dict = resource.get_dict(
121 include_perms=include_sensitive_info,
120 include_perms=include_sensitive_info,
122 include_processing_rules=include_sensitive_info)
121 include_processing_rules=include_sensitive_info)
123 return resource_dict
122 return resource_dict
124
123
125
124
126 @view_config(route_name='applications_no_id', request_method="POST",
125 @view_config(route_name='applications_no_id', request_method="POST",
127 renderer='json', permission='create_resources')
126 renderer='json', permission='create_resources')
128 def application_create(request):
127 def application_create(request):
129 """
128 """
130 Creates new application instances
129 Creates new application instances
131 """
130 """
132 user = request.user
131 user = request.user
133 form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body),
132 form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body),
134 csrf_context=request)
133 csrf_context=request)
135 if form.validate():
134 if form.validate():
136 session = DBSession()
135 session = DBSession()
137 resource = Application()
136 resource = Application()
138 DBSession.add(resource)
137 DBSession.add(resource)
139 form.populate_obj(resource)
138 form.populate_obj(resource)
140 resource.api_key = resource.generate_api_key()
139 resource.api_key = resource.generate_api_key()
141 user.resources.append(resource)
140 user.resources.append(resource)
142 request.session.flash(_('Application created'))
141 request.session.flash(_('Application created'))
143 DBSession.flush()
142 DBSession.flush()
144 mark_changed(session)
143 mark_changed(session)
145 else:
144 else:
146 return HTTPUnprocessableEntity(body=form.errors_json)
145 return HTTPUnprocessableEntity(body=form.errors_json)
147
146
148 return resource.get_dict()
147 return resource.get_dict()
149
148
150
149
151 @view_config(route_name='applications', request_method="PATCH",
150 @view_config(route_name='applications', request_method="PATCH",
152 renderer='json', permission='edit')
151 renderer='json', permission='edit')
153 def application_update(request):
152 def application_update(request):
154 """
153 """
155 Updates main application configuration
154 Updates main application configuration
156 """
155 """
157 resource = request.context.resource
156 resource = request.context.resource
158 if not resource:
157 if not resource:
159 return app_not_found()
158 return app_not_found()
160
159
161 # disallow setting permanent storage by non-admins
160 # disallow setting permanent storage by non-admins
162 # use default/non-resource based context for this check
161 # use default/non-resource based context for this check
163 req_dict = copy.copy(request.unsafe_json_body)
162 req_dict = copy.copy(request.unsafe_json_body)
164 if not request.has_permission('root_administration', RootFactory(request)):
163 if not request.has_permission('root_administration', RootFactory(request)):
165 req_dict['allow_permanent_storage'] = ''
164 req_dict['allow_permanent_storage'] = ''
166 if not req_dict.get('uptime_url'):
165 if not req_dict.get('uptime_url'):
167 # needed cause validator is still triggered by default
166 # needed cause validator is still triggered by default
168 req_dict.pop('uptime_url', '')
167 req_dict.pop('uptime_url', '')
169 application_form = forms.ApplicationUpdateForm(MultiDict(req_dict),
168 application_form = forms.ApplicationUpdateForm(MultiDict(req_dict),
170 csrf_context=request)
169 csrf_context=request)
171 if application_form.validate():
170 if application_form.validate():
172 application_form.populate_obj(resource)
171 application_form.populate_obj(resource)
173 request.session.flash(_('Application updated'))
172 request.session.flash(_('Application updated'))
174 else:
173 else:
175 return HTTPUnprocessableEntity(body=application_form.errors_json)
174 return HTTPUnprocessableEntity(body=application_form.errors_json)
176
175
177 include_sensitive_info = False
176 include_sensitive_info = False
178 if request.has_permission('edit'):
177 if request.has_permission('edit'):
179 include_sensitive_info = True
178 include_sensitive_info = True
180 resource_dict = resource.get_dict(
179 resource_dict = resource.get_dict(
181 include_perms=include_sensitive_info,
180 include_perms=include_sensitive_info,
182 include_processing_rules=include_sensitive_info)
181 include_processing_rules=include_sensitive_info)
183 return resource_dict
182 return resource_dict
184
183
185
184
186 @view_config(route_name='applications_property', match_param='key=api_key',
185 @view_config(route_name='applications_property', match_param='key=api_key',
187 request_method="POST", renderer='json',
186 request_method="POST", renderer='json',
188 permission='delete')
187 permission='delete')
189 def application_regenerate_key(request):
188 def application_regenerate_key(request):
190 """
189 """
191 Regenerates API keys for application
190 Regenerates API keys for application
192 """
191 """
193 resource = request.context.resource
192 resource = request.context.resource
194
193
195 form = forms.CheckPasswordForm(MultiDict(request.unsafe_json_body),
194 form = forms.CheckPasswordForm(MultiDict(request.unsafe_json_body),
196 csrf_context=request)
195 csrf_context=request)
197 form.password.user = request.user
196 form.password.user = request.user
198
197
199 if form.validate():
198 if form.validate():
200 resource.api_key = resource.generate_api_key()
199 resource.api_key = resource.generate_api_key()
201 resource.public_key = resource.generate_api_key()
200 resource.public_key = resource.generate_api_key()
202 msg = 'API keys regenerated - please update your application config.'
201 msg = 'API keys regenerated - please update your application config.'
203 request.session.flash(_(msg))
202 request.session.flash(_(msg))
204 else:
203 else:
205 return HTTPUnprocessableEntity(body=form.errors_json)
204 return HTTPUnprocessableEntity(body=form.errors_json)
206
205
207 if request.has_permission('edit'):
206 if request.has_permission('edit'):
208 include_sensitive_info = True
207 include_sensitive_info = True
209 resource_dict = resource.get_dict(
208 resource_dict = resource.get_dict(
210 include_perms=include_sensitive_info,
209 include_perms=include_sensitive_info,
211 include_processing_rules=include_sensitive_info)
210 include_processing_rules=include_sensitive_info)
212 return resource_dict
211 return resource_dict
213
212
214
213
215 @view_config(route_name='applications_property',
214 @view_config(route_name='applications_property',
216 match_param='key=delete_resource',
215 match_param='key=delete_resource',
217 request_method="PATCH", renderer='json', permission='delete')
216 request_method="PATCH", renderer='json', permission='delete')
218 def application_remove(request):
217 def application_remove(request):
219 """
218 """
220 Removes application resources
219 Removes application resources
221 """
220 """
222 resource = request.context.resource
221 resource = request.context.resource
223 # we need polymorphic object here, to properly launch sqlalchemy events
222 # we need polymorphic object here, to properly launch sqlalchemy events
224 resource = ApplicationService.by_id(resource.resource_id)
223 resource = ApplicationService.by_id(resource.resource_id)
225 form = forms.CheckPasswordForm(MultiDict(request.safe_json_body or {}),
224 form = forms.CheckPasswordForm(MultiDict(request.safe_json_body or {}),
226 csrf_context=request)
225 csrf_context=request)
227 form.password.user = request.user
226 form.password.user = request.user
228 if form.validate():
227 if form.validate():
229 DBSession.delete(resource)
228 DBSession.delete(resource)
230 request.session.flash(_('Application removed'))
229 request.session.flash(_('Application removed'))
231 else:
230 else:
232 return HTTPUnprocessableEntity(body=form.errors_json)
231 return HTTPUnprocessableEntity(body=form.errors_json)
233
232
234 return True
233 return True
235
234
236
235
237 @view_config(route_name='applications_property', match_param='key=owner',
236 @view_config(route_name='applications_property', match_param='key=owner',
238 request_method="PATCH", renderer='json', permission='delete')
237 request_method="PATCH", renderer='json', permission='delete')
239 def application_ownership_transfer(request):
238 def application_ownership_transfer(request):
240 """
239 """
241 Allows application owner to transfer application ownership to other user
240 Allows application owner to transfer application ownership to other user
242 """
241 """
243 resource = request.context.resource
242 resource = request.context.resource
244 form = forms.ChangeApplicationOwnerForm(
243 form = forms.ChangeApplicationOwnerForm(
245 MultiDict(request.safe_json_body or {}), csrf_context=request)
244 MultiDict(request.safe_json_body or {}), csrf_context=request)
246 form.password.user = request.user
245 form.password.user = request.user
247 if form.validate():
246 if form.validate():
248 user = UserService.by_user_name(form.user_name.data)
247 user = UserService.by_user_name(form.user_name.data)
249 user.resources.append(resource)
248 user.resources.append(resource)
250 # remove integrations to not leak security data of external applications
249 # remove integrations to not leak security data of external applications
251 for integration in resource.integrations[:]:
250 for integration in resource.integrations[:]:
252 resource.integrations.remove(integration)
251 resource.integrations.remove(integration)
253 request.session.flash(_('Application transfered'))
252 request.session.flash(_('Application transfered'))
254 else:
253 else:
255 return HTTPUnprocessableEntity(body=form.errors_json)
254 return HTTPUnprocessableEntity(body=form.errors_json)
256 return True
255 return True
257
256
258
257
259 @view_config(route_name='applications_property',
258 @view_config(route_name='applications_property',
260 match_param='key=postprocessing_rules', renderer='json',
259 match_param='key=postprocessing_rules', renderer='json',
261 request_method='POST', permission='edit')
260 request_method='POST', permission='edit')
262 def applications_postprocess_POST(request):
261 def applications_postprocess_POST(request):
263 """
262 """
264 Creates new postprocessing rules for applications
263 Creates new postprocessing rules for applications
265 """
264 """
266 resource = request.context.resource
265 resource = request.context.resource
267 conf = ApplicationPostprocessConf()
266 conf = ApplicationPostprocessConf()
268 conf.do = 'postprocess'
267 conf.do = 'postprocess'
269 conf.new_value = '1'
268 conf.new_value = '1'
270 resource.postprocess_conf.append(conf)
269 resource.postprocess_conf.append(conf)
271 DBSession.flush()
270 DBSession.flush()
272 return conf.get_dict()
271 return conf.get_dict()
273
272
274
273
275 @view_config(route_name='applications_property',
274 @view_config(route_name='applications_property',
276 match_param='key=postprocessing_rules', renderer='json',
275 match_param='key=postprocessing_rules', renderer='json',
277 request_method='PATCH', permission='edit')
276 request_method='PATCH', permission='edit')
278 def applications_postprocess_PATCH(request):
277 def applications_postprocess_PATCH(request):
279 """
278 """
280 Creates new postprocessing rules for applications
279 Creates new postprocessing rules for applications
281 """
280 """
282 json_body = request.unsafe_json_body
281 json_body = request.unsafe_json_body
283
282
284 schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX)
283 schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX)
285 try:
284 try:
286 schema.deserialize(json_body['rule'])
285 schema.deserialize(json_body['rule'])
287 except colander.Invalid as exc:
286 except colander.Invalid as exc:
288 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
287 return HTTPUnprocessableEntity(body=json.dumps(exc.asdict()))
289
288
290 resource = request.context.resource
289 resource = request.context.resource
291 conf = ApplicationPostprocessConfService.by_pkey_and_resource_id(
290 conf = ApplicationPostprocessConfService.by_pkey_and_resource_id(
292 json_body['pkey'], resource.resource_id)
291 json_body['pkey'], resource.resource_id)
293 conf.rule = request.unsafe_json_body['rule']
292 conf.rule = request.unsafe_json_body['rule']
294 # for now hardcode int since we dont support anything else so far
293 # for now hardcode int since we dont support anything else so far
295 conf.new_value = int(request.unsafe_json_body['new_value'])
294 conf.new_value = int(request.unsafe_json_body['new_value'])
296 return conf.get_dict()
295 return conf.get_dict()
297
296
298
297
299 @view_config(route_name='applications_property',
298 @view_config(route_name='applications_property',
300 match_param='key=postprocessing_rules', renderer='json',
299 match_param='key=postprocessing_rules', renderer='json',
301 request_method='DELETE', permission='edit')
300 request_method='DELETE', permission='edit')
302 def applications_postprocess_DELETE(request):
301 def applications_postprocess_DELETE(request):
303 """
302 """
304 Removes application postprocessing rules
303 Removes application postprocessing rules
305 """
304 """
306 form = forms.ReactorForm(request.POST, csrf_context=request)
305 form = forms.ReactorForm(request.POST, csrf_context=request)
307 resource = request.context.resource
306 resource = request.context.resource
308 if form.validate():
307 if form.validate():
309 for postprocess_conf in resource.postprocess_conf:
308 for postprocess_conf in resource.postprocess_conf:
310 if postprocess_conf.pkey == int(request.GET['pkey']):
309 if postprocess_conf.pkey == int(request.GET['pkey']):
311 # remove rule
310 # remove rule
312 DBSession.delete(postprocess_conf)
311 DBSession.delete(postprocess_conf)
313 return True
312 return True
314
313
315
314
316 @view_config(route_name='applications_property',
315 @view_config(route_name='applications_property',
317 match_param='key=report_graphs', renderer='json',
316 match_param='key=report_graphs', renderer='json',
318 permission='view')
317 permission='view')
319 @view_config(route_name='applications_property',
318 @view_config(route_name='applications_property',
320 match_param='key=slow_report_graphs', renderer='json',
319 match_param='key=slow_report_graphs', renderer='json',
321 permission='view')
320 permission='view')
322 def get_application_report_stats(request):
321 def get_application_report_stats(request):
323 query_params = request.GET.mixed()
322 query_params = request.GET.mixed()
324 query_params['resource'] = (request.context.resource.resource_id,)
323 query_params['resource'] = (request.context.resource.resource_id,)
325
324
326 filter_settings = build_filter_settings_from_query_dict(request,
325 filter_settings = build_filter_settings_from_query_dict(request,
327 query_params)
326 query_params)
328 if not filter_settings.get('end_date'):
327 if not filter_settings.get('end_date'):
329 end_date = datetime.utcnow().replace(microsecond=0, second=0)
328 end_date = datetime.utcnow().replace(microsecond=0, second=0)
330 filter_settings['end_date'] = end_date
329 filter_settings['end_date'] = end_date
331
330
332 if not filter_settings.get('start_date'):
331 if not filter_settings.get('start_date'):
333 delta = timedelta(hours=1)
332 delta = timedelta(hours=1)
334 filter_settings['start_date'] = filter_settings['end_date'] - delta
333 filter_settings['start_date'] = filter_settings['end_date'] - delta
335
334
336 result = ReportGroupService.get_report_stats(request, filter_settings)
335 result = ReportGroupService.get_report_stats(request, filter_settings)
337 return result
336 return result
338
337
339
338
340 @view_config(route_name='applications_property',
339 @view_config(route_name='applications_property',
341 match_param='key=metrics_graphs', renderer='json',
340 match_param='key=metrics_graphs', renderer='json',
342 permission='view')
341 permission='view')
343 def metrics_graphs(request):
342 def metrics_graphs(request):
344 """
343 """
345 Handles metric dashboard graphs
344 Handles metric dashboard graphs
346 Returns information for time/tier breakdown
345 Returns information for time/tier breakdown
347 """
346 """
348 query_params = request.GET.mixed()
347 query_params = request.GET.mixed()
349 query_params['resource'] = (request.context.resource.resource_id,)
348 query_params['resource'] = (request.context.resource.resource_id,)
350
349
351 filter_settings = build_filter_settings_from_query_dict(request,
350 filter_settings = build_filter_settings_from_query_dict(request,
352 query_params)
351 query_params)
353
352
354 if not filter_settings.get('end_date'):
353 if not filter_settings.get('end_date'):
355 end_date = datetime.utcnow().replace(microsecond=0, second=0)
354 end_date = datetime.utcnow().replace(microsecond=0, second=0)
356 filter_settings['end_date'] = end_date
355 filter_settings['end_date'] = end_date
357
356
358 delta = timedelta(hours=1)
357 delta = timedelta(hours=1)
359 if not filter_settings.get('start_date'):
358 if not filter_settings.get('start_date'):
360 filter_settings['start_date'] = filter_settings['end_date'] - delta
359 filter_settings['start_date'] = filter_settings['end_date'] - delta
361 if filter_settings['end_date'] <= filter_settings['start_date']:
360 if filter_settings['end_date'] <= filter_settings['start_date']:
362 filter_settings['end_date'] = filter_settings['start_date']
361 filter_settings['end_date'] = filter_settings['start_date']
363
362
364 delta = filter_settings['end_date'] - filter_settings['start_date']
363 delta = filter_settings['end_date'] - filter_settings['start_date']
365 if delta < h.time_deltas.get('12h')['delta']:
364 if delta < h.time_deltas.get('12h')['delta']:
366 divide_by_min = 1
365 divide_by_min = 1
367 elif delta <= h.time_deltas.get('3d')['delta']:
366 elif delta <= h.time_deltas.get('3d')['delta']:
368 divide_by_min = 5.0
367 divide_by_min = 5.0
369 elif delta >= h.time_deltas.get('2w')['delta']:
368 elif delta >= h.time_deltas.get('2w')['delta']:
370 divide_by_min = 60.0 * 24
369 divide_by_min = 60.0 * 24
371 else:
370 else:
372 divide_by_min = 60.0
371 divide_by_min = 60.0
373
372
374 results = RequestMetricService.get_metrics_stats(
373 results = RequestMetricService.get_metrics_stats(
375 request, filter_settings)
374 request, filter_settings)
376 # because requests are PER SECOND / we divide 1 min stats by 60
375 # because requests are PER SECOND / we divide 1 min stats by 60
377 # requests are normalized to 1 min average
376 # requests are normalized to 1 min average
378 # results are average seconds time spent per request in specific area
377 # results are average seconds time spent per request in specific area
379 for point in results:
378 for point in results:
380 if point['requests']:
379 if point['requests']:
381 point['main'] = (point['main'] - point['sql'] -
380 point['main'] = (point['main'] - point['sql'] -
382 point['nosql'] - point['remote'] -
381 point['nosql'] - point['remote'] -
383 point['tmpl'] -
382 point['tmpl'] -
384 point['custom']) / point['requests']
383 point['custom']) / point['requests']
385 point['sql'] = point['sql'] / point['requests']
384 point['sql'] = point['sql'] / point['requests']
386 point['nosql'] = point['nosql'] / point['requests']
385 point['nosql'] = point['nosql'] / point['requests']
387 point['remote'] = point['remote'] / point['requests']
386 point['remote'] = point['remote'] / point['requests']
388 point['tmpl'] = point['tmpl'] / point['requests']
387 point['tmpl'] = point['tmpl'] / point['requests']
389 point['custom'] = point['custom'] / point['requests']
388 point['custom'] = point['custom'] / point['requests']
390 point['requests_2'] = point['requests'] / 60.0 / divide_by_min
389 point['requests_2'] = point['requests'] / 60.0 / divide_by_min
391
390
392 selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom']
391 selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom']
393
392
394 for point in results:
393 for point in results:
395 for stat_type in selected_types:
394 for stat_type in selected_types:
396 point[stat_type] = round(point.get(stat_type, 0), 3)
395 point[stat_type] = round(point.get(stat_type, 0), 3)
397
396
398 return results
397 return results
399
398
400
399
401 @view_config(route_name='applications_property',
400 @view_config(route_name='applications_property',
402 match_param='key=response_graphs', renderer='json',
401 match_param='key=response_graphs', renderer='json',
403 permission='view')
402 permission='view')
404 def response_graphs(request):
403 def response_graphs(request):
405 """
404 """
406 Handles dashboard infomation for avg. response time split by today,
405 Handles dashboard infomation for avg. response time split by today,
407 2 days ago and week ago
406 2 days ago and week ago
408 """
407 """
409 query_params = request.GET.mixed()
408 query_params = request.GET.mixed()
410 query_params['resource'] = (request.context.resource.resource_id,)
409 query_params['resource'] = (request.context.resource.resource_id,)
411
410
412 filter_settings = build_filter_settings_from_query_dict(request,
411 filter_settings = build_filter_settings_from_query_dict(request,
413 query_params)
412 query_params)
414
413
415 if not filter_settings.get('end_date'):
414 if not filter_settings.get('end_date'):
416 end_date = datetime.utcnow().replace(microsecond=0, second=0)
415 end_date = datetime.utcnow().replace(microsecond=0, second=0)
417 filter_settings['end_date'] = end_date
416 filter_settings['end_date'] = end_date
418
417
419 delta = timedelta(hours=1)
418 delta = timedelta(hours=1)
420 if not filter_settings.get('start_date'):
419 if not filter_settings.get('start_date'):
421 filter_settings['start_date'] = filter_settings['end_date'] - delta
420 filter_settings['start_date'] = filter_settings['end_date'] - delta
422
421
423 result_now = RequestMetricService.get_metrics_stats(
422 result_now = RequestMetricService.get_metrics_stats(
424 request, filter_settings)
423 request, filter_settings)
425
424
426 filter_settings_2d = filter_settings.copy()
425 filter_settings_2d = filter_settings.copy()
427 filter_settings_2d['start_date'] = filter_settings['start_date'] - \
426 filter_settings_2d['start_date'] = filter_settings['start_date'] - \
428 timedelta(days=2)
427 timedelta(days=2)
429 filter_settings_2d['end_date'] = filter_settings['end_date'] - \
428 filter_settings_2d['end_date'] = filter_settings['end_date'] - \
430 timedelta(days=2)
429 timedelta(days=2)
431 result_2d = RequestMetricService.get_metrics_stats(
430 result_2d = RequestMetricService.get_metrics_stats(
432 request, filter_settings_2d)
431 request, filter_settings_2d)
433
432
434 filter_settings_7d = filter_settings.copy()
433 filter_settings_7d = filter_settings.copy()
435 filter_settings_7d['start_date'] = filter_settings['start_date'] - \
434 filter_settings_7d['start_date'] = filter_settings['start_date'] - \
436 timedelta(days=7)
435 timedelta(days=7)
437 filter_settings_7d['end_date'] = filter_settings['end_date'] - \
436 filter_settings_7d['end_date'] = filter_settings['end_date'] - \
438 timedelta(days=7)
437 timedelta(days=7)
439 result_7d = RequestMetricService.get_metrics_stats(
438 result_7d = RequestMetricService.get_metrics_stats(
440 request, filter_settings_7d)
439 request, filter_settings_7d)
441
440
442 plot_data = []
441 plot_data = []
443
442
444 for item in result_now:
443 for item in result_now:
445 point = {'x': item['x'], 'today': 0, 'days_ago_2': 0,
444 point = {'x': item['x'], 'today': 0, 'days_ago_2': 0,
446 'days_ago_7': 0}
445 'days_ago_7': 0}
447 if item['requests']:
446 if item['requests']:
448 point['today'] = round(item['main'] / item['requests'], 3)
447 point['today'] = round(item['main'] / item['requests'], 3)
449 plot_data.append(point)
448 plot_data.append(point)
450
449
451 for i, item in enumerate(result_2d[:len(plot_data)]):
450 for i, item in enumerate(result_2d[:len(plot_data)]):
452 plot_data[i]['days_ago_2'] = 0
451 plot_data[i]['days_ago_2'] = 0
453 point = result_2d[i]
452 point = result_2d[i]
454 if point['requests']:
453 if point['requests']:
455 plot_data[i]['days_ago_2'] = round(point['main'] /
454 plot_data[i]['days_ago_2'] = round(point['main'] /
456 point['requests'], 3)
455 point['requests'], 3)
457
456
458 for i, item in enumerate(result_7d[:len(plot_data)]):
457 for i, item in enumerate(result_7d[:len(plot_data)]):
459 plot_data[i]['days_ago_7'] = 0
458 plot_data[i]['days_ago_7'] = 0
460 point = result_7d[i]
459 point = result_7d[i]
461 if point['requests']:
460 if point['requests']:
462 plot_data[i]['days_ago_7'] = round(point['main'] /
461 plot_data[i]['days_ago_7'] = round(point['main'] /
463 point['requests'], 3)
462 point['requests'], 3)
464
463
465 return plot_data
464 return plot_data
466
465
467
466
468 @view_config(route_name='applications_property',
467 @view_config(route_name='applications_property',
469 match_param='key=requests_graphs', renderer='json',
468 match_param='key=requests_graphs', renderer='json',
470 permission='view')
469 permission='view')
471 def requests_graphs(request):
470 def requests_graphs(request):
472 """
471 """
473 Handles dashboard infomation for avg. response time split by today,
472 Handles dashboard infomation for avg. response time split by today,
474 2 days ago and week ago
473 2 days ago and week ago
475 """
474 """
476 query_params = request.GET.mixed()
475 query_params = request.GET.mixed()
477 query_params['resource'] = (request.context.resource.resource_id,)
476 query_params['resource'] = (request.context.resource.resource_id,)
478
477
479 filter_settings = build_filter_settings_from_query_dict(request,
478 filter_settings = build_filter_settings_from_query_dict(request,
480 query_params)
479 query_params)
481
480
482 if not filter_settings.get('end_date'):
481 if not filter_settings.get('end_date'):
483 end_date = datetime.utcnow().replace(microsecond=0, second=0)
482 end_date = datetime.utcnow().replace(microsecond=0, second=0)
484 filter_settings['end_date'] = end_date
483 filter_settings['end_date'] = end_date
485
484
486 delta = timedelta(hours=1)
485 delta = timedelta(hours=1)
487 if not filter_settings.get('start_date'):
486 if not filter_settings.get('start_date'):
488 filter_settings['start_date'] = filter_settings['end_date'] - delta
487 filter_settings['start_date'] = filter_settings['end_date'] - delta
489
488
490 result_now = RequestMetricService.get_metrics_stats(
489 result_now = RequestMetricService.get_metrics_stats(
491 request, filter_settings)
490 request, filter_settings)
492
491
493 delta = filter_settings['end_date'] - filter_settings['start_date']
492 delta = filter_settings['end_date'] - filter_settings['start_date']
494 if delta < h.time_deltas.get('12h')['delta']:
493 if delta < h.time_deltas.get('12h')['delta']:
495 seconds = h.time_deltas['1m']['minutes'] * 60.0
494 seconds = h.time_deltas['1m']['minutes'] * 60.0
496 elif delta <= h.time_deltas.get('3d')['delta']:
495 elif delta <= h.time_deltas.get('3d')['delta']:
497 seconds = h.time_deltas['5m']['minutes'] * 60.0
496 seconds = h.time_deltas['5m']['minutes'] * 60.0
498 elif delta >= h.time_deltas.get('2w')['delta']:
497 elif delta >= h.time_deltas.get('2w')['delta']:
499 seconds = h.time_deltas['24h']['minutes'] * 60.0
498 seconds = h.time_deltas['24h']['minutes'] * 60.0
500 else:
499 else:
501 seconds = h.time_deltas['1h']['minutes'] * 60.0
500 seconds = h.time_deltas['1h']['minutes'] * 60.0
502
501
503 for item in result_now:
502 for item in result_now:
504 if item['requests']:
503 if item['requests']:
505 item['requests'] = round(item['requests'] / seconds, 3)
504 item['requests'] = round(item['requests'] / seconds, 3)
506 return result_now
505 return result_now
507
506
508
507
509 @view_config(route_name='applications_property',
508 @view_config(route_name='applications_property',
510 match_param='key=apdex_stats', renderer='json',
509 match_param='key=apdex_stats', renderer='json',
511 permission='view')
510 permission='view')
512 def get_apdex_stats(request):
511 def get_apdex_stats(request):
513 """
512 """
514 Returns information and calculates APDEX score per server for dashboard
513 Returns information and calculates APDEX score per server for dashboard
515 server information (upper right stats boxes)
514 server information (upper right stats boxes)
516 """
515 """
517 query_params = request.GET.mixed()
516 query_params = request.GET.mixed()
518 query_params['resource'] = (request.context.resource.resource_id,)
517 query_params['resource'] = (request.context.resource.resource_id,)
519
518
520 filter_settings = build_filter_settings_from_query_dict(request,
519 filter_settings = build_filter_settings_from_query_dict(request,
521 query_params)
520 query_params)
522 # make sure we have only one resource here to don't produce
521 # make sure we have only one resource here to don't produce
523 # weird results when we have wrong app in app selector
522 # weird results when we have wrong app in app selector
524 filter_settings['resource'] = [filter_settings['resource'][0]]
523 filter_settings['resource'] = [filter_settings['resource'][0]]
525
524
526 if not filter_settings.get('end_date'):
525 if not filter_settings.get('end_date'):
527 end_date = datetime.utcnow().replace(microsecond=0, second=0)
526 end_date = datetime.utcnow().replace(microsecond=0, second=0)
528 filter_settings['end_date'] = end_date
527 filter_settings['end_date'] = end_date
529
528
530 delta = timedelta(hours=1)
529 delta = timedelta(hours=1)
531 if not filter_settings.get('start_date'):
530 if not filter_settings.get('start_date'):
532 filter_settings['start_date'] = filter_settings['end_date'] - delta
531 filter_settings['start_date'] = filter_settings['end_date'] - delta
533
532
534 return RequestMetricService.get_apdex_stats(request, filter_settings)
533 return RequestMetricService.get_apdex_stats(request, filter_settings)
535
534
536
535
537 @view_config(route_name='applications_property', match_param='key=slow_calls',
536 @view_config(route_name='applications_property', match_param='key=slow_calls',
538 renderer='json', permission='view')
537 renderer='json', permission='view')
539 def get_slow_calls(request):
538 def get_slow_calls(request):
540 """
539 """
541 Returns information for time consuming calls in specific time interval
540 Returns information for time consuming calls in specific time interval
542 """
541 """
543 query_params = request.GET.mixed()
542 query_params = request.GET.mixed()
544 query_params['resource'] = (request.context.resource.resource_id,)
543 query_params['resource'] = (request.context.resource.resource_id,)
545
544
546 filter_settings = build_filter_settings_from_query_dict(request,
545 filter_settings = build_filter_settings_from_query_dict(request,
547 query_params)
546 query_params)
548
547
549 if not filter_settings.get('end_date'):
548 if not filter_settings.get('end_date'):
550 end_date = datetime.utcnow().replace(microsecond=0, second=0)
549 end_date = datetime.utcnow().replace(microsecond=0, second=0)
551 filter_settings['end_date'] = end_date
550 filter_settings['end_date'] = end_date
552
551
553 delta = timedelta(hours=1)
552 delta = timedelta(hours=1)
554 if not filter_settings.get('start_date'):
553 if not filter_settings.get('start_date'):
555 filter_settings['start_date'] = filter_settings['end_date'] - delta
554 filter_settings['start_date'] = filter_settings['end_date'] - delta
556
555
557 return SlowCallService.get_time_consuming_calls(request, filter_settings)
556 return SlowCallService.get_time_consuming_calls(request, filter_settings)
558
557
559
558
560 @view_config(route_name='applications_property',
559 @view_config(route_name='applications_property',
561 match_param='key=requests_breakdown',
560 match_param='key=requests_breakdown',
562 renderer='json', permission='view')
561 renderer='json', permission='view')
563 def get_requests_breakdown(request):
562 def get_requests_breakdown(request):
564 """
563 """
565 Used on dashboard to get information which views are most used in
564 Used on dashboard to get information which views are most used in
566 a time interval
565 a time interval
567 """
566 """
568 query_params = request.GET.mixed()
567 query_params = request.GET.mixed()
569 query_params['resource'] = (request.context.resource.resource_id,)
568 query_params['resource'] = (request.context.resource.resource_id,)
570
569
571 filter_settings = build_filter_settings_from_query_dict(request,
570 filter_settings = build_filter_settings_from_query_dict(request,
572 query_params)
571 query_params)
573 if not filter_settings.get('end_date'):
572 if not filter_settings.get('end_date'):
574 end_date = datetime.utcnow().replace(microsecond=0, second=0)
573 end_date = datetime.utcnow().replace(microsecond=0, second=0)
575 filter_settings['end_date'] = end_date
574 filter_settings['end_date'] = end_date
576
575
577 if not filter_settings.get('start_date'):
576 if not filter_settings.get('start_date'):
578 delta = timedelta(hours=1)
577 delta = timedelta(hours=1)
579 filter_settings['start_date'] = filter_settings['end_date'] - delta
578 filter_settings['start_date'] = filter_settings['end_date'] - delta
580
579
581 series = RequestMetricService.get_requests_breakdown(
580 series = RequestMetricService.get_requests_breakdown(
582 request, filter_settings)
581 request, filter_settings)
583
582
584 results = []
583 results = []
585 for row in series:
584 for row in series:
586 d_row = {'avg_response': round(row['main'] / row['requests'], 3),
585 d_row = {'avg_response': round(row['main'] / row['requests'], 3),
587 'requests': row['requests'],
586 'requests': row['requests'],
588 'main': row['main'],
587 'main': row['main'],
589 'view_name': row['key'],
588 'view_name': row['key'],
590 'latest_details': row['latest_details'],
589 'latest_details': row['latest_details'],
591 'percentage': round(row['percentage'] * 100, 1)}
590 'percentage': round(row['percentage'] * 100, 1)}
592
591
593 results.append(d_row)
592 results.append(d_row)
594
593
595 return results
594 return results
596
595
597
596
598 @view_config(route_name='applications_property',
597 @view_config(route_name='applications_property',
599 match_param='key=trending_reports', renderer='json',
598 match_param='key=trending_reports', renderer='json',
600 permission='view')
599 permission='view')
601 def trending_reports(request):
600 def trending_reports(request):
602 """
601 """
603 Returns exception/slow reports trending for specific time interval
602 Returns exception/slow reports trending for specific time interval
604 """
603 """
605 query_params = request.GET.mixed().copy()
604 query_params = request.GET.mixed().copy()
606 # pop report type to rewrite it to tag later
605 # pop report type to rewrite it to tag later
607 report_type = query_params.pop('report_type', None)
606 report_type = query_params.pop('report_type', None)
608 if report_type:
607 if report_type:
609 query_params['type'] = report_type
608 query_params['type'] = report_type
610
609
611 query_params['resource'] = (request.context.resource.resource_id,)
610 query_params['resource'] = (request.context.resource.resource_id,)
612
611
613 filter_settings = build_filter_settings_from_query_dict(request,
612 filter_settings = build_filter_settings_from_query_dict(request,
614 query_params)
613 query_params)
615
614
616 if not filter_settings.get('end_date'):
615 if not filter_settings.get('end_date'):
617 end_date = datetime.utcnow().replace(microsecond=0, second=0)
616 end_date = datetime.utcnow().replace(microsecond=0, second=0)
618 filter_settings['end_date'] = end_date
617 filter_settings['end_date'] = end_date
619
618
620 if not filter_settings.get('start_date'):
619 if not filter_settings.get('start_date'):
621 delta = timedelta(hours=1)
620 delta = timedelta(hours=1)
622 filter_settings['start_date'] = filter_settings['end_date'] - delta
621 filter_settings['start_date'] = filter_settings['end_date'] - delta
623
622
624 results = ReportGroupService.get_trending(request, filter_settings)
623 results = ReportGroupService.get_trending(request, filter_settings)
625
624
626 trending = []
625 trending = []
627 for occurences, group in results:
626 for occurences, group in results:
628 report_group = group.get_dict(request)
627 report_group = group.get_dict(request)
629 # show the occurences in time range instead of global ones
628 # show the occurences in time range instead of global ones
630 report_group['occurences'] = occurences
629 report_group['occurences'] = occurences
631 trending.append(report_group)
630 trending.append(report_group)
632
631
633 return trending
632 return trending
634
633
635
634
636 @view_config(route_name='applications_property',
635 @view_config(route_name='applications_property',
637 match_param='key=integrations',
636 match_param='key=integrations',
638 renderer='json', permission='view')
637 renderer='json', permission='view')
639 def integrations(request):
638 def integrations(request):
640 """
639 """
641 Integration list for given application
640 Integration list for given application
642 """
641 """
643 application = request.context.resource
642 application = request.context.resource
644 return {'resource': application}
643 return {'resource': application}
645
644
646
645
647 @view_config(route_name='applications_property',
646 @view_config(route_name='applications_property',
648 match_param='key=user_permissions', renderer='json',
647 match_param='key=user_permissions', renderer='json',
649 permission='owner', request_method='POST')
648 permission='owner', request_method='POST')
650 def user_resource_permission_create(request):
649 def user_resource_permission_create(request):
651 """
650 """
652 Set new permissions for user for a resource
651 Set new permissions for user for a resource
653 """
652 """
654 resource = request.context.resource
653 resource = request.context.resource
655 user_name = request.unsafe_json_body.get('user_name')
654 user_name = request.unsafe_json_body.get('user_name')
656 user = UserService.by_user_name(user_name)
655 user = UserService.by_user_name(user_name)
657 if not user:
656 if not user:
658 user = UserService.by_email(user_name)
657 user = UserService.by_email(user_name)
659 if not user:
658 if not user:
660 return False
659 return False
661
660
662 for perm_name in request.unsafe_json_body.get('permissions', []):
661 for perm_name in request.unsafe_json_body.get('permissions', []):
663 permission = UserResourcePermissionService.by_resource_user_and_perm(
662 permission = UserResourcePermissionService.by_resource_user_and_perm(
664 user.id, perm_name, resource.resource_id)
663 user.id, perm_name, resource.resource_id)
665 if not permission:
664 if not permission:
666 permission = UserResourcePermission(perm_name=perm_name,
665 permission = UserResourcePermission(perm_name=perm_name,
667 user_id=user.id)
666 user_id=user.id)
668 resource.user_permissions.append(permission)
667 resource.user_permissions.append(permission)
669 DBSession.flush()
668 DBSession.flush()
670 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
669 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
671 if p.type == 'user']
670 if p.type == 'user']
672 result = {'user_name': user.user_name,
671 result = {'user_name': user.user_name,
673 'permissions': list(set(perms))}
672 'permissions': list(set(perms))}
674 return result
673 return result
675
674
676
675
677 @view_config(route_name='applications_property',
676 @view_config(route_name='applications_property',
678 match_param='key=user_permissions', renderer='json',
677 match_param='key=user_permissions', renderer='json',
679 permission='owner', request_method='DELETE')
678 permission='owner', request_method='DELETE')
680 def user_resource_permission_delete(request):
679 def user_resource_permission_delete(request):
681 """
680 """
682 Removes user permission from specific resource
681 Removes user permission from specific resource
683 """
682 """
684 resource = request.context.resource
683 resource = request.context.resource
685
684
686 user = UserService.by_user_name(request.GET.get('user_name'))
685 user = UserService.by_user_name(request.GET.get('user_name'))
687 if not user:
686 if not user:
688 return False
687 return False
689
688
690 for perm_name in request.GET.getall('permissions'):
689 for perm_name in request.GET.getall('permissions'):
691 permission = UserResourcePermissionService.by_resource_user_and_perm(
690 permission = UserResourcePermissionService.by_resource_user_and_perm(
692 user.id, perm_name, resource.resource_id)
691 user.id, perm_name, resource.resource_id)
693 resource.user_permissions.remove(permission)
692 resource.user_permissions.remove(permission)
694 DBSession.flush()
693 DBSession.flush()
695 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
694 perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user)
696 if p.type == 'user']
695 if p.type == 'user']
697 result = {'user_name': user.user_name,
696 result = {'user_name': user.user_name,
698 'permissions': list(set(perms))}
697 'permissions': list(set(perms))}
699 return result
698 return result
700
699
701
700
702 @view_config(route_name='applications_property',
701 @view_config(route_name='applications_property',
703 match_param='key=group_permissions', renderer='json',
702 match_param='key=group_permissions', renderer='json',
704 permission='owner', request_method='POST')
703 permission='owner', request_method='POST')
705 def group_resource_permission_create(request):
704 def group_resource_permission_create(request):
706 """
705 """
707 Set new permissions for group for a resource
706 Set new permissions for group for a resource
708 """
707 """
709 resource = request.context.resource
708 resource = request.context.resource
710 group = GroupService.by_id(request.unsafe_json_body.get('group_id'))
709 group = GroupService.by_id(request.unsafe_json_body.get('group_id'))
711 if not group:
710 if not group:
712 return False
711 return False
713
712
714 for perm_name in request.unsafe_json_body.get('permissions', []):
713 for perm_name in request.unsafe_json_body.get('permissions', []):
715 permission = GroupResourcePermissionService.by_resource_group_and_perm(
714 permission = GroupResourcePermissionService.by_resource_group_and_perm(
716 group.id, perm_name, resource.resource_id)
715 group.id, perm_name, resource.resource_id)
717 if not permission:
716 if not permission:
718 permission = GroupResourcePermission(perm_name=perm_name,
717 permission = GroupResourcePermission(perm_name=perm_name,
719 group_id=group.id)
718 group_id=group.id)
720 resource.group_permissions.append(permission)
719 resource.group_permissions.append(permission)
721 DBSession.flush()
720 DBSession.flush()
722 perm_tuples = ResourceService.groups_for_perm(
721 perm_tuples = ResourceService.groups_for_perm(
723 resource,
722 resource,
724 ANY_PERMISSION,
723 ANY_PERMISSION,
725 limit_group_permissions=True,
724 limit_group_permissions=True,
726 group_ids=[group.id])
725 group_ids=[group.id])
727 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
726 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
728 result = {'group': group.get_dict(),
727 result = {'group': group.get_dict(),
729 'permissions': list(set(perms))}
728 'permissions': list(set(perms))}
730 return result
729 return result
731
730
732
731
733 @view_config(route_name='applications_property',
732 @view_config(route_name='applications_property',
734 match_param='key=group_permissions', renderer='json',
733 match_param='key=group_permissions', renderer='json',
735 permission='owner', request_method='DELETE')
734 permission='owner', request_method='DELETE')
736 def group_resource_permission_delete(request):
735 def group_resource_permission_delete(request):
737 """
736 """
738 Removes group permission from specific resource
737 Removes group permission from specific resource
739 """
738 """
740 form = forms.ReactorForm(request.POST, csrf_context=request)
739 form = forms.ReactorForm(request.POST, csrf_context=request)
741 form.validate()
740 form.validate()
742 resource = request.context.resource
741 resource = request.context.resource
743 group = GroupService.by_id(request.GET.get('group_id'))
742 group = GroupService.by_id(request.GET.get('group_id'))
744 if not group:
743 if not group:
745 return False
744 return False
746
745
747 for perm_name in request.GET.getall('permissions'):
746 for perm_name in request.GET.getall('permissions'):
748 permission = GroupResourcePermissionService.by_resource_group_and_perm(
747 permission = GroupResourcePermissionService.by_resource_group_and_perm(
749 group.id, perm_name, resource.resource_id)
748 group.id, perm_name, resource.resource_id)
750 resource.group_permissions.remove(permission)
749 resource.group_permissions.remove(permission)
751 DBSession.flush()
750 DBSession.flush()
752 perm_tuples = ResourceService.groups_for_perm(
751 perm_tuples = ResourceService.groups_for_perm(
753 resource,
752 resource,
754 ANY_PERMISSION,
753 ANY_PERMISSION,
755 limit_group_permissions=True,
754 limit_group_permissions=True,
756 group_ids=[group.id])
755 group_ids=[group.id])
757 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
756 perms = [p.perm_name for p in perm_tuples if p.type == 'group']
758 result = {'group': group.get_dict(),
757 result = {'group': group.get_dict(),
759 'permissions': list(set(perms))}
758 'permissions': list(set(perms))}
760 return result
759 return result
@@ -1,446 +1,445 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 #
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
7 # You may obtain a copy of the License at
8 #
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
10 #
11 # Unless required by applicable law or agreed to in writing, software
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import copy
17 import copy
18 import logging
18 import logging
19 import datetime
19 import datetime
20 import time
20 import time
21 import random
21 import random
22 import redis
22 import redis
23 import six
24 import pyramid.renderers
23 import pyramid.renderers
25 import requests
24 import requests
26
25
27 from ziggurat_foundations.models.services.user import UserService
26 from ziggurat_foundations.models.services.user import UserService
28
27
29 import appenlight.celery.tasks
28 import appenlight.celery.tasks
30 from pyramid.view import view_config
29 from pyramid.view import view_config
31 from pyramid_mailer.message import Message
30 from pyramid_mailer.message import Message
32 from appenlight_client.timing import time_trace
31 from appenlight_client.timing import time_trace
33 from appenlight.models import DBSession, Datastores
32 from appenlight.models import DBSession, Datastores
34 from appenlight.models.user import User
33 from appenlight.models.user import User
35 from appenlight.models.report_group import ReportGroup
34 from appenlight.models.report_group import ReportGroup
36 from appenlight.models.event import Event
35 from appenlight.models.event import Event
37 from appenlight.models.services.report_group import ReportGroupService
36 from appenlight.models.services.report_group import ReportGroupService
38 from appenlight.models.services.event import EventService
37 from appenlight.models.services.event import EventService
39 from appenlight.lib.enums import ReportType
38 from appenlight.lib.enums import ReportType
40
39
41 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
42
41
43 GLOBAL_REQ = None
42 GLOBAL_REQ = None
44
43
45
44
46 @view_config(route_name='test', match_param='action=mail',
45 @view_config(route_name='test', match_param='action=mail',
47 renderer='string', permission='root_administration')
46 renderer='string', permission='root_administration')
48 def mail(request):
47 def mail(request):
49 """
48 """
50 Test email communication
49 Test email communication
51 """
50 """
52 request.environ['HTTP_HOST'] = 'appenlight.com'
51 request.environ['HTTP_HOST'] = 'appenlight.com'
53 request.environ['wsgi.url_scheme'] = 'https'
52 request.environ['wsgi.url_scheme'] = 'https'
54 renderer_vars = {"title": "You have just registered on AppEnlight",
53 renderer_vars = {"title": "You have just registered on AppEnlight",
55 "username": "test",
54 "username": "test",
56 "email": "grzegΕΌΓ³Ε‚ka",
55 "email": "grzegΕΌΓ³Ε‚ka",
57 'firstname': 'dupa'}
56 'firstname': 'dupa'}
58 # return vars
57 # return vars
59 html = pyramid.renderers.render('/email_templates/registered.jinja2',
58 html = pyramid.renderers.render('/email_templates/registered.jinja2',
60 renderer_vars,
59 renderer_vars,
61 request=request)
60 request=request)
62 message = Message(subject="hello world %s" % random.randint(1, 9999),
61 message = Message(subject="hello world %s" % random.randint(1, 9999),
63 sender="info@appenlight.com",
62 sender="info@appenlight.com",
64 recipients=["ergo14@gmail.com"],
63 recipients=["ergo14@gmail.com"],
65 html=html)
64 html=html)
66 request.registry.mailer.send(message)
65 request.registry.mailer.send(message)
67 return html
66 return html
68 return vars
67 return vars
69
68
70
69
71 @view_config(route_name='test', match_param='action=alerting',
70 @view_config(route_name='test', match_param='action=alerting',
72 renderer='appenlight:templates/tests/alerting.jinja2',
71 renderer='appenlight:templates/tests/alerting.jinja2',
73 permission='root_administration')
72 permission='root_administration')
74 def alerting_test(request):
73 def alerting_test(request):
75 """
74 """
76 Allows to test send data on various registered alerting channels
75 Allows to test send data on various registered alerting channels
77 """
76 """
78 applications = UserService.resources_with_perms(request.user, ['view'], resource_types=['application'])
77 applications = UserService.resources_with_perms(request.user, ['view'], resource_types=['application'])
79 # what we can select in total
78 # what we can select in total
80 all_possible_app_ids = [app.resource_id for app in applications]
79 all_possible_app_ids = [app.resource_id for app in applications]
81 resource = applications[0]
80 resource = applications[0]
82
81
83 alert_channels = []
82 alert_channels = []
84 for channel in request.user.alert_channels:
83 for channel in request.user.alert_channels:
85 alert_channels.append(channel.get_dict())
84 alert_channels.append(channel.get_dict())
86
85
87 cname = request.params.get('channel_name')
86 cname = request.params.get('channel_name')
88 cvalue = request.params.get('channel_value')
87 cvalue = request.params.get('channel_value')
89 event_name = request.params.get('event_name')
88 event_name = request.params.get('event_name')
90 if cname and cvalue:
89 if cname and cvalue:
91 for channel in request.user.alert_channels:
90 for channel in request.user.alert_channels:
92 if (channel.channel_value == cvalue and
91 if (channel.channel_value == cvalue and
93 channel.channel_name == cname):
92 channel.channel_name == cname):
94 break
93 break
95 if event_name in ['error_report_alert', 'slow_report_alert']:
94 if event_name in ['error_report_alert', 'slow_report_alert']:
96 # opened
95 # opened
97 new_event = Event(resource_id=resource.resource_id,
96 new_event = Event(resource_id=resource.resource_id,
98 event_type=Event.types[event_name],
97 event_type=Event.types[event_name],
99 start_date=datetime.datetime.utcnow(),
98 start_date=datetime.datetime.utcnow(),
100 status=Event.statuses['active'],
99 status=Event.statuses['active'],
101 values={'reports': 5,
100 values={'reports': 5,
102 'threshold': 10}
101 'threshold': 10}
103 )
102 )
104 channel.notify_alert(resource=resource,
103 channel.notify_alert(resource=resource,
105 event=new_event,
104 event=new_event,
106 user=request.user,
105 user=request.user,
107 request=request)
106 request=request)
108
107
109 # closed
108 # closed
110 ev_type = Event.types[event_name.replace('open', 'close')]
109 ev_type = Event.types[event_name.replace('open', 'close')]
111 new_event = Event(resource_id=resource.resource_id,
110 new_event = Event(resource_id=resource.resource_id,
112 event_type=ev_type,
111 event_type=ev_type,
113 start_date=datetime.datetime.utcnow(),
112 start_date=datetime.datetime.utcnow(),
114 status=Event.statuses['closed'],
113 status=Event.statuses['closed'],
115 values={'reports': 5,
114 values={'reports': 5,
116 'threshold': 10})
115 'threshold': 10})
117 channel.notify_alert(resource=resource,
116 channel.notify_alert(resource=resource,
118 event=new_event,
117 event=new_event,
119 user=request.user,
118 user=request.user,
120 request=request)
119 request=request)
121 elif event_name == 'notify_reports':
120 elif event_name == 'notify_reports':
122 report = ReportGroupService.by_app_ids(all_possible_app_ids) \
121 report = ReportGroupService.by_app_ids(all_possible_app_ids) \
123 .filter(ReportGroup.report_type == ReportType.error).first()
122 .filter(ReportGroup.report_type == ReportType.error).first()
124 confirmed_reports = [(5, report), (1, report)]
123 confirmed_reports = [(5, report), (1, report)]
125 channel.notify_reports(resource=resource,
124 channel.notify_reports(resource=resource,
126 user=request.user,
125 user=request.user,
127 request=request,
126 request=request,
128 since_when=datetime.datetime.utcnow(),
127 since_when=datetime.datetime.utcnow(),
129 reports=confirmed_reports)
128 reports=confirmed_reports)
130 confirmed_reports = [(5, report)]
129 confirmed_reports = [(5, report)]
131 channel.notify_reports(resource=resource,
130 channel.notify_reports(resource=resource,
132 user=request.user,
131 user=request.user,
133 request=request,
132 request=request,
134 since_when=datetime.datetime.utcnow(),
133 since_when=datetime.datetime.utcnow(),
135 reports=confirmed_reports)
134 reports=confirmed_reports)
136 elif event_name == 'notify_uptime':
135 elif event_name == 'notify_uptime':
137 new_event = Event(resource_id=resource.resource_id,
136 new_event = Event(resource_id=resource.resource_id,
138 event_type=Event.types['uptime_alert'],
137 event_type=Event.types['uptime_alert'],
139 start_date=datetime.datetime.utcnow(),
138 start_date=datetime.datetime.utcnow(),
140 status=Event.statuses['active'],
139 status=Event.statuses['active'],
141 values={"status_code": 500,
140 values={"status_code": 500,
142 "tries": 2,
141 "tries": 2,
143 "response_time": 0})
142 "response_time": 0})
144 channel.notify_uptime_alert(resource=resource,
143 channel.notify_uptime_alert(resource=resource,
145 event=new_event,
144 event=new_event,
146 user=request.user,
145 user=request.user,
147 request=request)
146 request=request)
148 elif event_name == 'chart_alert':
147 elif event_name == 'chart_alert':
149 event = EventService.by_type_and_status(
148 event = EventService.by_type_and_status(
150 event_types=(Event.types['chart_alert'],),
149 event_types=(Event.types['chart_alert'],),
151 status_types=(Event.statuses['active'],)).first()
150 status_types=(Event.statuses['active'],)).first()
152 channel.notify_chart_alert(resource=event.resource,
151 channel.notify_chart_alert(resource=event.resource,
153 event=event,
152 event=event,
154 user=request.user,
153 user=request.user,
155 request=request)
154 request=request)
156 elif event_name == 'daily_digest':
155 elif event_name == 'daily_digest':
157 since_when = datetime.datetime.utcnow() - datetime.timedelta(
156 since_when = datetime.datetime.utcnow() - datetime.timedelta(
158 hours=8)
157 hours=8)
159 filter_settings = {'resource': [resource.resource_id],
158 filter_settings = {'resource': [resource.resource_id],
160 'tags': [{'name': 'type',
159 'tags': [{'name': 'type',
161 'value': ['error'], 'op': None}],
160 'value': ['error'], 'op': None}],
162 'type': 'error', 'start_date': since_when}
161 'type': 'error', 'start_date': since_when}
163
162
164 reports = ReportGroupService.get_trending(
163 reports = ReportGroupService.get_trending(
165 request, filter_settings=filter_settings, limit=50)
164 request, filter_settings=filter_settings, limit=50)
166 channel.send_digest(resource=resource,
165 channel.send_digest(resource=resource,
167 user=request.user,
166 user=request.user,
168 request=request,
167 request=request,
169 since_when=datetime.datetime.utcnow(),
168 since_when=datetime.datetime.utcnow(),
170 reports=reports)
169 reports=reports)
171
170
172 return {'alert_channels': alert_channels,
171 return {'alert_channels': alert_channels,
173 'applications': dict([(app.resource_id, app.resource_name)
172 'applications': dict([(app.resource_id, app.resource_name)
174 for app in applications.all()])}
173 for app in applications.all()])}
175
174
176
175
177 @view_config(route_name='test', match_param='action=error',
176 @view_config(route_name='test', match_param='action=error',
178 renderer='string', permission='root_administration')
177 renderer='string', permission='root_administration')
179 def error(request):
178 def error(request):
180 """
179 """
181 Raises an internal error with some test data for testing purposes
180 Raises an internal error with some test data for testing purposes
182 """
181 """
183 request.environ['appenlight.message'] = 'test message'
182 request.environ['appenlight.message'] = 'test message'
184 request.environ['appenlight.extra']['dupa'] = 'dupa'
183 request.environ['appenlight.extra']['dupa'] = 'dupa'
185 request.environ['appenlight.extra']['message'] = 'message'
184 request.environ['appenlight.extra']['message'] = 'message'
186 request.environ['appenlight.tags']['action'] = 'test_error'
185 request.environ['appenlight.tags']['action'] = 'test_error'
187 request.environ['appenlight.tags']['count'] = 5
186 request.environ['appenlight.tags']['count'] = 5
188 log.debug(chr(960))
187 log.debug(chr(960))
189 log.debug('debug')
188 log.debug('debug')
190 log.info(chr(960))
189 log.info(chr(960))
191 log.info('INFO')
190 log.info('INFO')
192 log.warning('warning')
191 log.warning('warning')
193
192
194 @time_trace(name='error.foobar', min_duration=0.1)
193 @time_trace(name='error.foobar', min_duration=0.1)
195 def fooobar():
194 def fooobar():
196 time.sleep(0.12)
195 time.sleep(0.12)
197 return 1
196 return 1
198
197
199 fooobar()
198 fooobar()
200
199
201 def foobar(somearg):
200 def foobar(somearg):
202 raise Exception('test')
201 raise Exception('test')
203
202
204 client = redis.StrictRedis()
203 client = redis.StrictRedis()
205 client.setex('testval', 10, 'foo')
204 client.setex('testval', 10, 'foo')
206 request.environ['appenlight.force_send'] = 1
205 request.environ['appenlight.force_send'] = 1
207
206
208 # stats, result = get_local_storage(local_timing).get_thread_stats()
207 # stats, result = get_local_storage(local_timing).get_thread_stats()
209 # import pprint
208 # import pprint
210 # pprint.pprint(stats)
209 # pprint.pprint(stats)
211 # pprint.pprint(result)
210 # pprint.pprint(result)
212 # print 'entries', len(result)
211 # print 'entries', len(result)
213 request.environ['appenlight.username'] = 'ErgO'
212 request.environ['appenlight.username'] = 'ErgO'
214 raise Exception(chr(960) + '%s' % random.randint(1, 5))
213 raise Exception(chr(960) + '%s' % random.randint(1, 5))
215 return {}
214 return {}
216
215
217
216
218 @view_config(route_name='test', match_param='action=task',
217 @view_config(route_name='test', match_param='action=task',
219 renderer='string', permission='root_administration')
218 renderer='string', permission='root_administration')
220 def test_task(request):
219 def test_task(request):
221 """
220 """
222 Test erroneous celery task
221 Test erroneous celery task
223 """
222 """
224 import appenlight.celery.tasks
223 import appenlight.celery.tasks
225
224
226 appenlight.celery.tasks.test_exception_task.delay()
225 appenlight.celery.tasks.test_exception_task.delay()
227 return 'task sent'
226 return 'task sent'
228
227
229
228
230 @view_config(route_name='test', match_param='action=task_retry',
229 @view_config(route_name='test', match_param='action=task_retry',
231 renderer='string', permission='root_administration')
230 renderer='string', permission='root_administration')
232 def test_task_retry(request):
231 def test_task_retry(request):
233 """
232 """
234 Test erroneous celery task
233 Test erroneous celery task
235 """
234 """
236 import appenlight.celery.tasks
235 import appenlight.celery.tasks
237
236
238 appenlight.celery.tasks.test_retry_exception_task.delay()
237 appenlight.celery.tasks.test_retry_exception_task.delay()
239 return 'task sent'
238 return 'task sent'
240
239
241
240
242 @view_config(route_name='test', match_param='action=celery_emails',
241 @view_config(route_name='test', match_param='action=celery_emails',
243 renderer='string', permission='root_administration')
242 renderer='string', permission='root_administration')
244 def test_celery_emails(request):
243 def test_celery_emails(request):
245 import appenlight.celery.tasks
244 import appenlight.celery.tasks
246 appenlight.celery.tasks.alerting.delay()
245 appenlight.celery.tasks.alerting.delay()
247 return 'task sent'
246 return 'task sent'
248
247
249
248
250 @view_config(route_name='test', match_param='action=daily_digest',
249 @view_config(route_name='test', match_param='action=daily_digest',
251 renderer='string', permission='root_administration')
250 renderer='string', permission='root_administration')
252 def test_celery_daily_digest(request):
251 def test_celery_daily_digest(request):
253 import appenlight.celery.tasks
252 import appenlight.celery.tasks
254 appenlight.celery.tasks.daily_digest.delay()
253 appenlight.celery.tasks.daily_digest.delay()
255 return 'task sent'
254 return 'task sent'
256
255
257
256
258 @view_config(route_name='test', match_param='action=celery_alerting',
257 @view_config(route_name='test', match_param='action=celery_alerting',
259 renderer='string', permission='root_administration')
258 renderer='string', permission='root_administration')
260 def test_celery_alerting(request):
259 def test_celery_alerting(request):
261 import appenlight.celery.tasks
260 import appenlight.celery.tasks
262 appenlight.celery.tasks.alerting()
261 appenlight.celery.tasks.alerting()
263 return 'task sent'
262 return 'task sent'
264
263
265
264
266 @view_config(route_name='test', match_param='action=logging',
265 @view_config(route_name='test', match_param='action=logging',
267 renderer='string', permission='root_administration')
266 renderer='string', permission='root_administration')
268 def logs(request):
267 def logs(request):
269 """
268 """
270 Test some in-app logging
269 Test some in-app logging
271 """
270 """
272 log.debug(chr(960))
271 log.debug(chr(960))
273 log.debug('debug')
272 log.debug('debug')
274 log.info(chr(960))
273 log.info(chr(960))
275 log.info('INFO')
274 log.info('INFO')
276 log.warning('Matched GET /\xc4\x85\xc5\xbc\xc4\x87'
275 log.warning('Matched GET /\xc4\x85\xc5\xbc\xc4\x87'
277 '\xc4\x99\xc4\x99\xc4\x85/summary')
276 '\xc4\x99\xc4\x99\xc4\x85/summary')
278 log.warning('XXXXMatched GET /\xc4\x85\xc5\xbc\xc4'
277 log.warning('XXXXMatched GET /\xc4\x85\xc5\xbc\xc4'
279 '\x87\xc4\x99\xc4\x99\xc4\x85/summary')
278 '\x87\xc4\x99\xc4\x99\xc4\x85/summary')
280 log.warning('DUPA /Δ…ΕΌΔ‡Δ™Δ™Δ…')
279 log.warning('DUPA /Δ…ΕΌΔ‡Δ™Δ™Δ…')
281 log.warning("g\u017ceg\u017c\u00f3\u0142ka")
280 log.warning("g\u017ceg\u017c\u00f3\u0142ka")
282 log.error('TEST Lorem ipsum2',
281 log.error('TEST Lorem ipsum2',
283 extra={'user': 'ergo', 'commit': 'sog8ds0g7sdih12hh1j512h5k'})
282 extra={'user': 'ergo', 'commit': 'sog8ds0g7sdih12hh1j512h5k'})
284 log.fatal('TEST Lorem ipsum3')
283 log.fatal('TEST Lorem ipsum3')
285 log.warning('TEST Lorem ipsum',
284 log.warning('TEST Lorem ipsum',
286 extra={"action": 'purchase',
285 extra={"action": 'purchase',
287 "price": random.random() * 100,
286 "price": random.random() * 100,
288 "quantity": random.randint(1, 99)})
287 "quantity": random.randint(1, 99)})
289 log.warning('test_pkey',
288 log.warning('test_pkey',
290 extra={"action": 'test_pkey', "price": random.random() * 100,
289 extra={"action": 'test_pkey', "price": random.random() * 100,
291 'ae_primary_key': 1,
290 'ae_primary_key': 1,
292 "quantity": random.randint(1, 99)})
291 "quantity": random.randint(1, 99)})
293 log.warning('test_pkey2',
292 log.warning('test_pkey2',
294 extra={"action": 'test_pkey', "price": random.random() * 100,
293 extra={"action": 'test_pkey', "price": random.random() * 100,
295 'ae_primary_key': 'b',
294 'ae_primary_key': 'b',
296 'ae_permanent': 't',
295 'ae_permanent': 't',
297 "quantity": random.randint(1, 99)})
296 "quantity": random.randint(1, 99)})
298 log.warning('test_pkey3',
297 log.warning('test_pkey3',
299 extra={"action": 'test_pkey', "price": random.random() * 100,
298 extra={"action": 'test_pkey', "price": random.random() * 100,
300 'ae_primary_key': 1,
299 'ae_primary_key': 1,
301 "quantity": random.randint(1, 99)})
300 "quantity": random.randint(1, 99)})
302 log.warning('test_pkey4',
301 log.warning('test_pkey4',
303 extra={"action": 'test_pkey', "price": random.random() * 100,
302 extra={"action": 'test_pkey', "price": random.random() * 100,
304 'ae_primary_key': 'b',
303 'ae_primary_key': 'b',
305 'ae_permanent': True,
304 'ae_permanent': True,
306 "quantity": random.randint(1, 99)})
305 "quantity": random.randint(1, 99)})
307 request.environ['appenlight.force_send'] = 1
306 request.environ['appenlight.force_send'] = 1
308 return {}
307 return {}
309
308
310
309
311 @view_config(route_name='test', match_param='action=transaction',
310 @view_config(route_name='test', match_param='action=transaction',
312 renderer='string', permission='root_administration')
311 renderer='string', permission='root_administration')
313 def transaction_test(request):
312 def transaction_test(request):
314 """
313 """
315 Test transactions
314 Test transactions
316 """
315 """
317 try:
316 try:
318 result = DBSession.execute("SELECT 1/0")
317 result = DBSession.execute("SELECT 1/0")
319 except:
318 except:
320 request.tm.abort()
319 request.tm.abort()
321 result = DBSession.execute("SELECT 1")
320 result = DBSession.execute("SELECT 1")
322 return 'OK'
321 return 'OK'
323
322
324
323
325 @view_config(route_name='test', match_param='action=slow_request',
324 @view_config(route_name='test', match_param='action=slow_request',
326 renderer='string', permission='root_administration')
325 renderer='string', permission='root_administration')
327 def slow_request(request):
326 def slow_request(request):
328 """
327 """
329 Test a request that has some slow entries - including nested calls
328 Test a request that has some slow entries - including nested calls
330 """
329 """
331 users = DBSession.query(User).all()
330 users = DBSession.query(User).all()
332 import random
331 import random
333 some_val = random.random()
332 some_val = random.random()
334 import threading
333 import threading
335 t_id = id(threading.currentThread())
334 t_id = id(threading.currentThread())
336 log.warning('slow_log %s %s ' % (some_val, t_id))
335 log.warning('slow_log %s %s ' % (some_val, t_id))
337 log.critical('tid %s' % t_id)
336 log.critical('tid %s' % t_id)
338
337
339 @time_trace(name='baz_func %s' % some_val, min_duration=0.1)
338 @time_trace(name='baz_func %s' % some_val, min_duration=0.1)
340 def baz(arg):
339 def baz(arg):
341 time.sleep(0.32)
340 time.sleep(0.32)
342 return arg
341 return arg
343
342
344 requests.get('http://ubuntu.com')
343 requests.get('http://ubuntu.com')
345
344
346 @time_trace(name='foo_func %s %s' % (some_val, t_id), min_duration=0.1)
345 @time_trace(name='foo_func %s %s' % (some_val, t_id), min_duration=0.1)
347 def foo(arg):
346 def foo(arg):
348 time.sleep(0.52)
347 time.sleep(0.52)
349 log.warning('foo_func %s %s' % (some_val, t_id))
348 log.warning('foo_func %s %s' % (some_val, t_id))
350 requests.get('http://ubuntu.com?test=%s' % some_val)
349 requests.get('http://ubuntu.com?test=%s' % some_val)
351 return bar(arg)
350 return bar(arg)
352
351
353 @time_trace(name='bar_func %s %s' % (some_val, t_id), min_duration=0.1)
352 @time_trace(name='bar_func %s %s' % (some_val, t_id), min_duration=0.1)
354 def bar(arg):
353 def bar(arg):
355 log.warning('bar_func %s %s' % (some_val, t_id))
354 log.warning('bar_func %s %s' % (some_val, t_id))
356 time.sleep(1.52)
355 time.sleep(1.52)
357 baz(arg)
356 baz(arg)
358 baz(arg)
357 baz(arg)
359 return baz(arg)
358 return baz(arg)
360
359
361 foo('a')
360 foo('a')
362 return {}
361 return {}
363
362
364
363
365 @view_config(route_name='test', match_param='action=styling',
364 @view_config(route_name='test', match_param='action=styling',
366 renderer='appenlight:templates/tests/styling.jinja2',
365 renderer='appenlight:templates/tests/styling.jinja2',
367 permission='__no_permission_required__')
366 permission='__no_permission_required__')
368 def styling(request):
367 def styling(request):
369 """
368 """
370 Some styling test page
369 Some styling test page
371 """
370 """
372 _ = str
371 _ = str
373 request.session.flash(_(
372 request.session.flash(_(
374 'Your password got updated. '
373 'Your password got updated. '
375 'Next time log in with your new credentials.'))
374 'Next time log in with your new credentials.'))
376 request.session.flash(_(
375 request.session.flash(_(
377 'Something went wrong when we '
376 'Something went wrong when we '
378 'tried to authorize you via external provider'),
377 'tried to authorize you via external provider'),
379 'warning')
378 'warning')
380 request.session.flash(_(
379 request.session.flash(_(
381 'Unfortunately there was a problem '
380 'Unfortunately there was a problem '
382 'processing your payment, please try again later.'),
381 'processing your payment, please try again later.'),
383 'error')
382 'error')
384 return {}
383 return {}
385
384
386
385
387 @view_config(route_name='test', match_param='action=js_error',
386 @view_config(route_name='test', match_param='action=js_error',
388 renderer='appenlight:templates/tests/js_error.jinja2',
387 renderer='appenlight:templates/tests/js_error.jinja2',
389 permission='__no_permission_required__')
388 permission='__no_permission_required__')
390 def js(request):
389 def js(request):
391 """
390 """
392 Used for testing javasctipt client for error catching
391 Used for testing javasctipt client for error catching
393 """
392 """
394 return {}
393 return {}
395
394
396
395
397 @view_config(route_name='test', match_param='action=js_log',
396 @view_config(route_name='test', match_param='action=js_log',
398 renderer='appenlight:templates/tests/js_log.jinja2',
397 renderer='appenlight:templates/tests/js_log.jinja2',
399 permission='__no_permission_required__')
398 permission='__no_permission_required__')
400 def js_log(request):
399 def js_log(request):
401 """
400 """
402 Used for testing javasctipt client for logging
401 Used for testing javasctipt client for logging
403 """
402 """
404 return {}
403 return {}
405
404
406
405
407 @view_config(route_name='test', match_param='action=log_requests',
406 @view_config(route_name='test', match_param='action=log_requests',
408 renderer='string',
407 renderer='string',
409 permission='__no_permission_required__')
408 permission='__no_permission_required__')
410 def log_requests(request):
409 def log_requests(request):
411 """
410 """
412 Util view for printing json requests
411 Util view for printing json requests
413 """
412 """
414 return {}
413 return {}
415
414
416
415
417 @view_config(route_name='test', match_param='action=url', renderer='string',
416 @view_config(route_name='test', match_param='action=url', renderer='string',
418 permission='__no_permission_required__')
417 permission='__no_permission_required__')
419 def log_requests(request):
418 def log_requests(request):
420 """
419 """
421 I have no fucking clue why I needed that ;-)
420 I have no fucking clue why I needed that ;-)
422 """
421 """
423 return request.route_url('reports', _app_url='https://appenlight.com')
422 return request.route_url('reports', _app_url='https://appenlight.com')
424
423
425
424
426 class TestClass(object):
425 class TestClass(object):
427 """
426 """
428 Used to test if class-based view name resolution works correctly
427 Used to test if class-based view name resolution works correctly
429 """
428 """
430
429
431 def __init__(self, request):
430 def __init__(self, request):
432 self.request = request
431 self.request = request
433
432
434 @view_config(route_name='test', match_param='action=test_a',
433 @view_config(route_name='test', match_param='action=test_a',
435 renderer='string', permission='root_administration')
434 renderer='string', permission='root_administration')
436 @view_config(route_name='test', match_param='action=test_c',
435 @view_config(route_name='test', match_param='action=test_c',
437 renderer='string', permission='root_administration')
436 renderer='string', permission='root_administration')
438 @view_config(route_name='test', match_param='action=test_d',
437 @view_config(route_name='test', match_param='action=test_d',
439 renderer='string', permission='root_administration')
438 renderer='string', permission='root_administration')
440 def test_a(self):
439 def test_a(self):
441 return 'ok'
440 return 'ok'
442
441
443 @view_config(route_name='test', match_param='action=test_b',
442 @view_config(route_name='test', match_param='action=test_b',
444 renderer='string', permission='root_administration')
443 renderer='string', permission='root_administration')
445 def test_b(self):
444 def test_b(self):
446 return 'ok'
445 return 'ok'
General Comments 0
You need to be logged in to leave comments. Login now