##// END OF EJS Templates
project: add missing lib directory
ergo -
r2:87908169
parent child Browse files
Show More
@@ -0,0 +1,53 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 """Miscellaneous support packages for {{project}}.
23 """
24 import random
25 import string
26 import importlib
27
28 from appenlight_client.exceptions import get_current_traceback
29
30
31 def generate_random_string(chars=10):
32 return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
33 chars))
34
35
36 def to_integer_safe(input):
37 try:
38 return int(input)
39 except (TypeError, ValueError,):
40 return None
41
42 def print_traceback(log):
43 traceback = get_current_traceback(skip=1, show_hidden_frames=True,
44 ignore_system_exceptions=True)
45 exception_text = traceback.exception
46 log.error(exception_text)
47 log.error(traceback.plaintext)
48 del traceback
49
50 def get_callable(import_string):
51 import_module, indexer_callable = import_string.split(':')
52 return getattr(importlib.import_module(import_module),
53 indexer_callable)
@@ -0,0 +1,83 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import datetime
23 import logging
24
25 from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests
26
27 from appenlight.models import Datastores
28 from appenlight.models.services.config import ConfigService
29 from appenlight.lib.redis_keys import REDIS_KEYS
30
31 log = logging.getLogger(__name__)
32
33
34 def rate_limiting(request, resource, section, to_increment=1):
35 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
36 key = REDIS_KEYS['rate_limits'][section].format(tsample,
37 resource.resource_id)
38 current_count = Datastores.redis.incr(key, to_increment)
39 Datastores.redis.expire(key, 3600 * 24)
40 config = ConfigService.by_key_and_section(section, 'global')
41 limit = config.value if config else 1000
42 if current_count > int(limit):
43 log.info('RATE LIMITING: {}: {}, {}'.format(
44 section, resource, current_count))
45 abort_msg = 'Rate limits are in effect for this application'
46 raise HTTPTooManyRequests(abort_msg,
47 headers={'X-AppEnlight': abort_msg})
48
49
50 def check_cors(request, application, should_return=True):
51 """
52 Performs a check and validation if request comes from authorized domain for
53 application, otherwise return 403
54 """
55 origin_found = False
56 origin = request.headers.get('Origin')
57 if should_return:
58 log.info('CORS for %s' % origin)
59 if not origin:
60 return False
61 for domain in application.domains.split('\n'):
62 if domain in origin:
63 origin_found = True
64 if origin_found:
65 request.response.headers.add('Access-Control-Allow-Origin', origin)
66 request.response.headers.add('XDomainRequestAllowed', '1')
67 request.response.headers.add('Access-Control-Allow-Methods',
68 'GET, POST, OPTIONS')
69 request.response.headers.add('Access-Control-Allow-Headers',
70 'Accept-Encoding, Accept-Language, '
71 'Content-Type, '
72 'Depth, User-Agent, X-File-Size, '
73 'X-Requested-With, If-Modified-Since, '
74 'X-File-Name, '
75 'Cache-Control, Host, Pragma, Accept, '
76 'Origin, Connection, '
77 'Referer, Cookie, '
78 'X-appenlight-public-api-key, '
79 'x-appenlight-public-api-key')
80 request.response.headers.add('Access-Control-Max-Age', '86400')
81 return request.response
82 else:
83 return HTTPForbidden()
@@ -0,0 +1,188 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import copy
23 import hashlib
24 import inspect
25
26 from dogpile.cache import make_region, compat
27
28 regions = None
29
30
31 def key_mangler(key):
32 return "appenlight:dogpile:{}".format(key)
33
34
35 def hashgen(namespace, fn, to_str=compat.string_type):
36 """Return a function that generates a string
37 key, based on a given function as well as
38 arguments to the returned function itself.
39
40 This is used by :meth:`.CacheRegion.cache_on_arguments`
41 to generate a cache key from a decorated function.
42
43 It can be replaced using the ``function_key_generator``
44 argument passed to :func:`.make_region`.
45
46 """
47
48 if namespace is None:
49 namespace = '%s:%s' % (fn.__module__, fn.__name__)
50 else:
51 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
52
53 args = inspect.getargspec(fn)
54 has_self = args[0] and args[0][0] in ('self', 'cls')
55
56 def generate_key(*args, **kw):
57 if kw:
58 raise ValueError(
59 "dogpile.cache's default key creation "
60 "function does not accept keyword arguments.")
61 if has_self:
62 args = args[1:]
63
64 return namespace + "|" + hashlib.sha1(
65 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
66
67 return generate_key
68
69
70 class CacheRegions(object):
71 def __init__(self, settings):
72 config_redis = {"arguments": settings}
73
74 self.redis_min_1 = make_region(
75 function_key_generator=hashgen,
76 key_mangler=key_mangler).configure(
77 "dogpile.cache.redis",
78 expiration_time=60,
79 **copy.deepcopy(config_redis))
80 self.redis_min_5 = make_region(
81 function_key_generator=hashgen,
82 key_mangler=key_mangler).configure(
83 "dogpile.cache.redis",
84 expiration_time=300,
85 **copy.deepcopy(config_redis))
86
87 self.redis_min_10 = make_region(
88 function_key_generator=hashgen,
89 key_mangler=key_mangler).configure(
90 "dogpile.cache.redis",
91 expiration_time=60,
92 **copy.deepcopy(config_redis))
93
94 self.redis_min_60 = make_region(
95 function_key_generator=hashgen,
96 key_mangler=key_mangler).configure(
97 "dogpile.cache.redis",
98 expiration_time=3600,
99 **copy.deepcopy(config_redis))
100
101 self.redis_sec_1 = make_region(
102 function_key_generator=hashgen,
103 key_mangler=key_mangler).configure(
104 "dogpile.cache.redis",
105 expiration_time=1,
106 **copy.deepcopy(config_redis))
107
108 self.redis_sec_5 = make_region(
109 function_key_generator=hashgen,
110 key_mangler=key_mangler).configure(
111 "dogpile.cache.redis",
112 expiration_time=5,
113 **copy.deepcopy(config_redis))
114
115 self.redis_sec_30 = make_region(
116 function_key_generator=hashgen,
117 key_mangler=key_mangler).configure(
118 "dogpile.cache.redis",
119 expiration_time=30,
120 **copy.deepcopy(config_redis))
121
122 self.redis_day_1 = make_region(
123 function_key_generator=hashgen,
124 key_mangler=key_mangler).configure(
125 "dogpile.cache.redis",
126 expiration_time=86400,
127 **copy.deepcopy(config_redis))
128
129 self.redis_day_7 = make_region(
130 function_key_generator=hashgen,
131 key_mangler=key_mangler).configure(
132 "dogpile.cache.redis",
133 expiration_time=86400 * 7,
134 **copy.deepcopy(config_redis))
135
136 self.redis_day_30 = make_region(
137 function_key_generator=hashgen,
138 key_mangler=key_mangler).configure(
139 "dogpile.cache.redis",
140 expiration_time=86400 * 30,
141 **copy.deepcopy(config_redis))
142
143 self.memory_day_1 = make_region(
144 function_key_generator=hashgen,
145 key_mangler=key_mangler).configure(
146 "dogpile.cache.memory",
147 expiration_time=86400,
148 **copy.deepcopy(config_redis))
149
150 self.memory_sec_1 = make_region(
151 function_key_generator=hashgen,
152 key_mangler=key_mangler).configure(
153 "dogpile.cache.memory",
154 expiration_time=1)
155
156 self.memory_sec_5 = make_region(
157 function_key_generator=hashgen,
158 key_mangler=key_mangler).configure(
159 "dogpile.cache.memory",
160 expiration_time=5)
161
162 self.memory_min_1 = make_region(
163 function_key_generator=hashgen,
164 key_mangler=key_mangler).configure(
165 "dogpile.cache.memory",
166 expiration_time=60)
167
168 self.memory_min_5 = make_region(
169 function_key_generator=hashgen,
170 key_mangler=key_mangler).configure(
171 "dogpile.cache.memory",
172 expiration_time=300)
173
174 self.memory_min_10 = make_region(
175 function_key_generator=hashgen,
176 key_mangler=key_mangler).configure(
177 "dogpile.cache.memory",
178 expiration_time=600)
179
180 self.memory_min_60 = make_region(
181 function_key_generator=hashgen,
182 key_mangler=key_mangler).configure(
183 "dogpile.cache.memory",
184 expiration_time=3600)
185
186
187 def get_region(region):
188 return getattr(regions, region)
@@ -0,0 +1,63 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 # this gets set on runtime
23 from cryptography.fernet import Fernet
24
25 ENCRYPTION_SECRET = None
26
27
28 def encrypt_fernet(value):
29 # avoid double encryption
30 # not sure if this is needed but it won't hurt too much to have this
31 if value.startswith('enc$fernet$'):
32 return value
33 f = Fernet(ENCRYPTION_SECRET)
34 return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
35
36
37 def decrypt_fernet(value):
38 parts = value.split('$', 3)
39 if not len(parts) == 3:
40 # not encrypted values
41 return value
42 else:
43 f = Fernet(ENCRYPTION_SECRET)
44 decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
45 return decrypted_data
46
47
48 def encrypt_dictionary_keys(_dict, exclude_keys=None):
49 if not exclude_keys:
50 exclude_keys = []
51 keys = [k for k in _dict.keys() if k not in exclude_keys]
52 for k in keys:
53 _dict[k] = encrypt_fernet(_dict[k])
54 return _dict
55
56
57 def decrypt_dictionary_keys(_dict, exclude_keys=None):
58 if not exclude_keys:
59 exclude_keys = []
60 keys = [k for k in _dict.keys() if k not in exclude_keys]
61 for k in keys:
62 _dict[k] = decrypt_fernet(_dict[k])
63 return _dict
@@ -0,0 +1,93 b''
1 import collections
2 # -*- coding: utf-8 -*-
3
4 # Copyright (C) 2010-2016 RhodeCode GmbH
5 #
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License, version 3
8 # (only), as published by the Free Software Foundation.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU Affero General Public License
16 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 #
18 # This program is dual-licensed. If you wish to learn more about the
19 # App Enlight Enterprise Edition, including its added features, Support
20 # services, and proprietary license terms, please see
21 # https://rhodecode.com/licenses/
22
23
24 class StupidEnum(object):
25 @classmethod
26 def set_inverse(cls):
27 cls._inverse_values = dict(
28 (y, x) for x, y in vars(cls).items() if
29 not x.startswith('_') and not callable(y)
30 )
31
32 @classmethod
33 def key_from_value(cls, value):
34 if not hasattr(cls, '_inverse_values'):
35 cls.set_inverse()
36 return cls._inverse_values.get(value)
37
38
39 class ReportType(StupidEnum):
40 unknown = 0
41 error = 1
42 not_found = 2
43 slow = 3
44
45
46 class Language(StupidEnum):
47 unknown = 0
48 python = 1
49 javascript = 2
50 java = 3
51 objectivec = 4
52 swift = 5
53 cpp = 6
54 basic = 7
55 csharp = 8
56 php = 9
57 perl = 10
58 vb = 11
59 vbnet = 12
60 ruby = 13
61 fsharp = 14
62 actionscript = 15
63 go = 16
64 scala = 17
65 haskell = 18
66 erlang = 19
67 haxe = 20
68 scheme = 21
69
70
71 class LogLevel(StupidEnum):
72 UNKNOWN = 0
73 DEBUG = 2
74 TRACE = 4
75 INFO = 6
76 WARNING = 8
77 ERROR = 10
78 CRITICAL = 12
79 FATAL = 14
80
81
82 class LogLevelPython(StupidEnum):
83 CRITICAL = 50
84 ERROR = 40
85 WARNING = 30
86 INFO = 20
87 DEBUG = 10
88 NOTSET = 0
89
90
91 class ParsedSentryEventType(StupidEnum):
92 ERROR_REPORT = 1
93 LOG = 2
@@ -0,0 +1,153 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 """
23 ex-json borrowed from Marcin Kuzminski
24
25 source: https://secure.rhodecode.org/ext-json
26
27 """
28 import datetime
29 import functools
30 import decimal
31 import imp
32
33 __all__ = ['json', 'simplejson', 'stdlibjson']
34
35
36 def _is_aware(value):
37 """
38 Determines if a given datetime.time is aware.
39
40 The logic is described in Python's docs:
41 http://docs.python.org/library/datetime.html#datetime.tzinfo
42 """
43 return (value.tzinfo is not None
44 and value.tzinfo.utcoffset(value) is not None)
45
46
47 def _obj_dump(obj):
48 """
49 Custom function for dumping objects to JSON, if obj has __json__ attribute
50 or method defined it will be used for serialization
51
52 :param obj:
53 """
54
55 if isinstance(obj, complex):
56 return [obj.real, obj.imag]
57 # See "Date Time String Format" in the ECMA-262 specification.
58 # some code borrowed from django 1.4
59 elif isinstance(obj, datetime.datetime):
60 r = obj.isoformat()
61 # if obj.microsecond:
62 # r = r[:23] + r[26:]
63 if r.endswith('+00:00'):
64 r = r[:-6] + 'Z'
65 return r
66 elif isinstance(obj, datetime.date):
67 return obj.isoformat()
68 elif isinstance(obj, decimal.Decimal):
69 return str(obj)
70 elif isinstance(obj, datetime.time):
71 if _is_aware(obj):
72 raise ValueError("JSON can't represent timezone-aware times.")
73 r = obj.isoformat()
74 if obj.microsecond:
75 r = r[:12]
76 return r
77 elif isinstance(obj, set):
78 return list(obj)
79 elif hasattr(obj, '__json__'):
80 if callable(obj.__json__):
81 return obj.__json__()
82 else:
83 return obj.__json__
84 else:
85 raise NotImplementedError
86
87
88 # Import simplejson
89 try:
90 # import simplejson initially
91 _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
92
93
94 def extended_encode(obj):
95 try:
96 return _obj_dump(obj)
97 except NotImplementedError:
98 pass
99 raise TypeError("%r is not JSON serializable" % (obj,))
100
101
102 # we handle decimals our own it makes unified behavior of json vs
103 # simplejson
104 sj_version = [int(x) for x in _sj.__version__.split('.')]
105 major, minor = sj_version[0], sj_version[1]
106 if major < 2 or (major == 2 and minor < 1):
107 # simplejson < 2.1 doesnt support use_decimal
108 _sj.dumps = functools.partial(
109 _sj.dumps, default=extended_encode)
110 _sj.dump = functools.partial(
111 _sj.dump, default=extended_encode)
112 else:
113 _sj.dumps = functools.partial(
114 _sj.dumps, default=extended_encode, use_decimal=False)
115 _sj.dump = functools.partial(
116 _sj.dump, default=extended_encode, use_decimal=False)
117 simplejson = _sj
118
119 except ImportError:
120 # no simplejson set it to None
121 simplejson = None
122
123 try:
124 # simplejson not found try out regular json module
125 _json = imp.load_module('_json', *imp.find_module('json'))
126
127
128 # extended JSON encoder for json
129 class ExtendedEncoder(_json.JSONEncoder):
130 def default(self, obj):
131 try:
132 return _obj_dump(obj)
133 except NotImplementedError:
134 pass
135 raise TypeError("%r is not JSON serializable" % (obj,))
136
137
138 # monkey-patch JSON encoder to use extended version
139 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
140 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
141
142 except ImportError:
143 json = None
144
145 stdlibjson = _json
146
147 # set all available json modules
148 if simplejson:
149 json = _sj
150 elif _json:
151 json = _json
152 else:
153 raise ImportError('Could not find any json modules')
@@ -0,0 +1,124 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 """
23 Helper functions
24 """
25 import copy
26 import datetime
27
28 from collections import namedtuple, OrderedDict
29
30 _ = lambda x: x
31
32 time_deltas = OrderedDict()
33
34 time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
35 'label': '1 minute', 'minutes': 1}
36
37 time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
38 'label': '5 minutes', 'minutes': 5}
39 time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
40 'label': '30 minutes', 'minutes': 30}
41 time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
42 'label': '60 minutes', 'minutes': 60}
43 time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
44 'minutes': 60 * 4}
45 time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
46 'label': '12 hours', 'minutes': 60 * 12}
47 time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
48 'label': '24 hours', 'minutes': 60 * 24}
49 time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
50 'minutes': 60 * 24 * 3}
51 time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
52 'minutes': 60 * 24 * 7}
53 time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
54 'minutes': 60 * 24 * 14}
55 time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
56 'minutes': 60 * 24 * 31}
57 time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
58 'label': '3 months',
59 'minutes': 60 * 24 * 31 * 3}
60 time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
61 'label': '6 months',
62 'minutes': 60 * 24 * 31 * 6}
63 time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
64 'label': '12 months',
65 'minutes': 60 * 24 * 31 * 12}
66
67 # used in json representation
68 time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
69 for k, v in time_deltas.items()])
70 FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
71
72
73 def get_flash(request):
74 messages = []
75 messages.extend(
76 [FlashMsg(msg, 'error')
77 for msg in request.session.peek_flash('error')])
78 messages.extend([FlashMsg(msg, 'warning')
79 for msg in request.session.peek_flash('warning')])
80 messages.extend(
81 [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
82 return messages
83
84
85 def clear_flash(request):
86 request.session.pop_flash('error')
87 request.session.pop_flash('warning')
88 request.session.pop_flash()
89
90
91 def get_type_formatted_flash(request):
92 return [{'msg': message.msg, 'type': message.level}
93 for message in get_flash(request)]
94
95
96 def gen_pagination_headers(request, paginator):
97 headers = {
98 'x-total-count': str(paginator.item_count),
99 'x-current-page': str(paginator.page),
100 'x-items-per-page': str(paginator.items_per_page)
101 }
102 params_dict = request.GET.dict_of_lists()
103 last_page_params = copy.deepcopy(params_dict)
104 last_page_params['page'] = paginator.last_page or 1
105 first_page_params = copy.deepcopy(params_dict)
106 first_page_params.pop('page', None)
107 next_page_params = copy.deepcopy(params_dict)
108 next_page_params['page'] = paginator.next_page or paginator.last_page or 1
109 prev_page_params = copy.deepcopy(params_dict)
110 prev_page_params['page'] = paginator.previous_page or 1
111 lp_url = request.current_route_url(_query=last_page_params)
112 fp_url = request.current_route_url(_query=first_page_params)
113 links = [
114 'rel="last", <{}>'.format(lp_url),
115 'rel="first", <{}>'.format(fp_url),
116 ]
117 if first_page_params != prev_page_params:
118 prev_url = request.current_route_url(_query=prev_page_params)
119 links.append('rel="prev", <{}>'.format(prev_url))
120 if last_page_params != next_page_params:
121 next_url = request.current_route_url(_query=next_page_params)
122 links.append('rel="next", <{}>'.format(next_url))
123 headers['link'] = '; '.join(links)
124 return headers
@@ -0,0 +1,51 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import re
23 from appenlight.lib.ext_json import json
24 from jinja2 import Markup, escape, evalcontextfilter
25
26 _paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
27
28
29 @evalcontextfilter
30 def nl2br(eval_ctx, value):
31 if eval_ctx.autoescape:
32 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n'))
33 for p in _paragraph_re.split(escape(value)))
34 else:
35 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n')
36 for p in _paragraph_re.split(escape(value)))
37 if eval_ctx.autoescape:
38 result = Markup(result)
39 return result
40
41
42 @evalcontextfilter
43 def toJSONUnsafe(eval_ctx, value):
44 encoded = json.dumps(value).replace('&', '\\u0026') \
45 .replace('<', '\\u003c') \
46 .replace('>', '\\u003e') \
47 .replace('>', '\\u003e') \
48 .replace('"', '\\u0022') \
49 .replace("'", '\\u0027') \
50 .replace(r'\n', '/\\\n')
51 return Markup("'%s'" % encoded)
@@ -0,0 +1,59 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 BASE = 'appenlight:data:{}'
23
24 REDIS_KEYS = {
25 'tasks': {
26 'add_reports_lock': BASE.format('add_reports_lock:{}'),
27 'add_logs_lock': BASE.format('add_logs_lock:{}'),
28 },
29 'counters': {
30 'reports_per_minute': BASE.format('reports_per_minute:{}'),
31 'reports_per_minute_per_app': BASE.format(
32 'reports_per_minute_per_app:{}:{}'),
33 'reports_per_type': BASE.format('reports_per_type:{}'),
34 'logs_per_minute': BASE.format('logs_per_minute:{}'),
35 'logs_per_minute_per_app': BASE.format(
36 'logs_per_minute_per_app:{}:{}'),
37 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
38 'metrics_per_minute_per_app': BASE.format(
39 'metrics_per_minute_per_app:{}:{}'),
40 'report_group_occurences': BASE.format('report_group_occurences:{}'),
41 'report_group_occurences_10th': BASE.format(
42 'report_group_occurences_10th:{}'),
43 'report_group_occurences_100th': BASE.format(
44 'report_group_occurences_100th:{}'),
45 },
46 'rate_limits': {
47 'per_application_reports_rate_limit': BASE.format(
48 'per_application_reports_limit:{}:{}'),
49 'per_application_logs_rate_limit': BASE.format(
50 'per_application_logs_rate_limit:{}:{}'),
51 'per_application_metrics_rate_limit': BASE.format(
52 'per_application_metrics_rate_limit:{}:{}'),
53 },
54 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
55 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
56 'reports_to_notify_per_type_per_app': BASE.format(
57 'reports_to_notify_per_type_per_app:{}:{}'),
58 'seen_tag_list': BASE.format('seen_tag_list')
59 }
@@ -0,0 +1,89 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import appenlight.lib.helpers as helpers
23 import json
24 from pyramid.security import unauthenticated_userid
25 from appenlight.models.user import User
26
27
28 class CSRFException(Exception):
29 pass
30
31
32 class JSONException(Exception):
33 pass
34
35
36 def get_csrf_token(request):
37 return request.session.get_csrf_token()
38
39
40 def safe_json_body(request):
41 """
42 Returns None if json body is missing or erroneous
43 """
44 try:
45 return request.json_body
46 except ValueError:
47 return None
48
49
50 def unsafe_json_body(request):
51 """
52 Throws JSONException if json can't deserialize
53 """
54 try:
55 return request.json_body
56 except ValueError:
57 raise JSONException('Incorrect JSON')
58
59
60 def get_user(request):
61 if not request.path_info.startswith('/static'):
62 user_id = unauthenticated_userid(request)
63 try:
64 user_id = int(user_id)
65 except Exception:
66 return None
67
68 if user_id:
69 user = User.by_id(user_id)
70 if user:
71 request.environ['appenlight.username'] = '%d:%s' % (
72 user_id, user.user_name)
73 return user
74 else:
75 return None
76
77
78 def es_conn(request):
79 return request.registry.es_conn
80
81
82 def add_flash_to_headers(request, clear=True):
83 """
84 Adds pending flash messages to response, if clear is true clears out the
85 flash queue
86 """
87 flash_msgs = helpers.get_type_formatted_flash(request)
88 request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
89 helpers.clear_flash(request)
@@ -0,0 +1,288 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import logging
23 import operator
24
25 log = logging.getLogger(__name__)
26
27
28 class RuleException(Exception):
29 pass
30
31
32 class KeyNotFoundException(RuleException):
33 pass
34
35
36 class UnknownTypeException(RuleException):
37 pass
38
39
40 class BadConfigException(RuleException):
41 pass
42
43
44 class InvalidValueException(RuleException):
45 pass
46
47
48 class RuleBase(object):
49 @classmethod
50 def default_dict_struct_getter(cls, struct, field_name):
51 """
52 returns a key from dictionary based on field_name, if the name contains
53 `:` then it means additional nesting levels should be checked for the
54 key so `a:b:c` means return struct['a']['b']['c']
55
56 :param struct:
57 :param field_name:
58 :return:
59 """
60 parts = field_name.split(':') if field_name else []
61 found = struct
62 while parts:
63 current_key = parts.pop(0)
64 found = found.get(current_key)
65 if not found and parts:
66 raise KeyNotFoundException('Key not found in structure')
67 return found
68
69 @classmethod
70 def default_obj_struct_getter(cls, struct, field_name):
71 """
72 returns a key from instance based on field_name, if the name contains
73 `:` then it means additional nesting levels should be checked for the
74 key so `a:b:c` means return struct.a.b.c
75
76 :param struct:
77 :param field_name:
78 :return:
79 """
80 parts = field_name.split(':')
81 found = struct
82 while parts:
83 current_key = parts.pop(0)
84 found = getattr(found, current_key, None)
85 if not found and parts:
86 raise KeyNotFoundException('Key not found in structure')
87 return found
88
89 def normalized_type(self, field, value):
90 """
91 Converts text values from self.conf_value based on type_matrix below
92 check_matrix defines what kind of checks we can perform on a field
93 value based on field name
94 """
95 f_type = self.type_matrix.get(field)
96 if f_type:
97 cast_to = f_type['type']
98 else:
99 raise UnknownTypeException('Unknown type')
100
101 if value is None:
102 return None
103
104 try:
105 if cast_to == 'int':
106 return int(value)
107 elif cast_to == 'float':
108 return float(value)
109 elif cast_to == 'unicode':
110 return str(value)
111 except ValueError as exc:
112 raise InvalidValueException(exc)
113
114
115 class Rule(RuleBase):
116 def __init__(self, config, type_matrix,
117 struct_getter=RuleBase.default_dict_struct_getter,
118 config_manipulator=None):
119 """
120
121 :param config: dict - contains rule configuration
122 example::
123 {
124 "field": "__OR__",
125 "rules": [
126 {
127 "field": "__AND__",
128 "rules": [
129 {
130 "op": "ge",
131 "field": "occurences",
132 "value": "10"
133 },
134 {
135 "op": "ge",
136 "field": "priority",
137 "value": "4"
138 }
139 ]
140 },
141 {
142 "op": "eq",
143 "field": "http_status",
144 "value": "500"
145 }
146 ]
147 }
148 :param type_matrix: dict - contains map of type casts
149 example::
150 {
151 'http_status': 'int',
152 'priority': 'unicode',
153 }
154 :param struct_getter: callable - used to grab the value of field from
155 the structure passed to match() based
156 on key, default
157
158 """
159 self.type_matrix = type_matrix
160 self.config = config
161 self.struct_getter = struct_getter
162 self.config_manipulator = config_manipulator
163 if config_manipulator:
164 config_manipulator(self)
165
166 def subrule_check(self, rule_config, struct):
167 rule = Rule(rule_config, self.type_matrix,
168 config_manipulator=self.config_manipulator)
169 return rule.match(struct)
170
171 def match(self, struct):
172 """
173 Check if rule matched for this specific report
174 First tries report value, then tests tags in not found, then finally
175 report group
176 """
177 field_name = self.config.get('field')
178 test_value = self.config.get('value')
179
180 if not field_name:
181 return False
182
183 if field_name == '__AND__':
184 rule = AND(self.config['rules'], self.type_matrix,
185 config_manipulator=self.config_manipulator)
186 return rule.match(struct)
187 elif field_name == '__OR__':
188 rule = OR(self.config['rules'], self.type_matrix,
189 config_manipulator=self.config_manipulator)
190 return rule.match(struct)
191
192 if test_value is None:
193 return False
194
195 try:
196 struct_value = self.normalized_type(field_name,
197 self.struct_getter(struct,
198 field_name))
199 except (UnknownTypeException, InvalidValueException) as exc:
200 log.error(str(exc))
201 return False
202
203 try:
204 test_value = self.normalized_type(field_name, test_value)
205 except (UnknownTypeException, InvalidValueException) as exc:
206 log.error(str(exc))
207 return False
208
209 if self.config['op'] not in ('startswith', 'endswith', 'contains'):
210 try:
211 return getattr(operator,
212 self.config['op'])(struct_value, test_value)
213 except TypeError:
214 return False
215 elif self.config['op'] == 'startswith':
216 return struct_value.startswith(test_value)
217 elif self.config['op'] == 'endswith':
218 return struct_value.endswith(test_value)
219 elif self.config['op'] == 'contains':
220 return test_value in struct_value
221 raise BadConfigException('Invalid configuration, '
222 'unknown operator: {}'.format(self.config))
223
224 def __repr__(self):
225 return '<Rule {} {}>'.format(self.config.get('field'),
226 self.config.get('value'))
227
228
229 class AND(Rule):
230 def __init__(self, rules, *args, **kwargs):
231 super(AND, self).__init__({}, *args, **kwargs)
232 self.rules = rules
233
234 def match(self, struct):
235 return all([self.subrule_check(r_conf, struct) for r_conf
236 in self.rules])
237
238
239 class OR(Rule):
240 def __init__(self, rules, *args, **kwargs):
241 super(OR, self).__init__({}, *args, **kwargs)
242 self.rules = rules
243
244 def match(self, struct):
245 return any([self.subrule_check(r_conf, struct) for r_conf
246 in self.rules])
247
248
249 class RuleService(object):
250 @staticmethod
251 def rule_from_config(config, field_mappings, labels_dict,
252 manipulator_func=None):
253 """
254 Returns modified rule with manipulator function
255 By default manipulator function replaces field id from labels_dict
256 with current field id proper for the rule from fields_mappings
257
258 because label X_X id might be pointing different value on next request
259 when new term is returned from elasticsearch - this ensures things
260 are kept 1:1 all the time
261 """
262 rev_map = {}
263 for k, v in labels_dict.items():
264 rev_map[(v['agg'], v['key'],)] = k
265
266 if manipulator_func is None:
267 def label_rewriter_func(rule):
268 field = rule.config.get('field')
269 if not field or rule.config['field'] in ['__OR__', '__AND__']:
270 return
271
272 to_map = field_mappings.get(rule.config['field'])
273
274 # we need to replace series field with _AE_NOT_FOUND_ to not match
275 # accidently some other field which happens to have the series that
276 # was used when the alert was created
277 if to_map:
278 to_replace = rev_map.get((to_map['agg'], to_map['key'],),
279 '_AE_NOT_FOUND_')
280 else:
281 to_replace = '_AE_NOT_FOUND_'
282
283 rule.config['field'] = to_replace
284 rule.type_matrix[to_replace] = {"type": 'float'}
285
286 manipulator_func = label_rewriter_func
287
288 return Rule(config, {}, config_manipulator=manipulator_func)
@@ -0,0 +1,65 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 from ziggurat_foundations.models.services.external_identity import \
23 ExternalIdentityService
24 from appenlight.models.external_identity import ExternalIdentity
25
26
27 def handle_social_data(request, user, social_data):
28 social_data = social_data
29 update_identity = False
30
31 extng_id = ExternalIdentityService.by_external_id_and_provider(
32 social_data['user']['id'],
33 social_data['credentials'].provider_name
34 )
35
36 # fix legacy accounts with wrong google ID
37 if not extng_id and social_data['credentials'].provider_name == 'google':
38 extng_id = ExternalIdentityService.by_external_id_and_provider(
39 social_data['user']['email'],
40 social_data['credentials'].provider_name
41 )
42
43 if extng_id:
44 extng_id.delete()
45 update_identity = True
46
47 if not social_data['user']['id']:
48 request.session.flash(
49 'No external user id found? Perhaps permissions for '
50 'authentication are set incorrectly', 'error')
51 return False
52
53 if not extng_id or update_identity:
54 if not update_identity:
55 request.session.flash('Your external identity is now '
56 'connected with your account')
57 ex_identity = ExternalIdentity()
58 ex_identity.external_id = social_data['user']['id']
59 ex_identity.external_user_name = social_data['user']['user_name']
60 ex_identity.provider_name = social_data['credentials'].provider_name
61 ex_identity.access_token = social_data['credentials'].token
62 ex_identity.token_secret = social_data['credentials'].token_secret
63 ex_identity.alt_token = social_data['credentials'].refresh_token
64 user.external_identities.append(ex_identity)
65 request.session.pop('zigg.social_auth', None)
@@ -0,0 +1,54 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import binascii
23 import sqlalchemy.types as types
24
25 import appenlight.lib.encryption as encryption
26
27
28 class BinaryHex(types.TypeDecorator):
29 impl = types.LargeBinary
30
31 def process_bind_param(self, value, dialect):
32 if value is not None:
33 value = binascii.unhexlify(value)
34
35 return value
36
37 def process_result_value(self, value, dialect):
38 if value is not None:
39 value = binascii.hexlify(value)
40 return value
41
42
43 class EncryptedUnicode(types.TypeDecorator):
44 impl = types.Unicode
45
46 def process_bind_param(self, value, dialect):
47 if not value:
48 return value
49 return encryption.encrypt_fernet(value)
50
51 def process_result_value(self, value, dialect):
52 if not value:
53 return value
54 return encryption.decrypt_fernet(value)
@@ -0,0 +1,495 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 """
23 Utility functions.
24 """
25 import logging
26 import requests
27 import hashlib
28 import json
29 import copy
30 import uuid
31 import appenlight.lib.helpers as h
32 from collections import namedtuple
33 from datetime import timedelta, datetime, date
34 from dogpile.cache.api import NO_VALUE
35 from appenlight.models import Datastores
36 from appenlight.validators import (LogSearchSchema,
37 TagListSchema,
38 accepted_search_params)
39 from itsdangerous import TimestampSigner
40 from ziggurat_foundations.permissions import ALL_PERMISSIONS
41 from dateutil.relativedelta import relativedelta
42 from dateutil.rrule import rrule, MONTHLY, DAILY
43
44 log = logging.getLogger(__name__)
45
46
47 Stat = namedtuple('Stat', 'start_interval value')
48
49
50 def default_extractor(item):
51 """
52 :param item - item to extract date from
53 """
54 if hasattr(item, 'start_interval'):
55 return item.start_interval
56 return item['start_interval']
57
58
59 # fast gap generator
60 def gap_gen_default(start, step, itemiterator, end_time=None,
61 iv_extractor=None):
62 """ generates a list of time/value items based on step and itemiterator
63 if there are entries missing from iterator time/None will be returned
64 instead
65 :param start - datetime - what time should we start generating our values
66 :param step - timedelta - stepsize
67 :param itemiterator - iterable - we will check this iterable for values
68 corresponding to generated steps
69 :param end_time - datetime - when last step is >= end_time stop iterating
70 :param iv_extractor - extracts current step from iterable items
71 """
72
73 if not iv_extractor:
74 iv_extractor = default_extractor
75
76 next_step = start
77 minutes = step.total_seconds() / 60.0
78 while next_step.minute % minutes != 0:
79 next_step = next_step.replace(minute=next_step.minute - 1)
80 for item in itemiterator:
81 item_start_interval = iv_extractor(item)
82 # do we have a match for current time step in our data?
83 # no gen a new tuple with 0 values
84 while next_step < item_start_interval:
85 yield Stat(next_step, None)
86 next_step = next_step + step
87 if next_step == item_start_interval:
88 yield Stat(item_start_interval, item)
89 next_step = next_step + step
90 if end_time:
91 while next_step < end_time:
92 yield Stat(next_step, None)
93 next_step = next_step + step
94
95
96 class DateTimeEncoder(json.JSONEncoder):
97 """ Simple datetime to ISO encoder for json serialization"""
98
99 def default(self, obj):
100 if isinstance(obj, date):
101 return obj.isoformat()
102 if isinstance(obj, datetime):
103 return obj.isoformat()
104 return json.JSONEncoder.default(self, obj)
105
106
107 def cometd_request(secret, endpoint, payload, throw_exceptions=False,
108 servers=None):
109 responses = []
110 if not servers:
111 servers = []
112
113 signer = TimestampSigner(secret)
114 sig_for_server = signer.sign(endpoint)
115 for secret, server in [(s['secret'], s['server']) for s in servers]:
116 response = {}
117 secret_headers = {'x-channelstream-secret': sig_for_server,
118 'x-channelstream-endpoint': endpoint,
119 'Content-Type': 'application/json'}
120 url = '%s%s' % (server, endpoint)
121 try:
122 response = requests.post(url,
123 data=json.dumps(payload,
124 cls=DateTimeEncoder),
125 headers=secret_headers,
126 verify=False,
127 timeout=2).json()
128 except requests.exceptions.RequestException as e:
129 if throw_exceptions:
130 raise
131 responses.append(response)
132 return responses
133
134
135 def add_cors_headers(response):
136 # allow CORS
137 response.headers.add('Access-Control-Allow-Origin', '*')
138 response.headers.add('XDomainRequestAllowed', '1')
139 response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
140 # response.headers.add('Access-Control-Allow-Credentials', 'true')
141 response.headers.add('Access-Control-Allow-Headers',
142 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
143 response.headers.add('Access-Control-Max-Age', '86400')
144
145
146 from sqlalchemy.sql import compiler
147 from psycopg2.extensions import adapt as sqlescape
148
149
150 # or use the appropiate escape function from your db driver
151
152 def compile_query(query):
153 dialect = query.session.bind.dialect
154 statement = query.statement
155 comp = compiler.SQLCompiler(dialect, statement)
156 comp.compile()
157 enc = dialect.encoding
158 params = {}
159 for k, v in comp.params.items():
160 if isinstance(v, str):
161 v = v.encode(enc)
162 params[k] = sqlescape(v)
163 return (comp.string.encode(enc) % params).decode(enc)
164
165
166 def convert_es_type(input_data):
167 """
168 This might need to convert some text or other types to corresponding ES types
169 """
170 return str(input_data)
171
172
173 ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
174
175
176 def parse_proto(input_data):
177 try:
178 parts = [int(x) for x in input_data.split('.')]
179 while len(parts) < 3:
180 parts.append(0)
181 return ProtoVersion(*parts)
182 except Exception as e:
183 log.info('Unknown protocol version: %s' % e)
184 return ProtoVersion(99, 99, 99)
185
186
187 def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
188 ixtypes=None):
189 """
190 This function limits the search to 6 months by default so we don't have to
191 query 300 elasticsearch indices for 20 years of historical data for example
192 """
193
194 # should be cached later
195 def get_possible_names():
196 return list(Datastores.es.aliases().keys())
197
198 possible_names = get_possible_names()
199 es_index_types = []
200 if not ixtypes:
201 ixtypes = ['reports', 'metrics', 'logs']
202 for t in ixtypes:
203 if t == 'reports':
204 es_index_types.append('rcae_r_%s')
205 elif t == 'logs':
206 es_index_types.append('rcae_l_%s')
207 elif t == 'metrics':
208 es_index_types.append('rcae_m_%s')
209 elif t == 'uptime':
210 es_index_types.append('rcae_u_%s')
211 elif t == 'slow_calls':
212 es_index_types.append('rcae_sc_%s')
213
214 if start_date:
215 start_date = copy.copy(start_date)
216 else:
217 if not end_date:
218 end_date = datetime.utcnow()
219 start_date = end_date + relativedelta(months=months_in_past * -1)
220
221 if not end_date:
222 end_date = start_date + relativedelta(months=months_in_past)
223
224 index_dates = list(rrule(MONTHLY,
225 dtstart=start_date.date().replace(day=1),
226 until=end_date.date(),
227 count=36))
228 index_names = []
229 for ix_type in es_index_types:
230 to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
231 if ix_type % d.strftime('%Y_%m') in possible_names]
232 index_names.extend(to_extend)
233 for day in list(rrule(DAILY, dtstart=start_date.date(),
234 until=end_date.date(), count=366)):
235 ix_name = ix_type % day.strftime('%Y_%m_%d')
236 if ix_name in possible_names:
237 index_names.append(ix_name)
238 return index_names
239
240
241 def build_filter_settings_from_query_dict(
242 request, params=None, override_app_ids=None,
243 resource_permissions=None):
244 """
245 Builds list of normalized search terms for ES from query params
246 ensuring application list is restricted to only applications user
247 has access to
248
249 :param params (dictionary)
250 :param override_app_ids - list of application id's to use instead of
251 applications user normally has access to
252 """
253 params = copy.deepcopy(params)
254 applications = []
255 if not resource_permissions:
256 resource_permissions = ['view']
257
258 if request.user:
259 applications = request.user.resources_with_perms(
260 resource_permissions, resource_types=['application'])
261
262 # CRITICAL - this ensures our resultset is limited to only the ones
263 # user has view permissions
264 all_possible_app_ids = set([app.resource_id for app in applications])
265
266 # if override is preset we force permission for app to be present
267 # this allows users to see dashboards and applications they would
268 # normally not be able to
269
270 if override_app_ids:
271 all_possible_app_ids = set(override_app_ids)
272
273 schema = LogSearchSchema().bind(resources=all_possible_app_ids)
274 tag_schema = TagListSchema()
275 filter_settings = schema.deserialize(params)
276 tag_list = []
277 for k, v in list(filter_settings.items()):
278 if k in accepted_search_params:
279 continue
280 tag_list.append({"name": k, "value": v, "op": 'eq'})
281 # remove the key from filter_settings
282 filter_settings.pop(k, None)
283 tags = tag_schema.deserialize(tag_list)
284 filter_settings['tags'] = tags
285 return filter_settings
286
287
288 def gen_uuid():
289 return str(uuid.uuid4())
290
291
292 def gen_uuid4_sha_hex():
293 return hashlib.sha1(uuid.uuid4().bytes).hexdigest()
294
295
296 def permission_tuple_to_dict(data):
297 out = {
298 "user_name": None,
299 "perm_name": data.perm_name,
300 "owner": data.owner,
301 "type": data.type,
302 "resource_name": None,
303 "resource_type": None,
304 "resource_id": None,
305 "group_name": None,
306 "group_id": None
307 }
308 if data.user:
309 out["user_name"] = data.user.user_name
310 if data.perm_name == ALL_PERMISSIONS:
311 out['perm_name'] = '__all_permissions__'
312 if data.resource:
313 out['resource_name'] = data.resource.resource_name
314 out['resource_type'] = data.resource.resource_type
315 out['resource_id'] = data.resource.resource_id
316 if data.group:
317 out['group_name'] = data.group.group_name
318 out['group_id'] = data.group.id
319 return out
320
321
322 def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
323 gap_gen=None, db_session=None, step_interval=None,
324 iv_extractor=None,
325 rerange=False, *args, **kwargs):
326 """ Takes "fn" that should return some data and tries to load the data
327 dividing it into daily buckets - if the stats_since and end time give a
328 delta bigger than 24hours, then only "todays" data is computed on the fly
329
330 :param request: (request) request object
331 :param stats_since: (datetime) start date of buckets range
332 :param end_time: (datetime) end date of buckets range - utcnow() if None
333 :param fn: (callable) callable to use to populate buckets should have
334 following signature:
335 def get_data(request, since_when, until, *args, **kwargs):
336
337 :param cache_key: (string) cache key that will be used to build bucket
338 caches
339 :param gap_gen: (callable) gap generator - should return step intervals
340 to use with out `fn` callable
341 :param db_session: (Session) sqlalchemy session
342 :param step_interval: (timedelta) optional step interval if we want to
343 override the default determined from total start/end time delta
344 :param iv_extractor: (callable) used to get step intervals from data
345 returned by `fn` callable
346 :param rerange: (bool) handy if we want to change ranges from hours to
347 days when cached data is missing - will shorten execution time if `fn`
348 callable supports that and we are working with multiple rows - like metrics
349 :param args:
350 :param kwargs:
351
352 :return: iterable
353 """
354 if not end_time:
355 end_time = datetime.utcnow().replace(second=0, microsecond=0)
356 delta = end_time - stats_since
357 # if smaller than 3 days we want to group by 5min else by 1h,
358 # for 60 min group by min
359 if not gap_gen:
360 gap_gen = gap_gen_default
361 if not iv_extractor:
362 iv_extractor = default_extractor
363
364 # do not use custom interval if total time range with new iv would exceed
365 # end time
366 if not step_interval or stats_since + step_interval >= end_time:
367 if delta < h.time_deltas.get('12h')['delta']:
368 step_interval = timedelta(seconds=60)
369 elif delta < h.time_deltas.get('3d')['delta']:
370 step_interval = timedelta(seconds=60 * 5)
371 elif delta > h.time_deltas.get('2w')['delta']:
372 step_interval = timedelta(days=1)
373 else:
374 step_interval = timedelta(minutes=60)
375
376 if step_interval >= timedelta(minutes=60):
377 log.info('cached_buckets:{}: adjusting start time '
378 'for hourly or daily intervals'.format(cache_key))
379 stats_since = stats_since.replace(hour=0, minute=0)
380
381 ranges = [i.start_interval for i in list(gap_gen(stats_since,
382 step_interval, [],
383 end_time=end_time))]
384 buckets = {}
385 storage_key = 'buckets:' + cache_key + '{}|{}'
386 # this means we basicly cache per hour in 3-14 day intervals but i think
387 # its fine at this point - will be faster than db access anyways
388
389 if len(ranges) >= 1:
390 last_ranges = [ranges[-1]]
391 else:
392 last_ranges = []
393 if step_interval >= timedelta(minutes=60):
394 for r in ranges:
395 k = storage_key.format(step_interval.total_seconds(), r)
396 value = request.registry.cache_regions.redis_day_30.get(k)
397 # last buckets are never loaded from cache
398 is_last_result = (
399 r >= end_time - timedelta(hours=6) or r in last_ranges)
400 if value is not NO_VALUE and not is_last_result:
401 log.info("cached_buckets:{}: "
402 "loading range {} from cache".format(cache_key, r))
403 buckets[r] = value
404 else:
405 log.info("cached_buckets:{}: "
406 "loading range {} from storage".format(cache_key, r))
407 range_size = step_interval
408 if (step_interval == timedelta(minutes=60) and
409 not is_last_result and rerange):
410 range_size = timedelta(days=1)
411 r = r.replace(hour=0, minute=0)
412 log.info("cached_buckets:{}: "
413 "loading collapsed "
414 "range {} {}".format(cache_key, r,
415 r + range_size))
416 bucket_data = fn(
417 request, r, r + range_size, step_interval,
418 gap_gen, bucket_count=len(ranges), *args, **kwargs)
419 for b in bucket_data:
420 b_iv = iv_extractor(b)
421 buckets[b_iv] = b
422 k2 = storage_key.format(
423 step_interval.total_seconds(), b_iv)
424 request.registry.cache_regions.redis_day_30.set(k2, b)
425 log.info("cached_buckets:{}: saving cache".format(cache_key))
426 else:
427 # bucket count is 1 for short time ranges <= 24h from now
428 bucket_data = fn(request, stats_since, end_time, step_interval,
429 gap_gen, bucket_count=1, *args, **kwargs)
430 for b in bucket_data:
431 buckets[iv_extractor(b)] = b
432 return buckets
433
434
435 def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
436 db_session=None, *args, **kwargs):
437 """ Takes "fn" that should return some data and tries to load the data
438 dividing it into 2 buckets - cached "since_from" bucket and "today"
439 bucket - then the data can be reduced into single value
440
441 Data is cached if the stats_since and end time give a delta bigger
442 than 24hours - then only 24h is computed on the fly
443 """
444 if not end_time:
445 end_time = datetime.utcnow().replace(second=0, microsecond=0)
446 delta = end_time - stats_since
447
448 if delta >= timedelta(minutes=60):
449 log.info('cached_split_data:{}: adjusting start time '
450 'for hourly or daily intervals'.format(cache_key))
451 stats_since = stats_since.replace(hour=0, minute=0)
452
453 storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
454 old_end_time = end_time.replace(hour=0, minute=0)
455
456 final_storage_key = storage_key.format(delta.total_seconds(),
457 old_end_time)
458 older_data = None
459
460 cdata = request.registry.cache_regions.redis_day_7.get(
461 final_storage_key)
462
463 if cdata:
464 log.info("cached_split_data:{}: found old "
465 "bucket data".format(cache_key))
466 older_data = cdata
467
468 if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
469 not cdata):
470 log.info("cached_split_data:{}: didn't find the "
471 "start bucket in cache so load older data".format(cache_key))
472 recent_stats_since = old_end_time
473 older_data = fn(request, stats_since, recent_stats_since,
474 db_session=db_session, *args, **kwargs)
475 request.registry.cache_regions.redis_day_7.set(final_storage_key,
476 older_data)
477 elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
478 recent_stats_since = old_end_time
479 else:
480 recent_stats_since = stats_since
481
482 log.info("cached_split_data:{}: loading fresh "
483 "data bucksts from last 24h ".format(cache_key))
484 todays_data = fn(request, recent_stats_since, end_time,
485 db_session=db_session, *args, **kwargs)
486 return older_data, todays_data
487
488
489 def in_batches(seq, size):
490 """
491 Splits am iterable into batches of specified size
492 :param seq (iterable)
493 :param size integer
494 """
495 return (seq[pos:pos + size] for pos in range(0, len(seq), size))
@@ -0,0 +1,147 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 import logging
23 import uuid
24
25 from datetime import datetime
26
27 log = logging.getLogger(__name__)
28
29
30 def parse_airbrake_xml(request):
31 root = request.context.airbrake_xml_etree
32 error = root.find('error')
33 notifier = root.find('notifier')
34 server_env = root.find('server-environment')
35 request_data = root.find('request')
36 user = root.find('current-user')
37 if request_data is not None:
38 cgi_data = request_data.find('cgi-data')
39 if cgi_data is None:
40 cgi_data = []
41
42 error_dict = {
43 'class_name': error.findtext('class') or '',
44 'error': error.findtext('message') or '',
45 "occurences": 1,
46 "http_status": 500,
47 "priority": 5,
48 "server": 'unknown',
49 'url': 'unknown', 'request': {}
50 }
51 if user is not None:
52 error_dict['username'] = user.findtext('username') or \
53 user.findtext('id')
54 if notifier is not None:
55 error_dict['client'] = notifier.findtext('name')
56
57 if server_env is not None:
58 error_dict["server"] = server_env.findtext('hostname', 'unknown')
59
60 whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
61 'CONTENT_TYPE', 'HTTP_REFERER']
62
63 if request_data is not None:
64 error_dict['url'] = request_data.findtext('url', 'unknown')
65 component = request_data.findtext('component')
66 action = request_data.findtext('action')
67 if component and action:
68 error_dict['view_name'] = '%s:%s' % (component, action)
69 for node in cgi_data:
70 key = node.get('key')
71 if key.startswith('HTTP') or key in whitelist_environ:
72 error_dict['request'][key] = node.text
73 elif 'query_parameters' in key:
74 error_dict['request']['GET'] = {}
75 for x in node:
76 error_dict['request']['GET'][x.get('key')] = x.text
77 elif 'request_parameters' in key:
78 error_dict['request']['POST'] = {}
79 for x in node:
80 error_dict['request']['POST'][x.get('key')] = x.text
81 elif key.endswith('cookie'):
82 error_dict['request']['COOKIE'] = {}
83 for x in node:
84 error_dict['request']['COOKIE'][x.get('key')] = x.text
85 elif key.endswith('request_id'):
86 error_dict['request_id'] = node.text
87 elif key.endswith('session'):
88 error_dict['request']['SESSION'] = {}
89 for x in node:
90 error_dict['request']['SESSION'][x.get('key')] = x.text
91 else:
92 if key in ['rack.session.options']:
93 # skip secret configs
94 continue
95 try:
96 if len(node):
97 error_dict['request'][key] = dict(
98 [(x.get('key'), x.text,) for x in node])
99 else:
100 error_dict['request'][key] = node.text
101 except Exception as e:
102 log.warning('Airbrake integration exception: %s' % e)
103
104 error_dict['request'].pop('HTTP_COOKIE', '')
105
106 error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
107 error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
108 if 'request_id' not in error_dict:
109 error_dict['request_id'] = str(uuid.uuid4())
110 if request.context.possibly_public:
111 # set ip for reports that come from airbrake js client
112 error_dict["timestamp"] = datetime.utcnow()
113 if request.environ.get("HTTP_X_FORWARDED_FOR"):
114 ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
115 first_ip = ip.split(',')[0]
116 remote_addr = first_ip.strip()
117 else:
118 remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
119 request.environ.get('REMOTE_ADDR'))
120 error_dict["ip"] = remote_addr
121
122 blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
123 'session', 'test']
124
125 lines = []
126 for l in error.find('backtrace'):
127 lines.append({'file': l.get("file", ""),
128 'line': l.get("number", ""),
129 'fn': l.get("method", ""),
130 'module': l.get("module", ""),
131 'cline': l.get("method", ""),
132 'vars': {}})
133 error_dict['traceback'] = list(reversed(lines))
134 # filtering is not provided by airbrake
135 keys_to_check = (
136 error_dict['request'].get('COOKIE'),
137 error_dict['request'].get('COOKIES'),
138 error_dict['request'].get('POST'),
139 error_dict['request'].get('SESSION'),
140 )
141 for source in [_f for _f in keys_to_check if _f]:
142 for k in source.keys():
143 for bad_key in blacklist:
144 if bad_key in k.lower():
145 source[k] = '***'
146
147 return error_dict
@@ -0,0 +1,61 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 from datetime import tzinfo, timedelta, datetime
23 from dateutil.relativedelta import relativedelta
24 import logging
25
26 log = logging.getLogger(__name__)
27
28
29 def to_relativedelta(time_delta):
30 return relativedelta(seconds=int(time_delta.total_seconds()),
31 microseconds=time_delta.microseconds)
32
33
34 def convert_date(date_str, return_utcnow_if_wrong=True,
35 normalize_future=False):
36 utcnow = datetime.utcnow()
37 if isinstance(date_str, datetime):
38 # get rid of tzinfo
39 return date_str.replace(tzinfo=None)
40 if not date_str and return_utcnow_if_wrong:
41 return utcnow
42 try:
43 try:
44 if 'Z' in date_str:
45 date_str = date_str[:date_str.index('Z')]
46 if '.' in date_str:
47 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
48 else:
49 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
50 except Exception:
51 # bw compat with old client
52 date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
53 except Exception:
54 if return_utcnow_if_wrong:
55 date = utcnow
56 else:
57 date = None
58 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
59 log.warning('time %s in future + 3 min, normalizing' % date)
60 return utcnow
61 return date
@@ -0,0 +1,301 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # App Enlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
21
22 from datetime import timedelta
23
24 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
25
26 EXCLUDED_LOG_VARS = [
27 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
28 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
29 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
30 'thread', 'threadName']
31
32 EXCLUDE_SENTRY_KEYS = [
33 'csp',
34 'culprit',
35 'event_id',
36 'exception',
37 'extra',
38 'level',
39 'logentry',
40 'logger',
41 'message',
42 'modules',
43 'platform',
44 'query',
45 'release',
46 'request',
47 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
48 'sentry.interfaces.Http', 'sentry.interfaces.Message',
49 'sentry.interfaces.Query',
50 'sentry.interfaces.Stacktrace',
51 'sentry.interfaces.Template', 'sentry.interfaces.User',
52 'sentry.interfaces.csp.Csp',
53 'sentry.interfaces.exception.Exception',
54 'sentry.interfaces.http.Http',
55 'sentry.interfaces.message.Message',
56 'sentry.interfaces.query.Query',
57 'sentry.interfaces.stacktrace.Stacktrace',
58 'sentry.interfaces.template.Template',
59 'sentry.interfaces.user.User', 'server_name',
60 'stacktrace',
61 'tags',
62 'template',
63 'time_spent',
64 'timestamp',
65 'user']
66
67
68 def get_keys(list_of_keys, json_body):
69 for k in list_of_keys:
70 if k in json_body:
71 return json_body[k]
72
73
74 def get_logentry(json_body):
75 key_names = ['logentry',
76 'sentry.interfaces.message.Message',
77 'sentry.interfaces.Message'
78 ]
79 logentry = get_keys(key_names, json_body)
80 return logentry
81
82
83 def get_exception(json_body):
84 parsed_exception = {}
85 key_names = ['exception',
86 'sentry.interfaces.exception.Exception',
87 'sentry.interfaces.Exception'
88 ]
89 exception = get_keys(key_names, json_body) or {}
90 if exception:
91 if isinstance(exception, dict):
92 exception = exception['values'][0]
93 else:
94 exception = exception[0]
95
96 parsed_exception['type'] = exception.get('type')
97 parsed_exception['value'] = exception.get('value')
98 parsed_exception['module'] = exception.get('module')
99 parsed_stacktrace = get_stacktrace(exception) or {}
100 parsed_exception = exception or {}
101 return parsed_exception, parsed_stacktrace
102
103
104 def get_stacktrace(json_body):
105 parsed_stacktrace = []
106 key_names = ['stacktrace',
107 'sentry.interfaces.stacktrace.Stacktrace',
108 'sentry.interfaces.Stacktrace'
109 ]
110 stacktrace = get_keys(key_names, json_body)
111 if stacktrace:
112 for frame in stacktrace['frames']:
113 parsed_stacktrace.append(
114 {"cline": frame.get('context_line', ''),
115 "file": frame.get('filename', ''),
116 "module": frame.get('module', ''),
117 "fn": frame.get('function', ''),
118 "line": frame.get('lineno', ''),
119 "vars": list(frame.get('vars', {}).items())
120 }
121 )
122 return parsed_stacktrace
123
124
125 def get_template(json_body):
126 parsed_template = {}
127 key_names = ['template',
128 'sentry.interfaces.template.Template',
129 'sentry.interfaces.Template'
130 ]
131 template = get_keys(key_names, json_body)
132 if template:
133 for frame in template['frames']:
134 parsed_template.append(
135 {"cline": frame.get('context_line', ''),
136 "file": frame.get('filename', ''),
137 "fn": '',
138 "line": frame.get('lineno', ''),
139 "vars": []
140 }
141 )
142
143 return parsed_template
144
145
146 def get_request(json_body):
147 parsed_http = {}
148 key_names = ['request',
149 'sentry.interfaces.http.Http',
150 'sentry.interfaces.Http'
151 ]
152 http = get_keys(key_names, json_body) or {}
153 for k, v in http.items():
154 if k == 'headers':
155 parsed_http['headers'] = {}
156 for sk, sv in http['headers'].items():
157 parsed_http['headers'][sk.title()] = sv
158 else:
159 parsed_http[k.lower()] = v
160 return parsed_http
161
162
163 def get_user(json_body):
164 parsed_user = {}
165 key_names = ['user',
166 'sentry.interfaces.user.User',
167 'sentry.interfaces.User'
168 ]
169 user = get_keys(key_names, json_body)
170 if user:
171 parsed_user['id'] = user.get('id')
172 parsed_user['username'] = user.get('username')
173 parsed_user['email'] = user.get('email')
174 parsed_user['ip_address'] = user.get('ip_address')
175
176 return parsed_user
177
178
179 def get_query(json_body):
180 query = None
181 key_name = ['query',
182 'sentry.interfaces.query.Query',
183 'sentry.interfaces.Query'
184 ]
185 query = get_keys(key_name, json_body)
186 return query
187
188
189 def parse_sentry_event(json_body):
190 request_id = json_body.get('event_id')
191
192 # required
193 message = json_body.get('message')
194 log_timestamp = json_body.get('timestamp')
195 level = json_body.get('level')
196 if isinstance(level, int):
197 level = LogLevelPython.key_from_value(level)
198
199 namespace = json_body.get('logger')
200 language = json_body.get('platform')
201
202 # optional
203 server_name = json_body.get('server_name')
204 culprit = json_body.get('culprit')
205 release = json_body.get('release')
206
207 tags = json_body.get('tags', {})
208 if hasattr(tags, 'items'):
209 tags = list(tags.items())
210 extra = json_body.get('extra', {})
211 if hasattr(extra, 'items'):
212 extra = list(extra.items())
213
214 parsed_req = get_request(json_body)
215 user = get_user(json_body)
216 template = get_template(json_body)
217 query = get_query(json_body)
218
219 # other unidentified keys found
220 other_keys = [(k, json_body[k]) for k in json_body.keys()
221 if k not in EXCLUDE_SENTRY_KEYS]
222
223 logentry = get_logentry(json_body)
224 if logentry:
225 message = logentry['message']
226
227 exception, stacktrace = get_exception(json_body)
228
229 alt_stacktrace = get_stacktrace(json_body)
230 event_type = None
231 if not exception and not stacktrace and not alt_stacktrace and not template:
232 event_type = ParsedSentryEventType.LOG
233
234 event_dict = {
235 'log_level': level,
236 'message': message,
237 'namespace': namespace,
238 'request_id': request_id,
239 'server': server_name,
240 'date': log_timestamp,
241 'tags': tags
242 }
243 event_dict['tags'].extend(
244 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
245
246 # other keys can be various object types
247 event_dict['tags'].extend([(k, v) for k, v in other_keys
248 if isinstance(v, str)])
249 if culprit:
250 event_dict['tags'].append(('sentry_culprit', culprit))
251 if language:
252 event_dict['tags'].append(('sentry_language', language))
253 if release:
254 event_dict['tags'].append(('sentry_release', release))
255
256 if exception or stacktrace or alt_stacktrace or template:
257 event_type = ParsedSentryEventType.ERROR_REPORT
258 event_dict = {
259 'client': 'sentry',
260 'error': message,
261 'namespace': namespace,
262 'request_id': request_id,
263 'server': server_name,
264 'start_time': log_timestamp,
265 'end_time': None,
266 'tags': tags,
267 'extra': extra,
268 'language': language,
269 'view_name': json_body.get('culprit'),
270 'http_status': None,
271 'username': None,
272 'url': parsed_req.get('url'),
273 'ip': None,
274 'user_agent': None,
275 'request': None,
276 'slow_calls': None,
277 'request_stats': None,
278 'traceback': None
279 }
280
281 event_dict['extra'].extend(other_keys)
282 if release:
283 event_dict['tags'].append(('sentry_release', release))
284 event_dict['request'] = parsed_req
285 if 'headers' in parsed_req:
286 event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
287 if 'env' in parsed_req:
288 event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
289 ts_ms = int(json_body.get('time_spent') or 0)
290 if ts_ms > 0:
291 event_dict['end_time'] = event_dict['start_time'] + \
292 timedelta(milliseconds=ts_ms)
293 if stacktrace or alt_stacktrace or template:
294 event_dict['traceback'] = stacktrace or alt_stacktrace or template
295 for k in list(event_dict.keys()):
296 if event_dict[k] is None:
297 del event_dict[k]
298 if user:
299 event_dict['username'] = user['username'] or user['id'] \
300 or user['email']
301 return event_dict, event_type
@@ -26,7 +26,7 b' dist/'
26 downloads/
26 downloads/
27 eggs/
27 eggs/
28 .eggs/
28 .eggs/
29 lib/
29 $lib
30 lib64/
30 lib64/
31 parts/
31 parts/
32 sdist/
32 sdist/
General Comments 0
You need to be logged in to leave comments. Login now