diff --git a/.hgignore b/.hgignore
index d8b23d4..8a7e7e6 100644
--- a/.hgignore
+++ b/.hgignore
@@ -26,7 +26,7 @@ dist/
downloads/
eggs/
.eggs/
-lib/
+$lib
lib64/
parts/
sdist/
diff --git a/backend/src/appenlight/lib/__init__.py b/backend/src/appenlight/lib/__init__.py
new file mode 100644
index 0000000..27c5848
--- /dev/null
+++ b/backend/src/appenlight/lib/__init__.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+"""Miscellaneous support packages for {{project}}.
+"""
+import random
+import string
+import importlib
+
+from appenlight_client.exceptions import get_current_traceback
+
+
+def generate_random_string(chars=10):
+ return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
+ chars))
+
+
+def to_integer_safe(input):
+ try:
+ return int(input)
+ except (TypeError, ValueError,):
+ return None
+
+def print_traceback(log):
+ traceback = get_current_traceback(skip=1, show_hidden_frames=True,
+ ignore_system_exceptions=True)
+ exception_text = traceback.exception
+ log.error(exception_text)
+ log.error(traceback.plaintext)
+ del traceback
+
+def get_callable(import_string):
+ import_module, indexer_callable = import_string.split(':')
+ return getattr(importlib.import_module(import_module),
+ indexer_callable)
diff --git a/backend/src/appenlight/lib/api.py b/backend/src/appenlight/lib/api.py
new file mode 100644
index 0000000..5885215
--- /dev/null
+++ b/backend/src/appenlight/lib/api.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import datetime
+import logging
+
+from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests
+
+from appenlight.models import Datastores
+from appenlight.models.services.config import ConfigService
+from appenlight.lib.redis_keys import REDIS_KEYS
+
+log = logging.getLogger(__name__)
+
+
+def rate_limiting(request, resource, section, to_increment=1):
+ tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
+ key = REDIS_KEYS['rate_limits'][section].format(tsample,
+ resource.resource_id)
+ current_count = Datastores.redis.incr(key, to_increment)
+ Datastores.redis.expire(key, 3600 * 24)
+ config = ConfigService.by_key_and_section(section, 'global')
+ limit = config.value if config else 1000
+ if current_count > int(limit):
+ log.info('RATE LIMITING: {}: {}, {}'.format(
+ section, resource, current_count))
+ abort_msg = 'Rate limits are in effect for this application'
+ raise HTTPTooManyRequests(abort_msg,
+ headers={'X-AppEnlight': abort_msg})
+
+
+def check_cors(request, application, should_return=True):
+ """
+ Performs a check and validation if request comes from authorized domain for
+ application, otherwise return 403
+ """
+ origin_found = False
+ origin = request.headers.get('Origin')
+ if should_return:
+ log.info('CORS for %s' % origin)
+ if not origin:
+ return False
+ for domain in application.domains.split('\n'):
+ if domain in origin:
+ origin_found = True
+ if origin_found:
+ request.response.headers.add('Access-Control-Allow-Origin', origin)
+ request.response.headers.add('XDomainRequestAllowed', '1')
+ request.response.headers.add('Access-Control-Allow-Methods',
+ 'GET, POST, OPTIONS')
+ request.response.headers.add('Access-Control-Allow-Headers',
+ 'Accept-Encoding, Accept-Language, '
+ 'Content-Type, '
+ 'Depth, User-Agent, X-File-Size, '
+ 'X-Requested-With, If-Modified-Since, '
+ 'X-File-Name, '
+ 'Cache-Control, Host, Pragma, Accept, '
+ 'Origin, Connection, '
+ 'Referer, Cookie, '
+ 'X-appenlight-public-api-key, '
+ 'x-appenlight-public-api-key')
+ request.response.headers.add('Access-Control-Max-Age', '86400')
+ return request.response
+ else:
+ return HTTPForbidden()
diff --git a/backend/src/appenlight/lib/cache_regions.py b/backend/src/appenlight/lib/cache_regions.py
new file mode 100644
index 0000000..0bdb241
--- /dev/null
+++ b/backend/src/appenlight/lib/cache_regions.py
@@ -0,0 +1,188 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import copy
+import hashlib
+import inspect
+
+from dogpile.cache import make_region, compat
+
+regions = None
+
+
+def key_mangler(key):
+ return "appenlight:dogpile:{}".format(key)
+
+
+def hashgen(namespace, fn, to_str=compat.string_type):
+ """Return a function that generates a string
+ key, based on a given function as well as
+ arguments to the returned function itself.
+
+ This is used by :meth:`.CacheRegion.cache_on_arguments`
+ to generate a cache key from a decorated function.
+
+ It can be replaced using the ``function_key_generator``
+ argument passed to :func:`.make_region`.
+
+ """
+
+ if namespace is None:
+ namespace = '%s:%s' % (fn.__module__, fn.__name__)
+ else:
+ namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
+
+ args = inspect.getargspec(fn)
+ has_self = args[0] and args[0][0] in ('self', 'cls')
+
+ def generate_key(*args, **kw):
+ if kw:
+ raise ValueError(
+ "dogpile.cache's default key creation "
+ "function does not accept keyword arguments.")
+ if has_self:
+ args = args[1:]
+
+ return namespace + "|" + hashlib.sha1(
+ " ".join(map(to_str, args)).encode('utf8')).hexdigest()
+
+ return generate_key
+
+
+class CacheRegions(object):
+ def __init__(self, settings):
+ config_redis = {"arguments": settings}
+
+ self.redis_min_1 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=60,
+ **copy.deepcopy(config_redis))
+ self.redis_min_5 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=300,
+ **copy.deepcopy(config_redis))
+
+ self.redis_min_10 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=60,
+ **copy.deepcopy(config_redis))
+
+ self.redis_min_60 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=3600,
+ **copy.deepcopy(config_redis))
+
+ self.redis_sec_1 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=1,
+ **copy.deepcopy(config_redis))
+
+ self.redis_sec_5 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=5,
+ **copy.deepcopy(config_redis))
+
+ self.redis_sec_30 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=30,
+ **copy.deepcopy(config_redis))
+
+ self.redis_day_1 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=86400,
+ **copy.deepcopy(config_redis))
+
+ self.redis_day_7 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=86400 * 7,
+ **copy.deepcopy(config_redis))
+
+ self.redis_day_30 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.redis",
+ expiration_time=86400 * 30,
+ **copy.deepcopy(config_redis))
+
+ self.memory_day_1 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=86400,
+ **copy.deepcopy(config_redis))
+
+ self.memory_sec_1 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=1)
+
+ self.memory_sec_5 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=5)
+
+ self.memory_min_1 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=60)
+
+ self.memory_min_5 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=300)
+
+ self.memory_min_10 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=600)
+
+ self.memory_min_60 = make_region(
+ function_key_generator=hashgen,
+ key_mangler=key_mangler).configure(
+ "dogpile.cache.memory",
+ expiration_time=3600)
+
+
+def get_region(region):
+ return getattr(regions, region)
diff --git a/backend/src/appenlight/lib/encryption.py b/backend/src/appenlight/lib/encryption.py
new file mode 100644
index 0000000..0f1697d
--- /dev/null
+++ b/backend/src/appenlight/lib/encryption.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+# this gets set on runtime
+from cryptography.fernet import Fernet
+
+ENCRYPTION_SECRET = None
+
+
+def encrypt_fernet(value):
+ # avoid double encryption
+ # not sure if this is needed but it won't hurt too much to have this
+ if value.startswith('enc$fernet$'):
+ return value
+ f = Fernet(ENCRYPTION_SECRET)
+ return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
+
+
+def decrypt_fernet(value):
+ parts = value.split('$', 3)
+ if not len(parts) == 3:
+ # not encrypted values
+ return value
+ else:
+ f = Fernet(ENCRYPTION_SECRET)
+ decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
+ return decrypted_data
+
+
+def encrypt_dictionary_keys(_dict, exclude_keys=None):
+ if not exclude_keys:
+ exclude_keys = []
+ keys = [k for k in _dict.keys() if k not in exclude_keys]
+ for k in keys:
+ _dict[k] = encrypt_fernet(_dict[k])
+ return _dict
+
+
+def decrypt_dictionary_keys(_dict, exclude_keys=None):
+ if not exclude_keys:
+ exclude_keys = []
+ keys = [k for k in _dict.keys() if k not in exclude_keys]
+ for k in keys:
+ _dict[k] = decrypt_fernet(_dict[k])
+ return _dict
diff --git a/backend/src/appenlight/lib/enums.py b/backend/src/appenlight/lib/enums.py
new file mode 100644
index 0000000..ae4e1eb
--- /dev/null
+++ b/backend/src/appenlight/lib/enums.py
@@ -0,0 +1,93 @@
+import collections
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+
+class StupidEnum(object):
+ @classmethod
+ def set_inverse(cls):
+ cls._inverse_values = dict(
+ (y, x) for x, y in vars(cls).items() if
+ not x.startswith('_') and not callable(y)
+ )
+
+ @classmethod
+ def key_from_value(cls, value):
+ if not hasattr(cls, '_inverse_values'):
+ cls.set_inverse()
+ return cls._inverse_values.get(value)
+
+
+class ReportType(StupidEnum):
+ unknown = 0
+ error = 1
+ not_found = 2
+ slow = 3
+
+
+class Language(StupidEnum):
+ unknown = 0
+ python = 1
+ javascript = 2
+ java = 3
+ objectivec = 4
+ swift = 5
+ cpp = 6
+ basic = 7
+ csharp = 8
+ php = 9
+ perl = 10
+ vb = 11
+ vbnet = 12
+ ruby = 13
+ fsharp = 14
+ actionscript = 15
+ go = 16
+ scala = 17
+ haskell = 18
+ erlang = 19
+ haxe = 20
+ scheme = 21
+
+
+class LogLevel(StupidEnum):
+ UNKNOWN = 0
+ DEBUG = 2
+ TRACE = 4
+ INFO = 6
+ WARNING = 8
+ ERROR = 10
+ CRITICAL = 12
+ FATAL = 14
+
+
+class LogLevelPython(StupidEnum):
+ CRITICAL = 50
+ ERROR = 40
+ WARNING = 30
+ INFO = 20
+ DEBUG = 10
+ NOTSET = 0
+
+
+class ParsedSentryEventType(StupidEnum):
+ ERROR_REPORT = 1
+ LOG = 2
diff --git a/backend/src/appenlight/lib/ext_json.py b/backend/src/appenlight/lib/ext_json.py
new file mode 100644
index 0000000..19f7468
--- /dev/null
+++ b/backend/src/appenlight/lib/ext_json.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+"""
+ex-json borrowed from Marcin Kuzminski
+
+source: https://secure.rhodecode.org/ext-json
+
+"""
+import datetime
+import functools
+import decimal
+import imp
+
+__all__ = ['json', 'simplejson', 'stdlibjson']
+
+
+def _is_aware(value):
+ """
+ Determines if a given datetime.time is aware.
+
+ The logic is described in Python's docs:
+ http://docs.python.org/library/datetime.html#datetime.tzinfo
+ """
+ return (value.tzinfo is not None
+ and value.tzinfo.utcoffset(value) is not None)
+
+
+def _obj_dump(obj):
+ """
+ Custom function for dumping objects to JSON, if obj has __json__ attribute
+ or method defined it will be used for serialization
+
+ :param obj:
+ """
+
+ if isinstance(obj, complex):
+ return [obj.real, obj.imag]
+ # See "Date Time String Format" in the ECMA-262 specification.
+ # some code borrowed from django 1.4
+ elif isinstance(obj, datetime.datetime):
+ r = obj.isoformat()
+ # if obj.microsecond:
+ # r = r[:23] + r[26:]
+ if r.endswith('+00:00'):
+ r = r[:-6] + 'Z'
+ return r
+ elif isinstance(obj, datetime.date):
+ return obj.isoformat()
+ elif isinstance(obj, decimal.Decimal):
+ return str(obj)
+ elif isinstance(obj, datetime.time):
+ if _is_aware(obj):
+ raise ValueError("JSON can't represent timezone-aware times.")
+ r = obj.isoformat()
+ if obj.microsecond:
+ r = r[:12]
+ return r
+ elif isinstance(obj, set):
+ return list(obj)
+ elif hasattr(obj, '__json__'):
+ if callable(obj.__json__):
+ return obj.__json__()
+ else:
+ return obj.__json__
+ else:
+ raise NotImplementedError
+
+
+# Import simplejson
+try:
+ # import simplejson initially
+ _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
+
+
+ def extended_encode(obj):
+ try:
+ return _obj_dump(obj)
+ except NotImplementedError:
+ pass
+ raise TypeError("%r is not JSON serializable" % (obj,))
+
+
+ # we handle decimals our own it makes unified behavior of json vs
+ # simplejson
+ sj_version = [int(x) for x in _sj.__version__.split('.')]
+ major, minor = sj_version[0], sj_version[1]
+ if major < 2 or (major == 2 and minor < 1):
+ # simplejson < 2.1 doesnt support use_decimal
+ _sj.dumps = functools.partial(
+ _sj.dumps, default=extended_encode)
+ _sj.dump = functools.partial(
+ _sj.dump, default=extended_encode)
+ else:
+ _sj.dumps = functools.partial(
+ _sj.dumps, default=extended_encode, use_decimal=False)
+ _sj.dump = functools.partial(
+ _sj.dump, default=extended_encode, use_decimal=False)
+ simplejson = _sj
+
+except ImportError:
+ # no simplejson set it to None
+ simplejson = None
+
+try:
+ # simplejson not found try out regular json module
+ _json = imp.load_module('_json', *imp.find_module('json'))
+
+
+ # extended JSON encoder for json
+ class ExtendedEncoder(_json.JSONEncoder):
+ def default(self, obj):
+ try:
+ return _obj_dump(obj)
+ except NotImplementedError:
+ pass
+ raise TypeError("%r is not JSON serializable" % (obj,))
+
+
+ # monkey-patch JSON encoder to use extended version
+ _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
+ _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
+
+except ImportError:
+ json = None
+
+stdlibjson = _json
+
+# set all available json modules
+if simplejson:
+ json = _sj
+elif _json:
+ json = _json
+else:
+ raise ImportError('Could not find any json modules')
diff --git a/backend/src/appenlight/lib/helpers.py b/backend/src/appenlight/lib/helpers.py
new file mode 100644
index 0000000..e48e7c6
--- /dev/null
+++ b/backend/src/appenlight/lib/helpers.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+"""
+Helper functions
+"""
+import copy
+import datetime
+
+from collections import namedtuple, OrderedDict
+
+_ = lambda x: x
+
+time_deltas = OrderedDict()
+
+time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
+ 'label': '1 minute', 'minutes': 1}
+
+time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
+ 'label': '5 minutes', 'minutes': 5}
+time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
+ 'label': '30 minutes', 'minutes': 30}
+time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
+ 'label': '60 minutes', 'minutes': 60}
+time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
+ 'minutes': 60 * 4}
+time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
+ 'label': '12 hours', 'minutes': 60 * 12}
+time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
+ 'label': '24 hours', 'minutes': 60 * 24}
+time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
+ 'minutes': 60 * 24 * 3}
+time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
+ 'minutes': 60 * 24 * 7}
+time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
+ 'minutes': 60 * 24 * 14}
+time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
+ 'minutes': 60 * 24 * 31}
+time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
+ 'label': '3 months',
+ 'minutes': 60 * 24 * 31 * 3}
+time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
+ 'label': '6 months',
+ 'minutes': 60 * 24 * 31 * 6}
+time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
+ 'label': '12 months',
+ 'minutes': 60 * 24 * 31 * 12}
+
+# used in json representation
+time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
+ for k, v in time_deltas.items()])
+FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
+
+
+def get_flash(request):
+ messages = []
+ messages.extend(
+ [FlashMsg(msg, 'error')
+ for msg in request.session.peek_flash('error')])
+ messages.extend([FlashMsg(msg, 'warning')
+ for msg in request.session.peek_flash('warning')])
+ messages.extend(
+ [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
+ return messages
+
+
+def clear_flash(request):
+ request.session.pop_flash('error')
+ request.session.pop_flash('warning')
+ request.session.pop_flash()
+
+
+def get_type_formatted_flash(request):
+ return [{'msg': message.msg, 'type': message.level}
+ for message in get_flash(request)]
+
+
+def gen_pagination_headers(request, paginator):
+ headers = {
+ 'x-total-count': str(paginator.item_count),
+ 'x-current-page': str(paginator.page),
+ 'x-items-per-page': str(paginator.items_per_page)
+ }
+ params_dict = request.GET.dict_of_lists()
+ last_page_params = copy.deepcopy(params_dict)
+ last_page_params['page'] = paginator.last_page or 1
+ first_page_params = copy.deepcopy(params_dict)
+ first_page_params.pop('page', None)
+ next_page_params = copy.deepcopy(params_dict)
+ next_page_params['page'] = paginator.next_page or paginator.last_page or 1
+ prev_page_params = copy.deepcopy(params_dict)
+ prev_page_params['page'] = paginator.previous_page or 1
+ lp_url = request.current_route_url(_query=last_page_params)
+ fp_url = request.current_route_url(_query=first_page_params)
+ links = [
+ 'rel="last", <{}>'.format(lp_url),
+ 'rel="first", <{}>'.format(fp_url),
+ ]
+ if first_page_params != prev_page_params:
+ prev_url = request.current_route_url(_query=prev_page_params)
+ links.append('rel="prev", <{}>'.format(prev_url))
+ if last_page_params != next_page_params:
+ next_url = request.current_route_url(_query=next_page_params)
+ links.append('rel="next", <{}>'.format(next_url))
+ headers['link'] = '; '.join(links)
+ return headers
diff --git a/backend/src/appenlight/lib/jinja2_filters.py b/backend/src/appenlight/lib/jinja2_filters.py
new file mode 100644
index 0000000..82dc592
--- /dev/null
+++ b/backend/src/appenlight/lib/jinja2_filters.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import re
+from appenlight.lib.ext_json import json
+from jinja2 import Markup, escape, evalcontextfilter
+
+_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
+
+
+@evalcontextfilter
+def nl2br(eval_ctx, value):
+ if eval_ctx.autoescape:
+ result = '\n\n'.join('
%s
' % p.replace('\n', Markup('
\n'))
+ for p in _paragraph_re.split(escape(value)))
+ else:
+ result = '\n\n'.join('%s
' % p.replace('\n', '
\n')
+ for p in _paragraph_re.split(escape(value)))
+ if eval_ctx.autoescape:
+ result = Markup(result)
+ return result
+
+
+@evalcontextfilter
+def toJSONUnsafe(eval_ctx, value):
+ encoded = json.dumps(value).replace('&', '\\u0026') \
+ .replace('<', '\\u003c') \
+ .replace('>', '\\u003e') \
+ .replace('>', '\\u003e') \
+ .replace('"', '\\u0022') \
+ .replace("'", '\\u0027') \
+ .replace(r'\n', '/\\\n')
+ return Markup("'%s'" % encoded)
diff --git a/backend/src/appenlight/lib/redis_keys.py b/backend/src/appenlight/lib/redis_keys.py
new file mode 100644
index 0000000..de0e2a3
--- /dev/null
+++ b/backend/src/appenlight/lib/redis_keys.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+BASE = 'appenlight:data:{}'
+
+REDIS_KEYS = {
+ 'tasks': {
+ 'add_reports_lock': BASE.format('add_reports_lock:{}'),
+ 'add_logs_lock': BASE.format('add_logs_lock:{}'),
+ },
+ 'counters': {
+ 'reports_per_minute': BASE.format('reports_per_minute:{}'),
+ 'reports_per_minute_per_app': BASE.format(
+ 'reports_per_minute_per_app:{}:{}'),
+ 'reports_per_type': BASE.format('reports_per_type:{}'),
+ 'logs_per_minute': BASE.format('logs_per_minute:{}'),
+ 'logs_per_minute_per_app': BASE.format(
+ 'logs_per_minute_per_app:{}:{}'),
+ 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
+ 'metrics_per_minute_per_app': BASE.format(
+ 'metrics_per_minute_per_app:{}:{}'),
+ 'report_group_occurences': BASE.format('report_group_occurences:{}'),
+ 'report_group_occurences_10th': BASE.format(
+ 'report_group_occurences_10th:{}'),
+ 'report_group_occurences_100th': BASE.format(
+ 'report_group_occurences_100th:{}'),
+ },
+ 'rate_limits': {
+ 'per_application_reports_rate_limit': BASE.format(
+ 'per_application_reports_limit:{}:{}'),
+ 'per_application_logs_rate_limit': BASE.format(
+ 'per_application_logs_rate_limit:{}:{}'),
+ 'per_application_metrics_rate_limit': BASE.format(
+ 'per_application_metrics_rate_limit:{}:{}'),
+ },
+ 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
+ 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
+ 'reports_to_notify_per_type_per_app': BASE.format(
+ 'reports_to_notify_per_type_per_app:{}:{}'),
+ 'seen_tag_list': BASE.format('seen_tag_list')
+}
diff --git a/backend/src/appenlight/lib/request.py b/backend/src/appenlight/lib/request.py
new file mode 100644
index 0000000..cb91ea0
--- /dev/null
+++ b/backend/src/appenlight/lib/request.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import appenlight.lib.helpers as helpers
+import json
+from pyramid.security import unauthenticated_userid
+from appenlight.models.user import User
+
+
+class CSRFException(Exception):
+ pass
+
+
+class JSONException(Exception):
+ pass
+
+
+def get_csrf_token(request):
+ return request.session.get_csrf_token()
+
+
+def safe_json_body(request):
+ """
+ Returns None if json body is missing or erroneous
+ """
+ try:
+ return request.json_body
+ except ValueError:
+ return None
+
+
+def unsafe_json_body(request):
+ """
+ Throws JSONException if json can't deserialize
+ """
+ try:
+ return request.json_body
+ except ValueError:
+ raise JSONException('Incorrect JSON')
+
+
+def get_user(request):
+ if not request.path_info.startswith('/static'):
+ user_id = unauthenticated_userid(request)
+ try:
+ user_id = int(user_id)
+ except Exception:
+ return None
+
+ if user_id:
+ user = User.by_id(user_id)
+ if user:
+ request.environ['appenlight.username'] = '%d:%s' % (
+ user_id, user.user_name)
+ return user
+ else:
+ return None
+
+
+def es_conn(request):
+ return request.registry.es_conn
+
+
+def add_flash_to_headers(request, clear=True):
+ """
+ Adds pending flash messages to response, if clear is true clears out the
+ flash queue
+ """
+ flash_msgs = helpers.get_type_formatted_flash(request)
+ request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
+ helpers.clear_flash(request)
diff --git a/backend/src/appenlight/lib/rule.py b/backend/src/appenlight/lib/rule.py
new file mode 100644
index 0000000..a9de9fc
--- /dev/null
+++ b/backend/src/appenlight/lib/rule.py
@@ -0,0 +1,288 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import logging
+import operator
+
+log = logging.getLogger(__name__)
+
+
+class RuleException(Exception):
+ pass
+
+
+class KeyNotFoundException(RuleException):
+ pass
+
+
+class UnknownTypeException(RuleException):
+ pass
+
+
+class BadConfigException(RuleException):
+ pass
+
+
+class InvalidValueException(RuleException):
+ pass
+
+
+class RuleBase(object):
+ @classmethod
+ def default_dict_struct_getter(cls, struct, field_name):
+ """
+ returns a key from dictionary based on field_name, if the name contains
+ `:` then it means additional nesting levels should be checked for the
+ key so `a:b:c` means return struct['a']['b']['c']
+
+ :param struct:
+ :param field_name:
+ :return:
+ """
+ parts = field_name.split(':') if field_name else []
+ found = struct
+ while parts:
+ current_key = parts.pop(0)
+ found = found.get(current_key)
+ if not found and parts:
+ raise KeyNotFoundException('Key not found in structure')
+ return found
+
+ @classmethod
+ def default_obj_struct_getter(cls, struct, field_name):
+ """
+ returns a key from instance based on field_name, if the name contains
+ `:` then it means additional nesting levels should be checked for the
+ key so `a:b:c` means return struct.a.b.c
+
+ :param struct:
+ :param field_name:
+ :return:
+ """
+ parts = field_name.split(':')
+ found = struct
+ while parts:
+ current_key = parts.pop(0)
+ found = getattr(found, current_key, None)
+ if not found and parts:
+ raise KeyNotFoundException('Key not found in structure')
+ return found
+
+ def normalized_type(self, field, value):
+ """
+ Converts text values from self.conf_value based on type_matrix below
+ check_matrix defines what kind of checks we can perform on a field
+ value based on field name
+ """
+ f_type = self.type_matrix.get(field)
+ if f_type:
+ cast_to = f_type['type']
+ else:
+ raise UnknownTypeException('Unknown type')
+
+ if value is None:
+ return None
+
+ try:
+ if cast_to == 'int':
+ return int(value)
+ elif cast_to == 'float':
+ return float(value)
+ elif cast_to == 'unicode':
+ return str(value)
+ except ValueError as exc:
+ raise InvalidValueException(exc)
+
+
+class Rule(RuleBase):
+ def __init__(self, config, type_matrix,
+ struct_getter=RuleBase.default_dict_struct_getter,
+ config_manipulator=None):
+ """
+
+ :param config: dict - contains rule configuration
+ example::
+ {
+ "field": "__OR__",
+ "rules": [
+ {
+ "field": "__AND__",
+ "rules": [
+ {
+ "op": "ge",
+ "field": "occurences",
+ "value": "10"
+ },
+ {
+ "op": "ge",
+ "field": "priority",
+ "value": "4"
+ }
+ ]
+ },
+ {
+ "op": "eq",
+ "field": "http_status",
+ "value": "500"
+ }
+ ]
+ }
+ :param type_matrix: dict - contains map of type casts
+ example::
+ {
+ 'http_status': 'int',
+ 'priority': 'unicode',
+ }
+ :param struct_getter: callable - used to grab the value of field from
+ the structure passed to match() based
+ on key, default
+
+ """
+ self.type_matrix = type_matrix
+ self.config = config
+ self.struct_getter = struct_getter
+ self.config_manipulator = config_manipulator
+ if config_manipulator:
+ config_manipulator(self)
+
+ def subrule_check(self, rule_config, struct):
+ rule = Rule(rule_config, self.type_matrix,
+ config_manipulator=self.config_manipulator)
+ return rule.match(struct)
+
+ def match(self, struct):
+ """
+ Check if rule matched for this specific report
+ First tries report value, then tests tags in not found, then finally
+ report group
+ """
+ field_name = self.config.get('field')
+ test_value = self.config.get('value')
+
+ if not field_name:
+ return False
+
+ if field_name == '__AND__':
+ rule = AND(self.config['rules'], self.type_matrix,
+ config_manipulator=self.config_manipulator)
+ return rule.match(struct)
+ elif field_name == '__OR__':
+ rule = OR(self.config['rules'], self.type_matrix,
+ config_manipulator=self.config_manipulator)
+ return rule.match(struct)
+
+ if test_value is None:
+ return False
+
+ try:
+ struct_value = self.normalized_type(field_name,
+ self.struct_getter(struct,
+ field_name))
+ except (UnknownTypeException, InvalidValueException) as exc:
+ log.error(str(exc))
+ return False
+
+ try:
+ test_value = self.normalized_type(field_name, test_value)
+ except (UnknownTypeException, InvalidValueException) as exc:
+ log.error(str(exc))
+ return False
+
+ if self.config['op'] not in ('startswith', 'endswith', 'contains'):
+ try:
+ return getattr(operator,
+ self.config['op'])(struct_value, test_value)
+ except TypeError:
+ return False
+ elif self.config['op'] == 'startswith':
+ return struct_value.startswith(test_value)
+ elif self.config['op'] == 'endswith':
+ return struct_value.endswith(test_value)
+ elif self.config['op'] == 'contains':
+ return test_value in struct_value
+ raise BadConfigException('Invalid configuration, '
+ 'unknown operator: {}'.format(self.config))
+
+ def __repr__(self):
+ return ''.format(self.config.get('field'),
+ self.config.get('value'))
+
+
+class AND(Rule):
+ def __init__(self, rules, *args, **kwargs):
+ super(AND, self).__init__({}, *args, **kwargs)
+ self.rules = rules
+
+ def match(self, struct):
+ return all([self.subrule_check(r_conf, struct) for r_conf
+ in self.rules])
+
+
+class OR(Rule):
+ def __init__(self, rules, *args, **kwargs):
+ super(OR, self).__init__({}, *args, **kwargs)
+ self.rules = rules
+
+ def match(self, struct):
+ return any([self.subrule_check(r_conf, struct) for r_conf
+ in self.rules])
+
+
+class RuleService(object):
+ @staticmethod
+ def rule_from_config(config, field_mappings, labels_dict,
+ manipulator_func=None):
+ """
+ Returns modified rule with manipulator function
+ By default manipulator function replaces field id from labels_dict
+ with current field id proper for the rule from fields_mappings
+
+ because label X_X id might be pointing different value on next request
+ when new term is returned from elasticsearch - this ensures things
+ are kept 1:1 all the time
+ """
+ rev_map = {}
+ for k, v in labels_dict.items():
+ rev_map[(v['agg'], v['key'],)] = k
+
+ if manipulator_func is None:
+ def label_rewriter_func(rule):
+ field = rule.config.get('field')
+ if not field or rule.config['field'] in ['__OR__', '__AND__']:
+ return
+
+ to_map = field_mappings.get(rule.config['field'])
+
+ # we need to replace series field with _AE_NOT_FOUND_ to not match
+ # accidently some other field which happens to have the series that
+ # was used when the alert was created
+ if to_map:
+ to_replace = rev_map.get((to_map['agg'], to_map['key'],),
+ '_AE_NOT_FOUND_')
+ else:
+ to_replace = '_AE_NOT_FOUND_'
+
+ rule.config['field'] = to_replace
+ rule.type_matrix[to_replace] = {"type": 'float'}
+
+ manipulator_func = label_rewriter_func
+
+ return Rule(config, {}, config_manipulator=manipulator_func)
diff --git a/backend/src/appenlight/lib/social.py b/backend/src/appenlight/lib/social.py
new file mode 100644
index 0000000..b1c578c
--- /dev/null
+++ b/backend/src/appenlight/lib/social.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+from ziggurat_foundations.models.services.external_identity import \
+ ExternalIdentityService
+from appenlight.models.external_identity import ExternalIdentity
+
+
+def handle_social_data(request, user, social_data):
+ social_data = social_data
+ update_identity = False
+
+ extng_id = ExternalIdentityService.by_external_id_and_provider(
+ social_data['user']['id'],
+ social_data['credentials'].provider_name
+ )
+
+ # fix legacy accounts with wrong google ID
+ if not extng_id and social_data['credentials'].provider_name == 'google':
+ extng_id = ExternalIdentityService.by_external_id_and_provider(
+ social_data['user']['email'],
+ social_data['credentials'].provider_name
+ )
+
+ if extng_id:
+ extng_id.delete()
+ update_identity = True
+
+ if not social_data['user']['id']:
+ request.session.flash(
+ 'No external user id found? Perhaps permissions for '
+ 'authentication are set incorrectly', 'error')
+ return False
+
+ if not extng_id or update_identity:
+ if not update_identity:
+ request.session.flash('Your external identity is now '
+ 'connected with your account')
+ ex_identity = ExternalIdentity()
+ ex_identity.external_id = social_data['user']['id']
+ ex_identity.external_user_name = social_data['user']['user_name']
+ ex_identity.provider_name = social_data['credentials'].provider_name
+ ex_identity.access_token = social_data['credentials'].token
+ ex_identity.token_secret = social_data['credentials'].token_secret
+ ex_identity.alt_token = social_data['credentials'].refresh_token
+ user.external_identities.append(ex_identity)
+ request.session.pop('zigg.social_auth', None)
diff --git a/backend/src/appenlight/lib/sqlalchemy_fields.py b/backend/src/appenlight/lib/sqlalchemy_fields.py
new file mode 100644
index 0000000..21405fb
--- /dev/null
+++ b/backend/src/appenlight/lib/sqlalchemy_fields.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import binascii
+import sqlalchemy.types as types
+
+import appenlight.lib.encryption as encryption
+
+
+class BinaryHex(types.TypeDecorator):
+ impl = types.LargeBinary
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = binascii.unhexlify(value)
+
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value = binascii.hexlify(value)
+ return value
+
+
+class EncryptedUnicode(types.TypeDecorator):
+ impl = types.Unicode
+
+ def process_bind_param(self, value, dialect):
+ if not value:
+ return value
+ return encryption.encrypt_fernet(value)
+
+ def process_result_value(self, value, dialect):
+ if not value:
+ return value
+ return encryption.decrypt_fernet(value)
diff --git a/backend/src/appenlight/lib/utils/__init__.py b/backend/src/appenlight/lib/utils/__init__.py
new file mode 100644
index 0000000..b873788
--- /dev/null
+++ b/backend/src/appenlight/lib/utils/__init__.py
@@ -0,0 +1,495 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+"""
+Utility functions.
+"""
+import logging
+import requests
+import hashlib
+import json
+import copy
+import uuid
+import appenlight.lib.helpers as h
+from collections import namedtuple
+from datetime import timedelta, datetime, date
+from dogpile.cache.api import NO_VALUE
+from appenlight.models import Datastores
+from appenlight.validators import (LogSearchSchema,
+ TagListSchema,
+ accepted_search_params)
+from itsdangerous import TimestampSigner
+from ziggurat_foundations.permissions import ALL_PERMISSIONS
+from dateutil.relativedelta import relativedelta
+from dateutil.rrule import rrule, MONTHLY, DAILY
+
+log = logging.getLogger(__name__)
+
+
+Stat = namedtuple('Stat', 'start_interval value')
+
+
+def default_extractor(item):
+ """
+ :param item - item to extract date from
+ """
+ if hasattr(item, 'start_interval'):
+ return item.start_interval
+ return item['start_interval']
+
+
+# fast gap generator
+def gap_gen_default(start, step, itemiterator, end_time=None,
+ iv_extractor=None):
+ """ generates a list of time/value items based on step and itemiterator
+ if there are entries missing from iterator time/None will be returned
+ instead
+ :param start - datetime - what time should we start generating our values
+ :param step - timedelta - stepsize
+ :param itemiterator - iterable - we will check this iterable for values
+ corresponding to generated steps
+ :param end_time - datetime - when last step is >= end_time stop iterating
+ :param iv_extractor - extracts current step from iterable items
+ """
+
+ if not iv_extractor:
+ iv_extractor = default_extractor
+
+ next_step = start
+ minutes = step.total_seconds() / 60.0
+ while next_step.minute % minutes != 0:
+ next_step = next_step.replace(minute=next_step.minute - 1)
+ for item in itemiterator:
+ item_start_interval = iv_extractor(item)
+ # do we have a match for current time step in our data?
+ # no gen a new tuple with 0 values
+ while next_step < item_start_interval:
+ yield Stat(next_step, None)
+ next_step = next_step + step
+ if next_step == item_start_interval:
+ yield Stat(item_start_interval, item)
+ next_step = next_step + step
+ if end_time:
+ while next_step < end_time:
+ yield Stat(next_step, None)
+ next_step = next_step + step
+
+
+class DateTimeEncoder(json.JSONEncoder):
+ """ Simple datetime to ISO encoder for json serialization"""
+
+ def default(self, obj):
+ if isinstance(obj, date):
+ return obj.isoformat()
+ if isinstance(obj, datetime):
+ return obj.isoformat()
+ return json.JSONEncoder.default(self, obj)
+
+
+def cometd_request(secret, endpoint, payload, throw_exceptions=False,
+ servers=None):
+ responses = []
+ if not servers:
+ servers = []
+
+ signer = TimestampSigner(secret)
+ sig_for_server = signer.sign(endpoint)
+ for secret, server in [(s['secret'], s['server']) for s in servers]:
+ response = {}
+ secret_headers = {'x-channelstream-secret': sig_for_server,
+ 'x-channelstream-endpoint': endpoint,
+ 'Content-Type': 'application/json'}
+ url = '%s%s' % (server, endpoint)
+ try:
+ response = requests.post(url,
+ data=json.dumps(payload,
+ cls=DateTimeEncoder),
+ headers=secret_headers,
+ verify=False,
+ timeout=2).json()
+ except requests.exceptions.RequestException as e:
+ if throw_exceptions:
+ raise
+ responses.append(response)
+ return responses
+
+
+def add_cors_headers(response):
+ # allow CORS
+ response.headers.add('Access-Control-Allow-Origin', '*')
+ response.headers.add('XDomainRequestAllowed', '1')
+ response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
+ # response.headers.add('Access-Control-Allow-Credentials', 'true')
+ response.headers.add('Access-Control-Allow-Headers',
+ 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
+ response.headers.add('Access-Control-Max-Age', '86400')
+
+
+from sqlalchemy.sql import compiler
+from psycopg2.extensions import adapt as sqlescape
+
+
+# or use the appropiate escape function from your db driver
+
+def compile_query(query):
+ dialect = query.session.bind.dialect
+ statement = query.statement
+ comp = compiler.SQLCompiler(dialect, statement)
+ comp.compile()
+ enc = dialect.encoding
+ params = {}
+ for k, v in comp.params.items():
+ if isinstance(v, str):
+ v = v.encode(enc)
+ params[k] = sqlescape(v)
+ return (comp.string.encode(enc) % params).decode(enc)
+
+
+def convert_es_type(input_data):
+ """
+ This might need to convert some text or other types to corresponding ES types
+ """
+ return str(input_data)
+
+
+ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
+
+
+def parse_proto(input_data):
+ try:
+ parts = [int(x) for x in input_data.split('.')]
+ while len(parts) < 3:
+ parts.append(0)
+ return ProtoVersion(*parts)
+ except Exception as e:
+ log.info('Unknown protocol version: %s' % e)
+ return ProtoVersion(99, 99, 99)
+
+
+def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
+ ixtypes=None):
+ """
+ This function limits the search to 6 months by default so we don't have to
+ query 300 elasticsearch indices for 20 years of historical data for example
+ """
+
+ # should be cached later
+ def get_possible_names():
+ return list(Datastores.es.aliases().keys())
+
+ possible_names = get_possible_names()
+ es_index_types = []
+ if not ixtypes:
+ ixtypes = ['reports', 'metrics', 'logs']
+ for t in ixtypes:
+ if t == 'reports':
+ es_index_types.append('rcae_r_%s')
+ elif t == 'logs':
+ es_index_types.append('rcae_l_%s')
+ elif t == 'metrics':
+ es_index_types.append('rcae_m_%s')
+ elif t == 'uptime':
+ es_index_types.append('rcae_u_%s')
+ elif t == 'slow_calls':
+ es_index_types.append('rcae_sc_%s')
+
+ if start_date:
+ start_date = copy.copy(start_date)
+ else:
+ if not end_date:
+ end_date = datetime.utcnow()
+ start_date = end_date + relativedelta(months=months_in_past * -1)
+
+ if not end_date:
+ end_date = start_date + relativedelta(months=months_in_past)
+
+ index_dates = list(rrule(MONTHLY,
+ dtstart=start_date.date().replace(day=1),
+ until=end_date.date(),
+ count=36))
+ index_names = []
+ for ix_type in es_index_types:
+ to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
+ if ix_type % d.strftime('%Y_%m') in possible_names]
+ index_names.extend(to_extend)
+ for day in list(rrule(DAILY, dtstart=start_date.date(),
+ until=end_date.date(), count=366)):
+ ix_name = ix_type % day.strftime('%Y_%m_%d')
+ if ix_name in possible_names:
+ index_names.append(ix_name)
+ return index_names
+
+
+def build_filter_settings_from_query_dict(
+ request, params=None, override_app_ids=None,
+ resource_permissions=None):
+ """
+ Builds list of normalized search terms for ES from query params
+ ensuring application list is restricted to only applications user
+ has access to
+
+ :param params (dictionary)
+ :param override_app_ids - list of application id's to use instead of
+ applications user normally has access to
+ """
+ params = copy.deepcopy(params)
+ applications = []
+ if not resource_permissions:
+ resource_permissions = ['view']
+
+ if request.user:
+ applications = request.user.resources_with_perms(
+ resource_permissions, resource_types=['application'])
+
+ # CRITICAL - this ensures our resultset is limited to only the ones
+ # user has view permissions
+ all_possible_app_ids = set([app.resource_id for app in applications])
+
+ # if override is preset we force permission for app to be present
+ # this allows users to see dashboards and applications they would
+ # normally not be able to
+
+ if override_app_ids:
+ all_possible_app_ids = set(override_app_ids)
+
+ schema = LogSearchSchema().bind(resources=all_possible_app_ids)
+ tag_schema = TagListSchema()
+ filter_settings = schema.deserialize(params)
+ tag_list = []
+ for k, v in list(filter_settings.items()):
+ if k in accepted_search_params:
+ continue
+ tag_list.append({"name": k, "value": v, "op": 'eq'})
+ # remove the key from filter_settings
+ filter_settings.pop(k, None)
+ tags = tag_schema.deserialize(tag_list)
+ filter_settings['tags'] = tags
+ return filter_settings
+
+
+def gen_uuid():
+ return str(uuid.uuid4())
+
+
+def gen_uuid4_sha_hex():
+ return hashlib.sha1(uuid.uuid4().bytes).hexdigest()
+
+
+def permission_tuple_to_dict(data):
+ out = {
+ "user_name": None,
+ "perm_name": data.perm_name,
+ "owner": data.owner,
+ "type": data.type,
+ "resource_name": None,
+ "resource_type": None,
+ "resource_id": None,
+ "group_name": None,
+ "group_id": None
+ }
+ if data.user:
+ out["user_name"] = data.user.user_name
+ if data.perm_name == ALL_PERMISSIONS:
+ out['perm_name'] = '__all_permissions__'
+ if data.resource:
+ out['resource_name'] = data.resource.resource_name
+ out['resource_type'] = data.resource.resource_type
+ out['resource_id'] = data.resource.resource_id
+ if data.group:
+ out['group_name'] = data.group.group_name
+ out['group_id'] = data.group.id
+ return out
+
+
+def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
+ gap_gen=None, db_session=None, step_interval=None,
+ iv_extractor=None,
+ rerange=False, *args, **kwargs):
+ """ Takes "fn" that should return some data and tries to load the data
+ dividing it into daily buckets - if the stats_since and end time give a
+ delta bigger than 24hours, then only "todays" data is computed on the fly
+
+ :param request: (request) request object
+ :param stats_since: (datetime) start date of buckets range
+ :param end_time: (datetime) end date of buckets range - utcnow() if None
+ :param fn: (callable) callable to use to populate buckets should have
+ following signature:
+ def get_data(request, since_when, until, *args, **kwargs):
+
+ :param cache_key: (string) cache key that will be used to build bucket
+ caches
+ :param gap_gen: (callable) gap generator - should return step intervals
+ to use with out `fn` callable
+ :param db_session: (Session) sqlalchemy session
+ :param step_interval: (timedelta) optional step interval if we want to
+ override the default determined from total start/end time delta
+ :param iv_extractor: (callable) used to get step intervals from data
+ returned by `fn` callable
+ :param rerange: (bool) handy if we want to change ranges from hours to
+ days when cached data is missing - will shorten execution time if `fn`
+ callable supports that and we are working with multiple rows - like metrics
+ :param args:
+ :param kwargs:
+
+ :return: iterable
+ """
+ if not end_time:
+ end_time = datetime.utcnow().replace(second=0, microsecond=0)
+ delta = end_time - stats_since
+ # if smaller than 3 days we want to group by 5min else by 1h,
+ # for 60 min group by min
+ if not gap_gen:
+ gap_gen = gap_gen_default
+ if not iv_extractor:
+ iv_extractor = default_extractor
+
+ # do not use custom interval if total time range with new iv would exceed
+ # end time
+ if not step_interval or stats_since + step_interval >= end_time:
+ if delta < h.time_deltas.get('12h')['delta']:
+ step_interval = timedelta(seconds=60)
+ elif delta < h.time_deltas.get('3d')['delta']:
+ step_interval = timedelta(seconds=60 * 5)
+ elif delta > h.time_deltas.get('2w')['delta']:
+ step_interval = timedelta(days=1)
+ else:
+ step_interval = timedelta(minutes=60)
+
+ if step_interval >= timedelta(minutes=60):
+ log.info('cached_buckets:{}: adjusting start time '
+ 'for hourly or daily intervals'.format(cache_key))
+ stats_since = stats_since.replace(hour=0, minute=0)
+
+ ranges = [i.start_interval for i in list(gap_gen(stats_since,
+ step_interval, [],
+ end_time=end_time))]
+ buckets = {}
+ storage_key = 'buckets:' + cache_key + '{}|{}'
+ # this means we basicly cache per hour in 3-14 day intervals but i think
+ # its fine at this point - will be faster than db access anyways
+
+ if len(ranges) >= 1:
+ last_ranges = [ranges[-1]]
+ else:
+ last_ranges = []
+ if step_interval >= timedelta(minutes=60):
+ for r in ranges:
+ k = storage_key.format(step_interval.total_seconds(), r)
+ value = request.registry.cache_regions.redis_day_30.get(k)
+ # last buckets are never loaded from cache
+ is_last_result = (
+ r >= end_time - timedelta(hours=6) or r in last_ranges)
+ if value is not NO_VALUE and not is_last_result:
+ log.info("cached_buckets:{}: "
+ "loading range {} from cache".format(cache_key, r))
+ buckets[r] = value
+ else:
+ log.info("cached_buckets:{}: "
+ "loading range {} from storage".format(cache_key, r))
+ range_size = step_interval
+ if (step_interval == timedelta(minutes=60) and
+ not is_last_result and rerange):
+ range_size = timedelta(days=1)
+ r = r.replace(hour=0, minute=0)
+ log.info("cached_buckets:{}: "
+ "loading collapsed "
+ "range {} {}".format(cache_key, r,
+ r + range_size))
+ bucket_data = fn(
+ request, r, r + range_size, step_interval,
+ gap_gen, bucket_count=len(ranges), *args, **kwargs)
+ for b in bucket_data:
+ b_iv = iv_extractor(b)
+ buckets[b_iv] = b
+ k2 = storage_key.format(
+ step_interval.total_seconds(), b_iv)
+ request.registry.cache_regions.redis_day_30.set(k2, b)
+ log.info("cached_buckets:{}: saving cache".format(cache_key))
+ else:
+ # bucket count is 1 for short time ranges <= 24h from now
+ bucket_data = fn(request, stats_since, end_time, step_interval,
+ gap_gen, bucket_count=1, *args, **kwargs)
+ for b in bucket_data:
+ buckets[iv_extractor(b)] = b
+ return buckets
+
+
+def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
+ db_session=None, *args, **kwargs):
+ """ Takes "fn" that should return some data and tries to load the data
+ dividing it into 2 buckets - cached "since_from" bucket and "today"
+ bucket - then the data can be reduced into single value
+
+ Data is cached if the stats_since and end time give a delta bigger
+ than 24hours - then only 24h is computed on the fly
+ """
+ if not end_time:
+ end_time = datetime.utcnow().replace(second=0, microsecond=0)
+ delta = end_time - stats_since
+
+ if delta >= timedelta(minutes=60):
+ log.info('cached_split_data:{}: adjusting start time '
+ 'for hourly or daily intervals'.format(cache_key))
+ stats_since = stats_since.replace(hour=0, minute=0)
+
+ storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
+ old_end_time = end_time.replace(hour=0, minute=0)
+
+ final_storage_key = storage_key.format(delta.total_seconds(),
+ old_end_time)
+ older_data = None
+
+ cdata = request.registry.cache_regions.redis_day_7.get(
+ final_storage_key)
+
+ if cdata:
+ log.info("cached_split_data:{}: found old "
+ "bucket data".format(cache_key))
+ older_data = cdata
+
+ if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
+ not cdata):
+ log.info("cached_split_data:{}: didn't find the "
+ "start bucket in cache so load older data".format(cache_key))
+ recent_stats_since = old_end_time
+ older_data = fn(request, stats_since, recent_stats_since,
+ db_session=db_session, *args, **kwargs)
+ request.registry.cache_regions.redis_day_7.set(final_storage_key,
+ older_data)
+ elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
+ recent_stats_since = old_end_time
+ else:
+ recent_stats_since = stats_since
+
+ log.info("cached_split_data:{}: loading fresh "
+ "data bucksts from last 24h ".format(cache_key))
+ todays_data = fn(request, recent_stats_since, end_time,
+ db_session=db_session, *args, **kwargs)
+ return older_data, todays_data
+
+
+def in_batches(seq, size):
+ """
+ Splits am iterable into batches of specified size
+ :param seq (iterable)
+ :param size integer
+ """
+ return (seq[pos:pos + size] for pos in range(0, len(seq), size))
diff --git a/backend/src/appenlight/lib/utils/airbrake.py b/backend/src/appenlight/lib/utils/airbrake.py
new file mode 100644
index 0000000..82870f7
--- /dev/null
+++ b/backend/src/appenlight/lib/utils/airbrake.py
@@ -0,0 +1,147 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+import logging
+import uuid
+
+from datetime import datetime
+
+log = logging.getLogger(__name__)
+
+
+def parse_airbrake_xml(request):
+ root = request.context.airbrake_xml_etree
+ error = root.find('error')
+ notifier = root.find('notifier')
+ server_env = root.find('server-environment')
+ request_data = root.find('request')
+ user = root.find('current-user')
+ if request_data is not None:
+ cgi_data = request_data.find('cgi-data')
+ if cgi_data is None:
+ cgi_data = []
+
+ error_dict = {
+ 'class_name': error.findtext('class') or '',
+ 'error': error.findtext('message') or '',
+ "occurences": 1,
+ "http_status": 500,
+ "priority": 5,
+ "server": 'unknown',
+ 'url': 'unknown', 'request': {}
+ }
+ if user is not None:
+ error_dict['username'] = user.findtext('username') or \
+ user.findtext('id')
+ if notifier is not None:
+ error_dict['client'] = notifier.findtext('name')
+
+ if server_env is not None:
+ error_dict["server"] = server_env.findtext('hostname', 'unknown')
+
+ whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
+ 'CONTENT_TYPE', 'HTTP_REFERER']
+
+ if request_data is not None:
+ error_dict['url'] = request_data.findtext('url', 'unknown')
+ component = request_data.findtext('component')
+ action = request_data.findtext('action')
+ if component and action:
+ error_dict['view_name'] = '%s:%s' % (component, action)
+ for node in cgi_data:
+ key = node.get('key')
+ if key.startswith('HTTP') or key in whitelist_environ:
+ error_dict['request'][key] = node.text
+ elif 'query_parameters' in key:
+ error_dict['request']['GET'] = {}
+ for x in node:
+ error_dict['request']['GET'][x.get('key')] = x.text
+ elif 'request_parameters' in key:
+ error_dict['request']['POST'] = {}
+ for x in node:
+ error_dict['request']['POST'][x.get('key')] = x.text
+ elif key.endswith('cookie'):
+ error_dict['request']['COOKIE'] = {}
+ for x in node:
+ error_dict['request']['COOKIE'][x.get('key')] = x.text
+ elif key.endswith('request_id'):
+ error_dict['request_id'] = node.text
+ elif key.endswith('session'):
+ error_dict['request']['SESSION'] = {}
+ for x in node:
+ error_dict['request']['SESSION'][x.get('key')] = x.text
+ else:
+ if key in ['rack.session.options']:
+ # skip secret configs
+ continue
+ try:
+ if len(node):
+ error_dict['request'][key] = dict(
+ [(x.get('key'), x.text,) for x in node])
+ else:
+ error_dict['request'][key] = node.text
+ except Exception as e:
+ log.warning('Airbrake integration exception: %s' % e)
+
+ error_dict['request'].pop('HTTP_COOKIE', '')
+
+ error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
+ error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
+ if 'request_id' not in error_dict:
+ error_dict['request_id'] = str(uuid.uuid4())
+ if request.context.possibly_public:
+ # set ip for reports that come from airbrake js client
+ error_dict["timestamp"] = datetime.utcnow()
+ if request.environ.get("HTTP_X_FORWARDED_FOR"):
+ ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
+ first_ip = ip.split(',')[0]
+ remote_addr = first_ip.strip()
+ else:
+ remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
+ request.environ.get('REMOTE_ADDR'))
+ error_dict["ip"] = remote_addr
+
+ blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
+ 'session', 'test']
+
+ lines = []
+ for l in error.find('backtrace'):
+ lines.append({'file': l.get("file", ""),
+ 'line': l.get("number", ""),
+ 'fn': l.get("method", ""),
+ 'module': l.get("module", ""),
+ 'cline': l.get("method", ""),
+ 'vars': {}})
+ error_dict['traceback'] = list(reversed(lines))
+ # filtering is not provided by airbrake
+ keys_to_check = (
+ error_dict['request'].get('COOKIE'),
+ error_dict['request'].get('COOKIES'),
+ error_dict['request'].get('POST'),
+ error_dict['request'].get('SESSION'),
+ )
+ for source in [_f for _f in keys_to_check if _f]:
+ for k in source.keys():
+ for bad_key in blacklist:
+ if bad_key in k.lower():
+ source[k] = '***'
+
+ return error_dict
diff --git a/backend/src/appenlight/lib/utils/date_utils.py b/backend/src/appenlight/lib/utils/date_utils.py
new file mode 100644
index 0000000..f66eece
--- /dev/null
+++ b/backend/src/appenlight/lib/utils/date_utils.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+from datetime import tzinfo, timedelta, datetime
+from dateutil.relativedelta import relativedelta
+import logging
+
+log = logging.getLogger(__name__)
+
+
+def to_relativedelta(time_delta):
+ return relativedelta(seconds=int(time_delta.total_seconds()),
+ microseconds=time_delta.microseconds)
+
+
+def convert_date(date_str, return_utcnow_if_wrong=True,
+ normalize_future=False):
+ utcnow = datetime.utcnow()
+ if isinstance(date_str, datetime):
+ # get rid of tzinfo
+ return date_str.replace(tzinfo=None)
+ if not date_str and return_utcnow_if_wrong:
+ return utcnow
+ try:
+ try:
+ if 'Z' in date_str:
+ date_str = date_str[:date_str.index('Z')]
+ if '.' in date_str:
+ date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
+ else:
+ date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
+ except Exception:
+ # bw compat with old client
+ date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
+ except Exception:
+ if return_utcnow_if_wrong:
+ date = utcnow
+ else:
+ date = None
+ if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
+ log.warning('time %s in future + 3 min, normalizing' % date)
+ return utcnow
+ return date
diff --git a/backend/src/appenlight/lib/utils/sentry.py b/backend/src/appenlight/lib/utils/sentry.py
new file mode 100644
index 0000000..fea8bc7
--- /dev/null
+++ b/backend/src/appenlight/lib/utils/sentry.py
@@ -0,0 +1,301 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2010-2016 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# App Enlight Enterprise Edition, including its added features, Support
+# services, and proprietary license terms, please see
+# https://rhodecode.com/licenses/
+
+from datetime import timedelta
+
+from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
+
+EXCLUDED_LOG_VARS = [
+ 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
+ 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
+ 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
+ 'thread', 'threadName']
+
+EXCLUDE_SENTRY_KEYS = [
+ 'csp',
+ 'culprit',
+ 'event_id',
+ 'exception',
+ 'extra',
+ 'level',
+ 'logentry',
+ 'logger',
+ 'message',
+ 'modules',
+ 'platform',
+ 'query',
+ 'release',
+ 'request',
+ 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
+ 'sentry.interfaces.Http', 'sentry.interfaces.Message',
+ 'sentry.interfaces.Query',
+ 'sentry.interfaces.Stacktrace',
+ 'sentry.interfaces.Template', 'sentry.interfaces.User',
+ 'sentry.interfaces.csp.Csp',
+ 'sentry.interfaces.exception.Exception',
+ 'sentry.interfaces.http.Http',
+ 'sentry.interfaces.message.Message',
+ 'sentry.interfaces.query.Query',
+ 'sentry.interfaces.stacktrace.Stacktrace',
+ 'sentry.interfaces.template.Template',
+ 'sentry.interfaces.user.User', 'server_name',
+ 'stacktrace',
+ 'tags',
+ 'template',
+ 'time_spent',
+ 'timestamp',
+ 'user']
+
+
+def get_keys(list_of_keys, json_body):
+ for k in list_of_keys:
+ if k in json_body:
+ return json_body[k]
+
+
+def get_logentry(json_body):
+ key_names = ['logentry',
+ 'sentry.interfaces.message.Message',
+ 'sentry.interfaces.Message'
+ ]
+ logentry = get_keys(key_names, json_body)
+ return logentry
+
+
+def get_exception(json_body):
+ parsed_exception = {}
+ key_names = ['exception',
+ 'sentry.interfaces.exception.Exception',
+ 'sentry.interfaces.Exception'
+ ]
+ exception = get_keys(key_names, json_body) or {}
+ if exception:
+ if isinstance(exception, dict):
+ exception = exception['values'][0]
+ else:
+ exception = exception[0]
+
+ parsed_exception['type'] = exception.get('type')
+ parsed_exception['value'] = exception.get('value')
+ parsed_exception['module'] = exception.get('module')
+ parsed_stacktrace = get_stacktrace(exception) or {}
+ parsed_exception = exception or {}
+ return parsed_exception, parsed_stacktrace
+
+
+def get_stacktrace(json_body):
+ parsed_stacktrace = []
+ key_names = ['stacktrace',
+ 'sentry.interfaces.stacktrace.Stacktrace',
+ 'sentry.interfaces.Stacktrace'
+ ]
+ stacktrace = get_keys(key_names, json_body)
+ if stacktrace:
+ for frame in stacktrace['frames']:
+ parsed_stacktrace.append(
+ {"cline": frame.get('context_line', ''),
+ "file": frame.get('filename', ''),
+ "module": frame.get('module', ''),
+ "fn": frame.get('function', ''),
+ "line": frame.get('lineno', ''),
+ "vars": list(frame.get('vars', {}).items())
+ }
+ )
+ return parsed_stacktrace
+
+
+def get_template(json_body):
+ parsed_template = {}
+ key_names = ['template',
+ 'sentry.interfaces.template.Template',
+ 'sentry.interfaces.Template'
+ ]
+ template = get_keys(key_names, json_body)
+ if template:
+ for frame in template['frames']:
+ parsed_template.append(
+ {"cline": frame.get('context_line', ''),
+ "file": frame.get('filename', ''),
+ "fn": '',
+ "line": frame.get('lineno', ''),
+ "vars": []
+ }
+ )
+
+ return parsed_template
+
+
+def get_request(json_body):
+ parsed_http = {}
+ key_names = ['request',
+ 'sentry.interfaces.http.Http',
+ 'sentry.interfaces.Http'
+ ]
+ http = get_keys(key_names, json_body) or {}
+ for k, v in http.items():
+ if k == 'headers':
+ parsed_http['headers'] = {}
+ for sk, sv in http['headers'].items():
+ parsed_http['headers'][sk.title()] = sv
+ else:
+ parsed_http[k.lower()] = v
+ return parsed_http
+
+
+def get_user(json_body):
+ parsed_user = {}
+ key_names = ['user',
+ 'sentry.interfaces.user.User',
+ 'sentry.interfaces.User'
+ ]
+ user = get_keys(key_names, json_body)
+ if user:
+ parsed_user['id'] = user.get('id')
+ parsed_user['username'] = user.get('username')
+ parsed_user['email'] = user.get('email')
+ parsed_user['ip_address'] = user.get('ip_address')
+
+ return parsed_user
+
+
+def get_query(json_body):
+ query = None
+ key_name = ['query',
+ 'sentry.interfaces.query.Query',
+ 'sentry.interfaces.Query'
+ ]
+ query = get_keys(key_name, json_body)
+ return query
+
+
+def parse_sentry_event(json_body):
+ request_id = json_body.get('event_id')
+
+ # required
+ message = json_body.get('message')
+ log_timestamp = json_body.get('timestamp')
+ level = json_body.get('level')
+ if isinstance(level, int):
+ level = LogLevelPython.key_from_value(level)
+
+ namespace = json_body.get('logger')
+ language = json_body.get('platform')
+
+ # optional
+ server_name = json_body.get('server_name')
+ culprit = json_body.get('culprit')
+ release = json_body.get('release')
+
+ tags = json_body.get('tags', {})
+ if hasattr(tags, 'items'):
+ tags = list(tags.items())
+ extra = json_body.get('extra', {})
+ if hasattr(extra, 'items'):
+ extra = list(extra.items())
+
+ parsed_req = get_request(json_body)
+ user = get_user(json_body)
+ template = get_template(json_body)
+ query = get_query(json_body)
+
+ # other unidentified keys found
+ other_keys = [(k, json_body[k]) for k in json_body.keys()
+ if k not in EXCLUDE_SENTRY_KEYS]
+
+ logentry = get_logentry(json_body)
+ if logentry:
+ message = logentry['message']
+
+ exception, stacktrace = get_exception(json_body)
+
+ alt_stacktrace = get_stacktrace(json_body)
+ event_type = None
+ if not exception and not stacktrace and not alt_stacktrace and not template:
+ event_type = ParsedSentryEventType.LOG
+
+ event_dict = {
+ 'log_level': level,
+ 'message': message,
+ 'namespace': namespace,
+ 'request_id': request_id,
+ 'server': server_name,
+ 'date': log_timestamp,
+ 'tags': tags
+ }
+ event_dict['tags'].extend(
+ [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
+
+ # other keys can be various object types
+ event_dict['tags'].extend([(k, v) for k, v in other_keys
+ if isinstance(v, str)])
+ if culprit:
+ event_dict['tags'].append(('sentry_culprit', culprit))
+ if language:
+ event_dict['tags'].append(('sentry_language', language))
+ if release:
+ event_dict['tags'].append(('sentry_release', release))
+
+ if exception or stacktrace or alt_stacktrace or template:
+ event_type = ParsedSentryEventType.ERROR_REPORT
+ event_dict = {
+ 'client': 'sentry',
+ 'error': message,
+ 'namespace': namespace,
+ 'request_id': request_id,
+ 'server': server_name,
+ 'start_time': log_timestamp,
+ 'end_time': None,
+ 'tags': tags,
+ 'extra': extra,
+ 'language': language,
+ 'view_name': json_body.get('culprit'),
+ 'http_status': None,
+ 'username': None,
+ 'url': parsed_req.get('url'),
+ 'ip': None,
+ 'user_agent': None,
+ 'request': None,
+ 'slow_calls': None,
+ 'request_stats': None,
+ 'traceback': None
+ }
+
+ event_dict['extra'].extend(other_keys)
+ if release:
+ event_dict['tags'].append(('sentry_release', release))
+ event_dict['request'] = parsed_req
+ if 'headers' in parsed_req:
+ event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
+ if 'env' in parsed_req:
+ event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
+ ts_ms = int(json_body.get('time_spent') or 0)
+ if ts_ms > 0:
+ event_dict['end_time'] = event_dict['start_time'] + \
+ timedelta(milliseconds=ts_ms)
+ if stacktrace or alt_stacktrace or template:
+ event_dict['traceback'] = stacktrace or alt_stacktrace or template
+ for k in list(event_dict.keys()):
+ if event_dict[k] is None:
+ del event_dict[k]
+ if user:
+ event_dict['username'] = user['username'] or user['id'] \
+ or user['email']
+ return event_dict, event_type